Initial checkin of Kafka to Apache SVN. This corresponds to 709afe4ec7 except that git specific files have been removed and code has been put into trunk/branches/site/etc. This is just a copy of master, branches and history are not being converted since we can't find a good tool for it.

git-svn-id: https://svn.apache.org/repos/asf/incubator/kafka/trunk@1152970 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Edward Jay Kreps 2011-08-01 23:41:24 +00:00
commit 642da2f28c
433 changed files with 72944 additions and 0 deletions

10
CONTRIBUTORS Normal file
View File

@ -0,0 +1,10 @@
Jay Kreps
Rui Wang
Jun Rao
Neha Narkhede
Fatih Emekci
Lin Guo
Shirshanka Das
Roshan Sumbaly
Sam Shah
Chris Burroughs

202
LICENSE Normal file
View File

@ -0,0 +1,202 @@
Apache License
Version 2.0, January 2004
http://www.apache.org/licenses/
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
1. Definitions.
"License" shall mean the terms and conditions for use, reproduction,
and distribution as defined by Sections 1 through 9 of this document.
"Licensor" shall mean the copyright owner or entity authorized by
the copyright owner that is granting the License.
"Legal Entity" shall mean the union of the acting entity and all
other entities that control, are controlled by, or are under common
control with that entity. For the purposes of this definition,
"control" means (i) the power, direct or indirect, to cause the
direction or management of such entity, whether by contract or
otherwise, or (ii) ownership of fifty percent (50%) or more of the
outstanding shares, or (iii) beneficial ownership of such entity.
"You" (or "Your") shall mean an individual or Legal Entity
exercising permissions granted by this License.
"Source" form shall mean the preferred form for making modifications,
including but not limited to software source code, documentation
source, and configuration files.
"Object" form shall mean any form resulting from mechanical
transformation or translation of a Source form, including but
not limited to compiled object code, generated documentation,
and conversions to other media types.
"Work" shall mean the work of authorship, whether in Source or
Object form, made available under the License, as indicated by a
copyright notice that is included in or attached to the work
(an example is provided in the Appendix below).
"Derivative Works" shall mean any work, whether in Source or Object
form, that is based on (or derived from) the Work and for which the
editorial revisions, annotations, elaborations, or other modifications
represent, as a whole, an original work of authorship. For the purposes
of this License, Derivative Works shall not include works that remain
separable from, or merely link (or bind by name) to the interfaces of,
the Work and Derivative Works thereof.
"Contribution" shall mean any work of authorship, including
the original version of the Work and any modifications or additions
to that Work or Derivative Works thereof, that is intentionally
submitted to Licensor for inclusion in the Work by the copyright owner
or by an individual or Legal Entity authorized to submit on behalf of
the copyright owner. For the purposes of this definition, "submitted"
means any form of electronic, verbal, or written communication sent
to the Licensor or its representatives, including but not limited to
communication on electronic mailing lists, source code control systems,
and issue tracking systems that are managed by, or on behalf of, the
Licensor for the purpose of discussing and improving the Work, but
excluding communication that is conspicuously marked or otherwise
designated in writing by the copyright owner as "Not a Contribution."
"Contributor" shall mean Licensor and any individual or Legal Entity
on behalf of whom a Contribution has been received by Licensor and
subsequently incorporated within the Work.
2. Grant of Copyright License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
copyright license to reproduce, prepare Derivative Works of,
publicly display, publicly perform, sublicense, and distribute the
Work and such Derivative Works in Source or Object form.
3. Grant of Patent License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
(except as stated in this section) patent license to make, have made,
use, offer to sell, sell, import, and otherwise transfer the Work,
where such license applies only to those patent claims licensable
by such Contributor that are necessarily infringed by their
Contribution(s) alone or by combination of their Contribution(s)
with the Work to which such Contribution(s) was submitted. If You
institute patent litigation against any entity (including a
cross-claim or counterclaim in a lawsuit) alleging that the Work
or a Contribution incorporated within the Work constitutes direct
or contributory patent infringement, then any patent licenses
granted to You under this License for that Work shall terminate
as of the date such litigation is filed.
4. Redistribution. You may reproduce and distribute copies of the
Work or Derivative Works thereof in any medium, with or without
modifications, and in Source or Object form, provided that You
meet the following conditions:
(a) You must give any other recipients of the Work or
Derivative Works a copy of this License; and
(b) You must cause any modified files to carry prominent notices
stating that You changed the files; and
(c) You must retain, in the Source form of any Derivative Works
that You distribute, all copyright, patent, trademark, and
attribution notices from the Source form of the Work,
excluding those notices that do not pertain to any part of
the Derivative Works; and
(d) If the Work includes a "NOTICE" text file as part of its
distribution, then any Derivative Works that You distribute must
include a readable copy of the attribution notices contained
within such NOTICE file, excluding those notices that do not
pertain to any part of the Derivative Works, in at least one
of the following places: within a NOTICE text file distributed
as part of the Derivative Works; within the Source form or
documentation, if provided along with the Derivative Works; or,
within a display generated by the Derivative Works, if and
wherever such third-party notices normally appear. The contents
of the NOTICE file are for informational purposes only and
do not modify the License. You may add Your own attribution
notices within Derivative Works that You distribute, alongside
or as an addendum to the NOTICE text from the Work, provided
that such additional attribution notices cannot be construed
as modifying the License.
You may add Your own copyright statement to Your modifications and
may provide additional or different license terms and conditions
for use, reproduction, or distribution of Your modifications, or
for any such Derivative Works as a whole, provided Your use,
reproduction, and distribution of the Work otherwise complies with
the conditions stated in this License.
5. Submission of Contributions. Unless You explicitly state otherwise,
any Contribution intentionally submitted for inclusion in the Work
by You to the Licensor shall be under the terms and conditions of
this License, without any additional terms or conditions.
Notwithstanding the above, nothing herein shall supersede or modify
the terms of any separate license agreement you may have executed
with Licensor regarding such Contributions.
6. Trademarks. This License does not grant permission to use the trade
names, trademarks, service marks, or product names of the Licensor,
except as required for reasonable and customary use in describing the
origin of the Work and reproducing the content of the NOTICE file.
7. Disclaimer of Warranty. Unless required by applicable law or
agreed to in writing, Licensor provides the Work (and each
Contributor provides its Contributions) on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
implied, including, without limitation, any warranties or conditions
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
PARTICULAR PURPOSE. You are solely responsible for determining the
appropriateness of using or redistributing the Work and assume any
risks associated with Your exercise of permissions under this License.
8. Limitation of Liability. In no event and under no legal theory,
whether in tort (including negligence), contract, or otherwise,
unless required by applicable law (such as deliberate and grossly
negligent acts) or agreed to in writing, shall any Contributor be
liable to You for damages, including any direct, indirect, special,
incidental, or consequential damages of any character arising as a
result of this License or out of the use or inability to use the
Work (including but not limited to damages for loss of goodwill,
work stoppage, computer failure or malfunction, or any and all
other commercial damages or losses), even if such Contributor
has been advised of the possibility of such damages.
9. Accepting Warranty or Additional Liability. While redistributing
the Work or Derivative Works thereof, You may choose to offer,
and charge a fee for, acceptance of support, warranty, indemnity,
or other liability obligations and/or rights consistent with this
License. However, in accepting such obligations, You may act only
on Your own behalf and on Your sole responsibility, not on behalf
of any other Contributor, and only if You agree to indemnify,
defend, and hold each Contributor harmless for any liability
incurred by, or claims asserted against, such Contributor by reason
of your accepting any such warranty or additional liability.
END OF TERMS AND CONDITIONS
APPENDIX: How to apply the Apache License to your work.
To apply the Apache License to your work, attach the following
boilerplate notice, with the fields enclosed by brackets "[]"
replaced with your own identifying information. (Don't include
the brackets!) The text should be enclosed in the appropriate
comment syntax for the file format. We also recommend that a
file or class name and description of purpose be included on the
same "printed page" as the copyright notice for easier
identification within third-party archives.
Copyright 2010 LinkedIn
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.

64
NOTICE Normal file
View File

@ -0,0 +1,64 @@
Kafka
This product includes software developed by the Apache Software Foundation (http://www.apache.org/).
This product includes jopt-simple, a library for parsing command line options (http://jopt-simple.sourceforge.net/).
This product includes junit, developed by junit.org.
This product includes zkclient, developed by Stefan Groschupf, http://github.com/sgroschupf/zkclient
This produce includes joda-time, developed by joda.org (joda-time.sourceforge.net)
This product includes the scala runtime and compiler (www.scala-lang.org) developed by EPFL, which includes the following license:
This product includes zookeeper, a Hadoop sub-project (http://hadoop.apache.org/zookeeper)
This product includes log4j, an Apache project (http://logging.apache.org/log4j)
This product includes easymock, developed by easymock.org (http://easymock.org)
This product includes objenesis, developed by Joe Walnes, Henri Tremblay, Leonardo Mesquita (http://code.google.com/p/objenesis)
This product includes cglib, developed by sourceforge.net (http://cglib.sourceforge.net)
This product includes asm, developed by OW2 consortium (http://asm.ow2.org)
-----------------------------------------------------------------------
SCALA LICENSE
Copyright (c) 2002-2010 EPFL, Lausanne, unless otherwise specified.
All rights reserved.
This software was developed by the Programming Methods Laboratory of the
Swiss Federal Institute of Technology (EPFL), Lausanne, Switzerland.
Permission to use, copy, modify, and distribute this software in source
or binary form for any purpose with or without fee is hereby granted,
provided that the following conditions are met:
1. Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
3. Neither the name of the EPFL nor the names of its contributors
may be used to endorse or promote products derived from this
software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE REGENTS AND CONTRIBUTORS ``AS IS'' AND
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
ARE DISCLAIMED. IN NO EVENT SHALL THE REGENTS OR CONTRIBUTORS BE LIABLE
FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
SUCH DAMAGE.
-----------------------------------------------------------------------

55
README.md Normal file
View File

@ -0,0 +1,55 @@
# Kafka is a distributed publish/subscribe messaging system #
It is designed to support the following
* Persistent messaging with O(1) disk structures that provide constant time performance even with many TB of stored messages.
* High-throughput: even with very modest hardware Kafka can support hundreds of thousands of messages per second.
* Explicit support for partitioning messages over Kafka servers and distributing consumption over a cluster of consumer machines while maintaining per-partition ordering semantics.
* Support for parallel data load into Hadoop.
Kafka is aimed at providing a publish-subscribe solution that can handle all activity stream data and processing on a consumer-scale web site. This kind of activity (page views, searches, and other user actions) are a key ingredient in many of the social feature on the modern web. This data is typically handled by "logging" and ad hoc log aggregation solutions due to the throughput requirements. This kind of ad hoc solution is a viable solution to providing logging data to an offline analysis system like Hadoop, but is very limiting for building real-time processing. Kafka aims to unify offline and online processing by providing a mechanism for parallel load into Hadoop as well as the ability to partition real-time consumption over a cluster of machines.
See our [web site](http://sna-projects.com/kafka) for more details on the project.
## Contribution ##
Kafka is a new project, and we are interested in building the community; we would welcome any thoughts or patches. You can reach us [here](http://groups.google.com/group/kafka-dev).
To get kafka code:
git clone git@github.com:kafka-dev/kafka.git kafka
To build:
1. ./sbt
2. update - This downloads all the dependencies for all sub projects
3. package - This will compile all sub projects and creates all the jars
Here are some useful sbt commands, to be executed at the sbt command prompt (./sbt) -
actions : Lists all the sbt commands and their descriptions
clean : Deletes all generated files (the target directory).
clean-cache : Deletes the cache of artifacts downloaded for automatically managed dependencies.
clean-lib : Deletes the managed library directory.
compile : Compile all the sub projects, but not create the jars
test : Run all unit tests in all sub projects
release-zip : Create all the jars, run unit tests and create a deployable release zip
package-all: Creates jars for src, test, docs etc
projects : List all the sub projects
project sub_project_name : Switch to a particular sub-project. For example, to switch to the core kafka code, use "project core-kafka"
Following commands can be run only on a particular sub project -
test-only package.test.TestName : Runs only the specified test in the current sub project
run : Provides options to run any of the classes that have a main method. For example, you can switch to project java-examples, and run the examples there by executing "project java-examples" followed by "run"

View File

@ -0,0 +1,7 @@
log4j.rootLogger=INFO, stderr
log4j.appender.stderr=org.apache.log4j.ConsoleAppender
log4j.appender.stderr.target=System.err
log4j.appender.stderr.layout=org.apache.log4j.PatternLayout
log4j.appender.stderr.layout.ConversionPattern=[%d] %p %m (%c)%n

5
bin/kafka-console-consumer.sh Executable file
View File

@ -0,0 +1,5 @@
#!/bin/bash
base_dir=$(dirname $0)
export KAFKA_OPTS="-Xmx512M -server -Dcom.sun.management.jmxremote -Dlog4j.configuration=file:$base_dir/kafka-console-consumer-log4j.properties"
$base_dir/kafka-run-class.sh kafka.consumer.ConsoleConsumer $@

View File

@ -0,0 +1,3 @@
#!/bin/bash
$(dirname $0)/kafka-run-class.sh kafka.tools.ConsumerPerformance $@

3
bin/kafka-consumer-shell.sh Executable file
View File

@ -0,0 +1,3 @@
#!/bin/bash
$(dirname $0)/kafka-run-class.sh kafka.tools.ConsumerShell $@

View File

@ -0,0 +1,3 @@
#!/bin/bash
$(dirname $0)/kafka-run-class.sh kafka.tools.ProducerPerformance $@

3
bin/kafka-producer-shell.sh Executable file
View File

@ -0,0 +1,3 @@
#!/bin/bash
$(dirname $0)/kafka-run-class.sh kafka.tools.ProducerShell $@

View File

@ -0,0 +1,5 @@
#!/bin/bash
base_dir=$(dirname $0)
export KAFKA_OPTS="-Xmx512M -server -Dcom.sun.management.jmxremote -Dlog4j.configuration=file:$base_dir/../config/log4j.properties"
$base_dir/kafka-run-class.sh kafka.tools.ReplayLogProducer $@

47
bin/kafka-run-class.sh Executable file
View File

@ -0,0 +1,47 @@
#!/bin/bash
if [ $# -lt 1 ];
then
echo "USAGE: $0 classname [opts]"
exit 1
fi
base_dir=$(dirname $0)/..
for file in $base_dir/project/boot/scala-2.8.0/lib/*.jar;
do
CLASSPATH=$CLASSPATH:$file
done
for file in $base_dir/core/target/scala_2.8.0/*.jar;
do
CLASSPATH=$CLASSPATH:$file
done
for file in $base_dir/core/lib/*.jar;
do
CLASSPATH=$CLASSPATH:$file
done
for file in $base_dir/core/lib_managed/scala_2.8.0/compile/*.jar;
do
if [ ${file##*/} != "sbt-launch.jar" ]; then
CLASSPATH=$CLASSPATH:$file
fi
done
if [ -z "$KAFKA_JMX_OPTS" ]; then
KAFKA_JMX_OPTS="-Dcom.sun.management.jmxremote -Dcom.sun.management.jmxremote.authenticate=false -Dcom.sun.management.jmxremote.ssl=false "
fi
if [ -z "$KAFKA_OPTS" ]; then
KAFKA_OPTS="-Xmx512M -server -Dlog4j.configuration=file:$base_dir/config/log4j.properties"
fi
if [ $JMX_PORT ]; then
KAFKA_JMX_OPTS="$KAFKA_JMX_OPTS -Dcom.sun.management.jmxremote.port=$JMX_PORT "
fi
if [ -z "$JAVA_HOME" ]; then
JAVA="java"
else
JAVA="$JAVA_HOME/bin/java"
fi
$JAVA $KAFKA_OPTS $KAFKA_JMX_OPTS -cp $CLASSPATH $@

11
bin/kafka-server-start.sh Executable file
View File

@ -0,0 +1,11 @@
#!/bin/bash
if [ $# -lt 1 ];
then
echo "USAGE: $0 server.properties [consumer.properties]"
exit 1
fi
export JMX_PORT="9999"
$(dirname $0)/kafka-run-class.sh kafka.Kafka $@

2
bin/kafka-server-stop.sh Executable file
View File

@ -0,0 +1,2 @@
#!/bin/sh
ps ax | grep -i 'kafka.Kafka' | grep -v grep | awk '{print $1}' | xargs kill -SIGINT

View File

@ -0,0 +1,3 @@
#!/bin/bash
$(dirname $0)/kafka-run-class.sh kafka.tools.SimpleConsumerPerformance $@

View File

@ -0,0 +1,3 @@
#!/bin/bash
$(dirname $0)/kafka-run-class.sh kafka.tools.SimpleConsumerShell $@

9
bin/zookeeper-server-start.sh Executable file
View File

@ -0,0 +1,9 @@
#!/bin/bash
if [ $# -ne 1 ];
then
echo "USAGE: $0 zookeeper.properties"
exit 1
fi
$(dirname $0)/kafka-run-class.sh org.apache.zookeeper.server.quorum.QuorumPeerMain $@

2
bin/zookeeper-server-stop.sh Executable file
View File

@ -0,0 +1,2 @@
#!/bin/sh
ps ax | grep -i 'zookeeper' | grep -v grep | awk '{print $1}' | xargs kill -SIGINT

2
clients/clojure/.gitignore vendored Normal file
View File

@ -0,0 +1,2 @@
lib
classes

202
clients/clojure/LICENSE Normal file
View File

@ -0,0 +1,202 @@
Apache License
Version 2.0, January 2004
http://www.apache.org/licenses/
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
1. Definitions.
"License" shall mean the terms and conditions for use, reproduction,
and distribution as defined by Sections 1 through 9 of this document.
"Licensor" shall mean the copyright owner or entity authorized by
the copyright owner that is granting the License.
"Legal Entity" shall mean the union of the acting entity and all
other entities that control, are controlled by, or are under common
control with that entity. For the purposes of this definition,
"control" means (i) the power, direct or indirect, to cause the
direction or management of such entity, whether by contract or
otherwise, or (ii) ownership of fifty percent (50%) or more of the
outstanding shares, or (iii) beneficial ownership of such entity.
"You" (or "Your") shall mean an individual or Legal Entity
exercising permissions granted by this License.
"Source" form shall mean the preferred form for making modifications,
including but not limited to software source code, documentation
source, and configuration files.
"Object" form shall mean any form resulting from mechanical
transformation or translation of a Source form, including but
not limited to compiled object code, generated documentation,
and conversions to other media types.
"Work" shall mean the work of authorship, whether in Source or
Object form, made available under the License, as indicated by a
copyright notice that is included in or attached to the work
(an example is provided in the Appendix below).
"Derivative Works" shall mean any work, whether in Source or Object
form, that is based on (or derived from) the Work and for which the
editorial revisions, annotations, elaborations, or other modifications
represent, as a whole, an original work of authorship. For the purposes
of this License, Derivative Works shall not include works that remain
separable from, or merely link (or bind by name) to the interfaces of,
the Work and Derivative Works thereof.
"Contribution" shall mean any work of authorship, including
the original version of the Work and any modifications or additions
to that Work or Derivative Works thereof, that is intentionally
submitted to Licensor for inclusion in the Work by the copyright owner
or by an individual or Legal Entity authorized to submit on behalf of
the copyright owner. For the purposes of this definition, "submitted"
means any form of electronic, verbal, or written communication sent
to the Licensor or its representatives, including but not limited to
communication on electronic mailing lists, source code control systems,
and issue tracking systems that are managed by, or on behalf of, the
Licensor for the purpose of discussing and improving the Work, but
excluding communication that is conspicuously marked or otherwise
designated in writing by the copyright owner as "Not a Contribution."
"Contributor" shall mean Licensor and any individual or Legal Entity
on behalf of whom a Contribution has been received by Licensor and
subsequently incorporated within the Work.
2. Grant of Copyright License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
copyright license to reproduce, prepare Derivative Works of,
publicly display, publicly perform, sublicense, and distribute the
Work and such Derivative Works in Source or Object form.
3. Grant of Patent License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
(except as stated in this section) patent license to make, have made,
use, offer to sell, sell, import, and otherwise transfer the Work,
where such license applies only to those patent claims licensable
by such Contributor that are necessarily infringed by their
Contribution(s) alone or by combination of their Contribution(s)
with the Work to which such Contribution(s) was submitted. If You
institute patent litigation against any entity (including a
cross-claim or counterclaim in a lawsuit) alleging that the Work
or a Contribution incorporated within the Work constitutes direct
or contributory patent infringement, then any patent licenses
granted to You under this License for that Work shall terminate
as of the date such litigation is filed.
4. Redistribution. You may reproduce and distribute copies of the
Work or Derivative Works thereof in any medium, with or without
modifications, and in Source or Object form, provided that You
meet the following conditions:
(a) You must give any other recipients of the Work or
Derivative Works a copy of this License; and
(b) You must cause any modified files to carry prominent notices
stating that You changed the files; and
(c) You must retain, in the Source form of any Derivative Works
that You distribute, all copyright, patent, trademark, and
attribution notices from the Source form of the Work,
excluding those notices that do not pertain to any part of
the Derivative Works; and
(d) If the Work includes a "NOTICE" text file as part of its
distribution, then any Derivative Works that You distribute must
include a readable copy of the attribution notices contained
within such NOTICE file, excluding those notices that do not
pertain to any part of the Derivative Works, in at least one
of the following places: within a NOTICE text file distributed
as part of the Derivative Works; within the Source form or
documentation, if provided along with the Derivative Works; or,
within a display generated by the Derivative Works, if and
wherever such third-party notices normally appear. The contents
of the NOTICE file are for informational purposes only and
do not modify the License. You may add Your own attribution
notices within Derivative Works that You distribute, alongside
or as an addendum to the NOTICE text from the Work, provided
that such additional attribution notices cannot be construed
as modifying the License.
You may add Your own copyright statement to Your modifications and
may provide additional or different license terms and conditions
for use, reproduction, or distribution of Your modifications, or
for any such Derivative Works as a whole, provided Your use,
reproduction, and distribution of the Work otherwise complies with
the conditions stated in this License.
5. Submission of Contributions. Unless You explicitly state otherwise,
any Contribution intentionally submitted for inclusion in the Work
by You to the Licensor shall be under the terms and conditions of
this License, without any additional terms or conditions.
Notwithstanding the above, nothing herein shall supersede or modify
the terms of any separate license agreement you may have executed
with Licensor regarding such Contributions.
6. Trademarks. This License does not grant permission to use the trade
names, trademarks, service marks, or product names of the Licensor,
except as required for reasonable and customary use in describing the
origin of the Work and reproducing the content of the NOTICE file.
7. Disclaimer of Warranty. Unless required by applicable law or
agreed to in writing, Licensor provides the Work (and each
Contributor provides its Contributions) on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
implied, including, without limitation, any warranties or conditions
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
PARTICULAR PURPOSE. You are solely responsible for determining the
appropriateness of using or redistributing the Work and assume any
risks associated with Your exercise of permissions under this License.
8. Limitation of Liability. In no event and under no legal theory,
whether in tort (including negligence), contract, or otherwise,
unless required by applicable law (such as deliberate and grossly
negligent acts) or agreed to in writing, shall any Contributor be
liable to You for damages, including any direct, indirect, special,
incidental, or consequential damages of any character arising as a
result of this License or out of the use or inability to use the
Work (including but not limited to damages for loss of goodwill,
work stoppage, computer failure or malfunction, or any and all
other commercial damages or losses), even if such Contributor
has been advised of the possibility of such damages.
9. Accepting Warranty or Additional Liability. While redistributing
the Work or Derivative Works thereof, You may choose to offer,
and charge a fee for, acceptance of support, warranty, indemnity,
or other liability obligations and/or rights consistent with this
License. However, in accepting such obligations, You may act only
on Your own behalf and on Your sole responsibility, not on behalf
of any other Contributor, and only if You agree to indemnify,
defend, and hold each Contributor harmless for any liability
incurred by, or claims asserted against, such Contributor by reason
of your accepting any such warranty or additional liability.
END OF TERMS AND CONDITIONS
APPENDIX: How to apply the Apache License to your work.
To apply the Apache License to your work, attach the following
boilerplate notice, with the fields enclosed by brackets "[]"
replaced with your own identifying information. (Don't include
the brackets!) The text should be enclosed in the appropriate
comment syntax for the file format. We also recommend that a
file or class name and description of purpose be included on the
same "printed page" as the copyright notice for easier
identification within third-party archives.
Copyright 2011 LinkedIn
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.

50
clients/clojure/README.md Normal file
View File

@ -0,0 +1,50 @@
# kafka-clj
kafka-clj provides a producer and consumer that supports a basic fetch API as well as a managed sequence interface. Multifetch is not supported yet.
## Quick Start
Download and start [Kafka](http://sna-projects.com/kafka/quickstart.php).
Pull dependencies with [Leiningen](https://github.com/technomancy/leiningen):
$ lein deps
And run the example:
$ lein run-example
## Usage
### Sending messages
(with-open [p (producer "localhost" 9092)]
(produce p "test" 0 "Message 1")
(produce p "test" 0 ["Message 2" "Message 3"]))
### Simple consumer
(with-open [c (consumer "localhost" 9092)]
(let [offs (offsets c "test" 0 -1 10)]
(consume c "test" 0 (last offs) 1000000)))
### Consumer sequence
(with-open [c (consumer "localhost" 9092)]
(doseq [m (consume-seq c "test" 0 {:blocking true})]
(println m)))
Following options are supported:
* :blocking _boolean_ default false, sequence returns nil the first time fetch does not return new messages. If set to true, the sequence tries to fetch new messages :repeat-count times every :repeat-timeout milliseconds.
* :repeat-count _int_ number of attempts to fetch new messages before terminating, default 10.
* :repeat-timeout _int_ wait time in milliseconds between fetch attempts, default 1000.
* :offset _long_ initialized to highest offset if not provided.
* :max-size _int_ max result message size, default 1000000.
### Serialization
Load namespace _kafka.print_ for basic print_dup/read-string serialization or _kafka.serializeable_ for Java object serialization. For custom serialization implement Pack and Unpack protocols.
Questions? Email adam.smyczek \_at\_ gmail.com.

View File

@ -0,0 +1,10 @@
(ns leiningen.run-example
(:use [leiningen.compile :only (eval-in-project)]))
(defn run-example
[project & args]
(eval-in-project project
`(do
(require 'kafka.example)
(kafka.example/run))))

View File

@ -0,0 +1,13 @@
(defproject kafka-clj "0.1-SNAPSHOT"
:description "Kafka client for Clojure."
:url "http://sna-projects.com/kafka/"
:dependencies [[org.clojure/clojure "1.2.0"]
[org.clojure/clojure-contrib "1.2.0"]
[log4j "1.2.15" :exclusions [javax.mail/mail
javax.jms/jms
com.sun.jdmk/jmxtools
com.sun.jmx/jmxri]]]
:disable-deps-clean false
:warn-on-reflection true
:source-path "src"
:test-path "test")

View File

@ -0,0 +1,5 @@
log4j.rootLogger=INFO, A1
log4j.appender.A1=org.apache.log4j.ConsoleAppender
log4j.appender.A1.layout=org.apache.log4j.PatternLayout
log4j.appender.A1.layout.ConversionPattern= %-5p %c - %m%n

View File

@ -0,0 +1,175 @@
(ns #^{:doc "Wrapper around ByteBuffer,
provides a DSL to model byte messages."}
kafka.buffer
(:import (java.nio ByteBuffer)
(java.nio.channels SocketChannel)))
(def #^{:doc "Buffer stack bind in with-buffer."}
*buf* [])
(def #^{:doc "Number of attempts to read a complete buffer from channel."}
*channel-read-count* 5)
;
; Main buffer functions
;
(defn buffer
"Creates a new ByteBuffer of argument size."
[^int size]
(ByteBuffer/allocate size))
(defn ^ByteBuffer top
"Returns top buffer from *buf* stack."
[]
(peek *buf*))
(defn flip
[]
(.flip (top)))
(defn rewind
[]
(.rewind (top)))
(defn clear
[]
(.clear (top)))
(defn has-remaining
[]
(.hasRemaining (top)))
;
; Write to buffer
;
(defprotocol Put
"Put protocol defines a generic buffer put method."
(put [this]))
(extend-type Byte
Put
(put [this] (.put (top) this)))
(extend-type Integer
Put
(put [this] (.putInt (top) this)))
(extend-type Short
Put
(put [this] (.putShort (top) this)))
(extend-type Long
Put
(put [this] (.putLong (top) this)))
(extend-type String
Put
(put [this] (.put (top) (.getBytes this "UTF-8"))))
(extend-type (class (byte-array 0))
Put
(put [this] (.put (top) ^bytes this)))
(extend-type clojure.lang.IPersistentCollection
Put
(put [this] (doseq [e this] (put e))))
(defmacro length-encoded
[type & body]
`(with-buffer (.slice (top))
(put (~type 0))
(let [^ByteBuffer this# (top)
^ByteBuffer parent# (peek (pop *buf*))
type-size# (.position this#)]
~@body
(let [size# (.position this#)]
(.rewind this#)
(put (~type (- size# type-size#)))
(.position parent# (+ (.position parent#) size#))))))
(defmacro with-put
[size f & body]
`(with-buffer (.slice (top))
(put (byte-array ~size))
~@body
(let [^ByteBuffer this# (top)
^ByteBuffer parent# (peek (pop *buf*))
pos# (.position this#)
ba# (byte-array (- pos# ~size))]
(doto this# (.rewind) (.get (byte-array ~size)) (.get ba#))
(.rewind this#)
(put (~f ba#))
(.position parent# (+ (.position parent#) pos#)))))
;
; Read from buffer
;
(defn get-byte
[]
(.get (top)))
(defn get-short
[]
(.getShort (top)))
(defn get-int
[]
(.getInt (top)))
(defn get-long
[]
(.getLong (top)))
(defn get-array
"Reads byte array of argument length from buffer."
[^int length]
(let [ba (byte-array length)]
(.get (top) ba)
ba))
(defn get-string
"Reads string of argument length from buffer."
[^int length]
(let [ba (byte-array length)]
(.get (top) ba)
(String. ba "UTF-8")))
;
; Util functions and macros
;
(defmacro with-buffer
"Evaluates body in the context of the buffer."
[buffer & body]
`(binding [*buf* (conj *buf* ~buffer)]
~@body))
(defn read-from
"Reads from channel to the underlying top buffer.
Throws ConnectException if channel is closed."
[^SocketChannel channel]
(let [size (.read channel (top))]
(if (< size 0)
(throw (java.net.ConnectException. "Channel closed?"))
size)))
(defn read-completely-from
"Read the complete top buffer from the channel."
[^SocketChannel channel]
(loop [t *channel-read-count* size 0]
(let [s (read-from channel)]
(cond
(< t 0)
(throw (Exception. "Unable to read complete buffer from channel."))
(has-remaining)
(recur (dec t) (+ size s))
:else size))))
(defn write-to
"Writes underlying top buffer to channel."
[^SocketChannel channel]
(.write channel (top)))

View File

@ -0,0 +1,38 @@
(ns #^{:doc "Producer/Consumer example."}
kafka.example
(:use (clojure.contrib logging)
(kafka types kafka print)))
(defmacro thread
"Executes body in a thread, logs exceptions."
[ & body]
`(future
(try
~@body
(catch Exception e#
(error "Exception." e#)))))
(defn start-consumer
[]
(thread
(with-open [c (consumer "localhost" 9092)]
(doseq [m (consume-seq c "test" 0 {:blocking true})]
(println "Consumed <-- " m)))
(println "Finished consuming.")))
(defn start-producer
[]
(thread
(with-open [p (producer "localhost" 9092)]
(doseq [i (range 1 20)]
(let [m (str "Message " i)]
(produce p "test" 0 m)
(println "Produced --> " m)
(Thread/sleep 1000))))
(println "Finished producing.")))
(defn run
[]
(start-consumer)
(start-producer))

View File

@ -0,0 +1,267 @@
(ns #^{:doc "Core kafka-clj module,
provides producer and consumer factories."}
kafka.kafka
(:use (kafka types buffer)
(clojure.contrib logging))
(:import (kafka.types Message)
(java.nio.channels SocketChannel)
(java.net Socket InetSocketAddress)
(java.util.zip CRC32)))
;
; Utils
;
(defn- crc32-int
"CRC for byte array."
[^bytes ba]
(let [crc (doto (CRC32.) (.update ba))
lv (.getValue crc)]
(.intValue (bit-and lv 0xffffffff))))
(defn- new-channel
"Create and setup a new channel for a host name, port and options.
Supported options:
:receive-buffer-size - receive socket buffer size, default 65536.
:send-buffer-size - send socket buffer size, default 65536.
:socket-timeout - socket timeout."
[^String host ^Integer port opts]
(let [receive-buf-size (or (:receive-buffer-size opts) 65536)
send-buf-size (or (:send-buffer-size opts) 65536)
so-timeout (or (:socket-timeout opts) 60000)
ch (SocketChannel/open)]
(doto (.socket ch)
(.setReceiveBufferSize receive-buf-size)
(.setSendBufferSize send-buf-size)
(.setSoTimeout so-timeout))
(doto ch
(.configureBlocking true)
(.connect (InetSocketAddress. host port)))))
(defn- close-channel
"Close the channel."
[^SocketChannel channel]
(.close channel)
(.close (.socket channel)))
(defn- response-size
"Read first four bytes from channel as an integer."
[channel]
(with-buffer (buffer 4)
(read-completely-from channel)
(flip)
(get-int)))
(defmacro with-error-code
"Convenience response error code check."
[request & body]
`(let [error-code# (get-short)] ; error code
(if (not= error-code# 0)
(error (str "Request " ~request " returned error code: " error-code# "."))
~@body)))
;
; Producer
;
(defn- send-message
"Send messages."
[channel topic partition messages opts]
(let [size (or (:send-buffer-size opts) 65536)]
(with-buffer (buffer size)
(length-encoded int ; request size
(put (short 0)) ; request type
(length-encoded short ; topic size
(put topic)) ; topic
(put (int partition)) ; partition
(length-encoded int ; messages size
(doseq [m messages]
(let [^Message pm (pack m)]
(length-encoded int ; message size
(put (byte 0)) ; magic
(with-put 4 crc32-int ; crc
(put (.message pm)))))))) ; message
(flip)
(write-to channel))))
(defn producer
"Producer factory. See new-channel for list of supported options."
[host port & [opts]]
(let [channel (new-channel host port opts)]
(reify Producer
(produce [this topic partition messages]
(let [msg (if (sequential? messages) messages [messages])]
(send-message channel topic partition msg opts)))
(close [this]
(close-channel channel)))))
;
; Consumer
;
; Offset
(defn- offset-fetch-request
"Fetch offsets request."
[channel topic partition time max-offsets]
(let [size (+ 4 2 2 (count topic) 4 8 4)]
(with-buffer (buffer size)
(length-encoded int ; request size
(put (short 4)) ; request type
(length-encoded short ; topic size
(put topic)) ; topic
(put (int partition)) ; partition
(put (long time)) ; time
(put (int max-offsets))) ; max-offsets
(flip)
(write-to channel))))
(defn- fetch-offsets
"Fetch offsets as an integer sequence."
[channel topic partition time max-offsets]
(offset-fetch-request channel topic partition time max-offsets)
(let [rsp-size (response-size channel)]
(with-buffer (buffer rsp-size)
(read-completely-from channel)
(flip)
(with-error-code "Fetch-Offsets"
(loop [c (get-int) res []]
(if (> c 0)
(recur (dec c) (conj res (get-long)))
(doall res)))))))
; Messages
(defn- message-fetch-request
"Fetch messages request."
[channel topic partition offset max-size]
(let [size (+ 4 2 2 (count topic) 4 8 4)]
(with-buffer (buffer size)
(length-encoded int ; request size
(put (short 1)) ; request type
(length-encoded short ; topic size
(put topic)) ; topic
(put (int partition)) ; partition
(put (long offset)) ; offset
(put (int max-size))) ; max size
(flip)
(write-to channel))))
(defn- read-response
"Read response from buffer. Returns a pair [new offset, messages sequence]."
[offset]
(with-error-code "Fetch-Messages"
(loop [off offset msg []]
(if (has-remaining)
(let [size (get-int) ; message size
magic (get-byte) ; magic
crc (get-int) ; crc
message (get-array (- size 5))]
(recur (+ off size 4) (conj msg (unpack (Message. message)))))
[off (doall msg)]))))
(defn- fetch-messages
"Message fetch, returns a pair [new offset, messages sequence]."
[channel topic partition offset max-size]
(message-fetch-request channel topic partition offset max-size)
(let [rsp-size (response-size channel)]
(with-buffer (buffer rsp-size)
(read-completely-from channel)
(flip)
(read-response offset))))
; Consumer sequence
(defn- seq-fetch
"Non-blocking fetch function used by consumer sequence."
[channel topic partition opts]
(let [max-size (or (:max-size opts) 1000000)]
(fn [offset]
(fetch-messages channel topic partition offset max-size))))
(defn- blocking-seq-fetch
"Blocking fetch function used by consumer sequence."
[channel topic partition opts]
(let [repeat-count (or (:repeat-count opts) 10)
repeat-timeout (or (:repeat-timeout opts) 1000)
max-size (or (:max-size opts) 1000000)]
(fn [offset]
(loop [c repeat-count]
(if (> c 0)
(let [rs (fetch-messages channel topic partition offset max-size)]
(if (or (nil? rs) (= offset (first rs)))
(do
(Thread/sleep repeat-timeout)
(recur (dec c)))
(doall rs)))
(debug "Stopping blocking seq fetch."))))))
(defn- fetch-queue
[offset queue fetch-fn]
(if (empty? @queue)
(let [[new-offset msg] (fetch-fn @offset)]
(when new-offset
(debug (str "Fetched " (count msg) " messages:"))
(debug (str "New offset " new-offset "."))
(swap! queue #(reduce conj % (reverse msg)))
(reset! offset new-offset)))))
(defn- consumer-seq
"Sequence constructor."
[offset fetch-fn]
(let [offset (atom offset)
queue (atom (seq []))]
(reify
clojure.lang.IPersistentCollection
(seq [this] this)
(cons [this _] (throw (Exception. "cons not supported for consumer sequence.")))
(empty [this] nil)
(equiv [this o]
(fatal "Implement equiv for consumer seq!")
false)
clojure.lang.ISeq
(first [this]
(fetch-queue offset queue fetch-fn)
(first @queue))
(next [this]
(swap! queue rest)
(fetch-queue offset queue fetch-fn)
(if (not (empty? @queue)) this))
(more [this]
(swap! queue rest)
(fetch-queue offset queue fetch-fn)
(if (empty? @queue) (empty) this))
Object
(toString [this]
(str "ConsumerQueue")))))
; Consumer factory
(defn consumer
"Consumer factory. See new-channel for list of supported options."
[host port & [opts]]
(let [channel (new-channel host port opts)]
(reify Consumer
(consume [this topic partition offset max-size]
(fetch-messages channel topic partition offset max-size))
(offsets [this topic partition time max-offsets]
(fetch-offsets channel topic partition time max-offsets))
(consume-seq [this topic partition]
(let [[offset] (fetch-offsets channel topic partition -1 1)]
(debug (str "Initializing last offset to " offset "."))
(consumer-seq (or offset 0) (seq-fetch channel topic partition opts))))
(consume-seq [this topic partition opts]
(let [[offset] (or (:offset opts)
(fetch-offsets channel topic partition -1 1))
fetch-fn (if (:blocking opts)
(blocking-seq-fetch channel topic partition opts)
(seq-fetch channel topic partition opts))]
(debug (str "Initializing last offset to " offset "."))
(consumer-seq (or offset 0) fetch-fn)))
(close [this]
(close-channel channel)))))

View File

@ -0,0 +1,22 @@
(ns #^{:doc "Basic Clojure print-dup -> read-string message serialization."}
kafka.print
(:use kafka.types)
(:import (kafka.types Message)))
(extend-type Object
Pack
(pack [this]
(let [^String st (with-out-str (print-dup this *out*))]
(kafka.types.Message. (.getBytes st "UTF-8")))))
(extend-type Message
Unpack
(unpack [this]
(let [^bytes ba (.message this)
msg (String. ba "UTF-8")]
(if (not (empty? msg))
(try
(read-string msg)
(catch Exception e
(println "Invalid expression " msg)))))))

View File

@ -0,0 +1,22 @@
(ns #^{:doc "Serialization for all Java Serializable objects."}
kafka.serializable
(:use kafka.types)
(:import (kafka.types Message)
(java.io Serializable
ObjectOutputStream ByteArrayOutputStream
ObjectInputStream ByteArrayInputStream)))
(extend-type Serializable
Pack
(pack [this]
(let [bas (ByteArrayOutputStream.)]
(with-open [oos (ObjectOutputStream. bas)]
(.writeObject oos this))
(kafka.types.Message. (.toByteArray bas)))))
(extend-type Message
Unpack
(unpack [this]
(with-open [ois (ObjectInputStream. (ByteArrayInputStream. (.message this)))]
(.readObject ois))))

View File

@ -0,0 +1,28 @@
(ns #^{:doc "Base kafka-clj types."}
kafka.types)
(deftype #^{:doc "Message type, a wrapper around a byte array."}
Message [^bytes message])
(defprotocol Pack
"Pack protocol converts an object to a Message."
(pack [this] "Convert object to a Message."))
(defprotocol Unpack
"Unpack protocol, reads an object from a Message."
(unpack [^Message this] "Read an object from the message."))
(defprotocol Producer
"Producer protocol."
(produce [this topic partition messages] "Send message[s] for a topic to a partition.")
(close [this] "Closes the producer, socket and channel."))
(defprotocol Consumer
"Consumer protocol."
(consume [this topic partition offset max-size] "Fetch messages. Returns a pair [last-offset, message sequence]")
(offsets [this topic partition time max-offsets] "Query offsets. Returns offsets seq.")
(consume-seq [this topic partition]
[this topic partition opts] "Creates a sequence over the consumer.")
(close [this] "Close the consumer, socket and channel."))

View File

@ -0,0 +1,46 @@
(ns kafka.buffer-test
(:use (kafka buffer)
clojure.test))
(deftest test-put-get
(with-buffer (buffer 64)
(put (byte 5))
(put (short 10))
(put (int 20))
(put (long 40))
(put "test")
(put (byte-array 3 [(byte 1) (byte 2) (byte 3)]))
(flip)
(is (= (get-byte) (byte 5)))
(is (= (get-short) (short 10)))
(is (= (get-int) (int 20)))
(is (= (get-long) (long 40)))
(is (= (get-string 4) "test"))
(let [ba (get-array 3)]
(is (= (nth ba 0) (byte 1)))
(is (= (nth ba 1) (byte 2)))
(is (= (nth ba 2) (byte 3))))))
(deftest test-with-put
(with-buffer (buffer 64)
(with-put 4 count
(put "test 1"))
(flip)
(is (= (get-int) (int 6)))
(is (= (get-string 6) "test 1"))))
(deftest test-length-encoded
(with-buffer (buffer 64)
(length-encoded short
(put "test 1"))
(length-encoded int
(put "test 2"))
(flip)
(is (= (get-short) (short 6)))
(is (= (get-string 6) "test 1"))
(is (= (get-int) (int 6)))
(is (= (get-string 6) "test 2"))))

View File

@ -0,0 +1,12 @@
(ns kafka.print-test
(:use (kafka types print)
clojure.test))
(deftest test-pack-unpack
(is (= "test" (unpack (pack "test"))))
(is (= 123 (unpack (pack 123))))
(is (= true (unpack (pack true))))
(is (= [1 2 3] (unpack (pack [1 2 3]))))
(is (= {:a 1} (unpack (pack {:a 1}))))
(is (= '(+ 1 2 3) (unpack (pack '(+ 1 2 3))))))

View File

@ -0,0 +1,14 @@
(ns kafka.serializable-test
(:use (kafka types serializable)
clojure.test))
(deftest test-pack-unpack
(is (= "test" (unpack (pack "test"))))
(is (= 123 (unpack (pack 123))))
(is (= true (unpack (pack true))))
(is (= [1 2 3] (unpack (pack [1 2 3]))))
(is (= {:a 1} (unpack (pack {:a 1}))))
(is (= '(+ 1 2 3) (unpack (pack '(+ 1 2 3)))))
(let [now (java.util.Date.)]
(is (= now (unpack (pack now))))))

203
clients/cpp/LICENSE Normal file
View File

@ -0,0 +1,203 @@
Apache License
Version 2.0, January 2004
http://www.apache.org/licenses/
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
1. Definitions.
"License" shall mean the terms and conditions for use, reproduction,
and distribution as defined by Sections 1 through 9 of this document.
"Licensor" shall mean the copyright owner or entity authorized by
the copyright owner that is granting the License.
"Legal Entity" shall mean the union of the acting entity and all
other entities that control, are controlled by, or are under common
control with that entity. For the purposes of this definition,
"control" means (i) the power, direct or indirect, to cause the
direction or management of such entity, whether by contract or
otherwise, or (ii) ownership of fifty percent (50%) or more of the
outstanding shares, or (iii) beneficial ownership of such entity.
"You" (or "Your") shall mean an individual or Legal Entity
exercising permissions granted by this License.
"Source" form shall mean the preferred form for making modifications,
including but not limited to software source code, documentation
source, and configuration files.
"Object" form shall mean any form resulting from mechanical
transformation or translation of a Source form, including but
not limited to compiled object code, generated documentation,
and conversions to other media types.
"Work" shall mean the work of authorship, whether in Source or
Object form, made available under the License, as indicated by a
copyright notice that is included in or attached to the work
(an example is provided in the Appendix below).
"Derivative Works" shall mean any work, whether in Source or Object
form, that is based on (or derived from) the Work and for which the
editorial revisions, annotations, elaborations, or other modifications
represent, as a whole, an original work of authorship. For the purposes
of this License, Derivative Works shall not include works that remain
separable from, or merely link (or bind by name) to the interfaces of,
the Work and Derivative Works thereof.
"Contribution" shall mean any work of authorship, including
the original version of the Work and any modifications or additions
to that Work or Derivative Works thereof, that is intentionally
submitted to Licensor for inclusion in the Work by the copyright owner
or by an individual or Legal Entity authorized to submit on behalf of
the copyright owner. For the purposes of this definition, "submitted"
means any form of electronic, verbal, or written communication sent
to the Licensor or its representatives, including but not limited to
communication on electronic mailing lists, source code control systems,
and issue tracking systems that are managed by, or on behalf of, the
Licensor for the purpose of discussing and improving the Work, but
excluding communication that is conspicuously marked or otherwise
designated in writing by the copyright owner as "Not a Contribution."
"Contributor" shall mean Licensor and any individual or Legal Entity
on behalf of whom a Contribution has been received by Licensor and
subsequently incorporated within the Work.
2. Grant of Copyright License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
copyright license to reproduce, prepare Derivative Works of,
publicly display, publicly perform, sublicense, and distribute the
Work and such Derivative Works in Source or Object form.
3. Grant of Patent License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
(except as stated in this section) patent license to make, have made,
use, offer to sell, sell, import, and otherwise transfer the Work,
where such license applies only to those patent claims licensable
by such Contributor that are necessarily infringed by their
Contribution(s) alone or by combination of their Contribution(s)
with the Work to which such Contribution(s) was submitted. If You
institute patent litigation against any entity (including a
cross-claim or counterclaim in a lawsuit) alleging that the Work
or a Contribution incorporated within the Work constitutes direct
or contributory patent infringement, then any patent licenses
granted to You under this License for that Work shall terminate
as of the date such litigation is filed.
4. Redistribution. You may reproduce and distribute copies of the
Work or Derivative Works thereof in any medium, with or without
modifications, and in Source or Object form, provided that You
meet the following conditions:
(a) You must give any other recipients of the Work or
Derivative Works a copy of this License; and
(b) You must cause any modified files to carry prominent notices
stating that You changed the files; and
(c) You must retain, in the Source form of any Derivative Works
that You distribute, all copyright, patent, trademark, and
attribution notices from the Source form of the Work,
excluding those notices that do not pertain to any part of
the Derivative Works; and
(d) If the Work includes a "NOTICE" text file as part of its
distribution, then any Derivative Works that You distribute must
include a readable copy of the attribution notices contained
within such NOTICE file, excluding those notices that do not
pertain to any part of the Derivative Works, in at least one
of the following places: within a NOTICE text file distributed
as part of the Derivative Works; within the Source form or
documentation, if provided along with the Derivative Works; or,
within a display generated by the Derivative Works, if and
wherever such third-party notices normally appear. The contents
of the NOTICE file are for informational purposes only and
do not modify the License. You may add Your own attribution
notices within Derivative Works that You distribute, alongside
or as an addendum to the NOTICE text from the Work, provided
that such additional attribution notices cannot be construed
as modifying the License.
You may add Your own copyright statement to Your modifications and
may provide additional or different license terms and conditions
for use, reproduction, or distribution of Your modifications, or
for any such Derivative Works as a whole, provided Your use,
reproduction, and distribution of the Work otherwise complies with
the conditions stated in this License.
5. Submission of Contributions. Unless You explicitly state otherwise,
any Contribution intentionally submitted for inclusion in the Work
by You to the Licensor shall be under the terms and conditions of
this License, without any additional terms or conditions.
Notwithstanding the above, nothing herein shall supersede or modify
the terms of any separate license agreement you may have executed
with Licensor regarding such Contributions.
6. Trademarks. This License does not grant permission to use the trade
names, trademarks, service marks, or product names of the Licensor,
except as required for reasonable and customary use in describing the
origin of the Work and reproducing the content of the NOTICE file.
7. Disclaimer of Warranty. Unless required by applicable law or
agreed to in writing, Licensor provides the Work (and each
Contributor provides its Contributions) on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
implied, including, without limitation, any warranties or conditions
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
PARTICULAR PURPOSE. You are solely responsible for determining the
appropriateness of using or redistributing the Work and assume any
risks associated with Your exercise of permissions under this License.
8. Limitation of Liability. In no event and under no legal theory,
whether in tort (including negligence), contract, or otherwise,
unless required by applicable law (such as deliberate and grossly
negligent acts) or agreed to in writing, shall any Contributor be
liable to You for damages, including any direct, indirect, special,
incidental, or consequential damages of any character arising as a
result of this License or out of the use or inability to use the
Work (including but not limited to damages for loss of goodwill,
work stoppage, computer failure or malfunction, or any and all
other commercial damages or losses), even if such Contributor
has been advised of the possibility of such damages.
9. Accepting Warranty or Additional Liability. While redistributing
the Work or Derivative Works thereof, You may choose to offer,
and charge a fee for, acceptance of support, warranty, indemnity,
or other liability obligations and/or rights consistent with this
License. However, in accepting such obligations, You may act only
on Your own behalf and on Your sole responsibility, not on behalf
of any other Contributor, and only if You agree to indemnify,
defend, and hold each Contributor harmless for any liability
incurred by, or claims asserted against, such Contributor by reason
of your accepting any such warranty or additional liability.
END OF TERMS AND CONDITIONS
APPENDIX: How to apply the Apache License to your work.
To apply the Apache License to your work, attach the following
boilerplate notice, with the fields enclosed by brackets "[]"
replaced with your own identifying information. (Don't include
the brackets!) The text should be enclosed in the appropriate
comment syntax for the file format. We also recommend that a
file or class name and description of purpose be included on the
same "printed page" as the copyright notice for easier
identification within third-party archives.
Copyright 2011 LinkedIn
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.

51
clients/cpp/Makefile.am Normal file
View File

@ -0,0 +1,51 @@
## LibKafkaConect
## A C++ shared libray for connecting to Kafka
#
# Warning this is the first time I've made a configure.ac/Makefile.am thing
# Please improve it as I have no idea what I am doing
# @benjamg
#
ACLOCAL_AMFLAGS = -I build-aux/m4 ${ACLOCAL_FLAGS}
AM_CPPFLAGS = $(DEPS_CFLAGS)
EXAMPLE_LIBS = -lboost_system -lboost_thread -lkafkaconnect
#
# Shared Library
#
lib_LTLIBRARIES = libkafkaconnect.la
libkafkaconnect_la_SOURCES = src/producer.cpp
libkafkaconnect_la_LDFLAGS = -version-info $(KAFKACONNECT_VERSION)
kafkaconnect_includedir = $(includedir)/kafkaconnect
kafkaconnect_include_HEADERS = src/producer.hpp \
src/encoder.hpp \
src/encoder_helper.hpp
#
# Examples
#
noinst_PROGRAMS = producer
producer_SOURCES = src/example.cpp
producer_LDADD = $(DEPS_LIBS) $(EXAMPLE_LIBS)
#
# Tests
#
check_PROGRAMS = tests/encoder_helper tests/encoder tests/producer
TESTS = tests/encoder_helper tests/encoder tests/producer
tests_encoder_helper_SOURCES = src/tests/encoder_helper_tests.cpp
tests_encoder_helper_LDADD = $(DEPS_LIBS) $(EXAMPLE_LIBS) -lboost_unit_test_framework
tests_encoder_SOURCES = src/tests/encoder_tests.cpp
tests_encoder_LDADD = $(DEPS_LIBS) $(EXAMPLE_LIBS) -lboost_unit_test_framework
tests_producer_SOURCES = src/tests/producer_tests.cpp
tests_producer_LDADD = $(DEPS_LIBS) $(EXAMPLE_LIBS) -lboost_unit_test_framework

946
clients/cpp/Makefile.in Normal file
View File

@ -0,0 +1,946 @@
# Makefile.in generated by automake 1.11.1 from Makefile.am.
# @configure_input@
# Copyright (C) 1994, 1995, 1996, 1997, 1998, 1999, 2000, 2001, 2002,
# 2003, 2004, 2005, 2006, 2007, 2008, 2009 Free Software Foundation,
# Inc.
# This Makefile.in is free software; the Free Software Foundation
# gives unlimited permission to copy and/or distribute it,
# with or without modifications, as long as this notice is preserved.
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY, to the extent permitted by law; without
# even the implied warranty of MERCHANTABILITY or FITNESS FOR A
# PARTICULAR PURPOSE.
@SET_MAKE@
#
# Warning this is the first time I've made a configure.ac/Makefile.am thing
# Please improve it as I have no idea what I am doing
# @benjamg
#
VPATH = @srcdir@
pkgdatadir = $(datadir)/@PACKAGE@
pkgincludedir = $(includedir)/@PACKAGE@
pkglibdir = $(libdir)/@PACKAGE@
pkglibexecdir = $(libexecdir)/@PACKAGE@
am__cd = CDPATH="$${ZSH_VERSION+.}$(PATH_SEPARATOR)" && cd
install_sh_DATA = $(install_sh) -c -m 644
install_sh_PROGRAM = $(install_sh) -c
install_sh_SCRIPT = $(install_sh) -c
INSTALL_HEADER = $(INSTALL_DATA)
transform = $(program_transform_name)
NORMAL_INSTALL = :
PRE_INSTALL = :
POST_INSTALL = :
NORMAL_UNINSTALL = :
PRE_UNINSTALL = :
POST_UNINSTALL = :
build_triplet = @build@
host_triplet = @host@
noinst_PROGRAMS = producer$(EXEEXT)
check_PROGRAMS = tests/encoder_helper$(EXEEXT) tests/encoder$(EXEEXT) \
tests/producer$(EXEEXT)
TESTS = tests/encoder_helper$(EXEEXT) tests/encoder$(EXEEXT) \
tests/producer$(EXEEXT)
subdir = .
DIST_COMMON = $(am__configure_deps) $(kafkaconnect_include_HEADERS) \
$(srcdir)/Makefile.am $(srcdir)/Makefile.in \
$(top_srcdir)/configure build-aux/config.guess \
build-aux/config.sub build-aux/depcomp build-aux/install-sh \
build-aux/ltmain.sh build-aux/missing
ACLOCAL_M4 = $(top_srcdir)/aclocal.m4
am__aclocal_m4_deps = $(top_srcdir)/build-aux/m4/libtool.m4 \
$(top_srcdir)/build-aux/m4/ltoptions.m4 \
$(top_srcdir)/build-aux/m4/ltsugar.m4 \
$(top_srcdir)/build-aux/m4/ltversion.m4 \
$(top_srcdir)/build-aux/m4/lt~obsolete.m4 \
$(top_srcdir)/configure.ac
am__configure_deps = $(am__aclocal_m4_deps) $(CONFIGURE_DEPENDENCIES) \
$(ACLOCAL_M4)
am__CONFIG_DISTCLEAN_FILES = config.status config.cache config.log \
configure.lineno config.status.lineno
mkinstalldirs = $(install_sh) -d
CONFIG_CLEAN_FILES =
CONFIG_CLEAN_VPATH_FILES =
am__vpath_adj_setup = srcdirstrip=`echo "$(srcdir)" | sed 's|.|.|g'`;
am__vpath_adj = case $$p in \
$(srcdir)/*) f=`echo "$$p" | sed "s|^$$srcdirstrip/||"`;; \
*) f=$$p;; \
esac;
am__strip_dir = f=`echo $$p | sed -e 's|^.*/||'`;
am__install_max = 40
am__nobase_strip_setup = \
srcdirstrip=`echo "$(srcdir)" | sed 's/[].[^$$\\*|]/\\\\&/g'`
am__nobase_strip = \
for p in $$list; do echo "$$p"; done | sed -e "s|$$srcdirstrip/||"
am__nobase_list = $(am__nobase_strip_setup); \
for p in $$list; do echo "$$p $$p"; done | \
sed "s| $$srcdirstrip/| |;"' / .*\//!s/ .*/ ./; s,\( .*\)/[^/]*$$,\1,' | \
$(AWK) 'BEGIN { files["."] = "" } { files[$$2] = files[$$2] " " $$1; \
if (++n[$$2] == $(am__install_max)) \
{ print $$2, files[$$2]; n[$$2] = 0; files[$$2] = "" } } \
END { for (dir in files) print dir, files[dir] }'
am__base_list = \
sed '$$!N;$$!N;$$!N;$$!N;$$!N;$$!N;$$!N;s/\n/ /g' | \
sed '$$!N;$$!N;$$!N;$$!N;s/\n/ /g'
am__installdirs = "$(DESTDIR)$(libdir)" \
"$(DESTDIR)$(kafkaconnect_includedir)"
LTLIBRARIES = $(lib_LTLIBRARIES)
libkafkaconnect_la_LIBADD =
am_libkafkaconnect_la_OBJECTS = producer.lo
libkafkaconnect_la_OBJECTS = $(am_libkafkaconnect_la_OBJECTS)
libkafkaconnect_la_LINK = $(LIBTOOL) --tag=CXX $(AM_LIBTOOLFLAGS) \
$(LIBTOOLFLAGS) --mode=link $(CXXLD) $(AM_CXXFLAGS) \
$(CXXFLAGS) $(libkafkaconnect_la_LDFLAGS) $(LDFLAGS) -o $@
PROGRAMS = $(noinst_PROGRAMS)
am_producer_OBJECTS = example.$(OBJEXT)
producer_OBJECTS = $(am_producer_OBJECTS)
am__DEPENDENCIES_1 =
producer_DEPENDENCIES = $(am__DEPENDENCIES_1)
am_tests_encoder_OBJECTS = encoder_tests.$(OBJEXT)
tests_encoder_OBJECTS = $(am_tests_encoder_OBJECTS)
tests_encoder_DEPENDENCIES = $(am__DEPENDENCIES_1)
am__dirstamp = $(am__leading_dot)dirstamp
am_tests_encoder_helper_OBJECTS = encoder_helper_tests.$(OBJEXT)
tests_encoder_helper_OBJECTS = $(am_tests_encoder_helper_OBJECTS)
tests_encoder_helper_DEPENDENCIES = $(am__DEPENDENCIES_1)
am_tests_producer_OBJECTS = producer_tests.$(OBJEXT)
tests_producer_OBJECTS = $(am_tests_producer_OBJECTS)
tests_producer_DEPENDENCIES = $(am__DEPENDENCIES_1)
DEFAULT_INCLUDES = -I.@am__isrc@
depcomp = $(SHELL) $(top_srcdir)/build-aux/depcomp
am__depfiles_maybe = depfiles
am__mv = mv -f
CXXCOMPILE = $(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) \
$(AM_CPPFLAGS) $(CPPFLAGS) $(AM_CXXFLAGS) $(CXXFLAGS)
LTCXXCOMPILE = $(LIBTOOL) --tag=CXX $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) \
--mode=compile $(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) \
$(AM_CPPFLAGS) $(CPPFLAGS) $(AM_CXXFLAGS) $(CXXFLAGS)
CXXLD = $(CXX)
CXXLINK = $(LIBTOOL) --tag=CXX $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) \
--mode=link $(CXXLD) $(AM_CXXFLAGS) $(CXXFLAGS) $(AM_LDFLAGS) \
$(LDFLAGS) -o $@
SOURCES = $(libkafkaconnect_la_SOURCES) $(producer_SOURCES) \
$(tests_encoder_SOURCES) $(tests_encoder_helper_SOURCES) \
$(tests_producer_SOURCES)
DIST_SOURCES = $(libkafkaconnect_la_SOURCES) $(producer_SOURCES) \
$(tests_encoder_SOURCES) $(tests_encoder_helper_SOURCES) \
$(tests_producer_SOURCES)
HEADERS = $(kafkaconnect_include_HEADERS)
ETAGS = etags
CTAGS = ctags
am__tty_colors = \
red=; grn=; lgn=; blu=; std=
DISTFILES = $(DIST_COMMON) $(DIST_SOURCES) $(TEXINFOS) $(EXTRA_DIST)
distdir = $(PACKAGE)-$(VERSION)
top_distdir = $(distdir)
am__remove_distdir = \
{ test ! -d "$(distdir)" \
|| { find "$(distdir)" -type d ! -perm -200 -exec chmod u+w {} ';' \
&& rm -fr "$(distdir)"; }; }
DIST_ARCHIVES = $(distdir).tar.gz
GZIP_ENV = --best
distuninstallcheck_listfiles = find . -type f -print
distcleancheck_listfiles = find . -type f -print
ACLOCAL = @ACLOCAL@
AMTAR = @AMTAR@
AR = @AR@
AUTOCONF = @AUTOCONF@
AUTOHEADER = @AUTOHEADER@
AUTOMAKE = @AUTOMAKE@
AWK = @AWK@
CC = @CC@
CCDEPMODE = @CCDEPMODE@
CFLAGS = @CFLAGS@
CPP = @CPP@
CPPFLAGS = @CPPFLAGS@
CXX = @CXX@
CXXCPP = @CXXCPP@
CXXDEPMODE = @CXXDEPMODE@
CXXFLAGS = @CXXFLAGS@
CYGPATH_W = @CYGPATH_W@
DEFS = @DEFS@
DEPDIR = @DEPDIR@
DSYMUTIL = @DSYMUTIL@
DUMPBIN = @DUMPBIN@
ECHO_C = @ECHO_C@
ECHO_N = @ECHO_N@
ECHO_T = @ECHO_T@
EGREP = @EGREP@
EXEEXT = @EXEEXT@
FGREP = @FGREP@
GREP = @GREP@
INSTALL = @INSTALL@
INSTALL_DATA = @INSTALL_DATA@
INSTALL_PROGRAM = @INSTALL_PROGRAM@
INSTALL_SCRIPT = @INSTALL_SCRIPT@
INSTALL_STRIP_PROGRAM = @INSTALL_STRIP_PROGRAM@
KAFKACONNECT_VERSION = @KAFKACONNECT_VERSION@
LD = @LD@
LDFLAGS = @LDFLAGS@
LIBOBJS = @LIBOBJS@
LIBS = @LIBS@
LIBTOOL = @LIBTOOL@
LIPO = @LIPO@
LN_S = @LN_S@
LTLIBOBJS = @LTLIBOBJS@
MAKEINFO = @MAKEINFO@
MKDIR_P = @MKDIR_P@
NM = @NM@
NMEDIT = @NMEDIT@
OBJDUMP = @OBJDUMP@
OBJEXT = @OBJEXT@
OTOOL = @OTOOL@
OTOOL64 = @OTOOL64@
PACKAGE = @PACKAGE@
PACKAGE_BUGREPORT = @PACKAGE_BUGREPORT@
PACKAGE_NAME = @PACKAGE_NAME@
PACKAGE_STRING = @PACKAGE_STRING@
PACKAGE_TARNAME = @PACKAGE_TARNAME@
PACKAGE_URL = @PACKAGE_URL@
PACKAGE_VERSION = @PACKAGE_VERSION@
PATH_SEPARATOR = @PATH_SEPARATOR@
RANLIB = @RANLIB@
SED = @SED@
SET_MAKE = @SET_MAKE@
SHELL = @SHELL@
STRIP = @STRIP@
VERSION = @VERSION@
abs_builddir = @abs_builddir@
abs_srcdir = @abs_srcdir@
abs_top_builddir = @abs_top_builddir@
abs_top_srcdir = @abs_top_srcdir@
ac_ct_CC = @ac_ct_CC@
ac_ct_CXX = @ac_ct_CXX@
ac_ct_DUMPBIN = @ac_ct_DUMPBIN@
am__include = @am__include@
am__leading_dot = @am__leading_dot@
am__quote = @am__quote@
am__tar = @am__tar@
am__untar = @am__untar@
bindir = @bindir@
build = @build@
build_alias = @build_alias@
build_cpu = @build_cpu@
build_os = @build_os@
build_vendor = @build_vendor@
builddir = @builddir@
datadir = @datadir@
datarootdir = @datarootdir@
docdir = @docdir@
dvidir = @dvidir@
exec_prefix = @exec_prefix@
host = @host@
host_alias = @host_alias@
host_cpu = @host_cpu@
host_os = @host_os@
host_vendor = @host_vendor@
htmldir = @htmldir@
includedir = @includedir@
infodir = @infodir@
install_sh = @install_sh@
libdir = @libdir@
libexecdir = @libexecdir@
localedir = @localedir@
localstatedir = @localstatedir@
lt_ECHO = @lt_ECHO@
mandir = @mandir@
mkdir_p = @mkdir_p@
oldincludedir = @oldincludedir@
pdfdir = @pdfdir@
prefix = @prefix@
program_transform_name = @program_transform_name@
psdir = @psdir@
sbindir = @sbindir@
sharedstatedir = @sharedstatedir@
srcdir = @srcdir@
sysconfdir = @sysconfdir@
target_alias = @target_alias@
top_build_prefix = @top_build_prefix@
top_builddir = @top_builddir@
top_srcdir = @top_srcdir@
ACLOCAL_AMFLAGS = -I build-aux/m4 ${ACLOCAL_FLAGS}
AM_CPPFLAGS = $(DEPS_CFLAGS)
EXAMPLE_LIBS = -lboost_system -lboost_thread -lkafkaconnect
#
# Shared Library
#
lib_LTLIBRARIES = libkafkaconnect.la
libkafkaconnect_la_SOURCES = src/producer.cpp
libkafkaconnect_la_LDFLAGS = -version-info $(KAFKACONNECT_VERSION)
kafkaconnect_includedir = $(includedir)/kafkaconnect
kafkaconnect_include_HEADERS = src/producer.hpp \
src/encoder.hpp \
src/encoder_helper.hpp
producer_SOURCES = src/example.cpp
producer_LDADD = $(DEPS_LIBS) $(EXAMPLE_LIBS)
tests_encoder_helper_SOURCES = src/tests/encoder_helper_tests.cpp
tests_encoder_helper_LDADD = $(DEPS_LIBS) $(EXAMPLE_LIBS) -lboost_unit_test_framework
tests_encoder_SOURCES = src/tests/encoder_tests.cpp
tests_encoder_LDADD = $(DEPS_LIBS) $(EXAMPLE_LIBS) -lboost_unit_test_framework
tests_producer_SOURCES = src/tests/producer_tests.cpp
tests_producer_LDADD = $(DEPS_LIBS) $(EXAMPLE_LIBS) -lboost_unit_test_framework
all: all-am
.SUFFIXES:
.SUFFIXES: .cpp .lo .o .obj
am--refresh:
@:
$(srcdir)/Makefile.in: $(srcdir)/Makefile.am $(am__configure_deps)
@for dep in $?; do \
case '$(am__configure_deps)' in \
*$$dep*) \
echo ' cd $(srcdir) && $(AUTOMAKE) --foreign'; \
$(am__cd) $(srcdir) && $(AUTOMAKE) --foreign \
&& exit 0; \
exit 1;; \
esac; \
done; \
echo ' cd $(top_srcdir) && $(AUTOMAKE) --foreign Makefile'; \
$(am__cd) $(top_srcdir) && \
$(AUTOMAKE) --foreign Makefile
.PRECIOUS: Makefile
Makefile: $(srcdir)/Makefile.in $(top_builddir)/config.status
@case '$?' in \
*config.status*) \
echo ' $(SHELL) ./config.status'; \
$(SHELL) ./config.status;; \
*) \
echo ' cd $(top_builddir) && $(SHELL) ./config.status $@ $(am__depfiles_maybe)'; \
cd $(top_builddir) && $(SHELL) ./config.status $@ $(am__depfiles_maybe);; \
esac;
$(top_builddir)/config.status: $(top_srcdir)/configure $(CONFIG_STATUS_DEPENDENCIES)
$(SHELL) ./config.status --recheck
$(top_srcdir)/configure: $(am__configure_deps)
$(am__cd) $(srcdir) && $(AUTOCONF)
$(ACLOCAL_M4): $(am__aclocal_m4_deps)
$(am__cd) $(srcdir) && $(ACLOCAL) $(ACLOCAL_AMFLAGS)
$(am__aclocal_m4_deps):
install-libLTLIBRARIES: $(lib_LTLIBRARIES)
@$(NORMAL_INSTALL)
test -z "$(libdir)" || $(MKDIR_P) "$(DESTDIR)$(libdir)"
@list='$(lib_LTLIBRARIES)'; test -n "$(libdir)" || list=; \
list2=; for p in $$list; do \
if test -f $$p; then \
list2="$$list2 $$p"; \
else :; fi; \
done; \
test -z "$$list2" || { \
echo " $(LIBTOOL) $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=install $(INSTALL) $(INSTALL_STRIP_FLAG) $$list2 '$(DESTDIR)$(libdir)'"; \
$(LIBTOOL) $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=install $(INSTALL) $(INSTALL_STRIP_FLAG) $$list2 "$(DESTDIR)$(libdir)"; \
}
uninstall-libLTLIBRARIES:
@$(NORMAL_UNINSTALL)
@list='$(lib_LTLIBRARIES)'; test -n "$(libdir)" || list=; \
for p in $$list; do \
$(am__strip_dir) \
echo " $(LIBTOOL) $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=uninstall rm -f '$(DESTDIR)$(libdir)/$$f'"; \
$(LIBTOOL) $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=uninstall rm -f "$(DESTDIR)$(libdir)/$$f"; \
done
clean-libLTLIBRARIES:
-test -z "$(lib_LTLIBRARIES)" || rm -f $(lib_LTLIBRARIES)
@list='$(lib_LTLIBRARIES)'; for p in $$list; do \
dir="`echo $$p | sed -e 's|/[^/]*$$||'`"; \
test "$$dir" != "$$p" || dir=.; \
echo "rm -f \"$${dir}/so_locations\""; \
rm -f "$${dir}/so_locations"; \
done
libkafkaconnect.la: $(libkafkaconnect_la_OBJECTS) $(libkafkaconnect_la_DEPENDENCIES)
$(libkafkaconnect_la_LINK) -rpath $(libdir) $(libkafkaconnect_la_OBJECTS) $(libkafkaconnect_la_LIBADD) $(LIBS)
clean-checkPROGRAMS:
@list='$(check_PROGRAMS)'; test -n "$$list" || exit 0; \
echo " rm -f" $$list; \
rm -f $$list || exit $$?; \
test -n "$(EXEEXT)" || exit 0; \
list=`for p in $$list; do echo "$$p"; done | sed 's/$(EXEEXT)$$//'`; \
echo " rm -f" $$list; \
rm -f $$list
clean-noinstPROGRAMS:
@list='$(noinst_PROGRAMS)'; test -n "$$list" || exit 0; \
echo " rm -f" $$list; \
rm -f $$list || exit $$?; \
test -n "$(EXEEXT)" || exit 0; \
list=`for p in $$list; do echo "$$p"; done | sed 's/$(EXEEXT)$$//'`; \
echo " rm -f" $$list; \
rm -f $$list
producer$(EXEEXT): $(producer_OBJECTS) $(producer_DEPENDENCIES)
@rm -f producer$(EXEEXT)
$(CXXLINK) $(producer_OBJECTS) $(producer_LDADD) $(LIBS)
tests/$(am__dirstamp):
@$(MKDIR_P) tests
@: > tests/$(am__dirstamp)
tests/encoder$(EXEEXT): $(tests_encoder_OBJECTS) $(tests_encoder_DEPENDENCIES) tests/$(am__dirstamp)
@rm -f tests/encoder$(EXEEXT)
$(CXXLINK) $(tests_encoder_OBJECTS) $(tests_encoder_LDADD) $(LIBS)
tests/encoder_helper$(EXEEXT): $(tests_encoder_helper_OBJECTS) $(tests_encoder_helper_DEPENDENCIES) tests/$(am__dirstamp)
@rm -f tests/encoder_helper$(EXEEXT)
$(CXXLINK) $(tests_encoder_helper_OBJECTS) $(tests_encoder_helper_LDADD) $(LIBS)
tests/producer$(EXEEXT): $(tests_producer_OBJECTS) $(tests_producer_DEPENDENCIES) tests/$(am__dirstamp)
@rm -f tests/producer$(EXEEXT)
$(CXXLINK) $(tests_producer_OBJECTS) $(tests_producer_LDADD) $(LIBS)
mostlyclean-compile:
-rm -f *.$(OBJEXT)
distclean-compile:
-rm -f *.tab.c
@AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/encoder_helper_tests.Po@am__quote@
@AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/encoder_tests.Po@am__quote@
@AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/example.Po@am__quote@
@AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/producer.Plo@am__quote@
@AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/producer_tests.Po@am__quote@
.cpp.o:
@am__fastdepCXX_TRUE@ $(CXXCOMPILE) -MT $@ -MD -MP -MF $(DEPDIR)/$*.Tpo -c -o $@ $<
@am__fastdepCXX_TRUE@ $(am__mv) $(DEPDIR)/$*.Tpo $(DEPDIR)/$*.Po
@AMDEP_TRUE@@am__fastdepCXX_FALSE@ source='$<' object='$@' libtool=no @AMDEPBACKSLASH@
@AMDEP_TRUE@@am__fastdepCXX_FALSE@ DEPDIR=$(DEPDIR) $(CXXDEPMODE) $(depcomp) @AMDEPBACKSLASH@
@am__fastdepCXX_FALSE@ $(CXXCOMPILE) -c -o $@ $<
.cpp.obj:
@am__fastdepCXX_TRUE@ $(CXXCOMPILE) -MT $@ -MD -MP -MF $(DEPDIR)/$*.Tpo -c -o $@ `$(CYGPATH_W) '$<'`
@am__fastdepCXX_TRUE@ $(am__mv) $(DEPDIR)/$*.Tpo $(DEPDIR)/$*.Po
@AMDEP_TRUE@@am__fastdepCXX_FALSE@ source='$<' object='$@' libtool=no @AMDEPBACKSLASH@
@AMDEP_TRUE@@am__fastdepCXX_FALSE@ DEPDIR=$(DEPDIR) $(CXXDEPMODE) $(depcomp) @AMDEPBACKSLASH@
@am__fastdepCXX_FALSE@ $(CXXCOMPILE) -c -o $@ `$(CYGPATH_W) '$<'`
.cpp.lo:
@am__fastdepCXX_TRUE@ $(LTCXXCOMPILE) -MT $@ -MD -MP -MF $(DEPDIR)/$*.Tpo -c -o $@ $<
@am__fastdepCXX_TRUE@ $(am__mv) $(DEPDIR)/$*.Tpo $(DEPDIR)/$*.Plo
@AMDEP_TRUE@@am__fastdepCXX_FALSE@ source='$<' object='$@' libtool=yes @AMDEPBACKSLASH@
@AMDEP_TRUE@@am__fastdepCXX_FALSE@ DEPDIR=$(DEPDIR) $(CXXDEPMODE) $(depcomp) @AMDEPBACKSLASH@
@am__fastdepCXX_FALSE@ $(LTCXXCOMPILE) -c -o $@ $<
producer.lo: src/producer.cpp
@am__fastdepCXX_TRUE@ $(LIBTOOL) --tag=CXX $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=compile $(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(AM_CXXFLAGS) $(CXXFLAGS) -MT producer.lo -MD -MP -MF $(DEPDIR)/producer.Tpo -c -o producer.lo `test -f 'src/producer.cpp' || echo '$(srcdir)/'`src/producer.cpp
@am__fastdepCXX_TRUE@ $(am__mv) $(DEPDIR)/producer.Tpo $(DEPDIR)/producer.Plo
@AMDEP_TRUE@@am__fastdepCXX_FALSE@ source='src/producer.cpp' object='producer.lo' libtool=yes @AMDEPBACKSLASH@
@AMDEP_TRUE@@am__fastdepCXX_FALSE@ DEPDIR=$(DEPDIR) $(CXXDEPMODE) $(depcomp) @AMDEPBACKSLASH@
@am__fastdepCXX_FALSE@ $(LIBTOOL) --tag=CXX $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=compile $(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(AM_CXXFLAGS) $(CXXFLAGS) -c -o producer.lo `test -f 'src/producer.cpp' || echo '$(srcdir)/'`src/producer.cpp
example.o: src/example.cpp
@am__fastdepCXX_TRUE@ $(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(AM_CXXFLAGS) $(CXXFLAGS) -MT example.o -MD -MP -MF $(DEPDIR)/example.Tpo -c -o example.o `test -f 'src/example.cpp' || echo '$(srcdir)/'`src/example.cpp
@am__fastdepCXX_TRUE@ $(am__mv) $(DEPDIR)/example.Tpo $(DEPDIR)/example.Po
@AMDEP_TRUE@@am__fastdepCXX_FALSE@ source='src/example.cpp' object='example.o' libtool=no @AMDEPBACKSLASH@
@AMDEP_TRUE@@am__fastdepCXX_FALSE@ DEPDIR=$(DEPDIR) $(CXXDEPMODE) $(depcomp) @AMDEPBACKSLASH@
@am__fastdepCXX_FALSE@ $(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(AM_CXXFLAGS) $(CXXFLAGS) -c -o example.o `test -f 'src/example.cpp' || echo '$(srcdir)/'`src/example.cpp
example.obj: src/example.cpp
@am__fastdepCXX_TRUE@ $(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(AM_CXXFLAGS) $(CXXFLAGS) -MT example.obj -MD -MP -MF $(DEPDIR)/example.Tpo -c -o example.obj `if test -f 'src/example.cpp'; then $(CYGPATH_W) 'src/example.cpp'; else $(CYGPATH_W) '$(srcdir)/src/example.cpp'; fi`
@am__fastdepCXX_TRUE@ $(am__mv) $(DEPDIR)/example.Tpo $(DEPDIR)/example.Po
@AMDEP_TRUE@@am__fastdepCXX_FALSE@ source='src/example.cpp' object='example.obj' libtool=no @AMDEPBACKSLASH@
@AMDEP_TRUE@@am__fastdepCXX_FALSE@ DEPDIR=$(DEPDIR) $(CXXDEPMODE) $(depcomp) @AMDEPBACKSLASH@
@am__fastdepCXX_FALSE@ $(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(AM_CXXFLAGS) $(CXXFLAGS) -c -o example.obj `if test -f 'src/example.cpp'; then $(CYGPATH_W) 'src/example.cpp'; else $(CYGPATH_W) '$(srcdir)/src/example.cpp'; fi`
encoder_tests.o: src/tests/encoder_tests.cpp
@am__fastdepCXX_TRUE@ $(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(AM_CXXFLAGS) $(CXXFLAGS) -MT encoder_tests.o -MD -MP -MF $(DEPDIR)/encoder_tests.Tpo -c -o encoder_tests.o `test -f 'src/tests/encoder_tests.cpp' || echo '$(srcdir)/'`src/tests/encoder_tests.cpp
@am__fastdepCXX_TRUE@ $(am__mv) $(DEPDIR)/encoder_tests.Tpo $(DEPDIR)/encoder_tests.Po
@AMDEP_TRUE@@am__fastdepCXX_FALSE@ source='src/tests/encoder_tests.cpp' object='encoder_tests.o' libtool=no @AMDEPBACKSLASH@
@AMDEP_TRUE@@am__fastdepCXX_FALSE@ DEPDIR=$(DEPDIR) $(CXXDEPMODE) $(depcomp) @AMDEPBACKSLASH@
@am__fastdepCXX_FALSE@ $(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(AM_CXXFLAGS) $(CXXFLAGS) -c -o encoder_tests.o `test -f 'src/tests/encoder_tests.cpp' || echo '$(srcdir)/'`src/tests/encoder_tests.cpp
encoder_tests.obj: src/tests/encoder_tests.cpp
@am__fastdepCXX_TRUE@ $(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(AM_CXXFLAGS) $(CXXFLAGS) -MT encoder_tests.obj -MD -MP -MF $(DEPDIR)/encoder_tests.Tpo -c -o encoder_tests.obj `if test -f 'src/tests/encoder_tests.cpp'; then $(CYGPATH_W) 'src/tests/encoder_tests.cpp'; else $(CYGPATH_W) '$(srcdir)/src/tests/encoder_tests.cpp'; fi`
@am__fastdepCXX_TRUE@ $(am__mv) $(DEPDIR)/encoder_tests.Tpo $(DEPDIR)/encoder_tests.Po
@AMDEP_TRUE@@am__fastdepCXX_FALSE@ source='src/tests/encoder_tests.cpp' object='encoder_tests.obj' libtool=no @AMDEPBACKSLASH@
@AMDEP_TRUE@@am__fastdepCXX_FALSE@ DEPDIR=$(DEPDIR) $(CXXDEPMODE) $(depcomp) @AMDEPBACKSLASH@
@am__fastdepCXX_FALSE@ $(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(AM_CXXFLAGS) $(CXXFLAGS) -c -o encoder_tests.obj `if test -f 'src/tests/encoder_tests.cpp'; then $(CYGPATH_W) 'src/tests/encoder_tests.cpp'; else $(CYGPATH_W) '$(srcdir)/src/tests/encoder_tests.cpp'; fi`
encoder_helper_tests.o: src/tests/encoder_helper_tests.cpp
@am__fastdepCXX_TRUE@ $(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(AM_CXXFLAGS) $(CXXFLAGS) -MT encoder_helper_tests.o -MD -MP -MF $(DEPDIR)/encoder_helper_tests.Tpo -c -o encoder_helper_tests.o `test -f 'src/tests/encoder_helper_tests.cpp' || echo '$(srcdir)/'`src/tests/encoder_helper_tests.cpp
@am__fastdepCXX_TRUE@ $(am__mv) $(DEPDIR)/encoder_helper_tests.Tpo $(DEPDIR)/encoder_helper_tests.Po
@AMDEP_TRUE@@am__fastdepCXX_FALSE@ source='src/tests/encoder_helper_tests.cpp' object='encoder_helper_tests.o' libtool=no @AMDEPBACKSLASH@
@AMDEP_TRUE@@am__fastdepCXX_FALSE@ DEPDIR=$(DEPDIR) $(CXXDEPMODE) $(depcomp) @AMDEPBACKSLASH@
@am__fastdepCXX_FALSE@ $(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(AM_CXXFLAGS) $(CXXFLAGS) -c -o encoder_helper_tests.o `test -f 'src/tests/encoder_helper_tests.cpp' || echo '$(srcdir)/'`src/tests/encoder_helper_tests.cpp
encoder_helper_tests.obj: src/tests/encoder_helper_tests.cpp
@am__fastdepCXX_TRUE@ $(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(AM_CXXFLAGS) $(CXXFLAGS) -MT encoder_helper_tests.obj -MD -MP -MF $(DEPDIR)/encoder_helper_tests.Tpo -c -o encoder_helper_tests.obj `if test -f 'src/tests/encoder_helper_tests.cpp'; then $(CYGPATH_W) 'src/tests/encoder_helper_tests.cpp'; else $(CYGPATH_W) '$(srcdir)/src/tests/encoder_helper_tests.cpp'; fi`
@am__fastdepCXX_TRUE@ $(am__mv) $(DEPDIR)/encoder_helper_tests.Tpo $(DEPDIR)/encoder_helper_tests.Po
@AMDEP_TRUE@@am__fastdepCXX_FALSE@ source='src/tests/encoder_helper_tests.cpp' object='encoder_helper_tests.obj' libtool=no @AMDEPBACKSLASH@
@AMDEP_TRUE@@am__fastdepCXX_FALSE@ DEPDIR=$(DEPDIR) $(CXXDEPMODE) $(depcomp) @AMDEPBACKSLASH@
@am__fastdepCXX_FALSE@ $(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(AM_CXXFLAGS) $(CXXFLAGS) -c -o encoder_helper_tests.obj `if test -f 'src/tests/encoder_helper_tests.cpp'; then $(CYGPATH_W) 'src/tests/encoder_helper_tests.cpp'; else $(CYGPATH_W) '$(srcdir)/src/tests/encoder_helper_tests.cpp'; fi`
producer_tests.o: src/tests/producer_tests.cpp
@am__fastdepCXX_TRUE@ $(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(AM_CXXFLAGS) $(CXXFLAGS) -MT producer_tests.o -MD -MP -MF $(DEPDIR)/producer_tests.Tpo -c -o producer_tests.o `test -f 'src/tests/producer_tests.cpp' || echo '$(srcdir)/'`src/tests/producer_tests.cpp
@am__fastdepCXX_TRUE@ $(am__mv) $(DEPDIR)/producer_tests.Tpo $(DEPDIR)/producer_tests.Po
@AMDEP_TRUE@@am__fastdepCXX_FALSE@ source='src/tests/producer_tests.cpp' object='producer_tests.o' libtool=no @AMDEPBACKSLASH@
@AMDEP_TRUE@@am__fastdepCXX_FALSE@ DEPDIR=$(DEPDIR) $(CXXDEPMODE) $(depcomp) @AMDEPBACKSLASH@
@am__fastdepCXX_FALSE@ $(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(AM_CXXFLAGS) $(CXXFLAGS) -c -o producer_tests.o `test -f 'src/tests/producer_tests.cpp' || echo '$(srcdir)/'`src/tests/producer_tests.cpp
producer_tests.obj: src/tests/producer_tests.cpp
@am__fastdepCXX_TRUE@ $(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(AM_CXXFLAGS) $(CXXFLAGS) -MT producer_tests.obj -MD -MP -MF $(DEPDIR)/producer_tests.Tpo -c -o producer_tests.obj `if test -f 'src/tests/producer_tests.cpp'; then $(CYGPATH_W) 'src/tests/producer_tests.cpp'; else $(CYGPATH_W) '$(srcdir)/src/tests/producer_tests.cpp'; fi`
@am__fastdepCXX_TRUE@ $(am__mv) $(DEPDIR)/producer_tests.Tpo $(DEPDIR)/producer_tests.Po
@AMDEP_TRUE@@am__fastdepCXX_FALSE@ source='src/tests/producer_tests.cpp' object='producer_tests.obj' libtool=no @AMDEPBACKSLASH@
@AMDEP_TRUE@@am__fastdepCXX_FALSE@ DEPDIR=$(DEPDIR) $(CXXDEPMODE) $(depcomp) @AMDEPBACKSLASH@
@am__fastdepCXX_FALSE@ $(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(AM_CXXFLAGS) $(CXXFLAGS) -c -o producer_tests.obj `if test -f 'src/tests/producer_tests.cpp'; then $(CYGPATH_W) 'src/tests/producer_tests.cpp'; else $(CYGPATH_W) '$(srcdir)/src/tests/producer_tests.cpp'; fi`
mostlyclean-libtool:
-rm -f *.lo
clean-libtool:
-rm -rf .libs _libs
-rm -rf tests/.libs tests/_libs
distclean-libtool:
-rm -f libtool config.lt
install-kafkaconnect_includeHEADERS: $(kafkaconnect_include_HEADERS)
@$(NORMAL_INSTALL)
test -z "$(kafkaconnect_includedir)" || $(MKDIR_P) "$(DESTDIR)$(kafkaconnect_includedir)"
@list='$(kafkaconnect_include_HEADERS)'; test -n "$(kafkaconnect_includedir)" || list=; \
for p in $$list; do \
if test -f "$$p"; then d=; else d="$(srcdir)/"; fi; \
echo "$$d$$p"; \
done | $(am__base_list) | \
while read files; do \
echo " $(INSTALL_HEADER) $$files '$(DESTDIR)$(kafkaconnect_includedir)'"; \
$(INSTALL_HEADER) $$files "$(DESTDIR)$(kafkaconnect_includedir)" || exit $$?; \
done
uninstall-kafkaconnect_includeHEADERS:
@$(NORMAL_UNINSTALL)
@list='$(kafkaconnect_include_HEADERS)'; test -n "$(kafkaconnect_includedir)" || list=; \
files=`for p in $$list; do echo $$p; done | sed -e 's|^.*/||'`; \
test -n "$$files" || exit 0; \
echo " ( cd '$(DESTDIR)$(kafkaconnect_includedir)' && rm -f" $$files ")"; \
cd "$(DESTDIR)$(kafkaconnect_includedir)" && rm -f $$files
ID: $(HEADERS) $(SOURCES) $(LISP) $(TAGS_FILES)
list='$(SOURCES) $(HEADERS) $(LISP) $(TAGS_FILES)'; \
unique=`for i in $$list; do \
if test -f "$$i"; then echo $$i; else echo $(srcdir)/$$i; fi; \
done | \
$(AWK) '{ files[$$0] = 1; nonempty = 1; } \
END { if (nonempty) { for (i in files) print i; }; }'`; \
mkid -fID $$unique
tags: TAGS
TAGS: $(HEADERS) $(SOURCES) $(TAGS_DEPENDENCIES) \
$(TAGS_FILES) $(LISP)
set x; \
here=`pwd`; \
list='$(SOURCES) $(HEADERS) $(LISP) $(TAGS_FILES)'; \
unique=`for i in $$list; do \
if test -f "$$i"; then echo $$i; else echo $(srcdir)/$$i; fi; \
done | \
$(AWK) '{ files[$$0] = 1; nonempty = 1; } \
END { if (nonempty) { for (i in files) print i; }; }'`; \
shift; \
if test -z "$(ETAGS_ARGS)$$*$$unique"; then :; else \
test -n "$$unique" || unique=$$empty_fix; \
if test $$# -gt 0; then \
$(ETAGS) $(ETAGSFLAGS) $(AM_ETAGSFLAGS) $(ETAGS_ARGS) \
"$$@" $$unique; \
else \
$(ETAGS) $(ETAGSFLAGS) $(AM_ETAGSFLAGS) $(ETAGS_ARGS) \
$$unique; \
fi; \
fi
ctags: CTAGS
CTAGS: $(HEADERS) $(SOURCES) $(TAGS_DEPENDENCIES) \
$(TAGS_FILES) $(LISP)
list='$(SOURCES) $(HEADERS) $(LISP) $(TAGS_FILES)'; \
unique=`for i in $$list; do \
if test -f "$$i"; then echo $$i; else echo $(srcdir)/$$i; fi; \
done | \
$(AWK) '{ files[$$0] = 1; nonempty = 1; } \
END { if (nonempty) { for (i in files) print i; }; }'`; \
test -z "$(CTAGS_ARGS)$$unique" \
|| $(CTAGS) $(CTAGSFLAGS) $(AM_CTAGSFLAGS) $(CTAGS_ARGS) \
$$unique
GTAGS:
here=`$(am__cd) $(top_builddir) && pwd` \
&& $(am__cd) $(top_srcdir) \
&& gtags -i $(GTAGS_ARGS) "$$here"
distclean-tags:
-rm -f TAGS ID GTAGS GRTAGS GSYMS GPATH tags
check-TESTS: $(TESTS)
@failed=0; all=0; xfail=0; xpass=0; skip=0; \
srcdir=$(srcdir); export srcdir; \
list=' $(TESTS) '; \
$(am__tty_colors); \
if test -n "$$list"; then \
for tst in $$list; do \
if test -f ./$$tst; then dir=./; \
elif test -f $$tst; then dir=; \
else dir="$(srcdir)/"; fi; \
if $(TESTS_ENVIRONMENT) $${dir}$$tst; then \
all=`expr $$all + 1`; \
case " $(XFAIL_TESTS) " in \
*[\ \ ]$$tst[\ \ ]*) \
xpass=`expr $$xpass + 1`; \
failed=`expr $$failed + 1`; \
col=$$red; res=XPASS; \
;; \
*) \
col=$$grn; res=PASS; \
;; \
esac; \
elif test $$? -ne 77; then \
all=`expr $$all + 1`; \
case " $(XFAIL_TESTS) " in \
*[\ \ ]$$tst[\ \ ]*) \
xfail=`expr $$xfail + 1`; \
col=$$lgn; res=XFAIL; \
;; \
*) \
failed=`expr $$failed + 1`; \
col=$$red; res=FAIL; \
;; \
esac; \
else \
skip=`expr $$skip + 1`; \
col=$$blu; res=SKIP; \
fi; \
echo "$${col}$$res$${std}: $$tst"; \
done; \
if test "$$all" -eq 1; then \
tests="test"; \
All=""; \
else \
tests="tests"; \
All="All "; \
fi; \
if test "$$failed" -eq 0; then \
if test "$$xfail" -eq 0; then \
banner="$$All$$all $$tests passed"; \
else \
if test "$$xfail" -eq 1; then failures=failure; else failures=failures; fi; \
banner="$$All$$all $$tests behaved as expected ($$xfail expected $$failures)"; \
fi; \
else \
if test "$$xpass" -eq 0; then \
banner="$$failed of $$all $$tests failed"; \
else \
if test "$$xpass" -eq 1; then passes=pass; else passes=passes; fi; \
banner="$$failed of $$all $$tests did not behave as expected ($$xpass unexpected $$passes)"; \
fi; \
fi; \
dashes="$$banner"; \
skipped=""; \
if test "$$skip" -ne 0; then \
if test "$$skip" -eq 1; then \
skipped="($$skip test was not run)"; \
else \
skipped="($$skip tests were not run)"; \
fi; \
test `echo "$$skipped" | wc -c` -le `echo "$$banner" | wc -c` || \
dashes="$$skipped"; \
fi; \
report=""; \
if test "$$failed" -ne 0 && test -n "$(PACKAGE_BUGREPORT)"; then \
report="Please report to $(PACKAGE_BUGREPORT)"; \
test `echo "$$report" | wc -c` -le `echo "$$banner" | wc -c` || \
dashes="$$report"; \
fi; \
dashes=`echo "$$dashes" | sed s/./=/g`; \
if test "$$failed" -eq 0; then \
echo "$$grn$$dashes"; \
else \
echo "$$red$$dashes"; \
fi; \
echo "$$banner"; \
test -z "$$skipped" || echo "$$skipped"; \
test -z "$$report" || echo "$$report"; \
echo "$$dashes$$std"; \
test "$$failed" -eq 0; \
else :; fi
distdir: $(DISTFILES)
$(am__remove_distdir)
test -d "$(distdir)" || mkdir "$(distdir)"
@srcdirstrip=`echo "$(srcdir)" | sed 's/[].[^$$\\*]/\\\\&/g'`; \
topsrcdirstrip=`echo "$(top_srcdir)" | sed 's/[].[^$$\\*]/\\\\&/g'`; \
list='$(DISTFILES)'; \
dist_files=`for file in $$list; do echo $$file; done | \
sed -e "s|^$$srcdirstrip/||;t" \
-e "s|^$$topsrcdirstrip/|$(top_builddir)/|;t"`; \
case $$dist_files in \
*/*) $(MKDIR_P) `echo "$$dist_files" | \
sed '/\//!d;s|^|$(distdir)/|;s,/[^/]*$$,,' | \
sort -u` ;; \
esac; \
for file in $$dist_files; do \
if test -f $$file || test -d $$file; then d=.; else d=$(srcdir); fi; \
if test -d $$d/$$file; then \
dir=`echo "/$$file" | sed -e 's,/[^/]*$$,,'`; \
if test -d "$(distdir)/$$file"; then \
find "$(distdir)/$$file" -type d ! -perm -700 -exec chmod u+rwx {} \;; \
fi; \
if test -d $(srcdir)/$$file && test $$d != $(srcdir); then \
cp -fpR $(srcdir)/$$file "$(distdir)$$dir" || exit 1; \
find "$(distdir)/$$file" -type d ! -perm -700 -exec chmod u+rwx {} \;; \
fi; \
cp -fpR $$d/$$file "$(distdir)$$dir" || exit 1; \
else \
test -f "$(distdir)/$$file" \
|| cp -p $$d/$$file "$(distdir)/$$file" \
|| exit 1; \
fi; \
done
-test -n "$(am__skip_mode_fix)" \
|| find "$(distdir)" -type d ! -perm -755 \
-exec chmod u+rwx,go+rx {} \; -o \
! -type d ! -perm -444 -links 1 -exec chmod a+r {} \; -o \
! -type d ! -perm -400 -exec chmod a+r {} \; -o \
! -type d ! -perm -444 -exec $(install_sh) -c -m a+r {} {} \; \
|| chmod -R a+r "$(distdir)"
dist-gzip: distdir
tardir=$(distdir) && $(am__tar) | GZIP=$(GZIP_ENV) gzip -c >$(distdir).tar.gz
$(am__remove_distdir)
dist-bzip2: distdir
tardir=$(distdir) && $(am__tar) | bzip2 -9 -c >$(distdir).tar.bz2
$(am__remove_distdir)
dist-lzma: distdir
tardir=$(distdir) && $(am__tar) | lzma -9 -c >$(distdir).tar.lzma
$(am__remove_distdir)
dist-xz: distdir
tardir=$(distdir) && $(am__tar) | xz -c >$(distdir).tar.xz
$(am__remove_distdir)
dist-tarZ: distdir
tardir=$(distdir) && $(am__tar) | compress -c >$(distdir).tar.Z
$(am__remove_distdir)
dist-shar: distdir
shar $(distdir) | GZIP=$(GZIP_ENV) gzip -c >$(distdir).shar.gz
$(am__remove_distdir)
dist-zip: distdir
-rm -f $(distdir).zip
zip -rq $(distdir).zip $(distdir)
$(am__remove_distdir)
dist dist-all: distdir
tardir=$(distdir) && $(am__tar) | GZIP=$(GZIP_ENV) gzip -c >$(distdir).tar.gz
$(am__remove_distdir)
# This target untars the dist file and tries a VPATH configuration. Then
# it guarantees that the distribution is self-contained by making another
# tarfile.
distcheck: dist
case '$(DIST_ARCHIVES)' in \
*.tar.gz*) \
GZIP=$(GZIP_ENV) gzip -dc $(distdir).tar.gz | $(am__untar) ;;\
*.tar.bz2*) \
bzip2 -dc $(distdir).tar.bz2 | $(am__untar) ;;\
*.tar.lzma*) \
lzma -dc $(distdir).tar.lzma | $(am__untar) ;;\
*.tar.xz*) \
xz -dc $(distdir).tar.xz | $(am__untar) ;;\
*.tar.Z*) \
uncompress -c $(distdir).tar.Z | $(am__untar) ;;\
*.shar.gz*) \
GZIP=$(GZIP_ENV) gzip -dc $(distdir).shar.gz | unshar ;;\
*.zip*) \
unzip $(distdir).zip ;;\
esac
chmod -R a-w $(distdir); chmod a+w $(distdir)
mkdir $(distdir)/_build
mkdir $(distdir)/_inst
chmod a-w $(distdir)
test -d $(distdir)/_build || exit 0; \
dc_install_base=`$(am__cd) $(distdir)/_inst && pwd | sed -e 's,^[^:\\/]:[\\/],/,'` \
&& dc_destdir="$${TMPDIR-/tmp}/am-dc-$$$$/" \
&& am__cwd=`pwd` \
&& $(am__cd) $(distdir)/_build \
&& ../configure --srcdir=.. --prefix="$$dc_install_base" \
$(DISTCHECK_CONFIGURE_FLAGS) \
&& $(MAKE) $(AM_MAKEFLAGS) \
&& $(MAKE) $(AM_MAKEFLAGS) dvi \
&& $(MAKE) $(AM_MAKEFLAGS) check \
&& $(MAKE) $(AM_MAKEFLAGS) install \
&& $(MAKE) $(AM_MAKEFLAGS) installcheck \
&& $(MAKE) $(AM_MAKEFLAGS) uninstall \
&& $(MAKE) $(AM_MAKEFLAGS) distuninstallcheck_dir="$$dc_install_base" \
distuninstallcheck \
&& chmod -R a-w "$$dc_install_base" \
&& ({ \
(cd ../.. && umask 077 && mkdir "$$dc_destdir") \
&& $(MAKE) $(AM_MAKEFLAGS) DESTDIR="$$dc_destdir" install \
&& $(MAKE) $(AM_MAKEFLAGS) DESTDIR="$$dc_destdir" uninstall \
&& $(MAKE) $(AM_MAKEFLAGS) DESTDIR="$$dc_destdir" \
distuninstallcheck_dir="$$dc_destdir" distuninstallcheck; \
} || { rm -rf "$$dc_destdir"; exit 1; }) \
&& rm -rf "$$dc_destdir" \
&& $(MAKE) $(AM_MAKEFLAGS) dist \
&& rm -rf $(DIST_ARCHIVES) \
&& $(MAKE) $(AM_MAKEFLAGS) distcleancheck \
&& cd "$$am__cwd" \
|| exit 1
$(am__remove_distdir)
@(echo "$(distdir) archives ready for distribution: "; \
list='$(DIST_ARCHIVES)'; for i in $$list; do echo $$i; done) | \
sed -e 1h -e 1s/./=/g -e 1p -e 1x -e '$$p' -e '$$x'
distuninstallcheck:
@$(am__cd) '$(distuninstallcheck_dir)' \
&& test `$(distuninstallcheck_listfiles) | wc -l` -le 1 \
|| { echo "ERROR: files left after uninstall:" ; \
if test -n "$(DESTDIR)"; then \
echo " (check DESTDIR support)"; \
fi ; \
$(distuninstallcheck_listfiles) ; \
exit 1; } >&2
distcleancheck: distclean
@if test '$(srcdir)' = . ; then \
echo "ERROR: distcleancheck can only run from a VPATH build" ; \
exit 1 ; \
fi
@test `$(distcleancheck_listfiles) | wc -l` -eq 0 \
|| { echo "ERROR: files left in build directory after distclean:" ; \
$(distcleancheck_listfiles) ; \
exit 1; } >&2
check-am: all-am
$(MAKE) $(AM_MAKEFLAGS) $(check_PROGRAMS)
$(MAKE) $(AM_MAKEFLAGS) check-TESTS
check: check-am
all-am: Makefile $(LTLIBRARIES) $(PROGRAMS) $(HEADERS)
installdirs:
for dir in "$(DESTDIR)$(libdir)" "$(DESTDIR)$(kafkaconnect_includedir)"; do \
test -z "$$dir" || $(MKDIR_P) "$$dir"; \
done
install: install-am
install-exec: install-exec-am
install-data: install-data-am
uninstall: uninstall-am
install-am: all-am
@$(MAKE) $(AM_MAKEFLAGS) install-exec-am install-data-am
installcheck: installcheck-am
install-strip:
$(MAKE) $(AM_MAKEFLAGS) INSTALL_PROGRAM="$(INSTALL_STRIP_PROGRAM)" \
install_sh_PROGRAM="$(INSTALL_STRIP_PROGRAM)" INSTALL_STRIP_FLAG=-s \
`test -z '$(STRIP)' || \
echo "INSTALL_PROGRAM_ENV=STRIPPROG='$(STRIP)'"` install
mostlyclean-generic:
clean-generic:
distclean-generic:
-test -z "$(CONFIG_CLEAN_FILES)" || rm -f $(CONFIG_CLEAN_FILES)
-test . = "$(srcdir)" || test -z "$(CONFIG_CLEAN_VPATH_FILES)" || rm -f $(CONFIG_CLEAN_VPATH_FILES)
-rm -f tests/$(am__dirstamp)
maintainer-clean-generic:
@echo "This command is intended for maintainers to use"
@echo "it deletes files that may require special tools to rebuild."
clean: clean-am
clean-am: clean-checkPROGRAMS clean-generic clean-libLTLIBRARIES \
clean-libtool clean-noinstPROGRAMS mostlyclean-am
distclean: distclean-am
-rm -f $(am__CONFIG_DISTCLEAN_FILES)
-rm -rf ./$(DEPDIR)
-rm -f Makefile
distclean-am: clean-am distclean-compile distclean-generic \
distclean-libtool distclean-tags
dvi: dvi-am
dvi-am:
html: html-am
html-am:
info: info-am
info-am:
install-data-am: install-kafkaconnect_includeHEADERS
install-dvi: install-dvi-am
install-dvi-am:
install-exec-am: install-libLTLIBRARIES
install-html: install-html-am
install-html-am:
install-info: install-info-am
install-info-am:
install-man:
install-pdf: install-pdf-am
install-pdf-am:
install-ps: install-ps-am
install-ps-am:
installcheck-am:
maintainer-clean: maintainer-clean-am
-rm -f $(am__CONFIG_DISTCLEAN_FILES)
-rm -rf $(top_srcdir)/autom4te.cache
-rm -rf ./$(DEPDIR)
-rm -f Makefile
maintainer-clean-am: distclean-am maintainer-clean-generic
mostlyclean: mostlyclean-am
mostlyclean-am: mostlyclean-compile mostlyclean-generic \
mostlyclean-libtool
pdf: pdf-am
pdf-am:
ps: ps-am
ps-am:
uninstall-am: uninstall-kafkaconnect_includeHEADERS \
uninstall-libLTLIBRARIES
.MAKE: check-am install-am install-strip
.PHONY: CTAGS GTAGS all all-am am--refresh check check-TESTS check-am \
clean clean-checkPROGRAMS clean-generic clean-libLTLIBRARIES \
clean-libtool clean-noinstPROGRAMS ctags dist dist-all \
dist-bzip2 dist-gzip dist-lzma dist-shar dist-tarZ dist-xz \
dist-zip distcheck distclean distclean-compile \
distclean-generic distclean-libtool distclean-tags \
distcleancheck distdir distuninstallcheck dvi dvi-am html \
html-am info info-am install install-am install-data \
install-data-am install-dvi install-dvi-am install-exec \
install-exec-am install-html install-html-am install-info \
install-info-am install-kafkaconnect_includeHEADERS \
install-libLTLIBRARIES install-man install-pdf install-pdf-am \
install-ps install-ps-am install-strip installcheck \
installcheck-am installdirs maintainer-clean \
maintainer-clean-generic mostlyclean mostlyclean-compile \
mostlyclean-generic mostlyclean-libtool pdf pdf-am ps ps-am \
tags uninstall uninstall-am \
uninstall-kafkaconnect_includeHEADERS uninstall-libLTLIBRARIES
# Tell versions [3.59,3.63) of GNU make to not export all variables.
# Otherwise a system limit (for SysV at least) may be exceeded.
.NOEXPORT:

55
clients/cpp/README.md Normal file
View File

@ -0,0 +1,55 @@
# C++ kafka library
This library allows you to produce messages to the Kafka distributed publish/subscribe messaging service.
## Requirements
Tested on Ubuntu and Redhat both with g++ 4.4 and Boost 1.46.1
## Installation
Make sure you have g++ and the latest version of Boost:
http://gcc.gnu.org/
http://www.boost.org/
```bash
./configure
```
Run this to generate the makefile for your system. Do this first.
```bash
make
```
builds the producer example and the KafkaConnect library
```bash
make check
```
builds and runs the unit tests,
```bash
make install
```
to install as a shared library to 'default' locations (/usr/local/lib and /usr/local/include on linux)
## Usage
Example.cpp is a very basic Kafka Producer
## API docs
There isn't much code, if I get around to writing the other parts of the library I'll document it sensibly,
for now have a look at the header file: /src/producer.hpp
## Contact for questions
Ben Gray, MediaSift Ltd.
http://twitter.com/benjamg

956
clients/cpp/aclocal.m4 vendored Normal file
View File

@ -0,0 +1,956 @@
# generated automatically by aclocal 1.11.1 -*- Autoconf -*-
# Copyright (C) 1996, 1997, 1998, 1999, 2000, 2001, 2002, 2003, 2004,
# 2005, 2006, 2007, 2008, 2009 Free Software Foundation, Inc.
# This file is free software; the Free Software Foundation
# gives unlimited permission to copy and/or distribute it,
# with or without modifications, as long as this notice is preserved.
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY, to the extent permitted by law; without
# even the implied warranty of MERCHANTABILITY or FITNESS FOR A
# PARTICULAR PURPOSE.
m4_ifndef([AC_AUTOCONF_VERSION],
[m4_copy([m4_PACKAGE_VERSION], [AC_AUTOCONF_VERSION])])dnl
m4_if(m4_defn([AC_AUTOCONF_VERSION]), [2.67],,
[m4_warning([this file was generated for autoconf 2.67.
You have another version of autoconf. It may work, but is not guaranteed to.
If you have problems, you may need to regenerate the build system entirely.
To do so, use the procedure documented by the package, typically `autoreconf'.])])
# Copyright (C) 2002, 2003, 2005, 2006, 2007, 2008 Free Software Foundation, Inc.
#
# This file is free software; the Free Software Foundation
# gives unlimited permission to copy and/or distribute it,
# with or without modifications, as long as this notice is preserved.
# AM_AUTOMAKE_VERSION(VERSION)
# ----------------------------
# Automake X.Y traces this macro to ensure aclocal.m4 has been
# generated from the m4 files accompanying Automake X.Y.
# (This private macro should not be called outside this file.)
AC_DEFUN([AM_AUTOMAKE_VERSION],
[am__api_version='1.11'
dnl Some users find AM_AUTOMAKE_VERSION and mistake it for a way to
dnl require some minimum version. Point them to the right macro.
m4_if([$1], [1.11.1], [],
[AC_FATAL([Do not call $0, use AM_INIT_AUTOMAKE([$1]).])])dnl
])
# _AM_AUTOCONF_VERSION(VERSION)
# -----------------------------
# aclocal traces this macro to find the Autoconf version.
# This is a private macro too. Using m4_define simplifies
# the logic in aclocal, which can simply ignore this definition.
m4_define([_AM_AUTOCONF_VERSION], [])
# AM_SET_CURRENT_AUTOMAKE_VERSION
# -------------------------------
# Call AM_AUTOMAKE_VERSION and AM_AUTOMAKE_VERSION so they can be traced.
# This function is AC_REQUIREd by AM_INIT_AUTOMAKE.
AC_DEFUN([AM_SET_CURRENT_AUTOMAKE_VERSION],
[AM_AUTOMAKE_VERSION([1.11.1])dnl
m4_ifndef([AC_AUTOCONF_VERSION],
[m4_copy([m4_PACKAGE_VERSION], [AC_AUTOCONF_VERSION])])dnl
_AM_AUTOCONF_VERSION(m4_defn([AC_AUTOCONF_VERSION]))])
# AM_AUX_DIR_EXPAND -*- Autoconf -*-
# Copyright (C) 2001, 2003, 2005 Free Software Foundation, Inc.
#
# This file is free software; the Free Software Foundation
# gives unlimited permission to copy and/or distribute it,
# with or without modifications, as long as this notice is preserved.
# For projects using AC_CONFIG_AUX_DIR([foo]), Autoconf sets
# $ac_aux_dir to `$srcdir/foo'. In other projects, it is set to
# `$srcdir', `$srcdir/..', or `$srcdir/../..'.
#
# Of course, Automake must honor this variable whenever it calls a
# tool from the auxiliary directory. The problem is that $srcdir (and
# therefore $ac_aux_dir as well) can be either absolute or relative,
# depending on how configure is run. This is pretty annoying, since
# it makes $ac_aux_dir quite unusable in subdirectories: in the top
# source directory, any form will work fine, but in subdirectories a
# relative path needs to be adjusted first.
#
# $ac_aux_dir/missing
# fails when called from a subdirectory if $ac_aux_dir is relative
# $top_srcdir/$ac_aux_dir/missing
# fails if $ac_aux_dir is absolute,
# fails when called from a subdirectory in a VPATH build with
# a relative $ac_aux_dir
#
# The reason of the latter failure is that $top_srcdir and $ac_aux_dir
# are both prefixed by $srcdir. In an in-source build this is usually
# harmless because $srcdir is `.', but things will broke when you
# start a VPATH build or use an absolute $srcdir.
#
# So we could use something similar to $top_srcdir/$ac_aux_dir/missing,
# iff we strip the leading $srcdir from $ac_aux_dir. That would be:
# am_aux_dir='\$(top_srcdir)/'`expr "$ac_aux_dir" : "$srcdir//*\(.*\)"`
# and then we would define $MISSING as
# MISSING="\${SHELL} $am_aux_dir/missing"
# This will work as long as MISSING is not called from configure, because
# unfortunately $(top_srcdir) has no meaning in configure.
# However there are other variables, like CC, which are often used in
# configure, and could therefore not use this "fixed" $ac_aux_dir.
#
# Another solution, used here, is to always expand $ac_aux_dir to an
# absolute PATH. The drawback is that using absolute paths prevent a
# configured tree to be moved without reconfiguration.
AC_DEFUN([AM_AUX_DIR_EXPAND],
[dnl Rely on autoconf to set up CDPATH properly.
AC_PREREQ([2.50])dnl
# expand $ac_aux_dir to an absolute path
am_aux_dir=`cd $ac_aux_dir && pwd`
])
# AM_CONDITIONAL -*- Autoconf -*-
# Copyright (C) 1997, 2000, 2001, 2003, 2004, 2005, 2006, 2008
# Free Software Foundation, Inc.
#
# This file is free software; the Free Software Foundation
# gives unlimited permission to copy and/or distribute it,
# with or without modifications, as long as this notice is preserved.
# serial 9
# AM_CONDITIONAL(NAME, SHELL-CONDITION)
# -------------------------------------
# Define a conditional.
AC_DEFUN([AM_CONDITIONAL],
[AC_PREREQ(2.52)dnl
ifelse([$1], [TRUE], [AC_FATAL([$0: invalid condition: $1])],
[$1], [FALSE], [AC_FATAL([$0: invalid condition: $1])])dnl
AC_SUBST([$1_TRUE])dnl
AC_SUBST([$1_FALSE])dnl
_AM_SUBST_NOTMAKE([$1_TRUE])dnl
_AM_SUBST_NOTMAKE([$1_FALSE])dnl
m4_define([_AM_COND_VALUE_$1], [$2])dnl
if $2; then
$1_TRUE=
$1_FALSE='#'
else
$1_TRUE='#'
$1_FALSE=
fi
AC_CONFIG_COMMANDS_PRE(
[if test -z "${$1_TRUE}" && test -z "${$1_FALSE}"; then
AC_MSG_ERROR([[conditional "$1" was never defined.
Usually this means the macro was only invoked conditionally.]])
fi])])
# Copyright (C) 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2009
# Free Software Foundation, Inc.
#
# This file is free software; the Free Software Foundation
# gives unlimited permission to copy and/or distribute it,
# with or without modifications, as long as this notice is preserved.
# serial 10
# There are a few dirty hacks below to avoid letting `AC_PROG_CC' be
# written in clear, in which case automake, when reading aclocal.m4,
# will think it sees a *use*, and therefore will trigger all it's
# C support machinery. Also note that it means that autoscan, seeing
# CC etc. in the Makefile, will ask for an AC_PROG_CC use...
# _AM_DEPENDENCIES(NAME)
# ----------------------
# See how the compiler implements dependency checking.
# NAME is "CC", "CXX", "GCJ", or "OBJC".
# We try a few techniques and use that to set a single cache variable.
#
# We don't AC_REQUIRE the corresponding AC_PROG_CC since the latter was
# modified to invoke _AM_DEPENDENCIES(CC); we would have a circular
# dependency, and given that the user is not expected to run this macro,
# just rely on AC_PROG_CC.
AC_DEFUN([_AM_DEPENDENCIES],
[AC_REQUIRE([AM_SET_DEPDIR])dnl
AC_REQUIRE([AM_OUTPUT_DEPENDENCY_COMMANDS])dnl
AC_REQUIRE([AM_MAKE_INCLUDE])dnl
AC_REQUIRE([AM_DEP_TRACK])dnl
ifelse([$1], CC, [depcc="$CC" am_compiler_list=],
[$1], CXX, [depcc="$CXX" am_compiler_list=],
[$1], OBJC, [depcc="$OBJC" am_compiler_list='gcc3 gcc'],
[$1], UPC, [depcc="$UPC" am_compiler_list=],
[$1], GCJ, [depcc="$GCJ" am_compiler_list='gcc3 gcc'],
[depcc="$$1" am_compiler_list=])
AC_CACHE_CHECK([dependency style of $depcc],
[am_cv_$1_dependencies_compiler_type],
[if test -z "$AMDEP_TRUE" && test -f "$am_depcomp"; then
# We make a subdir and do the tests there. Otherwise we can end up
# making bogus files that we don't know about and never remove. For
# instance it was reported that on HP-UX the gcc test will end up
# making a dummy file named `D' -- because `-MD' means `put the output
# in D'.
mkdir conftest.dir
# Copy depcomp to subdir because otherwise we won't find it if we're
# using a relative directory.
cp "$am_depcomp" conftest.dir
cd conftest.dir
# We will build objects and dependencies in a subdirectory because
# it helps to detect inapplicable dependency modes. For instance
# both Tru64's cc and ICC support -MD to output dependencies as a
# side effect of compilation, but ICC will put the dependencies in
# the current directory while Tru64 will put them in the object
# directory.
mkdir sub
am_cv_$1_dependencies_compiler_type=none
if test "$am_compiler_list" = ""; then
am_compiler_list=`sed -n ['s/^#*\([a-zA-Z0-9]*\))$/\1/p'] < ./depcomp`
fi
am__universal=false
m4_case([$1], [CC],
[case " $depcc " in #(
*\ -arch\ *\ -arch\ *) am__universal=true ;;
esac],
[CXX],
[case " $depcc " in #(
*\ -arch\ *\ -arch\ *) am__universal=true ;;
esac])
for depmode in $am_compiler_list; do
# Setup a source with many dependencies, because some compilers
# like to wrap large dependency lists on column 80 (with \), and
# we should not choose a depcomp mode which is confused by this.
#
# We need to recreate these files for each test, as the compiler may
# overwrite some of them when testing with obscure command lines.
# This happens at least with the AIX C compiler.
: > sub/conftest.c
for i in 1 2 3 4 5 6; do
echo '#include "conftst'$i'.h"' >> sub/conftest.c
# Using `: > sub/conftst$i.h' creates only sub/conftst1.h with
# Solaris 8's {/usr,}/bin/sh.
touch sub/conftst$i.h
done
echo "${am__include} ${am__quote}sub/conftest.Po${am__quote}" > confmf
# We check with `-c' and `-o' for the sake of the "dashmstdout"
# mode. It turns out that the SunPro C++ compiler does not properly
# handle `-M -o', and we need to detect this. Also, some Intel
# versions had trouble with output in subdirs
am__obj=sub/conftest.${OBJEXT-o}
am__minus_obj="-o $am__obj"
case $depmode in
gcc)
# This depmode causes a compiler race in universal mode.
test "$am__universal" = false || continue
;;
nosideeffect)
# after this tag, mechanisms are not by side-effect, so they'll
# only be used when explicitly requested
if test "x$enable_dependency_tracking" = xyes; then
continue
else
break
fi
;;
msvisualcpp | msvcmsys)
# This compiler won't grok `-c -o', but also, the minuso test has
# not run yet. These depmodes are late enough in the game, and
# so weak that their functioning should not be impacted.
am__obj=conftest.${OBJEXT-o}
am__minus_obj=
;;
none) break ;;
esac
if depmode=$depmode \
source=sub/conftest.c object=$am__obj \
depfile=sub/conftest.Po tmpdepfile=sub/conftest.TPo \
$SHELL ./depcomp $depcc -c $am__minus_obj sub/conftest.c \
>/dev/null 2>conftest.err &&
grep sub/conftst1.h sub/conftest.Po > /dev/null 2>&1 &&
grep sub/conftst6.h sub/conftest.Po > /dev/null 2>&1 &&
grep $am__obj sub/conftest.Po > /dev/null 2>&1 &&
${MAKE-make} -s -f confmf > /dev/null 2>&1; then
# icc doesn't choke on unknown options, it will just issue warnings
# or remarks (even with -Werror). So we grep stderr for any message
# that says an option was ignored or not supported.
# When given -MP, icc 7.0 and 7.1 complain thusly:
# icc: Command line warning: ignoring option '-M'; no argument required
# The diagnosis changed in icc 8.0:
# icc: Command line remark: option '-MP' not supported
if (grep 'ignoring option' conftest.err ||
grep 'not supported' conftest.err) >/dev/null 2>&1; then :; else
am_cv_$1_dependencies_compiler_type=$depmode
break
fi
fi
done
cd ..
rm -rf conftest.dir
else
am_cv_$1_dependencies_compiler_type=none
fi
])
AC_SUBST([$1DEPMODE], [depmode=$am_cv_$1_dependencies_compiler_type])
AM_CONDITIONAL([am__fastdep$1], [
test "x$enable_dependency_tracking" != xno \
&& test "$am_cv_$1_dependencies_compiler_type" = gcc3])
])
# AM_SET_DEPDIR
# -------------
# Choose a directory name for dependency files.
# This macro is AC_REQUIREd in _AM_DEPENDENCIES
AC_DEFUN([AM_SET_DEPDIR],
[AC_REQUIRE([AM_SET_LEADING_DOT])dnl
AC_SUBST([DEPDIR], ["${am__leading_dot}deps"])dnl
])
# AM_DEP_TRACK
# ------------
AC_DEFUN([AM_DEP_TRACK],
[AC_ARG_ENABLE(dependency-tracking,
[ --disable-dependency-tracking speeds up one-time build
--enable-dependency-tracking do not reject slow dependency extractors])
if test "x$enable_dependency_tracking" != xno; then
am_depcomp="$ac_aux_dir/depcomp"
AMDEPBACKSLASH='\'
fi
AM_CONDITIONAL([AMDEP], [test "x$enable_dependency_tracking" != xno])
AC_SUBST([AMDEPBACKSLASH])dnl
_AM_SUBST_NOTMAKE([AMDEPBACKSLASH])dnl
])
# Generate code to set up dependency tracking. -*- Autoconf -*-
# Copyright (C) 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2008
# Free Software Foundation, Inc.
#
# This file is free software; the Free Software Foundation
# gives unlimited permission to copy and/or distribute it,
# with or without modifications, as long as this notice is preserved.
#serial 5
# _AM_OUTPUT_DEPENDENCY_COMMANDS
# ------------------------------
AC_DEFUN([_AM_OUTPUT_DEPENDENCY_COMMANDS],
[{
# Autoconf 2.62 quotes --file arguments for eval, but not when files
# are listed without --file. Let's play safe and only enable the eval
# if we detect the quoting.
case $CONFIG_FILES in
*\'*) eval set x "$CONFIG_FILES" ;;
*) set x $CONFIG_FILES ;;
esac
shift
for mf
do
# Strip MF so we end up with the name of the file.
mf=`echo "$mf" | sed -e 's/:.*$//'`
# Check whether this is an Automake generated Makefile or not.
# We used to match only the files named `Makefile.in', but
# some people rename them; so instead we look at the file content.
# Grep'ing the first line is not enough: some people post-process
# each Makefile.in and add a new line on top of each file to say so.
# Grep'ing the whole file is not good either: AIX grep has a line
# limit of 2048, but all sed's we know have understand at least 4000.
if sed -n 's,^#.*generated by automake.*,X,p' "$mf" | grep X >/dev/null 2>&1; then
dirpart=`AS_DIRNAME("$mf")`
else
continue
fi
# Extract the definition of DEPDIR, am__include, and am__quote
# from the Makefile without running `make'.
DEPDIR=`sed -n 's/^DEPDIR = //p' < "$mf"`
test -z "$DEPDIR" && continue
am__include=`sed -n 's/^am__include = //p' < "$mf"`
test -z "am__include" && continue
am__quote=`sed -n 's/^am__quote = //p' < "$mf"`
# When using ansi2knr, U may be empty or an underscore; expand it
U=`sed -n 's/^U = //p' < "$mf"`
# Find all dependency output files, they are included files with
# $(DEPDIR) in their names. We invoke sed twice because it is the
# simplest approach to changing $(DEPDIR) to its actual value in the
# expansion.
for file in `sed -n "
s/^$am__include $am__quote\(.*(DEPDIR).*\)$am__quote"'$/\1/p' <"$mf" | \
sed -e 's/\$(DEPDIR)/'"$DEPDIR"'/g' -e 's/\$U/'"$U"'/g'`; do
# Make sure the directory exists.
test -f "$dirpart/$file" && continue
fdir=`AS_DIRNAME(["$file"])`
AS_MKDIR_P([$dirpart/$fdir])
# echo "creating $dirpart/$file"
echo '# dummy' > "$dirpart/$file"
done
done
}
])# _AM_OUTPUT_DEPENDENCY_COMMANDS
# AM_OUTPUT_DEPENDENCY_COMMANDS
# -----------------------------
# This macro should only be invoked once -- use via AC_REQUIRE.
#
# This code is only required when automatic dependency tracking
# is enabled. FIXME. This creates each `.P' file that we will
# need in order to bootstrap the dependency handling code.
AC_DEFUN([AM_OUTPUT_DEPENDENCY_COMMANDS],
[AC_CONFIG_COMMANDS([depfiles],
[test x"$AMDEP_TRUE" != x"" || _AM_OUTPUT_DEPENDENCY_COMMANDS],
[AMDEP_TRUE="$AMDEP_TRUE" ac_aux_dir="$ac_aux_dir"])
])
# Do all the work for Automake. -*- Autoconf -*-
# Copyright (C) 1996, 1997, 1998, 1999, 2000, 2001, 2002, 2003, 2004,
# 2005, 2006, 2008, 2009 Free Software Foundation, Inc.
#
# This file is free software; the Free Software Foundation
# gives unlimited permission to copy and/or distribute it,
# with or without modifications, as long as this notice is preserved.
# serial 16
# This macro actually does too much. Some checks are only needed if
# your package does certain things. But this isn't really a big deal.
# AM_INIT_AUTOMAKE(PACKAGE, VERSION, [NO-DEFINE])
# AM_INIT_AUTOMAKE([OPTIONS])
# -----------------------------------------------
# The call with PACKAGE and VERSION arguments is the old style
# call (pre autoconf-2.50), which is being phased out. PACKAGE
# and VERSION should now be passed to AC_INIT and removed from
# the call to AM_INIT_AUTOMAKE.
# We support both call styles for the transition. After
# the next Automake release, Autoconf can make the AC_INIT
# arguments mandatory, and then we can depend on a new Autoconf
# release and drop the old call support.
AC_DEFUN([AM_INIT_AUTOMAKE],
[AC_PREREQ([2.62])dnl
dnl Autoconf wants to disallow AM_ names. We explicitly allow
dnl the ones we care about.
m4_pattern_allow([^AM_[A-Z]+FLAGS$])dnl
AC_REQUIRE([AM_SET_CURRENT_AUTOMAKE_VERSION])dnl
AC_REQUIRE([AC_PROG_INSTALL])dnl
if test "`cd $srcdir && pwd`" != "`pwd`"; then
# Use -I$(srcdir) only when $(srcdir) != ., so that make's output
# is not polluted with repeated "-I."
AC_SUBST([am__isrc], [' -I$(srcdir)'])_AM_SUBST_NOTMAKE([am__isrc])dnl
# test to see if srcdir already configured
if test -f $srcdir/config.status; then
AC_MSG_ERROR([source directory already configured; run "make distclean" there first])
fi
fi
# test whether we have cygpath
if test -z "$CYGPATH_W"; then
if (cygpath --version) >/dev/null 2>/dev/null; then
CYGPATH_W='cygpath -w'
else
CYGPATH_W=echo
fi
fi
AC_SUBST([CYGPATH_W])
# Define the identity of the package.
dnl Distinguish between old-style and new-style calls.
m4_ifval([$2],
[m4_ifval([$3], [_AM_SET_OPTION([no-define])])dnl
AC_SUBST([PACKAGE], [$1])dnl
AC_SUBST([VERSION], [$2])],
[_AM_SET_OPTIONS([$1])dnl
dnl Diagnose old-style AC_INIT with new-style AM_AUTOMAKE_INIT.
m4_if(m4_ifdef([AC_PACKAGE_NAME], 1)m4_ifdef([AC_PACKAGE_VERSION], 1), 11,,
[m4_fatal([AC_INIT should be called with package and version arguments])])dnl
AC_SUBST([PACKAGE], ['AC_PACKAGE_TARNAME'])dnl
AC_SUBST([VERSION], ['AC_PACKAGE_VERSION'])])dnl
_AM_IF_OPTION([no-define],,
[AC_DEFINE_UNQUOTED(PACKAGE, "$PACKAGE", [Name of package])
AC_DEFINE_UNQUOTED(VERSION, "$VERSION", [Version number of package])])dnl
# Some tools Automake needs.
AC_REQUIRE([AM_SANITY_CHECK])dnl
AC_REQUIRE([AC_ARG_PROGRAM])dnl
AM_MISSING_PROG(ACLOCAL, aclocal-${am__api_version})
AM_MISSING_PROG(AUTOCONF, autoconf)
AM_MISSING_PROG(AUTOMAKE, automake-${am__api_version})
AM_MISSING_PROG(AUTOHEADER, autoheader)
AM_MISSING_PROG(MAKEINFO, makeinfo)
AC_REQUIRE([AM_PROG_INSTALL_SH])dnl
AC_REQUIRE([AM_PROG_INSTALL_STRIP])dnl
AC_REQUIRE([AM_PROG_MKDIR_P])dnl
# We need awk for the "check" target. The system "awk" is bad on
# some platforms.
AC_REQUIRE([AC_PROG_AWK])dnl
AC_REQUIRE([AC_PROG_MAKE_SET])dnl
AC_REQUIRE([AM_SET_LEADING_DOT])dnl
_AM_IF_OPTION([tar-ustar], [_AM_PROG_TAR([ustar])],
[_AM_IF_OPTION([tar-pax], [_AM_PROG_TAR([pax])],
[_AM_PROG_TAR([v7])])])
_AM_IF_OPTION([no-dependencies],,
[AC_PROVIDE_IFELSE([AC_PROG_CC],
[_AM_DEPENDENCIES(CC)],
[define([AC_PROG_CC],
defn([AC_PROG_CC])[_AM_DEPENDENCIES(CC)])])dnl
AC_PROVIDE_IFELSE([AC_PROG_CXX],
[_AM_DEPENDENCIES(CXX)],
[define([AC_PROG_CXX],
defn([AC_PROG_CXX])[_AM_DEPENDENCIES(CXX)])])dnl
AC_PROVIDE_IFELSE([AC_PROG_OBJC],
[_AM_DEPENDENCIES(OBJC)],
[define([AC_PROG_OBJC],
defn([AC_PROG_OBJC])[_AM_DEPENDENCIES(OBJC)])])dnl
])
_AM_IF_OPTION([silent-rules], [AC_REQUIRE([AM_SILENT_RULES])])dnl
dnl The `parallel-tests' driver may need to know about EXEEXT, so add the
dnl `am__EXEEXT' conditional if _AM_COMPILER_EXEEXT was seen. This macro
dnl is hooked onto _AC_COMPILER_EXEEXT early, see below.
AC_CONFIG_COMMANDS_PRE(dnl
[m4_provide_if([_AM_COMPILER_EXEEXT],
[AM_CONDITIONAL([am__EXEEXT], [test -n "$EXEEXT"])])])dnl
])
dnl Hook into `_AC_COMPILER_EXEEXT' early to learn its expansion. Do not
dnl add the conditional right here, as _AC_COMPILER_EXEEXT may be further
dnl mangled by Autoconf and run in a shell conditional statement.
m4_define([_AC_COMPILER_EXEEXT],
m4_defn([_AC_COMPILER_EXEEXT])[m4_provide([_AM_COMPILER_EXEEXT])])
# When config.status generates a header, we must update the stamp-h file.
# This file resides in the same directory as the config header
# that is generated. The stamp files are numbered to have different names.
# Autoconf calls _AC_AM_CONFIG_HEADER_HOOK (when defined) in the
# loop where config.status creates the headers, so we can generate
# our stamp files there.
AC_DEFUN([_AC_AM_CONFIG_HEADER_HOOK],
[# Compute $1's index in $config_headers.
_am_arg=$1
_am_stamp_count=1
for _am_header in $config_headers :; do
case $_am_header in
$_am_arg | $_am_arg:* )
break ;;
* )
_am_stamp_count=`expr $_am_stamp_count + 1` ;;
esac
done
echo "timestamp for $_am_arg" >`AS_DIRNAME(["$_am_arg"])`/stamp-h[]$_am_stamp_count])
# Copyright (C) 2001, 2003, 2005, 2008 Free Software Foundation, Inc.
#
# This file is free software; the Free Software Foundation
# gives unlimited permission to copy and/or distribute it,
# with or without modifications, as long as this notice is preserved.
# AM_PROG_INSTALL_SH
# ------------------
# Define $install_sh.
AC_DEFUN([AM_PROG_INSTALL_SH],
[AC_REQUIRE([AM_AUX_DIR_EXPAND])dnl
if test x"${install_sh}" != xset; then
case $am_aux_dir in
*\ * | *\ *)
install_sh="\${SHELL} '$am_aux_dir/install-sh'" ;;
*)
install_sh="\${SHELL} $am_aux_dir/install-sh"
esac
fi
AC_SUBST(install_sh)])
# Copyright (C) 2003, 2005 Free Software Foundation, Inc.
#
# This file is free software; the Free Software Foundation
# gives unlimited permission to copy and/or distribute it,
# with or without modifications, as long as this notice is preserved.
# serial 2
# Check whether the underlying file-system supports filenames
# with a leading dot. For instance MS-DOS doesn't.
AC_DEFUN([AM_SET_LEADING_DOT],
[rm -rf .tst 2>/dev/null
mkdir .tst 2>/dev/null
if test -d .tst; then
am__leading_dot=.
else
am__leading_dot=_
fi
rmdir .tst 2>/dev/null
AC_SUBST([am__leading_dot])])
# Check to see how 'make' treats includes. -*- Autoconf -*-
# Copyright (C) 2001, 2002, 2003, 2005, 2009 Free Software Foundation, Inc.
#
# This file is free software; the Free Software Foundation
# gives unlimited permission to copy and/or distribute it,
# with or without modifications, as long as this notice is preserved.
# serial 4
# AM_MAKE_INCLUDE()
# -----------------
# Check to see how make treats includes.
AC_DEFUN([AM_MAKE_INCLUDE],
[am_make=${MAKE-make}
cat > confinc << 'END'
am__doit:
@echo this is the am__doit target
.PHONY: am__doit
END
# If we don't find an include directive, just comment out the code.
AC_MSG_CHECKING([for style of include used by $am_make])
am__include="#"
am__quote=
_am_result=none
# First try GNU make style include.
echo "include confinc" > confmf
# Ignore all kinds of additional output from `make'.
case `$am_make -s -f confmf 2> /dev/null` in #(
*the\ am__doit\ target*)
am__include=include
am__quote=
_am_result=GNU
;;
esac
# Now try BSD make style include.
if test "$am__include" = "#"; then
echo '.include "confinc"' > confmf
case `$am_make -s -f confmf 2> /dev/null` in #(
*the\ am__doit\ target*)
am__include=.include
am__quote="\""
_am_result=BSD
;;
esac
fi
AC_SUBST([am__include])
AC_SUBST([am__quote])
AC_MSG_RESULT([$_am_result])
rm -f confinc confmf
])
# Fake the existence of programs that GNU maintainers use. -*- Autoconf -*-
# Copyright (C) 1997, 1999, 2000, 2001, 2003, 2004, 2005, 2008
# Free Software Foundation, Inc.
#
# This file is free software; the Free Software Foundation
# gives unlimited permission to copy and/or distribute it,
# with or without modifications, as long as this notice is preserved.
# serial 6
# AM_MISSING_PROG(NAME, PROGRAM)
# ------------------------------
AC_DEFUN([AM_MISSING_PROG],
[AC_REQUIRE([AM_MISSING_HAS_RUN])
$1=${$1-"${am_missing_run}$2"}
AC_SUBST($1)])
# AM_MISSING_HAS_RUN
# ------------------
# Define MISSING if not defined so far and test if it supports --run.
# If it does, set am_missing_run to use it, otherwise, to nothing.
AC_DEFUN([AM_MISSING_HAS_RUN],
[AC_REQUIRE([AM_AUX_DIR_EXPAND])dnl
AC_REQUIRE_AUX_FILE([missing])dnl
if test x"${MISSING+set}" != xset; then
case $am_aux_dir in
*\ * | *\ *)
MISSING="\${SHELL} \"$am_aux_dir/missing\"" ;;
*)
MISSING="\${SHELL} $am_aux_dir/missing" ;;
esac
fi
# Use eval to expand $SHELL
if eval "$MISSING --run true"; then
am_missing_run="$MISSING --run "
else
am_missing_run=
AC_MSG_WARN([`missing' script is too old or missing])
fi
])
# Copyright (C) 2003, 2004, 2005, 2006 Free Software Foundation, Inc.
#
# This file is free software; the Free Software Foundation
# gives unlimited permission to copy and/or distribute it,
# with or without modifications, as long as this notice is preserved.
# AM_PROG_MKDIR_P
# ---------------
# Check for `mkdir -p'.
AC_DEFUN([AM_PROG_MKDIR_P],
[AC_PREREQ([2.60])dnl
AC_REQUIRE([AC_PROG_MKDIR_P])dnl
dnl Automake 1.8 to 1.9.6 used to define mkdir_p. We now use MKDIR_P,
dnl while keeping a definition of mkdir_p for backward compatibility.
dnl @MKDIR_P@ is magic: AC_OUTPUT adjusts its value for each Makefile.
dnl However we cannot define mkdir_p as $(MKDIR_P) for the sake of
dnl Makefile.ins that do not define MKDIR_P, so we do our own
dnl adjustment using top_builddir (which is defined more often than
dnl MKDIR_P).
AC_SUBST([mkdir_p], ["$MKDIR_P"])dnl
case $mkdir_p in
[[\\/$]]* | ?:[[\\/]]*) ;;
*/*) mkdir_p="\$(top_builddir)/$mkdir_p" ;;
esac
])
# Helper functions for option handling. -*- Autoconf -*-
# Copyright (C) 2001, 2002, 2003, 2005, 2008 Free Software Foundation, Inc.
#
# This file is free software; the Free Software Foundation
# gives unlimited permission to copy and/or distribute it,
# with or without modifications, as long as this notice is preserved.
# serial 4
# _AM_MANGLE_OPTION(NAME)
# -----------------------
AC_DEFUN([_AM_MANGLE_OPTION],
[[_AM_OPTION_]m4_bpatsubst($1, [[^a-zA-Z0-9_]], [_])])
# _AM_SET_OPTION(NAME)
# ------------------------------
# Set option NAME. Presently that only means defining a flag for this option.
AC_DEFUN([_AM_SET_OPTION],
[m4_define(_AM_MANGLE_OPTION([$1]), 1)])
# _AM_SET_OPTIONS(OPTIONS)
# ----------------------------------
# OPTIONS is a space-separated list of Automake options.
AC_DEFUN([_AM_SET_OPTIONS],
[m4_foreach_w([_AM_Option], [$1], [_AM_SET_OPTION(_AM_Option)])])
# _AM_IF_OPTION(OPTION, IF-SET, [IF-NOT-SET])
# -------------------------------------------
# Execute IF-SET if OPTION is set, IF-NOT-SET otherwise.
AC_DEFUN([_AM_IF_OPTION],
[m4_ifset(_AM_MANGLE_OPTION([$1]), [$2], [$3])])
# Check to make sure that the build environment is sane. -*- Autoconf -*-
# Copyright (C) 1996, 1997, 2000, 2001, 2003, 2005, 2008
# Free Software Foundation, Inc.
#
# This file is free software; the Free Software Foundation
# gives unlimited permission to copy and/or distribute it,
# with or without modifications, as long as this notice is preserved.
# serial 5
# AM_SANITY_CHECK
# ---------------
AC_DEFUN([AM_SANITY_CHECK],
[AC_MSG_CHECKING([whether build environment is sane])
# Just in case
sleep 1
echo timestamp > conftest.file
# Reject unsafe characters in $srcdir or the absolute working directory
# name. Accept space and tab only in the latter.
am_lf='
'
case `pwd` in
*[[\\\"\#\$\&\'\`$am_lf]]*)
AC_MSG_ERROR([unsafe absolute working directory name]);;
esac
case $srcdir in
*[[\\\"\#\$\&\'\`$am_lf\ \ ]]*)
AC_MSG_ERROR([unsafe srcdir value: `$srcdir']);;
esac
# Do `set' in a subshell so we don't clobber the current shell's
# arguments. Must try -L first in case configure is actually a
# symlink; some systems play weird games with the mod time of symlinks
# (eg FreeBSD returns the mod time of the symlink's containing
# directory).
if (
set X `ls -Lt "$srcdir/configure" conftest.file 2> /dev/null`
if test "$[*]" = "X"; then
# -L didn't work.
set X `ls -t "$srcdir/configure" conftest.file`
fi
rm -f conftest.file
if test "$[*]" != "X $srcdir/configure conftest.file" \
&& test "$[*]" != "X conftest.file $srcdir/configure"; then
# If neither matched, then we have a broken ls. This can happen
# if, for instance, CONFIG_SHELL is bash and it inherits a
# broken ls alias from the environment. This has actually
# happened. Such a system could not be considered "sane".
AC_MSG_ERROR([ls -t appears to fail. Make sure there is not a broken
alias in your environment])
fi
test "$[2]" = conftest.file
)
then
# Ok.
:
else
AC_MSG_ERROR([newly created file is older than distributed files!
Check your system clock])
fi
AC_MSG_RESULT(yes)])
# Copyright (C) 2001, 2003, 2005 Free Software Foundation, Inc.
#
# This file is free software; the Free Software Foundation
# gives unlimited permission to copy and/or distribute it,
# with or without modifications, as long as this notice is preserved.
# AM_PROG_INSTALL_STRIP
# ---------------------
# One issue with vendor `install' (even GNU) is that you can't
# specify the program used to strip binaries. This is especially
# annoying in cross-compiling environments, where the build's strip
# is unlikely to handle the host's binaries.
# Fortunately install-sh will honor a STRIPPROG variable, so we
# always use install-sh in `make install-strip', and initialize
# STRIPPROG with the value of the STRIP variable (set by the user).
AC_DEFUN([AM_PROG_INSTALL_STRIP],
[AC_REQUIRE([AM_PROG_INSTALL_SH])dnl
# Installed binaries are usually stripped using `strip' when the user
# run `make install-strip'. However `strip' might not be the right
# tool to use in cross-compilation environments, therefore Automake
# will honor the `STRIP' environment variable to overrule this program.
dnl Don't test for $cross_compiling = yes, because it might be `maybe'.
if test "$cross_compiling" != no; then
AC_CHECK_TOOL([STRIP], [strip], :)
fi
INSTALL_STRIP_PROGRAM="\$(install_sh) -c -s"
AC_SUBST([INSTALL_STRIP_PROGRAM])])
# Copyright (C) 2006, 2008 Free Software Foundation, Inc.
#
# This file is free software; the Free Software Foundation
# gives unlimited permission to copy and/or distribute it,
# with or without modifications, as long as this notice is preserved.
# serial 2
# _AM_SUBST_NOTMAKE(VARIABLE)
# ---------------------------
# Prevent Automake from outputting VARIABLE = @VARIABLE@ in Makefile.in.
# This macro is traced by Automake.
AC_DEFUN([_AM_SUBST_NOTMAKE])
# AM_SUBST_NOTMAKE(VARIABLE)
# ---------------------------
# Public sister of _AM_SUBST_NOTMAKE.
AC_DEFUN([AM_SUBST_NOTMAKE], [_AM_SUBST_NOTMAKE($@)])
# Check how to create a tarball. -*- Autoconf -*-
# Copyright (C) 2004, 2005 Free Software Foundation, Inc.
#
# This file is free software; the Free Software Foundation
# gives unlimited permission to copy and/or distribute it,
# with or without modifications, as long as this notice is preserved.
# serial 2
# _AM_PROG_TAR(FORMAT)
# --------------------
# Check how to create a tarball in format FORMAT.
# FORMAT should be one of `v7', `ustar', or `pax'.
#
# Substitute a variable $(am__tar) that is a command
# writing to stdout a FORMAT-tarball containing the directory
# $tardir.
# tardir=directory && $(am__tar) > result.tar
#
# Substitute a variable $(am__untar) that extract such
# a tarball read from stdin.
# $(am__untar) < result.tar
AC_DEFUN([_AM_PROG_TAR],
[# Always define AMTAR for backward compatibility.
AM_MISSING_PROG([AMTAR], [tar])
m4_if([$1], [v7],
[am__tar='${AMTAR} chof - "$$tardir"'; am__untar='${AMTAR} xf -'],
[m4_case([$1], [ustar],, [pax],,
[m4_fatal([Unknown tar format])])
AC_MSG_CHECKING([how to create a $1 tar archive])
# Loop over all known methods to create a tar archive until one works.
_am_tools='gnutar m4_if([$1], [ustar], [plaintar]) pax cpio none'
_am_tools=${am_cv_prog_tar_$1-$_am_tools}
# Do not fold the above two line into one, because Tru64 sh and
# Solaris sh will not grok spaces in the rhs of `-'.
for _am_tool in $_am_tools
do
case $_am_tool in
gnutar)
for _am_tar in tar gnutar gtar;
do
AM_RUN_LOG([$_am_tar --version]) && break
done
am__tar="$_am_tar --format=m4_if([$1], [pax], [posix], [$1]) -chf - "'"$$tardir"'
am__tar_="$_am_tar --format=m4_if([$1], [pax], [posix], [$1]) -chf - "'"$tardir"'
am__untar="$_am_tar -xf -"
;;
plaintar)
# Must skip GNU tar: if it does not support --format= it doesn't create
# ustar tarball either.
(tar --version) >/dev/null 2>&1 && continue
am__tar='tar chf - "$$tardir"'
am__tar_='tar chf - "$tardir"'
am__untar='tar xf -'
;;
pax)
am__tar='pax -L -x $1 -w "$$tardir"'
am__tar_='pax -L -x $1 -w "$tardir"'
am__untar='pax -r'
;;
cpio)
am__tar='find "$$tardir" -print | cpio -o -H $1 -L'
am__tar_='find "$tardir" -print | cpio -o -H $1 -L'
am__untar='cpio -i -H $1 -d'
;;
none)
am__tar=false
am__tar_=false
am__untar=false
;;
esac
# If the value was cached, stop now. We just wanted to have am__tar
# and am__untar set.
test -n "${am_cv_prog_tar_$1}" && break
# tar/untar a dummy directory, and stop if the command works
rm -rf conftest.dir
mkdir conftest.dir
echo GrepMe > conftest.dir/file
AM_RUN_LOG([tardir=conftest.dir && eval $am__tar_ >conftest.tar])
rm -rf conftest.dir
if test -s conftest.tar; then
AM_RUN_LOG([$am__untar <conftest.tar])
grep GrepMe conftest.dir/file >/dev/null 2>&1 && break
fi
done
rm -rf conftest.dir
AC_CACHE_VAL([am_cv_prog_tar_$1], [am_cv_prog_tar_$1=$_am_tool])
AC_MSG_RESULT([$am_cv_prog_tar_$1])])
AC_SUBST([am__tar])
AC_SUBST([am__untar])
]) # _AM_PROG_TAR
m4_include([build-aux/m4/libtool.m4])
m4_include([build-aux/m4/ltoptions.m4])
m4_include([build-aux/m4/ltsugar.m4])
m4_include([build-aux/m4/ltversion.m4])
m4_include([build-aux/m4/lt~obsolete.m4])

1502
clients/cpp/build-aux/config.guess vendored Executable file

File diff suppressed because it is too large Load Diff

1714
clients/cpp/build-aux/config.sub vendored Executable file

File diff suppressed because it is too large Load Diff

630
clients/cpp/build-aux/depcomp Executable file
View File

@ -0,0 +1,630 @@
#! /bin/sh
# depcomp - compile a program generating dependencies as side-effects
scriptversion=2009-04-28.21; # UTC
# Copyright (C) 1999, 2000, 2003, 2004, 2005, 2006, 2007, 2009 Free
# Software Foundation, Inc.
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2, or (at your option)
# any later version.
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
# As a special exception to the GNU General Public License, if you
# distribute this file as part of a program that contains a
# configuration script generated by Autoconf, you may include it under
# the same distribution terms that you use for the rest of that program.
# Originally written by Alexandre Oliva <oliva@dcc.unicamp.br>.
case $1 in
'')
echo "$0: No command. Try \`$0 --help' for more information." 1>&2
exit 1;
;;
-h | --h*)
cat <<\EOF
Usage: depcomp [--help] [--version] PROGRAM [ARGS]
Run PROGRAMS ARGS to compile a file, generating dependencies
as side-effects.
Environment variables:
depmode Dependency tracking mode.
source Source file read by `PROGRAMS ARGS'.
object Object file output by `PROGRAMS ARGS'.
DEPDIR directory where to store dependencies.
depfile Dependency file to output.
tmpdepfile Temporary file to use when outputing dependencies.
libtool Whether libtool is used (yes/no).
Report bugs to <bug-automake@gnu.org>.
EOF
exit $?
;;
-v | --v*)
echo "depcomp $scriptversion"
exit $?
;;
esac
if test -z "$depmode" || test -z "$source" || test -z "$object"; then
echo "depcomp: Variables source, object and depmode must be set" 1>&2
exit 1
fi
# Dependencies for sub/bar.o or sub/bar.obj go into sub/.deps/bar.Po.
depfile=${depfile-`echo "$object" |
sed 's|[^\\/]*$|'${DEPDIR-.deps}'/&|;s|\.\([^.]*\)$|.P\1|;s|Pobj$|Po|'`}
tmpdepfile=${tmpdepfile-`echo "$depfile" | sed 's/\.\([^.]*\)$/.T\1/'`}
rm -f "$tmpdepfile"
# Some modes work just like other modes, but use different flags. We
# parameterize here, but still list the modes in the big case below,
# to make depend.m4 easier to write. Note that we *cannot* use a case
# here, because this file can only contain one case statement.
if test "$depmode" = hp; then
# HP compiler uses -M and no extra arg.
gccflag=-M
depmode=gcc
fi
if test "$depmode" = dashXmstdout; then
# This is just like dashmstdout with a different argument.
dashmflag=-xM
depmode=dashmstdout
fi
cygpath_u="cygpath -u -f -"
if test "$depmode" = msvcmsys; then
# This is just like msvisualcpp but w/o cygpath translation.
# Just convert the backslash-escaped backslashes to single forward
# slashes to satisfy depend.m4
cygpath_u="sed s,\\\\\\\\,/,g"
depmode=msvisualcpp
fi
case "$depmode" in
gcc3)
## gcc 3 implements dependency tracking that does exactly what
## we want. Yay! Note: for some reason libtool 1.4 doesn't like
## it if -MD -MP comes after the -MF stuff. Hmm.
## Unfortunately, FreeBSD c89 acceptance of flags depends upon
## the command line argument order; so add the flags where they
## appear in depend2.am. Note that the slowdown incurred here
## affects only configure: in makefiles, %FASTDEP% shortcuts this.
for arg
do
case $arg in
-c) set fnord "$@" -MT "$object" -MD -MP -MF "$tmpdepfile" "$arg" ;;
*) set fnord "$@" "$arg" ;;
esac
shift # fnord
shift # $arg
done
"$@"
stat=$?
if test $stat -eq 0; then :
else
rm -f "$tmpdepfile"
exit $stat
fi
mv "$tmpdepfile" "$depfile"
;;
gcc)
## There are various ways to get dependency output from gcc. Here's
## why we pick this rather obscure method:
## - Don't want to use -MD because we'd like the dependencies to end
## up in a subdir. Having to rename by hand is ugly.
## (We might end up doing this anyway to support other compilers.)
## - The DEPENDENCIES_OUTPUT environment variable makes gcc act like
## -MM, not -M (despite what the docs say).
## - Using -M directly means running the compiler twice (even worse
## than renaming).
if test -z "$gccflag"; then
gccflag=-MD,
fi
"$@" -Wp,"$gccflag$tmpdepfile"
stat=$?
if test $stat -eq 0; then :
else
rm -f "$tmpdepfile"
exit $stat
fi
rm -f "$depfile"
echo "$object : \\" > "$depfile"
alpha=ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz
## The second -e expression handles DOS-style file names with drive letters.
sed -e 's/^[^:]*: / /' \
-e 's/^['$alpha']:\/[^:]*: / /' < "$tmpdepfile" >> "$depfile"
## This next piece of magic avoids the `deleted header file' problem.
## The problem is that when a header file which appears in a .P file
## is deleted, the dependency causes make to die (because there is
## typically no way to rebuild the header). We avoid this by adding
## dummy dependencies for each header file. Too bad gcc doesn't do
## this for us directly.
tr ' ' '
' < "$tmpdepfile" |
## Some versions of gcc put a space before the `:'. On the theory
## that the space means something, we add a space to the output as
## well.
## Some versions of the HPUX 10.20 sed can't process this invocation
## correctly. Breaking it into two sed invocations is a workaround.
sed -e 's/^\\$//' -e '/^$/d' -e '/:$/d' | sed -e 's/$/ :/' >> "$depfile"
rm -f "$tmpdepfile"
;;
hp)
# This case exists only to let depend.m4 do its work. It works by
# looking at the text of this script. This case will never be run,
# since it is checked for above.
exit 1
;;
sgi)
if test "$libtool" = yes; then
"$@" "-Wp,-MDupdate,$tmpdepfile"
else
"$@" -MDupdate "$tmpdepfile"
fi
stat=$?
if test $stat -eq 0; then :
else
rm -f "$tmpdepfile"
exit $stat
fi
rm -f "$depfile"
if test -f "$tmpdepfile"; then # yes, the sourcefile depend on other files
echo "$object : \\" > "$depfile"
# Clip off the initial element (the dependent). Don't try to be
# clever and replace this with sed code, as IRIX sed won't handle
# lines with more than a fixed number of characters (4096 in
# IRIX 6.2 sed, 8192 in IRIX 6.5). We also remove comment lines;
# the IRIX cc adds comments like `#:fec' to the end of the
# dependency line.
tr ' ' '
' < "$tmpdepfile" \
| sed -e 's/^.*\.o://' -e 's/#.*$//' -e '/^$/ d' | \
tr '
' ' ' >> "$depfile"
echo >> "$depfile"
# The second pass generates a dummy entry for each header file.
tr ' ' '
' < "$tmpdepfile" \
| sed -e 's/^.*\.o://' -e 's/#.*$//' -e '/^$/ d' -e 's/$/:/' \
>> "$depfile"
else
# The sourcefile does not contain any dependencies, so just
# store a dummy comment line, to avoid errors with the Makefile
# "include basename.Plo" scheme.
echo "#dummy" > "$depfile"
fi
rm -f "$tmpdepfile"
;;
aix)
# The C for AIX Compiler uses -M and outputs the dependencies
# in a .u file. In older versions, this file always lives in the
# current directory. Also, the AIX compiler puts `$object:' at the
# start of each line; $object doesn't have directory information.
# Version 6 uses the directory in both cases.
dir=`echo "$object" | sed -e 's|/[^/]*$|/|'`
test "x$dir" = "x$object" && dir=
base=`echo "$object" | sed -e 's|^.*/||' -e 's/\.o$//' -e 's/\.lo$//'`
if test "$libtool" = yes; then
tmpdepfile1=$dir$base.u
tmpdepfile2=$base.u
tmpdepfile3=$dir.libs/$base.u
"$@" -Wc,-M
else
tmpdepfile1=$dir$base.u
tmpdepfile2=$dir$base.u
tmpdepfile3=$dir$base.u
"$@" -M
fi
stat=$?
if test $stat -eq 0; then :
else
rm -f "$tmpdepfile1" "$tmpdepfile2" "$tmpdepfile3"
exit $stat
fi
for tmpdepfile in "$tmpdepfile1" "$tmpdepfile2" "$tmpdepfile3"
do
test -f "$tmpdepfile" && break
done
if test -f "$tmpdepfile"; then
# Each line is of the form `foo.o: dependent.h'.
# Do two passes, one to just change these to
# `$object: dependent.h' and one to simply `dependent.h:'.
sed -e "s,^.*\.[a-z]*:,$object:," < "$tmpdepfile" > "$depfile"
# That's a tab and a space in the [].
sed -e 's,^.*\.[a-z]*:[ ]*,,' -e 's,$,:,' < "$tmpdepfile" >> "$depfile"
else
# The sourcefile does not contain any dependencies, so just
# store a dummy comment line, to avoid errors with the Makefile
# "include basename.Plo" scheme.
echo "#dummy" > "$depfile"
fi
rm -f "$tmpdepfile"
;;
icc)
# Intel's C compiler understands `-MD -MF file'. However on
# icc -MD -MF foo.d -c -o sub/foo.o sub/foo.c
# ICC 7.0 will fill foo.d with something like
# foo.o: sub/foo.c
# foo.o: sub/foo.h
# which is wrong. We want:
# sub/foo.o: sub/foo.c
# sub/foo.o: sub/foo.h
# sub/foo.c:
# sub/foo.h:
# ICC 7.1 will output
# foo.o: sub/foo.c sub/foo.h
# and will wrap long lines using \ :
# foo.o: sub/foo.c ... \
# sub/foo.h ... \
# ...
"$@" -MD -MF "$tmpdepfile"
stat=$?
if test $stat -eq 0; then :
else
rm -f "$tmpdepfile"
exit $stat
fi
rm -f "$depfile"
# Each line is of the form `foo.o: dependent.h',
# or `foo.o: dep1.h dep2.h \', or ` dep3.h dep4.h \'.
# Do two passes, one to just change these to
# `$object: dependent.h' and one to simply `dependent.h:'.
sed "s,^[^:]*:,$object :," < "$tmpdepfile" > "$depfile"
# Some versions of the HPUX 10.20 sed can't process this invocation
# correctly. Breaking it into two sed invocations is a workaround.
sed 's,^[^:]*: \(.*\)$,\1,;s/^\\$//;/^$/d;/:$/d' < "$tmpdepfile" |
sed -e 's/$/ :/' >> "$depfile"
rm -f "$tmpdepfile"
;;
hp2)
# The "hp" stanza above does not work with aCC (C++) and HP's ia64
# compilers, which have integrated preprocessors. The correct option
# to use with these is +Maked; it writes dependencies to a file named
# 'foo.d', which lands next to the object file, wherever that
# happens to be.
# Much of this is similar to the tru64 case; see comments there.
dir=`echo "$object" | sed -e 's|/[^/]*$|/|'`
test "x$dir" = "x$object" && dir=
base=`echo "$object" | sed -e 's|^.*/||' -e 's/\.o$//' -e 's/\.lo$//'`
if test "$libtool" = yes; then
tmpdepfile1=$dir$base.d
tmpdepfile2=$dir.libs/$base.d
"$@" -Wc,+Maked
else
tmpdepfile1=$dir$base.d
tmpdepfile2=$dir$base.d
"$@" +Maked
fi
stat=$?
if test $stat -eq 0; then :
else
rm -f "$tmpdepfile1" "$tmpdepfile2"
exit $stat
fi
for tmpdepfile in "$tmpdepfile1" "$tmpdepfile2"
do
test -f "$tmpdepfile" && break
done
if test -f "$tmpdepfile"; then
sed -e "s,^.*\.[a-z]*:,$object:," "$tmpdepfile" > "$depfile"
# Add `dependent.h:' lines.
sed -ne '2,${
s/^ *//
s/ \\*$//
s/$/:/
p
}' "$tmpdepfile" >> "$depfile"
else
echo "#dummy" > "$depfile"
fi
rm -f "$tmpdepfile" "$tmpdepfile2"
;;
tru64)
# The Tru64 compiler uses -MD to generate dependencies as a side
# effect. `cc -MD -o foo.o ...' puts the dependencies into `foo.o.d'.
# At least on Alpha/Redhat 6.1, Compaq CCC V6.2-504 seems to put
# dependencies in `foo.d' instead, so we check for that too.
# Subdirectories are respected.
dir=`echo "$object" | sed -e 's|/[^/]*$|/|'`
test "x$dir" = "x$object" && dir=
base=`echo "$object" | sed -e 's|^.*/||' -e 's/\.o$//' -e 's/\.lo$//'`
if test "$libtool" = yes; then
# With Tru64 cc, shared objects can also be used to make a
# static library. This mechanism is used in libtool 1.4 series to
# handle both shared and static libraries in a single compilation.
# With libtool 1.4, dependencies were output in $dir.libs/$base.lo.d.
#
# With libtool 1.5 this exception was removed, and libtool now
# generates 2 separate objects for the 2 libraries. These two
# compilations output dependencies in $dir.libs/$base.o.d and
# in $dir$base.o.d. We have to check for both files, because
# one of the two compilations can be disabled. We should prefer
# $dir$base.o.d over $dir.libs/$base.o.d because the latter is
# automatically cleaned when .libs/ is deleted, while ignoring
# the former would cause a distcleancheck panic.
tmpdepfile1=$dir.libs/$base.lo.d # libtool 1.4
tmpdepfile2=$dir$base.o.d # libtool 1.5
tmpdepfile3=$dir.libs/$base.o.d # libtool 1.5
tmpdepfile4=$dir.libs/$base.d # Compaq CCC V6.2-504
"$@" -Wc,-MD
else
tmpdepfile1=$dir$base.o.d
tmpdepfile2=$dir$base.d
tmpdepfile3=$dir$base.d
tmpdepfile4=$dir$base.d
"$@" -MD
fi
stat=$?
if test $stat -eq 0; then :
else
rm -f "$tmpdepfile1" "$tmpdepfile2" "$tmpdepfile3" "$tmpdepfile4"
exit $stat
fi
for tmpdepfile in "$tmpdepfile1" "$tmpdepfile2" "$tmpdepfile3" "$tmpdepfile4"
do
test -f "$tmpdepfile" && break
done
if test -f "$tmpdepfile"; then
sed -e "s,^.*\.[a-z]*:,$object:," < "$tmpdepfile" > "$depfile"
# That's a tab and a space in the [].
sed -e 's,^.*\.[a-z]*:[ ]*,,' -e 's,$,:,' < "$tmpdepfile" >> "$depfile"
else
echo "#dummy" > "$depfile"
fi
rm -f "$tmpdepfile"
;;
#nosideeffect)
# This comment above is used by automake to tell side-effect
# dependency tracking mechanisms from slower ones.
dashmstdout)
# Important note: in order to support this mode, a compiler *must*
# always write the preprocessed file to stdout, regardless of -o.
"$@" || exit $?
# Remove the call to Libtool.
if test "$libtool" = yes; then
while test "X$1" != 'X--mode=compile'; do
shift
done
shift
fi
# Remove `-o $object'.
IFS=" "
for arg
do
case $arg in
-o)
shift
;;
$object)
shift
;;
*)
set fnord "$@" "$arg"
shift # fnord
shift # $arg
;;
esac
done
test -z "$dashmflag" && dashmflag=-M
# Require at least two characters before searching for `:'
# in the target name. This is to cope with DOS-style filenames:
# a dependency such as `c:/foo/bar' could be seen as target `c' otherwise.
"$@" $dashmflag |
sed 's:^[ ]*[^: ][^:][^:]*\:[ ]*:'"$object"'\: :' > "$tmpdepfile"
rm -f "$depfile"
cat < "$tmpdepfile" > "$depfile"
tr ' ' '
' < "$tmpdepfile" | \
## Some versions of the HPUX 10.20 sed can't process this invocation
## correctly. Breaking it into two sed invocations is a workaround.
sed -e 's/^\\$//' -e '/^$/d' -e '/:$/d' | sed -e 's/$/ :/' >> "$depfile"
rm -f "$tmpdepfile"
;;
dashXmstdout)
# This case only exists to satisfy depend.m4. It is never actually
# run, as this mode is specially recognized in the preamble.
exit 1
;;
makedepend)
"$@" || exit $?
# Remove any Libtool call
if test "$libtool" = yes; then
while test "X$1" != 'X--mode=compile'; do
shift
done
shift
fi
# X makedepend
shift
cleared=no eat=no
for arg
do
case $cleared in
no)
set ""; shift
cleared=yes ;;
esac
if test $eat = yes; then
eat=no
continue
fi
case "$arg" in
-D*|-I*)
set fnord "$@" "$arg"; shift ;;
# Strip any option that makedepend may not understand. Remove
# the object too, otherwise makedepend will parse it as a source file.
-arch)
eat=yes ;;
-*|$object)
;;
*)
set fnord "$@" "$arg"; shift ;;
esac
done
obj_suffix=`echo "$object" | sed 's/^.*\././'`
touch "$tmpdepfile"
${MAKEDEPEND-makedepend} -o"$obj_suffix" -f"$tmpdepfile" "$@"
rm -f "$depfile"
cat < "$tmpdepfile" > "$depfile"
sed '1,2d' "$tmpdepfile" | tr ' ' '
' | \
## Some versions of the HPUX 10.20 sed can't process this invocation
## correctly. Breaking it into two sed invocations is a workaround.
sed -e 's/^\\$//' -e '/^$/d' -e '/:$/d' | sed -e 's/$/ :/' >> "$depfile"
rm -f "$tmpdepfile" "$tmpdepfile".bak
;;
cpp)
# Important note: in order to support this mode, a compiler *must*
# always write the preprocessed file to stdout.
"$@" || exit $?
# Remove the call to Libtool.
if test "$libtool" = yes; then
while test "X$1" != 'X--mode=compile'; do
shift
done
shift
fi
# Remove `-o $object'.
IFS=" "
for arg
do
case $arg in
-o)
shift
;;
$object)
shift
;;
*)
set fnord "$@" "$arg"
shift # fnord
shift # $arg
;;
esac
done
"$@" -E |
sed -n -e '/^# [0-9][0-9]* "\([^"]*\)".*/ s:: \1 \\:p' \
-e '/^#line [0-9][0-9]* "\([^"]*\)".*/ s:: \1 \\:p' |
sed '$ s: \\$::' > "$tmpdepfile"
rm -f "$depfile"
echo "$object : \\" > "$depfile"
cat < "$tmpdepfile" >> "$depfile"
sed < "$tmpdepfile" '/^$/d;s/^ //;s/ \\$//;s/$/ :/' >> "$depfile"
rm -f "$tmpdepfile"
;;
msvisualcpp)
# Important note: in order to support this mode, a compiler *must*
# always write the preprocessed file to stdout.
"$@" || exit $?
# Remove the call to Libtool.
if test "$libtool" = yes; then
while test "X$1" != 'X--mode=compile'; do
shift
done
shift
fi
IFS=" "
for arg
do
case "$arg" in
-o)
shift
;;
$object)
shift
;;
"-Gm"|"/Gm"|"-Gi"|"/Gi"|"-ZI"|"/ZI")
set fnord "$@"
shift
shift
;;
*)
set fnord "$@" "$arg"
shift
shift
;;
esac
done
"$@" -E 2>/dev/null |
sed -n '/^#line [0-9][0-9]* "\([^"]*\)"/ s::\1:p' | $cygpath_u | sort -u > "$tmpdepfile"
rm -f "$depfile"
echo "$object : \\" > "$depfile"
sed < "$tmpdepfile" -n -e 's% %\\ %g' -e '/^\(.*\)$/ s:: \1 \\:p' >> "$depfile"
echo " " >> "$depfile"
sed < "$tmpdepfile" -n -e 's% %\\ %g' -e '/^\(.*\)$/ s::\1\::p' >> "$depfile"
rm -f "$tmpdepfile"
;;
msvcmsys)
# This case exists only to let depend.m4 do its work. It works by
# looking at the text of this script. This case will never be run,
# since it is checked for above.
exit 1
;;
none)
exec "$@"
;;
*)
echo "Unknown depmode $depmode" 1>&2
exit 1
;;
esac
exit 0
# Local Variables:
# mode: shell-script
# sh-indentation: 2
# eval: (add-hook 'write-file-hooks 'time-stamp)
# time-stamp-start: "scriptversion="
# time-stamp-format: "%:y-%02m-%02d.%02H"
# time-stamp-time-zone: "UTC"
# time-stamp-end: "; # UTC"
# End:

520
clients/cpp/build-aux/install-sh Executable file
View File

@ -0,0 +1,520 @@
#!/bin/sh
# install - install a program, script, or datafile
scriptversion=2009-04-28.21; # UTC
# This originates from X11R5 (mit/util/scripts/install.sh), which was
# later released in X11R6 (xc/config/util/install.sh) with the
# following copyright and license.
#
# Copyright (C) 1994 X Consortium
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to
# deal in the Software without restriction, including without limitation the
# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
# sell copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# X CONSORTIUM BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN
# AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNEC-
# TION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
# Except as contained in this notice, the name of the X Consortium shall not
# be used in advertising or otherwise to promote the sale, use or other deal-
# ings in this Software without prior written authorization from the X Consor-
# tium.
#
#
# FSF changes to this file are in the public domain.
#
# Calling this script install-sh is preferred over install.sh, to prevent
# `make' implicit rules from creating a file called install from it
# when there is no Makefile.
#
# This script is compatible with the BSD install script, but was written
# from scratch.
nl='
'
IFS=" "" $nl"
# set DOITPROG to echo to test this script
# Don't use :- since 4.3BSD and earlier shells don't like it.
doit=${DOITPROG-}
if test -z "$doit"; then
doit_exec=exec
else
doit_exec=$doit
fi
# Put in absolute file names if you don't have them in your path;
# or use environment vars.
chgrpprog=${CHGRPPROG-chgrp}
chmodprog=${CHMODPROG-chmod}
chownprog=${CHOWNPROG-chown}
cmpprog=${CMPPROG-cmp}
cpprog=${CPPROG-cp}
mkdirprog=${MKDIRPROG-mkdir}
mvprog=${MVPROG-mv}
rmprog=${RMPROG-rm}
stripprog=${STRIPPROG-strip}
posix_glob='?'
initialize_posix_glob='
test "$posix_glob" != "?" || {
if (set -f) 2>/dev/null; then
posix_glob=
else
posix_glob=:
fi
}
'
posix_mkdir=
# Desired mode of installed file.
mode=0755
chgrpcmd=
chmodcmd=$chmodprog
chowncmd=
mvcmd=$mvprog
rmcmd="$rmprog -f"
stripcmd=
src=
dst=
dir_arg=
dst_arg=
copy_on_change=false
no_target_directory=
usage="\
Usage: $0 [OPTION]... [-T] SRCFILE DSTFILE
or: $0 [OPTION]... SRCFILES... DIRECTORY
or: $0 [OPTION]... -t DIRECTORY SRCFILES...
or: $0 [OPTION]... -d DIRECTORIES...
In the 1st form, copy SRCFILE to DSTFILE.
In the 2nd and 3rd, copy all SRCFILES to DIRECTORY.
In the 4th, create DIRECTORIES.
Options:
--help display this help and exit.
--version display version info and exit.
-c (ignored)
-C install only if different (preserve the last data modification time)
-d create directories instead of installing files.
-g GROUP $chgrpprog installed files to GROUP.
-m MODE $chmodprog installed files to MODE.
-o USER $chownprog installed files to USER.
-s $stripprog installed files.
-t DIRECTORY install into DIRECTORY.
-T report an error if DSTFILE is a directory.
Environment variables override the default commands:
CHGRPPROG CHMODPROG CHOWNPROG CMPPROG CPPROG MKDIRPROG MVPROG
RMPROG STRIPPROG
"
while test $# -ne 0; do
case $1 in
-c) ;;
-C) copy_on_change=true;;
-d) dir_arg=true;;
-g) chgrpcmd="$chgrpprog $2"
shift;;
--help) echo "$usage"; exit $?;;
-m) mode=$2
case $mode in
*' '* | *' '* | *'
'* | *'*'* | *'?'* | *'['*)
echo "$0: invalid mode: $mode" >&2
exit 1;;
esac
shift;;
-o) chowncmd="$chownprog $2"
shift;;
-s) stripcmd=$stripprog;;
-t) dst_arg=$2
shift;;
-T) no_target_directory=true;;
--version) echo "$0 $scriptversion"; exit $?;;
--) shift
break;;
-*) echo "$0: invalid option: $1" >&2
exit 1;;
*) break;;
esac
shift
done
if test $# -ne 0 && test -z "$dir_arg$dst_arg"; then
# When -d is used, all remaining arguments are directories to create.
# When -t is used, the destination is already specified.
# Otherwise, the last argument is the destination. Remove it from $@.
for arg
do
if test -n "$dst_arg"; then
# $@ is not empty: it contains at least $arg.
set fnord "$@" "$dst_arg"
shift # fnord
fi
shift # arg
dst_arg=$arg
done
fi
if test $# -eq 0; then
if test -z "$dir_arg"; then
echo "$0: no input file specified." >&2
exit 1
fi
# It's OK to call `install-sh -d' without argument.
# This can happen when creating conditional directories.
exit 0
fi
if test -z "$dir_arg"; then
trap '(exit $?); exit' 1 2 13 15
# Set umask so as not to create temps with too-generous modes.
# However, 'strip' requires both read and write access to temps.
case $mode in
# Optimize common cases.
*644) cp_umask=133;;
*755) cp_umask=22;;
*[0-7])
if test -z "$stripcmd"; then
u_plus_rw=
else
u_plus_rw='% 200'
fi
cp_umask=`expr '(' 777 - $mode % 1000 ')' $u_plus_rw`;;
*)
if test -z "$stripcmd"; then
u_plus_rw=
else
u_plus_rw=,u+rw
fi
cp_umask=$mode$u_plus_rw;;
esac
fi
for src
do
# Protect names starting with `-'.
case $src in
-*) src=./$src;;
esac
if test -n "$dir_arg"; then
dst=$src
dstdir=$dst
test -d "$dstdir"
dstdir_status=$?
else
# Waiting for this to be detected by the "$cpprog $src $dsttmp" command
# might cause directories to be created, which would be especially bad
# if $src (and thus $dsttmp) contains '*'.
if test ! -f "$src" && test ! -d "$src"; then
echo "$0: $src does not exist." >&2
exit 1
fi
if test -z "$dst_arg"; then
echo "$0: no destination specified." >&2
exit 1
fi
dst=$dst_arg
# Protect names starting with `-'.
case $dst in
-*) dst=./$dst;;
esac
# If destination is a directory, append the input filename; won't work
# if double slashes aren't ignored.
if test -d "$dst"; then
if test -n "$no_target_directory"; then
echo "$0: $dst_arg: Is a directory" >&2
exit 1
fi
dstdir=$dst
dst=$dstdir/`basename "$src"`
dstdir_status=0
else
# Prefer dirname, but fall back on a substitute if dirname fails.
dstdir=`
(dirname "$dst") 2>/dev/null ||
expr X"$dst" : 'X\(.*[^/]\)//*[^/][^/]*/*$' \| \
X"$dst" : 'X\(//\)[^/]' \| \
X"$dst" : 'X\(//\)$' \| \
X"$dst" : 'X\(/\)' \| . 2>/dev/null ||
echo X"$dst" |
sed '/^X\(.*[^/]\)\/\/*[^/][^/]*\/*$/{
s//\1/
q
}
/^X\(\/\/\)[^/].*/{
s//\1/
q
}
/^X\(\/\/\)$/{
s//\1/
q
}
/^X\(\/\).*/{
s//\1/
q
}
s/.*/./; q'
`
test -d "$dstdir"
dstdir_status=$?
fi
fi
obsolete_mkdir_used=false
if test $dstdir_status != 0; then
case $posix_mkdir in
'')
# Create intermediate dirs using mode 755 as modified by the umask.
# This is like FreeBSD 'install' as of 1997-10-28.
umask=`umask`
case $stripcmd.$umask in
# Optimize common cases.
*[2367][2367]) mkdir_umask=$umask;;
.*0[02][02] | .[02][02] | .[02]) mkdir_umask=22;;
*[0-7])
mkdir_umask=`expr $umask + 22 \
- $umask % 100 % 40 + $umask % 20 \
- $umask % 10 % 4 + $umask % 2
`;;
*) mkdir_umask=$umask,go-w;;
esac
# With -d, create the new directory with the user-specified mode.
# Otherwise, rely on $mkdir_umask.
if test -n "$dir_arg"; then
mkdir_mode=-m$mode
else
mkdir_mode=
fi
posix_mkdir=false
case $umask in
*[123567][0-7][0-7])
# POSIX mkdir -p sets u+wx bits regardless of umask, which
# is incompatible with FreeBSD 'install' when (umask & 300) != 0.
;;
*)
tmpdir=${TMPDIR-/tmp}/ins$RANDOM-$$
trap 'ret=$?; rmdir "$tmpdir/d" "$tmpdir" 2>/dev/null; exit $ret' 0
if (umask $mkdir_umask &&
exec $mkdirprog $mkdir_mode -p -- "$tmpdir/d") >/dev/null 2>&1
then
if test -z "$dir_arg" || {
# Check for POSIX incompatibilities with -m.
# HP-UX 11.23 and IRIX 6.5 mkdir -m -p sets group- or
# other-writeable bit of parent directory when it shouldn't.
# FreeBSD 6.1 mkdir -m -p sets mode of existing directory.
ls_ld_tmpdir=`ls -ld "$tmpdir"`
case $ls_ld_tmpdir in
d????-?r-*) different_mode=700;;
d????-?--*) different_mode=755;;
*) false;;
esac &&
$mkdirprog -m$different_mode -p -- "$tmpdir" && {
ls_ld_tmpdir_1=`ls -ld "$tmpdir"`
test "$ls_ld_tmpdir" = "$ls_ld_tmpdir_1"
}
}
then posix_mkdir=:
fi
rmdir "$tmpdir/d" "$tmpdir"
else
# Remove any dirs left behind by ancient mkdir implementations.
rmdir ./$mkdir_mode ./-p ./-- 2>/dev/null
fi
trap '' 0;;
esac;;
esac
if
$posix_mkdir && (
umask $mkdir_umask &&
$doit_exec $mkdirprog $mkdir_mode -p -- "$dstdir"
)
then :
else
# The umask is ridiculous, or mkdir does not conform to POSIX,
# or it failed possibly due to a race condition. Create the
# directory the slow way, step by step, checking for races as we go.
case $dstdir in
/*) prefix='/';;
-*) prefix='./';;
*) prefix='';;
esac
eval "$initialize_posix_glob"
oIFS=$IFS
IFS=/
$posix_glob set -f
set fnord $dstdir
shift
$posix_glob set +f
IFS=$oIFS
prefixes=
for d
do
test -z "$d" && continue
prefix=$prefix$d
if test -d "$prefix"; then
prefixes=
else
if $posix_mkdir; then
(umask=$mkdir_umask &&
$doit_exec $mkdirprog $mkdir_mode -p -- "$dstdir") && break
# Don't fail if two instances are running concurrently.
test -d "$prefix" || exit 1
else
case $prefix in
*\'*) qprefix=`echo "$prefix" | sed "s/'/'\\\\\\\\''/g"`;;
*) qprefix=$prefix;;
esac
prefixes="$prefixes '$qprefix'"
fi
fi
prefix=$prefix/
done
if test -n "$prefixes"; then
# Don't fail if two instances are running concurrently.
(umask $mkdir_umask &&
eval "\$doit_exec \$mkdirprog $prefixes") ||
test -d "$dstdir" || exit 1
obsolete_mkdir_used=true
fi
fi
fi
if test -n "$dir_arg"; then
{ test -z "$chowncmd" || $doit $chowncmd "$dst"; } &&
{ test -z "$chgrpcmd" || $doit $chgrpcmd "$dst"; } &&
{ test "$obsolete_mkdir_used$chowncmd$chgrpcmd" = false ||
test -z "$chmodcmd" || $doit $chmodcmd $mode "$dst"; } || exit 1
else
# Make a couple of temp file names in the proper directory.
dsttmp=$dstdir/_inst.$$_
rmtmp=$dstdir/_rm.$$_
# Trap to clean up those temp files at exit.
trap 'ret=$?; rm -f "$dsttmp" "$rmtmp" && exit $ret' 0
# Copy the file name to the temp name.
(umask $cp_umask && $doit_exec $cpprog "$src" "$dsttmp") &&
# and set any options; do chmod last to preserve setuid bits.
#
# If any of these fail, we abort the whole thing. If we want to
# ignore errors from any of these, just make sure not to ignore
# errors from the above "$doit $cpprog $src $dsttmp" command.
#
{ test -z "$chowncmd" || $doit $chowncmd "$dsttmp"; } &&
{ test -z "$chgrpcmd" || $doit $chgrpcmd "$dsttmp"; } &&
{ test -z "$stripcmd" || $doit $stripcmd "$dsttmp"; } &&
{ test -z "$chmodcmd" || $doit $chmodcmd $mode "$dsttmp"; } &&
# If -C, don't bother to copy if it wouldn't change the file.
if $copy_on_change &&
old=`LC_ALL=C ls -dlL "$dst" 2>/dev/null` &&
new=`LC_ALL=C ls -dlL "$dsttmp" 2>/dev/null` &&
eval "$initialize_posix_glob" &&
$posix_glob set -f &&
set X $old && old=:$2:$4:$5:$6 &&
set X $new && new=:$2:$4:$5:$6 &&
$posix_glob set +f &&
test "$old" = "$new" &&
$cmpprog "$dst" "$dsttmp" >/dev/null 2>&1
then
rm -f "$dsttmp"
else
# Rename the file to the real destination.
$doit $mvcmd -f "$dsttmp" "$dst" 2>/dev/null ||
# The rename failed, perhaps because mv can't rename something else
# to itself, or perhaps because mv is so ancient that it does not
# support -f.
{
# Now remove or move aside any old file at destination location.
# We try this two ways since rm can't unlink itself on some
# systems and the destination file might be busy for other
# reasons. In this case, the final cleanup might fail but the new
# file should still install successfully.
{
test ! -f "$dst" ||
$doit $rmcmd -f "$dst" 2>/dev/null ||
{ $doit $mvcmd -f "$dst" "$rmtmp" 2>/dev/null &&
{ $doit $rmcmd -f "$rmtmp" 2>/dev/null; :; }
} ||
{ echo "$0: cannot unlink or rename $dst" >&2
(exit 1); exit 1
}
} &&
# Now rename the file to the real destination.
$doit $mvcmd "$dsttmp" "$dst"
}
fi || exit 1
trap '' 0
fi
done
# Local variables:
# eval: (add-hook 'write-file-hooks 'time-stamp)
# time-stamp-start: "scriptversion="
# time-stamp-format: "%:y-%02m-%02d.%02H"
# time-stamp-time-zone: "UTC"
# time-stamp-end: "; # UTC"
# End:

8413
clients/cpp/build-aux/ltmain.sh Executable file

File diff suppressed because it is too large Load Diff

7377
clients/cpp/build-aux/m4/libtool.m4 vendored Normal file

File diff suppressed because it is too large Load Diff

368
clients/cpp/build-aux/m4/ltoptions.m4 vendored Normal file
View File

@ -0,0 +1,368 @@
# Helper functions for option handling. -*- Autoconf -*-
#
# Copyright (C) 2004, 2005, 2007, 2008 Free Software Foundation, Inc.
# Written by Gary V. Vaughan, 2004
#
# This file is free software; the Free Software Foundation gives
# unlimited permission to copy and/or distribute it, with or without
# modifications, as long as this notice is preserved.
# serial 6 ltoptions.m4
# This is to help aclocal find these macros, as it can't see m4_define.
AC_DEFUN([LTOPTIONS_VERSION], [m4_if([1])])
# _LT_MANGLE_OPTION(MACRO-NAME, OPTION-NAME)
# ------------------------------------------
m4_define([_LT_MANGLE_OPTION],
[[_LT_OPTION_]m4_bpatsubst($1__$2, [[^a-zA-Z0-9_]], [_])])
# _LT_SET_OPTION(MACRO-NAME, OPTION-NAME)
# ---------------------------------------
# Set option OPTION-NAME for macro MACRO-NAME, and if there is a
# matching handler defined, dispatch to it. Other OPTION-NAMEs are
# saved as a flag.
m4_define([_LT_SET_OPTION],
[m4_define(_LT_MANGLE_OPTION([$1], [$2]))dnl
m4_ifdef(_LT_MANGLE_DEFUN([$1], [$2]),
_LT_MANGLE_DEFUN([$1], [$2]),
[m4_warning([Unknown $1 option `$2'])])[]dnl
])
# _LT_IF_OPTION(MACRO-NAME, OPTION-NAME, IF-SET, [IF-NOT-SET])
# ------------------------------------------------------------
# Execute IF-SET if OPTION is set, IF-NOT-SET otherwise.
m4_define([_LT_IF_OPTION],
[m4_ifdef(_LT_MANGLE_OPTION([$1], [$2]), [$3], [$4])])
# _LT_UNLESS_OPTIONS(MACRO-NAME, OPTION-LIST, IF-NOT-SET)
# -------------------------------------------------------
# Execute IF-NOT-SET unless all options in OPTION-LIST for MACRO-NAME
# are set.
m4_define([_LT_UNLESS_OPTIONS],
[m4_foreach([_LT_Option], m4_split(m4_normalize([$2])),
[m4_ifdef(_LT_MANGLE_OPTION([$1], _LT_Option),
[m4_define([$0_found])])])[]dnl
m4_ifdef([$0_found], [m4_undefine([$0_found])], [$3
])[]dnl
])
# _LT_SET_OPTIONS(MACRO-NAME, OPTION-LIST)
# ----------------------------------------
# OPTION-LIST is a space-separated list of Libtool options associated
# with MACRO-NAME. If any OPTION has a matching handler declared with
# LT_OPTION_DEFINE, dispatch to that macro; otherwise complain about
# the unknown option and exit.
m4_defun([_LT_SET_OPTIONS],
[# Set options
m4_foreach([_LT_Option], m4_split(m4_normalize([$2])),
[_LT_SET_OPTION([$1], _LT_Option)])
m4_if([$1],[LT_INIT],[
dnl
dnl Simply set some default values (i.e off) if boolean options were not
dnl specified:
_LT_UNLESS_OPTIONS([LT_INIT], [dlopen], [enable_dlopen=no
])
_LT_UNLESS_OPTIONS([LT_INIT], [win32-dll], [enable_win32_dll=no
])
dnl
dnl If no reference was made to various pairs of opposing options, then
dnl we run the default mode handler for the pair. For example, if neither
dnl `shared' nor `disable-shared' was passed, we enable building of shared
dnl archives by default:
_LT_UNLESS_OPTIONS([LT_INIT], [shared disable-shared], [_LT_ENABLE_SHARED])
_LT_UNLESS_OPTIONS([LT_INIT], [static disable-static], [_LT_ENABLE_STATIC])
_LT_UNLESS_OPTIONS([LT_INIT], [pic-only no-pic], [_LT_WITH_PIC])
_LT_UNLESS_OPTIONS([LT_INIT], [fast-install disable-fast-install],
[_LT_ENABLE_FAST_INSTALL])
])
])# _LT_SET_OPTIONS
## --------------------------------- ##
## Macros to handle LT_INIT options. ##
## --------------------------------- ##
# _LT_MANGLE_DEFUN(MACRO-NAME, OPTION-NAME)
# -----------------------------------------
m4_define([_LT_MANGLE_DEFUN],
[[_LT_OPTION_DEFUN_]m4_bpatsubst(m4_toupper([$1__$2]), [[^A-Z0-9_]], [_])])
# LT_OPTION_DEFINE(MACRO-NAME, OPTION-NAME, CODE)
# -----------------------------------------------
m4_define([LT_OPTION_DEFINE],
[m4_define(_LT_MANGLE_DEFUN([$1], [$2]), [$3])[]dnl
])# LT_OPTION_DEFINE
# dlopen
# ------
LT_OPTION_DEFINE([LT_INIT], [dlopen], [enable_dlopen=yes
])
AU_DEFUN([AC_LIBTOOL_DLOPEN],
[_LT_SET_OPTION([LT_INIT], [dlopen])
AC_DIAGNOSE([obsolete],
[$0: Remove this warning and the call to _LT_SET_OPTION when you
put the `dlopen' option into LT_INIT's first parameter.])
])
dnl aclocal-1.4 backwards compatibility:
dnl AC_DEFUN([AC_LIBTOOL_DLOPEN], [])
# win32-dll
# ---------
# Declare package support for building win32 dll's.
LT_OPTION_DEFINE([LT_INIT], [win32-dll],
[enable_win32_dll=yes
case $host in
*-*-cygwin* | *-*-mingw* | *-*-pw32* | *-cegcc*)
AC_CHECK_TOOL(AS, as, false)
AC_CHECK_TOOL(DLLTOOL, dlltool, false)
AC_CHECK_TOOL(OBJDUMP, objdump, false)
;;
esac
test -z "$AS" && AS=as
_LT_DECL([], [AS], [0], [Assembler program])dnl
test -z "$DLLTOOL" && DLLTOOL=dlltool
_LT_DECL([], [DLLTOOL], [0], [DLL creation program])dnl
test -z "$OBJDUMP" && OBJDUMP=objdump
_LT_DECL([], [OBJDUMP], [0], [Object dumper program])dnl
])# win32-dll
AU_DEFUN([AC_LIBTOOL_WIN32_DLL],
[AC_REQUIRE([AC_CANONICAL_HOST])dnl
_LT_SET_OPTION([LT_INIT], [win32-dll])
AC_DIAGNOSE([obsolete],
[$0: Remove this warning and the call to _LT_SET_OPTION when you
put the `win32-dll' option into LT_INIT's first parameter.])
])
dnl aclocal-1.4 backwards compatibility:
dnl AC_DEFUN([AC_LIBTOOL_WIN32_DLL], [])
# _LT_ENABLE_SHARED([DEFAULT])
# ----------------------------
# implement the --enable-shared flag, and supports the `shared' and
# `disable-shared' LT_INIT options.
# DEFAULT is either `yes' or `no'. If omitted, it defaults to `yes'.
m4_define([_LT_ENABLE_SHARED],
[m4_define([_LT_ENABLE_SHARED_DEFAULT], [m4_if($1, no, no, yes)])dnl
AC_ARG_ENABLE([shared],
[AS_HELP_STRING([--enable-shared@<:@=PKGS@:>@],
[build shared libraries @<:@default=]_LT_ENABLE_SHARED_DEFAULT[@:>@])],
[p=${PACKAGE-default}
case $enableval in
yes) enable_shared=yes ;;
no) enable_shared=no ;;
*)
enable_shared=no
# Look at the argument we got. We use all the common list separators.
lt_save_ifs="$IFS"; IFS="${IFS}$PATH_SEPARATOR,"
for pkg in $enableval; do
IFS="$lt_save_ifs"
if test "X$pkg" = "X$p"; then
enable_shared=yes
fi
done
IFS="$lt_save_ifs"
;;
esac],
[enable_shared=]_LT_ENABLE_SHARED_DEFAULT)
_LT_DECL([build_libtool_libs], [enable_shared], [0],
[Whether or not to build shared libraries])
])# _LT_ENABLE_SHARED
LT_OPTION_DEFINE([LT_INIT], [shared], [_LT_ENABLE_SHARED([yes])])
LT_OPTION_DEFINE([LT_INIT], [disable-shared], [_LT_ENABLE_SHARED([no])])
# Old names:
AC_DEFUN([AC_ENABLE_SHARED],
[_LT_SET_OPTION([LT_INIT], m4_if([$1], [no], [disable-])[shared])
])
AC_DEFUN([AC_DISABLE_SHARED],
[_LT_SET_OPTION([LT_INIT], [disable-shared])
])
AU_DEFUN([AM_ENABLE_SHARED], [AC_ENABLE_SHARED($@)])
AU_DEFUN([AM_DISABLE_SHARED], [AC_DISABLE_SHARED($@)])
dnl aclocal-1.4 backwards compatibility:
dnl AC_DEFUN([AM_ENABLE_SHARED], [])
dnl AC_DEFUN([AM_DISABLE_SHARED], [])
# _LT_ENABLE_STATIC([DEFAULT])
# ----------------------------
# implement the --enable-static flag, and support the `static' and
# `disable-static' LT_INIT options.
# DEFAULT is either `yes' or `no'. If omitted, it defaults to `yes'.
m4_define([_LT_ENABLE_STATIC],
[m4_define([_LT_ENABLE_STATIC_DEFAULT], [m4_if($1, no, no, yes)])dnl
AC_ARG_ENABLE([static],
[AS_HELP_STRING([--enable-static@<:@=PKGS@:>@],
[build static libraries @<:@default=]_LT_ENABLE_STATIC_DEFAULT[@:>@])],
[p=${PACKAGE-default}
case $enableval in
yes) enable_static=yes ;;
no) enable_static=no ;;
*)
enable_static=no
# Look at the argument we got. We use all the common list separators.
lt_save_ifs="$IFS"; IFS="${IFS}$PATH_SEPARATOR,"
for pkg in $enableval; do
IFS="$lt_save_ifs"
if test "X$pkg" = "X$p"; then
enable_static=yes
fi
done
IFS="$lt_save_ifs"
;;
esac],
[enable_static=]_LT_ENABLE_STATIC_DEFAULT)
_LT_DECL([build_old_libs], [enable_static], [0],
[Whether or not to build static libraries])
])# _LT_ENABLE_STATIC
LT_OPTION_DEFINE([LT_INIT], [static], [_LT_ENABLE_STATIC([yes])])
LT_OPTION_DEFINE([LT_INIT], [disable-static], [_LT_ENABLE_STATIC([no])])
# Old names:
AC_DEFUN([AC_ENABLE_STATIC],
[_LT_SET_OPTION([LT_INIT], m4_if([$1], [no], [disable-])[static])
])
AC_DEFUN([AC_DISABLE_STATIC],
[_LT_SET_OPTION([LT_INIT], [disable-static])
])
AU_DEFUN([AM_ENABLE_STATIC], [AC_ENABLE_STATIC($@)])
AU_DEFUN([AM_DISABLE_STATIC], [AC_DISABLE_STATIC($@)])
dnl aclocal-1.4 backwards compatibility:
dnl AC_DEFUN([AM_ENABLE_STATIC], [])
dnl AC_DEFUN([AM_DISABLE_STATIC], [])
# _LT_ENABLE_FAST_INSTALL([DEFAULT])
# ----------------------------------
# implement the --enable-fast-install flag, and support the `fast-install'
# and `disable-fast-install' LT_INIT options.
# DEFAULT is either `yes' or `no'. If omitted, it defaults to `yes'.
m4_define([_LT_ENABLE_FAST_INSTALL],
[m4_define([_LT_ENABLE_FAST_INSTALL_DEFAULT], [m4_if($1, no, no, yes)])dnl
AC_ARG_ENABLE([fast-install],
[AS_HELP_STRING([--enable-fast-install@<:@=PKGS@:>@],
[optimize for fast installation @<:@default=]_LT_ENABLE_FAST_INSTALL_DEFAULT[@:>@])],
[p=${PACKAGE-default}
case $enableval in
yes) enable_fast_install=yes ;;
no) enable_fast_install=no ;;
*)
enable_fast_install=no
# Look at the argument we got. We use all the common list separators.
lt_save_ifs="$IFS"; IFS="${IFS}$PATH_SEPARATOR,"
for pkg in $enableval; do
IFS="$lt_save_ifs"
if test "X$pkg" = "X$p"; then
enable_fast_install=yes
fi
done
IFS="$lt_save_ifs"
;;
esac],
[enable_fast_install=]_LT_ENABLE_FAST_INSTALL_DEFAULT)
_LT_DECL([fast_install], [enable_fast_install], [0],
[Whether or not to optimize for fast installation])dnl
])# _LT_ENABLE_FAST_INSTALL
LT_OPTION_DEFINE([LT_INIT], [fast-install], [_LT_ENABLE_FAST_INSTALL([yes])])
LT_OPTION_DEFINE([LT_INIT], [disable-fast-install], [_LT_ENABLE_FAST_INSTALL([no])])
# Old names:
AU_DEFUN([AC_ENABLE_FAST_INSTALL],
[_LT_SET_OPTION([LT_INIT], m4_if([$1], [no], [disable-])[fast-install])
AC_DIAGNOSE([obsolete],
[$0: Remove this warning and the call to _LT_SET_OPTION when you put
the `fast-install' option into LT_INIT's first parameter.])
])
AU_DEFUN([AC_DISABLE_FAST_INSTALL],
[_LT_SET_OPTION([LT_INIT], [disable-fast-install])
AC_DIAGNOSE([obsolete],
[$0: Remove this warning and the call to _LT_SET_OPTION when you put
the `disable-fast-install' option into LT_INIT's first parameter.])
])
dnl aclocal-1.4 backwards compatibility:
dnl AC_DEFUN([AC_ENABLE_FAST_INSTALL], [])
dnl AC_DEFUN([AM_DISABLE_FAST_INSTALL], [])
# _LT_WITH_PIC([MODE])
# --------------------
# implement the --with-pic flag, and support the `pic-only' and `no-pic'
# LT_INIT options.
# MODE is either `yes' or `no'. If omitted, it defaults to `both'.
m4_define([_LT_WITH_PIC],
[AC_ARG_WITH([pic],
[AS_HELP_STRING([--with-pic],
[try to use only PIC/non-PIC objects @<:@default=use both@:>@])],
[pic_mode="$withval"],
[pic_mode=default])
test -z "$pic_mode" && pic_mode=m4_default([$1], [default])
_LT_DECL([], [pic_mode], [0], [What type of objects to build])dnl
])# _LT_WITH_PIC
LT_OPTION_DEFINE([LT_INIT], [pic-only], [_LT_WITH_PIC([yes])])
LT_OPTION_DEFINE([LT_INIT], [no-pic], [_LT_WITH_PIC([no])])
# Old name:
AU_DEFUN([AC_LIBTOOL_PICMODE],
[_LT_SET_OPTION([LT_INIT], [pic-only])
AC_DIAGNOSE([obsolete],
[$0: Remove this warning and the call to _LT_SET_OPTION when you
put the `pic-only' option into LT_INIT's first parameter.])
])
dnl aclocal-1.4 backwards compatibility:
dnl AC_DEFUN([AC_LIBTOOL_PICMODE], [])
## ----------------- ##
## LTDL_INIT Options ##
## ----------------- ##
m4_define([_LTDL_MODE], [])
LT_OPTION_DEFINE([LTDL_INIT], [nonrecursive],
[m4_define([_LTDL_MODE], [nonrecursive])])
LT_OPTION_DEFINE([LTDL_INIT], [recursive],
[m4_define([_LTDL_MODE], [recursive])])
LT_OPTION_DEFINE([LTDL_INIT], [subproject],
[m4_define([_LTDL_MODE], [subproject])])
m4_define([_LTDL_TYPE], [])
LT_OPTION_DEFINE([LTDL_INIT], [installable],
[m4_define([_LTDL_TYPE], [installable])])
LT_OPTION_DEFINE([LTDL_INIT], [convenience],
[m4_define([_LTDL_TYPE], [convenience])])

123
clients/cpp/build-aux/m4/ltsugar.m4 vendored Normal file
View File

@ -0,0 +1,123 @@
# ltsugar.m4 -- libtool m4 base layer. -*-Autoconf-*-
#
# Copyright (C) 2004, 2005, 2007, 2008 Free Software Foundation, Inc.
# Written by Gary V. Vaughan, 2004
#
# This file is free software; the Free Software Foundation gives
# unlimited permission to copy and/or distribute it, with or without
# modifications, as long as this notice is preserved.
# serial 6 ltsugar.m4
# This is to help aclocal find these macros, as it can't see m4_define.
AC_DEFUN([LTSUGAR_VERSION], [m4_if([0.1])])
# lt_join(SEP, ARG1, [ARG2...])
# -----------------------------
# Produce ARG1SEPARG2...SEPARGn, omitting [] arguments and their
# associated separator.
# Needed until we can rely on m4_join from Autoconf 2.62, since all earlier
# versions in m4sugar had bugs.
m4_define([lt_join],
[m4_if([$#], [1], [],
[$#], [2], [[$2]],
[m4_if([$2], [], [], [[$2]_])$0([$1], m4_shift(m4_shift($@)))])])
m4_define([_lt_join],
[m4_if([$#$2], [2], [],
[m4_if([$2], [], [], [[$1$2]])$0([$1], m4_shift(m4_shift($@)))])])
# lt_car(LIST)
# lt_cdr(LIST)
# ------------
# Manipulate m4 lists.
# These macros are necessary as long as will still need to support
# Autoconf-2.59 which quotes differently.
m4_define([lt_car], [[$1]])
m4_define([lt_cdr],
[m4_if([$#], 0, [m4_fatal([$0: cannot be called without arguments])],
[$#], 1, [],
[m4_dquote(m4_shift($@))])])
m4_define([lt_unquote], $1)
# lt_append(MACRO-NAME, STRING, [SEPARATOR])
# ------------------------------------------
# Redefine MACRO-NAME to hold its former content plus `SEPARATOR'`STRING'.
# Note that neither SEPARATOR nor STRING are expanded; they are appended
# to MACRO-NAME as is (leaving the expansion for when MACRO-NAME is invoked).
# No SEPARATOR is output if MACRO-NAME was previously undefined (different
# than defined and empty).
#
# This macro is needed until we can rely on Autoconf 2.62, since earlier
# versions of m4sugar mistakenly expanded SEPARATOR but not STRING.
m4_define([lt_append],
[m4_define([$1],
m4_ifdef([$1], [m4_defn([$1])[$3]])[$2])])
# lt_combine(SEP, PREFIX-LIST, INFIX, SUFFIX1, [SUFFIX2...])
# ----------------------------------------------------------
# Produce a SEP delimited list of all paired combinations of elements of
# PREFIX-LIST with SUFFIX1 through SUFFIXn. Each element of the list
# has the form PREFIXmINFIXSUFFIXn.
# Needed until we can rely on m4_combine added in Autoconf 2.62.
m4_define([lt_combine],
[m4_if(m4_eval([$# > 3]), [1],
[m4_pushdef([_Lt_sep], [m4_define([_Lt_sep], m4_defn([lt_car]))])]]dnl
[[m4_foreach([_Lt_prefix], [$2],
[m4_foreach([_Lt_suffix],
]m4_dquote(m4_dquote(m4_shift(m4_shift(m4_shift($@)))))[,
[_Lt_sep([$1])[]m4_defn([_Lt_prefix])[$3]m4_defn([_Lt_suffix])])])])])
# lt_if_append_uniq(MACRO-NAME, VARNAME, [SEPARATOR], [UNIQ], [NOT-UNIQ])
# -----------------------------------------------------------------------
# Iff MACRO-NAME does not yet contain VARNAME, then append it (delimited
# by SEPARATOR if supplied) and expand UNIQ, else NOT-UNIQ.
m4_define([lt_if_append_uniq],
[m4_ifdef([$1],
[m4_if(m4_index([$3]m4_defn([$1])[$3], [$3$2$3]), [-1],
[lt_append([$1], [$2], [$3])$4],
[$5])],
[lt_append([$1], [$2], [$3])$4])])
# lt_dict_add(DICT, KEY, VALUE)
# -----------------------------
m4_define([lt_dict_add],
[m4_define([$1($2)], [$3])])
# lt_dict_add_subkey(DICT, KEY, SUBKEY, VALUE)
# --------------------------------------------
m4_define([lt_dict_add_subkey],
[m4_define([$1($2:$3)], [$4])])
# lt_dict_fetch(DICT, KEY, [SUBKEY])
# ----------------------------------
m4_define([lt_dict_fetch],
[m4_ifval([$3],
m4_ifdef([$1($2:$3)], [m4_defn([$1($2:$3)])]),
m4_ifdef([$1($2)], [m4_defn([$1($2)])]))])
# lt_if_dict_fetch(DICT, KEY, [SUBKEY], VALUE, IF-TRUE, [IF-FALSE])
# -----------------------------------------------------------------
m4_define([lt_if_dict_fetch],
[m4_if(lt_dict_fetch([$1], [$2], [$3]), [$4],
[$5],
[$6])])
# lt_dict_filter(DICT, [SUBKEY], VALUE, [SEPARATOR], KEY, [...])
# --------------------------------------------------------------
m4_define([lt_dict_filter],
[m4_if([$5], [], [],
[lt_join(m4_quote(m4_default([$4], [[, ]])),
lt_unquote(m4_split(m4_normalize(m4_foreach(_Lt_key, lt_car([m4_shiftn(4, $@)]),
[lt_if_dict_fetch([$1], _Lt_key, [$2], [$3], [_Lt_key ])])))))])[]dnl
])

23
clients/cpp/build-aux/m4/ltversion.m4 vendored Normal file
View File

@ -0,0 +1,23 @@
# ltversion.m4 -- version numbers -*- Autoconf -*-
#
# Copyright (C) 2004 Free Software Foundation, Inc.
# Written by Scott James Remnant, 2004
#
# This file is free software; the Free Software Foundation gives
# unlimited permission to copy and/or distribute it, with or without
# modifications, as long as this notice is preserved.
# Generated from ltversion.in.
# serial 3017 ltversion.m4
# This file is part of GNU Libtool
m4_define([LT_PACKAGE_VERSION], [2.2.6b])
m4_define([LT_PACKAGE_REVISION], [1.3017])
AC_DEFUN([LTVERSION_VERSION],
[macro_version='2.2.6b'
macro_revision='1.3017'
_LT_DECL(, macro_version, 0, [Which release of libtool.m4 was used?])
_LT_DECL(, macro_revision, 0)
])

92
clients/cpp/build-aux/m4/lt~obsolete.m4 vendored Normal file
View File

@ -0,0 +1,92 @@
# lt~obsolete.m4 -- aclocal satisfying obsolete definitions. -*-Autoconf-*-
#
# Copyright (C) 2004, 2005, 2007 Free Software Foundation, Inc.
# Written by Scott James Remnant, 2004.
#
# This file is free software; the Free Software Foundation gives
# unlimited permission to copy and/or distribute it, with or without
# modifications, as long as this notice is preserved.
# serial 4 lt~obsolete.m4
# These exist entirely to fool aclocal when bootstrapping libtool.
#
# In the past libtool.m4 has provided macros via AC_DEFUN (or AU_DEFUN)
# which have later been changed to m4_define as they aren't part of the
# exported API, or moved to Autoconf or Automake where they belong.
#
# The trouble is, aclocal is a bit thick. It'll see the old AC_DEFUN
# in /usr/share/aclocal/libtool.m4 and remember it, then when it sees us
# using a macro with the same name in our local m4/libtool.m4 it'll
# pull the old libtool.m4 in (it doesn't see our shiny new m4_define
# and doesn't know about Autoconf macros at all.)
#
# So we provide this file, which has a silly filename so it's always
# included after everything else. This provides aclocal with the
# AC_DEFUNs it wants, but when m4 processes it, it doesn't do anything
# because those macros already exist, or will be overwritten later.
# We use AC_DEFUN over AU_DEFUN for compatibility with aclocal-1.6.
#
# Anytime we withdraw an AC_DEFUN or AU_DEFUN, remember to add it here.
# Yes, that means every name once taken will need to remain here until
# we give up compatibility with versions before 1.7, at which point
# we need to keep only those names which we still refer to.
# This is to help aclocal find these macros, as it can't see m4_define.
AC_DEFUN([LTOBSOLETE_VERSION], [m4_if([1])])
m4_ifndef([AC_LIBTOOL_LINKER_OPTION], [AC_DEFUN([AC_LIBTOOL_LINKER_OPTION])])
m4_ifndef([AC_PROG_EGREP], [AC_DEFUN([AC_PROG_EGREP])])
m4_ifndef([_LT_AC_PROG_ECHO_BACKSLASH], [AC_DEFUN([_LT_AC_PROG_ECHO_BACKSLASH])])
m4_ifndef([_LT_AC_SHELL_INIT], [AC_DEFUN([_LT_AC_SHELL_INIT])])
m4_ifndef([_LT_AC_SYS_LIBPATH_AIX], [AC_DEFUN([_LT_AC_SYS_LIBPATH_AIX])])
m4_ifndef([_LT_PROG_LTMAIN], [AC_DEFUN([_LT_PROG_LTMAIN])])
m4_ifndef([_LT_AC_TAGVAR], [AC_DEFUN([_LT_AC_TAGVAR])])
m4_ifndef([AC_LTDL_ENABLE_INSTALL], [AC_DEFUN([AC_LTDL_ENABLE_INSTALL])])
m4_ifndef([AC_LTDL_PREOPEN], [AC_DEFUN([AC_LTDL_PREOPEN])])
m4_ifndef([_LT_AC_SYS_COMPILER], [AC_DEFUN([_LT_AC_SYS_COMPILER])])
m4_ifndef([_LT_AC_LOCK], [AC_DEFUN([_LT_AC_LOCK])])
m4_ifndef([AC_LIBTOOL_SYS_OLD_ARCHIVE], [AC_DEFUN([AC_LIBTOOL_SYS_OLD_ARCHIVE])])
m4_ifndef([_LT_AC_TRY_DLOPEN_SELF], [AC_DEFUN([_LT_AC_TRY_DLOPEN_SELF])])
m4_ifndef([AC_LIBTOOL_PROG_CC_C_O], [AC_DEFUN([AC_LIBTOOL_PROG_CC_C_O])])
m4_ifndef([AC_LIBTOOL_SYS_HARD_LINK_LOCKS], [AC_DEFUN([AC_LIBTOOL_SYS_HARD_LINK_LOCKS])])
m4_ifndef([AC_LIBTOOL_OBJDIR], [AC_DEFUN([AC_LIBTOOL_OBJDIR])])
m4_ifndef([AC_LTDL_OBJDIR], [AC_DEFUN([AC_LTDL_OBJDIR])])
m4_ifndef([AC_LIBTOOL_PROG_LD_HARDCODE_LIBPATH], [AC_DEFUN([AC_LIBTOOL_PROG_LD_HARDCODE_LIBPATH])])
m4_ifndef([AC_LIBTOOL_SYS_LIB_STRIP], [AC_DEFUN([AC_LIBTOOL_SYS_LIB_STRIP])])
m4_ifndef([AC_PATH_MAGIC], [AC_DEFUN([AC_PATH_MAGIC])])
m4_ifndef([AC_PROG_LD_GNU], [AC_DEFUN([AC_PROG_LD_GNU])])
m4_ifndef([AC_PROG_LD_RELOAD_FLAG], [AC_DEFUN([AC_PROG_LD_RELOAD_FLAG])])
m4_ifndef([AC_DEPLIBS_CHECK_METHOD], [AC_DEFUN([AC_DEPLIBS_CHECK_METHOD])])
m4_ifndef([AC_LIBTOOL_PROG_COMPILER_NO_RTTI], [AC_DEFUN([AC_LIBTOOL_PROG_COMPILER_NO_RTTI])])
m4_ifndef([AC_LIBTOOL_SYS_GLOBAL_SYMBOL_PIPE], [AC_DEFUN([AC_LIBTOOL_SYS_GLOBAL_SYMBOL_PIPE])])
m4_ifndef([AC_LIBTOOL_PROG_COMPILER_PIC], [AC_DEFUN([AC_LIBTOOL_PROG_COMPILER_PIC])])
m4_ifndef([AC_LIBTOOL_PROG_LD_SHLIBS], [AC_DEFUN([AC_LIBTOOL_PROG_LD_SHLIBS])])
m4_ifndef([AC_LIBTOOL_POSTDEP_PREDEP], [AC_DEFUN([AC_LIBTOOL_POSTDEP_PREDEP])])
m4_ifndef([LT_AC_PROG_EGREP], [AC_DEFUN([LT_AC_PROG_EGREP])])
m4_ifndef([LT_AC_PROG_SED], [AC_DEFUN([LT_AC_PROG_SED])])
m4_ifndef([_LT_CC_BASENAME], [AC_DEFUN([_LT_CC_BASENAME])])
m4_ifndef([_LT_COMPILER_BOILERPLATE], [AC_DEFUN([_LT_COMPILER_BOILERPLATE])])
m4_ifndef([_LT_LINKER_BOILERPLATE], [AC_DEFUN([_LT_LINKER_BOILERPLATE])])
m4_ifndef([_AC_PROG_LIBTOOL], [AC_DEFUN([_AC_PROG_LIBTOOL])])
m4_ifndef([AC_LIBTOOL_SETUP], [AC_DEFUN([AC_LIBTOOL_SETUP])])
m4_ifndef([_LT_AC_CHECK_DLFCN], [AC_DEFUN([_LT_AC_CHECK_DLFCN])])
m4_ifndef([AC_LIBTOOL_SYS_DYNAMIC_LINKER], [AC_DEFUN([AC_LIBTOOL_SYS_DYNAMIC_LINKER])])
m4_ifndef([_LT_AC_TAGCONFIG], [AC_DEFUN([_LT_AC_TAGCONFIG])])
m4_ifndef([AC_DISABLE_FAST_INSTALL], [AC_DEFUN([AC_DISABLE_FAST_INSTALL])])
m4_ifndef([_LT_AC_LANG_CXX], [AC_DEFUN([_LT_AC_LANG_CXX])])
m4_ifndef([_LT_AC_LANG_F77], [AC_DEFUN([_LT_AC_LANG_F77])])
m4_ifndef([_LT_AC_LANG_GCJ], [AC_DEFUN([_LT_AC_LANG_GCJ])])
m4_ifndef([AC_LIBTOOL_RC], [AC_DEFUN([AC_LIBTOOL_RC])])
m4_ifndef([AC_LIBTOOL_LANG_C_CONFIG], [AC_DEFUN([AC_LIBTOOL_LANG_C_CONFIG])])
m4_ifndef([_LT_AC_LANG_C_CONFIG], [AC_DEFUN([_LT_AC_LANG_C_CONFIG])])
m4_ifndef([AC_LIBTOOL_LANG_CXX_CONFIG], [AC_DEFUN([AC_LIBTOOL_LANG_CXX_CONFIG])])
m4_ifndef([_LT_AC_LANG_CXX_CONFIG], [AC_DEFUN([_LT_AC_LANG_CXX_CONFIG])])
m4_ifndef([AC_LIBTOOL_LANG_F77_CONFIG], [AC_DEFUN([AC_LIBTOOL_LANG_F77_CONFIG])])
m4_ifndef([_LT_AC_LANG_F77_CONFIG], [AC_DEFUN([_LT_AC_LANG_F77_CONFIG])])
m4_ifndef([AC_LIBTOOL_LANG_GCJ_CONFIG], [AC_DEFUN([AC_LIBTOOL_LANG_GCJ_CONFIG])])
m4_ifndef([_LT_AC_LANG_GCJ_CONFIG], [AC_DEFUN([_LT_AC_LANG_GCJ_CONFIG])])
m4_ifndef([AC_LIBTOOL_LANG_RC_CONFIG], [AC_DEFUN([AC_LIBTOOL_LANG_RC_CONFIG])])
m4_ifndef([_LT_AC_LANG_RC_CONFIG], [AC_DEFUN([_LT_AC_LANG_RC_CONFIG])])
m4_ifndef([AC_LIBTOOL_CONFIG], [AC_DEFUN([AC_LIBTOOL_CONFIG])])
m4_ifndef([_LT_AC_FILE_LTDLL_C], [AC_DEFUN([_LT_AC_FILE_LTDLL_C])])

376
clients/cpp/build-aux/missing Executable file
View File

@ -0,0 +1,376 @@
#! /bin/sh
# Common stub for a few missing GNU programs while installing.
scriptversion=2009-04-28.21; # UTC
# Copyright (C) 1996, 1997, 1999, 2000, 2002, 2003, 2004, 2005, 2006,
# 2008, 2009 Free Software Foundation, Inc.
# Originally by Fran,cois Pinard <pinard@iro.umontreal.ca>, 1996.
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2, or (at your option)
# any later version.
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
# As a special exception to the GNU General Public License, if you
# distribute this file as part of a program that contains a
# configuration script generated by Autoconf, you may include it under
# the same distribution terms that you use for the rest of that program.
if test $# -eq 0; then
echo 1>&2 "Try \`$0 --help' for more information"
exit 1
fi
run=:
sed_output='s/.* --output[ =]\([^ ]*\).*/\1/p'
sed_minuso='s/.* -o \([^ ]*\).*/\1/p'
# In the cases where this matters, `missing' is being run in the
# srcdir already.
if test -f configure.ac; then
configure_ac=configure.ac
else
configure_ac=configure.in
fi
msg="missing on your system"
case $1 in
--run)
# Try to run requested program, and just exit if it succeeds.
run=
shift
"$@" && exit 0
# Exit code 63 means version mismatch. This often happens
# when the user try to use an ancient version of a tool on
# a file that requires a minimum version. In this case we
# we should proceed has if the program had been absent, or
# if --run hadn't been passed.
if test $? = 63; then
run=:
msg="probably too old"
fi
;;
-h|--h|--he|--hel|--help)
echo "\
$0 [OPTION]... PROGRAM [ARGUMENT]...
Handle \`PROGRAM [ARGUMENT]...' for when PROGRAM is missing, or return an
error status if there is no known handling for PROGRAM.
Options:
-h, --help display this help and exit
-v, --version output version information and exit
--run try to run the given command, and emulate it if it fails
Supported PROGRAM values:
aclocal touch file \`aclocal.m4'
autoconf touch file \`configure'
autoheader touch file \`config.h.in'
autom4te touch the output file, or create a stub one
automake touch all \`Makefile.in' files
bison create \`y.tab.[ch]', if possible, from existing .[ch]
flex create \`lex.yy.c', if possible, from existing .c
help2man touch the output file
lex create \`lex.yy.c', if possible, from existing .c
makeinfo touch the output file
tar try tar, gnutar, gtar, then tar without non-portable flags
yacc create \`y.tab.[ch]', if possible, from existing .[ch]
Version suffixes to PROGRAM as well as the prefixes \`gnu-', \`gnu', and
\`g' are ignored when checking the name.
Send bug reports to <bug-automake@gnu.org>."
exit $?
;;
-v|--v|--ve|--ver|--vers|--versi|--versio|--version)
echo "missing $scriptversion (GNU Automake)"
exit $?
;;
-*)
echo 1>&2 "$0: Unknown \`$1' option"
echo 1>&2 "Try \`$0 --help' for more information"
exit 1
;;
esac
# normalize program name to check for.
program=`echo "$1" | sed '
s/^gnu-//; t
s/^gnu//; t
s/^g//; t'`
# Now exit if we have it, but it failed. Also exit now if we
# don't have it and --version was passed (most likely to detect
# the program). This is about non-GNU programs, so use $1 not
# $program.
case $1 in
lex*|yacc*)
# Not GNU programs, they don't have --version.
;;
tar*)
if test -n "$run"; then
echo 1>&2 "ERROR: \`tar' requires --run"
exit 1
elif test "x$2" = "x--version" || test "x$2" = "x--help"; then
exit 1
fi
;;
*)
if test -z "$run" && ($1 --version) > /dev/null 2>&1; then
# We have it, but it failed.
exit 1
elif test "x$2" = "x--version" || test "x$2" = "x--help"; then
# Could not run --version or --help. This is probably someone
# running `$TOOL --version' or `$TOOL --help' to check whether
# $TOOL exists and not knowing $TOOL uses missing.
exit 1
fi
;;
esac
# If it does not exist, or fails to run (possibly an outdated version),
# try to emulate it.
case $program in
aclocal*)
echo 1>&2 "\
WARNING: \`$1' is $msg. You should only need it if
you modified \`acinclude.m4' or \`${configure_ac}'. You might want
to install the \`Automake' and \`Perl' packages. Grab them from
any GNU archive site."
touch aclocal.m4
;;
autoconf*)
echo 1>&2 "\
WARNING: \`$1' is $msg. You should only need it if
you modified \`${configure_ac}'. You might want to install the
\`Autoconf' and \`GNU m4' packages. Grab them from any GNU
archive site."
touch configure
;;
autoheader*)
echo 1>&2 "\
WARNING: \`$1' is $msg. You should only need it if
you modified \`acconfig.h' or \`${configure_ac}'. You might want
to install the \`Autoconf' and \`GNU m4' packages. Grab them
from any GNU archive site."
files=`sed -n 's/^[ ]*A[CM]_CONFIG_HEADER(\([^)]*\)).*/\1/p' ${configure_ac}`
test -z "$files" && files="config.h"
touch_files=
for f in $files; do
case $f in
*:*) touch_files="$touch_files "`echo "$f" |
sed -e 's/^[^:]*://' -e 's/:.*//'`;;
*) touch_files="$touch_files $f.in";;
esac
done
touch $touch_files
;;
automake*)
echo 1>&2 "\
WARNING: \`$1' is $msg. You should only need it if
you modified \`Makefile.am', \`acinclude.m4' or \`${configure_ac}'.
You might want to install the \`Automake' and \`Perl' packages.
Grab them from any GNU archive site."
find . -type f -name Makefile.am -print |
sed 's/\.am$/.in/' |
while read f; do touch "$f"; done
;;
autom4te*)
echo 1>&2 "\
WARNING: \`$1' is needed, but is $msg.
You might have modified some files without having the
proper tools for further handling them.
You can get \`$1' as part of \`Autoconf' from any GNU
archive site."
file=`echo "$*" | sed -n "$sed_output"`
test -z "$file" && file=`echo "$*" | sed -n "$sed_minuso"`
if test -f "$file"; then
touch $file
else
test -z "$file" || exec >$file
echo "#! /bin/sh"
echo "# Created by GNU Automake missing as a replacement of"
echo "# $ $@"
echo "exit 0"
chmod +x $file
exit 1
fi
;;
bison*|yacc*)
echo 1>&2 "\
WARNING: \`$1' $msg. You should only need it if
you modified a \`.y' file. You may need the \`Bison' package
in order for those modifications to take effect. You can get
\`Bison' from any GNU archive site."
rm -f y.tab.c y.tab.h
if test $# -ne 1; then
eval LASTARG="\${$#}"
case $LASTARG in
*.y)
SRCFILE=`echo "$LASTARG" | sed 's/y$/c/'`
if test -f "$SRCFILE"; then
cp "$SRCFILE" y.tab.c
fi
SRCFILE=`echo "$LASTARG" | sed 's/y$/h/'`
if test -f "$SRCFILE"; then
cp "$SRCFILE" y.tab.h
fi
;;
esac
fi
if test ! -f y.tab.h; then
echo >y.tab.h
fi
if test ! -f y.tab.c; then
echo 'main() { return 0; }' >y.tab.c
fi
;;
lex*|flex*)
echo 1>&2 "\
WARNING: \`$1' is $msg. You should only need it if
you modified a \`.l' file. You may need the \`Flex' package
in order for those modifications to take effect. You can get
\`Flex' from any GNU archive site."
rm -f lex.yy.c
if test $# -ne 1; then
eval LASTARG="\${$#}"
case $LASTARG in
*.l)
SRCFILE=`echo "$LASTARG" | sed 's/l$/c/'`
if test -f "$SRCFILE"; then
cp "$SRCFILE" lex.yy.c
fi
;;
esac
fi
if test ! -f lex.yy.c; then
echo 'main() { return 0; }' >lex.yy.c
fi
;;
help2man*)
echo 1>&2 "\
WARNING: \`$1' is $msg. You should only need it if
you modified a dependency of a manual page. You may need the
\`Help2man' package in order for those modifications to take
effect. You can get \`Help2man' from any GNU archive site."
file=`echo "$*" | sed -n "$sed_output"`
test -z "$file" && file=`echo "$*" | sed -n "$sed_minuso"`
if test -f "$file"; then
touch $file
else
test -z "$file" || exec >$file
echo ".ab help2man is required to generate this page"
exit $?
fi
;;
makeinfo*)
echo 1>&2 "\
WARNING: \`$1' is $msg. You should only need it if
you modified a \`.texi' or \`.texinfo' file, or any other file
indirectly affecting the aspect of the manual. The spurious
call might also be the consequence of using a buggy \`make' (AIX,
DU, IRIX). You might want to install the \`Texinfo' package or
the \`GNU make' package. Grab either from any GNU archive site."
# The file to touch is that specified with -o ...
file=`echo "$*" | sed -n "$sed_output"`
test -z "$file" && file=`echo "$*" | sed -n "$sed_minuso"`
if test -z "$file"; then
# ... or it is the one specified with @setfilename ...
infile=`echo "$*" | sed 's/.* \([^ ]*\) *$/\1/'`
file=`sed -n '
/^@setfilename/{
s/.* \([^ ]*\) *$/\1/
p
q
}' $infile`
# ... or it is derived from the source name (dir/f.texi becomes f.info)
test -z "$file" && file=`echo "$infile" | sed 's,.*/,,;s,.[^.]*$,,'`.info
fi
# If the file does not exist, the user really needs makeinfo;
# let's fail without touching anything.
test -f $file || exit 1
touch $file
;;
tar*)
shift
# We have already tried tar in the generic part.
# Look for gnutar/gtar before invocation to avoid ugly error
# messages.
if (gnutar --version > /dev/null 2>&1); then
gnutar "$@" && exit 0
fi
if (gtar --version > /dev/null 2>&1); then
gtar "$@" && exit 0
fi
firstarg="$1"
if shift; then
case $firstarg in
*o*)
firstarg=`echo "$firstarg" | sed s/o//`
tar "$firstarg" "$@" && exit 0
;;
esac
case $firstarg in
*h*)
firstarg=`echo "$firstarg" | sed s/h//`
tar "$firstarg" "$@" && exit 0
;;
esac
fi
echo 1>&2 "\
WARNING: I can't seem to be able to run \`tar' with the given arguments.
You may want to install GNU tar or Free paxutils, or check the
command line arguments."
exit 1
;;
*)
echo 1>&2 "\
WARNING: \`$1' is needed, and is $msg.
You might have modified some files without having the
proper tools for further handling them. Check the \`README' file,
it often tells you about the needed prerequisites for installing
this package. You may also peek at any GNU archive site, in case
some other package would contain this missing \`$1' program."
exit 1
;;
esac
exit 0
# Local variables:
# eval: (add-hook 'write-file-hooks 'time-stamp)
# time-stamp-start: "scriptversion="
# time-stamp-format: "%:y-%02m-%02d.%02H"
# time-stamp-time-zone: "UTC"
# time-stamp-end: "; # UTC"
# End:

16820
clients/cpp/configure vendored Executable file

File diff suppressed because it is too large Load Diff

28
clients/cpp/configure.ac Normal file
View File

@ -0,0 +1,28 @@
## LibKafkaConect
## A C++ shared libray for connecting to Kafka
#
# Warning this is the first time I've made a configure.ac/Makefile.am thing
# Please improve it as I have no idea what I am doing
# @benjamg
#
AC_INIT([LibKafkaConnect], [0.1])
AC_PREREQ([2.59])
AC_CONFIG_AUX_DIR([build-aux])
AM_INIT_AUTOMAKE([foreign -Wall])
AC_PROG_LIBTOOL
AC_PROG_CXX
AC_PROG_CPP
AC_CONFIG_MACRO_DIR([build-aux/m4])
#
# Version number
#
AC_SUBST([KAFKACONNECT_VERSION], [1:0:1])
AC_CONFIG_FILES([Makefile])
AC_OUTPUT

View File

@ -0,0 +1,49 @@
/*
* encoder.hpp
*
* Created on: 21 Jun 2011
* Author: Ben Gray (@benjamg)
*/
#ifndef KAFKA_ENCODER_HPP_
#define KAFKA_ENCODER_HPP_
#include <boost/foreach.hpp>
#include "encoder_helper.hpp"
namespace kafkaconnect {
template <typename List>
void encode(std::ostream& stream, const std::string& topic, const uint32_t partition, const List& messages)
{
// Pre-calculate size of message set
uint32_t messageset_size = 0;
BOOST_FOREACH(const std::string& message, messages)
{
messageset_size += message_format_header_size + message.length();
}
// Packet format is ... packet size (4 bytes)
encoder_helper::raw(stream, htonl(2 + 2 + topic.size() + 4 + 4 + messageset_size));
// ... magic number (2 bytes)
encoder_helper::raw(stream, htons(kafka_format_version));
// ... topic string size (2 bytes) & topic string
encoder_helper::raw(stream, htons(topic.size()));
stream << topic;
// ... partition (4 bytes)
encoder_helper::raw(stream, htonl(partition));
// ... message set size (4 bytes) and message set
encoder_helper::raw(stream, htonl(messageset_size));
BOOST_FOREACH(const std::string& message, messages)
{
encoder_helper::message(stream, message);
}
}
}
#endif /* KAFKA_ENCODER_HPP_ */

View File

@ -0,0 +1,63 @@
/*
* encoder_helper.hpp
*
* Created on: 21 Jun 2011
* Author: Ben Gray (@benjamg)
*/
#ifndef KAFKA_ENCODER_HELPER_HPP_
#define KAFKA_ENCODER_HELPER_HPP_
#include <ostream>
#include <string>
#include <arpa/inet.h>
#include <boost/crc.hpp>
#include <stdint.h>
namespace kafkaconnect {
namespace test { class encoder_helper; }
const uint16_t kafka_format_version = 0;
const uint8_t message_format_magic_number = 0;
const uint8_t message_format_extra_data_size = 1 + 4;
const uint8_t message_format_header_size = message_format_extra_data_size + 4;
class encoder_helper
{
private:
friend class test::encoder_helper;
template <typename T> friend void encode(std::ostream&, const std::string&, const uint32_t, const T&);
static std::ostream& message(std::ostream& stream, const std::string message)
{
// Message format is ... message & data size (4 bytes)
raw(stream, htonl(message_format_extra_data_size + message.length()));
// ... magic number (1 byte)
stream << message_format_magic_number;
// ... string crc32 (4 bytes)
boost::crc_32_type result;
result.process_bytes(message.c_str(), message.length());
raw(stream, htonl(result.checksum()));
// ... message string bytes
stream << message;
return stream;
}
template <typename Data>
static std::ostream& raw(std::ostream& stream, const Data& data)
{
stream.write(reinterpret_cast<const char*>(&data), sizeof(Data));
return stream;
}
};
}
#endif /* KAFKA_ENCODER_HELPER_HPP_ */

View File

@ -0,0 +1,38 @@
#include <exception>
#include <cstdlib>
#include <iostream>
#include <string>
#include <boost/thread.hpp>
#include "producer.hpp"
int main(int argc, char* argv[])
{
std::string hostname = (argc >= 2) ? argv[1] : "localhost";
std::string port = (argc >= 3) ? argv[2] : "9092";
boost::asio::io_service io_service;
std::auto_ptr<boost::asio::io_service::work> work(new boost::asio::io_service::work(io_service));
boost::thread bt(boost::bind(&boost::asio::io_service::run, &io_service));
kafkaconnect::producer producer(io_service);
producer.connect(hostname, port);
while(!producer.is_connected())
{
boost::this_thread::sleep(boost::posix_time::seconds(1));
}
std::vector<std::string> messages;
messages.push_back("So long and thanks for all the fish");
messages.push_back("Time is an illusion. Lunchtime doubly so.");
producer.send(messages, "test");
work.reset();
io_service.stop();
return EXIT_SUCCESS;
}

View File

@ -0,0 +1,100 @@
/*
* producer.cpp
*
* Created on: 21 Jun 2011
* Author: Ben Gray (@benjamg)
*/
#include <boost/lexical_cast.hpp>
#include "producer.hpp"
namespace kafkaconnect {
producer::producer(boost::asio::io_service& io_service, const error_handler_function& error_handler)
: _connected(false)
, _resolver(io_service)
, _socket(io_service)
, _error_handler(error_handler)
{
}
producer::~producer()
{
close();
}
void producer::connect(const std::string& hostname, const uint16_t port)
{
connect(hostname, boost::lexical_cast<std::string>(port));
}
void producer::connect(const std::string& hostname, const std::string& servicename)
{
boost::asio::ip::tcp::resolver::query query(hostname, servicename);
_resolver.async_resolve(
query,
boost::bind(
&producer::handle_resolve, this,
boost::asio::placeholders::error, boost::asio::placeholders::iterator
)
);
}
void producer::close()
{
_connected = false;
_socket.close();
}
bool producer::is_connected() const
{
return _connected;
}
void producer::handle_resolve(const boost::system::error_code& error_code, boost::asio::ip::tcp::resolver::iterator endpoints)
{
if (!error_code)
{
boost::asio::ip::tcp::endpoint endpoint = *endpoints;
_socket.async_connect(
endpoint,
boost::bind(
&producer::handle_connect, this,
boost::asio::placeholders::error, ++endpoints
)
);
}
else { fail_fast_error_handler(error_code); }
}
void producer::handle_connect(const boost::system::error_code& error_code, boost::asio::ip::tcp::resolver::iterator endpoints)
{
if (!error_code)
{
// The connection was successful. Send the request.
_connected = true;
}
else if (endpoints != boost::asio::ip::tcp::resolver::iterator())
{
// TODO: handle connection error (we might not need this as we have others though?)
// The connection failed, but we have more potential endpoints so throw it back to handle resolve
_socket.close();
handle_resolve(boost::system::error_code(), endpoints);
}
else { fail_fast_error_handler(error_code); }
}
void producer::handle_write_request(const boost::system::error_code& error_code, boost::asio::streambuf* buffer)
{
if (error_code)
{
fail_fast_error_handler(error_code);
}
delete buffer;
}
}

View File

@ -0,0 +1,99 @@
/*
* producer.hpp
*
* Created on: 21 Jun 2011
* Author: Ben Gray (@benjamg)
*/
#ifndef KAFKA_PRODUCER_HPP_
#define KAFKA_PRODUCER_HPP_
#include <string>
#include <vector>
#include <boost/array.hpp>
#include <boost/asio.hpp>
#include <boost/bind.hpp>
#include <boost/function.hpp>
#include <stdint.h>
#include "encoder.hpp"
namespace kafkaconnect {
const uint32_t use_random_partition = 0xFFFFFFFF;
class producer
{
public:
typedef boost::function<void(const boost::system::error_code&)> error_handler_function;
producer(boost::asio::io_service& io_service, const error_handler_function& error_handler = error_handler_function());
~producer();
void connect(const std::string& hostname, const uint16_t port);
void connect(const std::string& hostname, const std::string& servicename);
void close();
bool is_connected() const;
bool send(const std::string& message, const std::string& topic, const uint32_t partition = kafkaconnect::use_random_partition)
{
boost::array<std::string, 1> messages = { { message } };
return send(messages, topic, partition);
}
// TODO: replace this with a sending of the buffered data so encode is called prior to send this will allow for decoupling from the encoder
template <typename List>
bool send(const List& messages, const std::string& topic, const uint32_t partition = kafkaconnect::use_random_partition)
{
if (!is_connected())
{
return false;
}
// TODO: make this more efficient with memory allocations.
boost::asio::streambuf* buffer = new boost::asio::streambuf();
std::ostream stream(buffer);
kafkaconnect::encode(stream, topic, partition, messages);
boost::asio::async_write(
_socket, *buffer,
boost::bind(&producer::handle_write_request, this, boost::asio::placeholders::error, buffer)
);
return true;
}
private:
bool _connected;
boost::asio::ip::tcp::resolver _resolver;
boost::asio::ip::tcp::socket _socket;
error_handler_function _error_handler;
void handle_resolve(const boost::system::error_code& error_code, boost::asio::ip::tcp::resolver::iterator endpoints);
void handle_connect(const boost::system::error_code& error_code, boost::asio::ip::tcp::resolver::iterator endpoints);
void handle_write_request(const boost::system::error_code& error_code, boost::asio::streambuf* buffer);
/* Fail Fast Error Handler Braindump
*
* If an error handler is not provided in the constructor then the default response is to throw
* back the boost error_code from asio as a boost system_error exception.
*
* Most likely this will cause whatever thread you have processing boost io to terminate unless caught.
* This is great on debug systems or anything where you use io polling to process any outstanding io,
* however if your io thread is seperate and not monitored it is recommended to pass a handler to
* the constructor.
*/
inline void fail_fast_error_handler(const boost::system::error_code& error_code)
{
if(_error_handler.empty()) { throw boost::system::system_error(error_code); }
else { _error_handler(error_code); }
}
};
}
#endif /* KAFKA_PRODUCER_HPP_ */

View File

@ -0,0 +1,71 @@
/*
* encoder_helper_tests.cpp
*
* Created on: 21 Jun 2011
* Author: Ben Gray (@benjamg)
*/
#define BOOST_TEST_DYN_LINK
#define BOOST_TEST_MODULE kafkaconnect
#include <boost/test/unit_test.hpp>
#include <arpa/inet.h>
#include "../encoder_helper.hpp"
// test wrapper
namespace kafkaconnect { namespace test {
class encoder_helper {
public:
static std::ostream& message(std::ostream& stream, const std::string message) { return kafkaconnect::encoder_helper::message(stream, message); }
template <typename T> static std::ostream& raw(std::ostream& stream, const T& t) { return kafkaconnect::encoder_helper::raw(stream, t); }
};
} }
using namespace kafkaconnect::test;
BOOST_AUTO_TEST_SUITE(kafka_encoder_helper)
BOOST_AUTO_TEST_CASE(encode_raw_char)
{
std::ostringstream stream;
char value = 0x1;
encoder_helper::raw(stream, value);
BOOST_CHECK_EQUAL(stream.str().length(), 1);
BOOST_CHECK_EQUAL(stream.str().at(0), value);
}
BOOST_AUTO_TEST_CASE(encode_raw_integer)
{
std::ostringstream stream;
int value = 0x10203;
encoder_helper::raw(stream, htonl(value));
BOOST_CHECK_EQUAL(stream.str().length(), 4);
BOOST_CHECK_EQUAL(stream.str().at(0), 0);
BOOST_CHECK_EQUAL(stream.str().at(1), 0x1);
BOOST_CHECK_EQUAL(stream.str().at(2), 0x2);
BOOST_CHECK_EQUAL(stream.str().at(3), 0x3);
}
BOOST_AUTO_TEST_CASE(encode_message)
{
std::string message = "a simple test";
std::ostringstream stream;
encoder_helper::message(stream, message);
BOOST_CHECK_EQUAL(stream.str().length(), kafkaconnect::message_format_header_size + message.length());
BOOST_CHECK_EQUAL(stream.str().at(3), 5 + message.length());
BOOST_CHECK_EQUAL(stream.str().at(4), kafkaconnect::message_format_magic_number);
for(size_t i = 0; i < message.length(); ++i)
{
BOOST_CHECK_EQUAL(stream.str().at(9 + i), message.at(i));
}
}
BOOST_AUTO_TEST_SUITE_END()

View File

@ -0,0 +1,52 @@
/*
* encoder_tests.cpp
*
* Created on: 21 Jun 2011
* Author: Ben Gray (@benjamg)
*/
#define BOOST_TEST_DYN_LINK
#define BOOST_TEST_MODULE kafkaconnect
#include <boost/test/unit_test.hpp>
#include <string>
#include <vector>
#include "../encoder.hpp"
BOOST_AUTO_TEST_CASE(single_message_test)
{
std::ostringstream stream;
std::vector<std::string> messages;
messages.push_back("test message");
kafkaconnect::encode(stream, "topic", 1, messages);
BOOST_CHECK_EQUAL(stream.str().length(), 4 + 2 + 2 + strlen("topic") + 4 + 4 + 9 + strlen("test message"));
BOOST_CHECK_EQUAL(stream.str().at(3), 2 + 2 + strlen("topic") + 4 + 4 + 9 + strlen("test message"));
BOOST_CHECK_EQUAL(stream.str().at(5), 0);
BOOST_CHECK_EQUAL(stream.str().at(7), strlen("topic"));
BOOST_CHECK_EQUAL(stream.str().at(8), 't');
BOOST_CHECK_EQUAL(stream.str().at(8 + strlen("topic") - 1), 'c');
BOOST_CHECK_EQUAL(stream.str().at(11 + strlen("topic")), 1);
BOOST_CHECK_EQUAL(stream.str().at(15 + strlen("topic")), 9 + strlen("test message"));
BOOST_CHECK_EQUAL(stream.str().at(16 + strlen("topic")), 0);
BOOST_CHECK_EQUAL(stream.str().at(25 + strlen("topic")), 't');
}
BOOST_AUTO_TEST_CASE(multiple_message_test)
{
std::ostringstream stream;
std::vector<std::string> messages;
messages.push_back("test message");
messages.push_back("another message to check");
kafkaconnect::encode(stream, "topic", 1, messages);
BOOST_CHECK_EQUAL(stream.str().length(), 4 + 2 + 2 + strlen("topic") + 4 + 4 + 9 + strlen("test message") + 9 + strlen("another message to check"));
BOOST_CHECK_EQUAL(stream.str().at(3), 2 + 2 + strlen("topic") + 4 + 4 + 9 + strlen("test message") + 9 + strlen("another message to check"));
BOOST_CHECK_EQUAL(stream.str().at(15 + strlen("topic")), 9 + strlen("test message") + 9 + strlen("another message to check"));
}

View File

@ -0,0 +1,59 @@
/*
* producer_tests.cpp
*
* Created on: 21 Jun 2011
* Author: Ben Gray (@benjamg)
*/
#define BOOST_TEST_DYN_LINK
#define BOOST_TEST_MODULE kafkaconnect
#include <boost/test/unit_test.hpp>
#include <memory>
#include <boost/thread.hpp>
#include "../producer.hpp"
BOOST_AUTO_TEST_CASE(basic_message_test)
{
boost::asio::io_service io_service;
std::auto_ptr<boost::asio::io_service::work> work(new boost::asio::io_service::work(io_service));
boost::asio::ip::tcp::acceptor acceptor(io_service, boost::asio::ip::tcp::endpoint(boost::asio::ip::tcp::v4(), 12345));
boost::thread bt(boost::bind(&boost::asio::io_service::run, &io_service));
kafkaconnect::producer producer(io_service);
BOOST_CHECK_EQUAL(producer.is_connected(), false);
producer.connect("localhost", 12345);
boost::asio::ip::tcp::socket socket(io_service);
acceptor.accept(socket);
while(!producer.is_connected())
{
boost::this_thread::sleep(boost::posix_time::seconds(1));
}
std::vector<std::string> messages;
messages.push_back("so long and thanks for all the fish");
producer.send(messages, "mice", 42);
boost::array<char, 1024> buffer;
boost::system::error_code error;
size_t len = socket.read_some(boost::asio::buffer(buffer), error);
BOOST_CHECK_EQUAL(len, 4 + 2 + 2 + strlen("mice") + 4 + 4 + 9 + strlen("so long and thanks for all the fish"));
BOOST_CHECK_EQUAL(buffer[3], 2 + 2 + strlen("mice") + 4 + 4 + 9 + strlen("so long and thanks for all the fish"));
BOOST_CHECK_EQUAL(buffer[5], 0);
BOOST_CHECK_EQUAL(buffer[7], strlen("mice"));
BOOST_CHECK_EQUAL(buffer[8], 'm');
BOOST_CHECK_EQUAL(buffer[8 + strlen("mice") - 1], 'e');
BOOST_CHECK_EQUAL(buffer[11 + strlen("mice")], 42);
BOOST_CHECK_EQUAL(buffer[15 + strlen("mice")], 9 + strlen("so long and thanks for all the fish"));
BOOST_CHECK_EQUAL(buffer[16 + strlen("mice")], 0);
BOOST_CHECK_EQUAL(buffer[25 + strlen("mice")], 's');
work.reset();
io_service.stop();
}

5
clients/csharp/.gitignore vendored Normal file
View File

@ -0,0 +1,5 @@
StyleCop.Cache
bin
obj
*.suo
*.csproj.user

202
clients/csharp/LICENSE Normal file
View File

@ -0,0 +1,202 @@
Apache License
Version 2.0, January 2004
http://www.apache.org/licenses/
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
1. Definitions.
"License" shall mean the terms and conditions for use, reproduction,
and distribution as defined by Sections 1 through 9 of this document.
"Licensor" shall mean the copyright owner or entity authorized by
the copyright owner that is granting the License.
"Legal Entity" shall mean the union of the acting entity and all
other entities that control, are controlled by, or are under common
control with that entity. For the purposes of this definition,
"control" means (i) the power, direct or indirect, to cause the
direction or management of such entity, whether by contract or
otherwise, or (ii) ownership of fifty percent (50%) or more of the
outstanding shares, or (iii) beneficial ownership of such entity.
"You" (or "Your") shall mean an individual or Legal Entity
exercising permissions granted by this License.
"Source" form shall mean the preferred form for making modifications,
including but not limited to software source code, documentation
source, and configuration files.
"Object" form shall mean any form resulting from mechanical
transformation or translation of a Source form, including but
not limited to compiled object code, generated documentation,
and conversions to other media types.
"Work" shall mean the work of authorship, whether in Source or
Object form, made available under the License, as indicated by a
copyright notice that is included in or attached to the work
(an example is provided in the Appendix below).
"Derivative Works" shall mean any work, whether in Source or Object
form, that is based on (or derived from) the Work and for which the
editorial revisions, annotations, elaborations, or other modifications
represent, as a whole, an original work of authorship. For the purposes
of this License, Derivative Works shall not include works that remain
separable from, or merely link (or bind by name) to the interfaces of,
the Work and Derivative Works thereof.
"Contribution" shall mean any work of authorship, including
the original version of the Work and any modifications or additions
to that Work or Derivative Works thereof, that is intentionally
submitted to Licensor for inclusion in the Work by the copyright owner
or by an individual or Legal Entity authorized to submit on behalf of
the copyright owner. For the purposes of this definition, "submitted"
means any form of electronic, verbal, or written communication sent
to the Licensor or its representatives, including but not limited to
communication on electronic mailing lists, source code control systems,
and issue tracking systems that are managed by, or on behalf of, the
Licensor for the purpose of discussing and improving the Work, but
excluding communication that is conspicuously marked or otherwise
designated in writing by the copyright owner as "Not a Contribution."
"Contributor" shall mean Licensor and any individual or Legal Entity
on behalf of whom a Contribution has been received by Licensor and
subsequently incorporated within the Work.
2. Grant of Copyright License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
copyright license to reproduce, prepare Derivative Works of,
publicly display, publicly perform, sublicense, and distribute the
Work and such Derivative Works in Source or Object form.
3. Grant of Patent License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
(except as stated in this section) patent license to make, have made,
use, offer to sell, sell, import, and otherwise transfer the Work,
where such license applies only to those patent claims licensable
by such Contributor that are necessarily infringed by their
Contribution(s) alone or by combination of their Contribution(s)
with the Work to which such Contribution(s) was submitted. If You
institute patent litigation against any entity (including a
cross-claim or counterclaim in a lawsuit) alleging that the Work
or a Contribution incorporated within the Work constitutes direct
or contributory patent infringement, then any patent licenses
granted to You under this License for that Work shall terminate
as of the date such litigation is filed.
4. Redistribution. You may reproduce and distribute copies of the
Work or Derivative Works thereof in any medium, with or without
modifications, and in Source or Object form, provided that You
meet the following conditions:
(a) You must give any other recipients of the Work or
Derivative Works a copy of this License; and
(b) You must cause any modified files to carry prominent notices
stating that You changed the files; and
(c) You must retain, in the Source form of any Derivative Works
that You distribute, all copyright, patent, trademark, and
attribution notices from the Source form of the Work,
excluding those notices that do not pertain to any part of
the Derivative Works; and
(d) If the Work includes a "NOTICE" text file as part of its
distribution, then any Derivative Works that You distribute must
include a readable copy of the attribution notices contained
within such NOTICE file, excluding those notices that do not
pertain to any part of the Derivative Works, in at least one
of the following places: within a NOTICE text file distributed
as part of the Derivative Works; within the Source form or
documentation, if provided along with the Derivative Works; or,
within a display generated by the Derivative Works, if and
wherever such third-party notices normally appear. The contents
of the NOTICE file are for informational purposes only and
do not modify the License. You may add Your own attribution
notices within Derivative Works that You distribute, alongside
or as an addendum to the NOTICE text from the Work, provided
that such additional attribution notices cannot be construed
as modifying the License.
You may add Your own copyright statement to Your modifications and
may provide additional or different license terms and conditions
for use, reproduction, or distribution of Your modifications, or
for any such Derivative Works as a whole, provided Your use,
reproduction, and distribution of the Work otherwise complies with
the conditions stated in this License.
5. Submission of Contributions. Unless You explicitly state otherwise,
any Contribution intentionally submitted for inclusion in the Work
by You to the Licensor shall be under the terms and conditions of
this License, without any additional terms or conditions.
Notwithstanding the above, nothing herein shall supersede or modify
the terms of any separate license agreement you may have executed
with Licensor regarding such Contributions.
6. Trademarks. This License does not grant permission to use the trade
names, trademarks, service marks, or product names of the Licensor,
except as required for reasonable and customary use in describing the
origin of the Work and reproducing the content of the NOTICE file.
7. Disclaimer of Warranty. Unless required by applicable law or
agreed to in writing, Licensor provides the Work (and each
Contributor provides its Contributions) on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
implied, including, without limitation, any warranties or conditions
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
PARTICULAR PURPOSE. You are solely responsible for determining the
appropriateness of using or redistributing the Work and assume any
risks associated with Your exercise of permissions under this License.
8. Limitation of Liability. In no event and under no legal theory,
whether in tort (including negligence), contract, or otherwise,
unless required by applicable law (such as deliberate and grossly
negligent acts) or agreed to in writing, shall any Contributor be
liable to You for damages, including any direct, indirect, special,
incidental, or consequential damages of any character arising as a
result of this License or out of the use or inability to use the
Work (including but not limited to damages for loss of goodwill,
work stoppage, computer failure or malfunction, or any and all
other commercial damages or losses), even if such Contributor
has been advised of the possibility of such damages.
9. Accepting Warranty or Additional Liability. While redistributing
the Work or Derivative Works thereof, You may choose to offer,
and charge a fee for, acceptance of support, warranty, indemnity,
or other liability obligations and/or rights consistent with this
License. However, in accepting such obligations, You may act only
on Your own behalf and on Your sole responsibility, not on behalf
of any other Contributor, and only if You agree to indemnify,
defend, and hold each Contributor harmless for any liability
incurred by, or claims asserted against, such Contributor by reason
of your accepting any such warranty or additional liability.
END OF TERMS AND CONDITIONS
APPENDIX: How to apply the Apache License to your work.
To apply the Apache License to your work, attach the following
boilerplate notice, with the fields enclosed by brackets "[]"
replaced with your own identifying information. (Don't include
the brackets!) The text should be enclosed in the appropriate
comment syntax for the file format. We also recommend that a
file or class name and description of purpose be included on the
same "printed page" as the copyright notice for easier
identification within third-party archives.
Copyright 2011 LinkedIn
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.

66
clients/csharp/README.md Normal file
View File

@ -0,0 +1,66 @@
# .NET Kafka Client
This is a .NET implementation of a client for Kafka using C#. It provides for a basic implementation that covers most basic functionalities to include a simple Producer and Consumer.
The .NET client will wrap Kafka server error codes to the `KafkaException` class. Exceptions are not trapped within the library and basically bubble up directly from the TcpClient and it's underlying Socket connection. Clients using this library should look to do their own exception handling regarding these kinds of errors.
## Producer
The Producer can send one or more messages to Kafka in both a synchronous and asynchronous fashion.
### Producer Usage
string payload1 = "kafka 1.";
byte[] payloadData1 = Encoding.UTF8.GetBytes(payload1);
Message msg1 = new Message(payloadData1);
string payload2 = "kafka 2.";
byte[] payloadData2 = Encoding.UTF8.GetBytes(payload2);
Message msg2 = new Message(payloadData2);
Producer producer = new Producer("localhost", 9092);
producer.Send("test", 0, new List<Message> { msg1, msg2 });
### Asynchronous Producer Usage
List<Message> messages = GetBunchOfMessages();
Producer producer = new Producer("localhost", 9092);
producer.SendAsync("test", 0, messages, (requestContext) => { // doing work });
### Multi-Producer Usage
List<ProducerRequest> requests = new List<ProducerRequest>
{
new ProducerRequest("test a", 0, new List<Message> { new Message(Encoding.UTF8.GetBytes("1: " + DateTime.UtcNow)) }),
new ProducerRequest("test b", 0, new List<Message> { new Message(Encoding.UTF8.GetBytes("2: " + DateTime.UtcNow)) }),
new ProducerRequest("test c", 0, new List<Message> { new Message(Encoding.UTF8.GetBytes("3: " + DateTime.UtcNow)) }),
new ProducerRequest("test d", 0, new List<Message> { new Message(Encoding.UTF8.GetBytes("4: " + DateTime.UtcNow)) })
};
MultiProducerRequest request = new MultiProducerRequest(requests);
Producer producer = new Producer("localhost", 9092);
producer.Send(request);
## Consumer
The consumer has several functions of interest: `GetOffsetsBefore` and `Consume`. `GetOffsetsBefore` will retrieve a list of offsets before a given time and `Consume` will attempt to get a list of messages from Kafka given a topic, partition and offset. `Consume` allows for both a single and batched request function using the `MultiFetchRequest`.
### Consumer Usage
Consumer consumer = new Consumer("localhost", 9092);
int max = 10;
long[] offsets = consumer.GetOffsetsBefore("test", 0, OffsetRequest.LatestTime, max);
List<Message> messages = consumer.Consume("test", 0, offsets[0]);
### Consumer Multi-fetch
Consumer consumer = new Consumer("localhost", 9092);
MultiFetchRequest request = new MultiFetchRequest(new List<FetchRequest>
{
new FetchRequest("testa", 0, 0),
new FetchRequest("testb", 0, 0),
new FetchRequest("testc", 0, 0)
});
List<List<Message>> messages = consumer.Consume(request);

View File

@ -0,0 +1,76 @@
<StyleCopSettings Version="4.3">
<Parsers>
<Parser ParserId="Microsoft.StyleCop.CSharp.CsParser">
<ParserSettings>
<BooleanProperty Name="AnalyzeDesignerFiles">False</BooleanProperty>
</ParserSettings>
</Parser>
</Parsers>
<Analyzers>
<Analyzer AnalyzerId="Microsoft.StyleCop.CSharp.NamingRules">
<Rules>
<Rule Name="FieldNamesMustNotBeginWithUnderscore">
<RuleSettings>
<BooleanProperty Name="Enabled">False</BooleanProperty>
</RuleSettings>
</Rule>
</Rules>
<AnalyzerSettings />
</Analyzer>
<Analyzer AnalyzerId="Microsoft.StyleCop.CSharp.DocumentationRules">
<Rules>
<Rule Name="FileMustHaveHeader">
<RuleSettings>
<BooleanProperty Name="Enabled">False</BooleanProperty>
</RuleSettings>
</Rule>
<Rule Name="FileHeaderMustShowCopyright">
<RuleSettings>
<BooleanProperty Name="Enabled">False</BooleanProperty>
</RuleSettings>
</Rule>
<Rule Name="FileHeaderMustHaveCopyrightText">
<RuleSettings>
<BooleanProperty Name="Enabled">False</BooleanProperty>
</RuleSettings>
</Rule>
<Rule Name="FileHeaderMustContainFileName">
<RuleSettings>
<BooleanProperty Name="Enabled">False</BooleanProperty>
</RuleSettings>
</Rule>
<Rule Name="FileHeaderFileNameDocumentationMustMatchFileName">
<RuleSettings>
<BooleanProperty Name="Enabled">False</BooleanProperty>
</RuleSettings>
</Rule>
<Rule Name="FileHeaderMustHaveValidCompanyText">
<RuleSettings>
<BooleanProperty Name="Enabled">False</BooleanProperty>
</RuleSettings>
</Rule>
</Rules>
<AnalyzerSettings />
</Analyzer>
<Analyzer AnalyzerId="Microsoft.StyleCop.CSharp.OrderingRules">
<Rules>
<Rule Name="UsingDirectivesMustBePlacedWithinNamespace">
<RuleSettings>
<BooleanProperty Name="Enabled">False</BooleanProperty>
</RuleSettings>
</Rule>
</Rules>
<AnalyzerSettings />
</Analyzer>
<Analyzer AnalyzerId="Microsoft.StyleCop.CSharp.ReadabilityRules">
<Rules>
<Rule Name="PrefixLocalCallsWithThis">
<RuleSettings>
<BooleanProperty Name="Enabled">False</BooleanProperty>
</RuleSettings>
</Rule>
</Rules>
<AnalyzerSettings />
</Analyzer>
</Analyzers>
</StyleCopSettings>

Binary file not shown.

View File

@ -0,0 +1,35 @@
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
namespace Kafka.Client
{
/// <summary>
/// Base request to make to Kafka.
/// </summary>
public abstract class AbstractRequest
{
/// <summary>
/// Gets or sets the topic to publish to.
/// </summary>
public string Topic { get; set; }
/// <summary>
/// Gets or sets the partition to publish to.
/// </summary>
public int Partition { get; set; }
/// <summary>
/// Converts the request to an array of bytes that is expected by Kafka.
/// </summary>
/// <returns>An array of bytes that represents the request.</returns>
public abstract byte[] GetBytes();
/// <summary>
/// Determines if the request has valid settings.
/// </summary>
/// <returns>True if valid and false otherwise.</returns>
public abstract bool IsValid();
}
}

View File

@ -0,0 +1,232 @@
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using Kafka.Client.Request;
using Kafka.Client.Util;
namespace Kafka.Client
{
/// <summary>
/// Consumes messages from Kafka.
/// </summary>
public class Consumer
{
/// <summary>
/// Maximum size.
/// </summary>
private static readonly int MaxSize = 1048576;
/// <summary>
/// Initializes a new instance of the Consumer class.
/// </summary>
/// <param name="server">The server to connect to.</param>
/// <param name="port">The port to connect to.</param>
public Consumer(string server, int port)
{
Server = server;
Port = port;
}
/// <summary>
/// Gets the server to which the connection is to be established.
/// </summary>
public string Server { get; private set; }
/// <summary>
/// Gets the port to which the connection is to be established.
/// </summary>
public int Port { get; private set; }
/// <summary>
/// Consumes messages from Kafka.
/// </summary>
/// <param name="topic">The topic to consume from.</param>
/// <param name="partition">The partition to consume from.</param>
/// <param name="offset">The offset to start at.</param>
/// <returns>A list of messages from Kafka.</returns>
public List<Message> Consume(string topic, int partition, long offset)
{
return Consume(topic, partition, offset, MaxSize);
}
/// <summary>
/// Consumes messages from Kafka.
/// </summary>
/// <param name="topic">The topic to consume from.</param>
/// <param name="partition">The partition to consume from.</param>
/// <param name="offset">The offset to start at.</param>
/// <param name="maxSize">The maximum size.</param>
/// <returns>A list of messages from Kafka.</returns>
public List<Message> Consume(string topic, int partition, long offset, int maxSize)
{
return Consume(new FetchRequest(topic, partition, offset, maxSize));
}
/// <summary>
/// Consumes messages from Kafka.
/// </summary>
/// <param name="request">The request to send to Kafka.</param>
/// <returns>A list of messages from Kafka.</returns>
public List<Message> Consume(FetchRequest request)
{
List<Message> messages = new List<Message>();
using (KafkaConnection connection = new KafkaConnection(Server, Port))
{
connection.Write(request.GetBytes());
int dataLength = BitConverter.ToInt32(BitWorks.ReverseBytes(connection.Read(4)), 0);
if (dataLength > 0)
{
byte[] data = connection.Read(dataLength);
int errorCode = BitConverter.ToInt16(BitWorks.ReverseBytes(data.Take(2).ToArray<byte>()), 0);
if (errorCode != KafkaException.NoError)
{
throw new KafkaException(errorCode);
}
// skip the error code and process the rest
byte[] unbufferedData = data.Skip(2).ToArray();
int processed = 0;
int length = unbufferedData.Length - 4;
int messageSize = 0;
while (processed <= length)
{
messageSize = BitConverter.ToInt32(BitWorks.ReverseBytes(unbufferedData.Skip(processed).Take(4).ToArray<byte>()), 0);
messages.Add(Message.ParseFrom(unbufferedData.Skip(processed).Take(messageSize + 4).ToArray<byte>()));
processed += 4 + messageSize;
}
}
}
return messages;
}
/// <summary>
/// Executes a multi-fetch operation.
/// </summary>
/// <param name="request">The request to push to Kafka.</param>
/// <returns>
/// A list containing sets of messages. The message sets should match the request order.
/// </returns>
public List<List<Message>> Consume(MultiFetchRequest request)
{
int fetchRequests = request.ConsumerRequests.Count;
List<List<Message>> messages = new List<List<Message>>();
using (KafkaConnection connection = new KafkaConnection(Server, Port))
{
connection.Write(request.GetBytes());
int dataLength = BitConverter.ToInt32(BitWorks.ReverseBytes(connection.Read(4)), 0);
if (dataLength > 0)
{
byte[] data = connection.Read(dataLength);
int position = 0;
int errorCode = BitConverter.ToInt16(BitWorks.ReverseBytes(data.Take(2).ToArray<byte>()), 0);
if (errorCode != KafkaException.NoError)
{
throw new KafkaException(errorCode);
}
// skip the error code and process the rest
position = position + 2;
for (int ix = 0; ix < fetchRequests; ix++)
{
messages.Add(new List<Message>());
int messageSetSize = BitConverter.ToInt32(BitWorks.ReverseBytes(data.Skip(position).Take(4).ToArray<byte>()), 0);
position = position + 4;
errorCode = BitConverter.ToInt16(BitWorks.ReverseBytes(data.Skip(position).Take(2).ToArray<byte>()), 0);
if (errorCode != KafkaException.NoError)
{
throw new KafkaException(errorCode);
}
// skip the error code and process the rest
position = position + 2;
byte[] messageSetBytes = data.Skip(position).ToArray<byte>().Take(messageSetSize).ToArray<byte>();
int processed = 0;
int messageSize = 0;
// dropped 2 bytes at the end...padding???
while (processed < messageSetBytes.Length - 2)
{
messageSize = BitConverter.ToInt32(BitWorks.ReverseBytes(messageSetBytes.Skip(processed).Take(4).ToArray<byte>()), 0);
messages[ix].Add(Message.ParseFrom(messageSetBytes.Skip(processed).Take(messageSize + 4).ToArray<byte>()));
processed += 4 + messageSize;
}
position = position + processed;
}
}
}
return messages;
}
/// <summary>
/// Get a list of valid offsets (up to maxSize) before the given time.
/// </summary>
/// <param name="topic">The topic to check.</param>
/// <param name="partition">The partition on the topic.</param>
/// <param name="time">time in millisecs (if -1, just get from the latest available)</param>
/// <param name="maxNumOffsets">That maximum number of offsets to return.</param>
/// <returns>List of offsets, in descending order.</returns>
public IList<long> GetOffsetsBefore(string topic, int partition, long time, int maxNumOffsets)
{
return GetOffsetsBefore(new OffsetRequest(topic, partition, time, maxNumOffsets));
}
/// <summary>
/// Get a list of valid offsets (up to maxSize) before the given time.
/// </summary>
/// <param name="request">The offset request.</param>
/// <returns>List of offsets, in descending order.</returns>
public IList<long> GetOffsetsBefore(OffsetRequest request)
{
List<long> offsets = new List<long>();
using (KafkaConnection connection = new KafkaConnection(Server, Port))
{
connection.Write(request.GetBytes());
int dataLength = BitConverter.ToInt32(BitWorks.ReverseBytes(connection.Read(4)), 0);
if (dataLength > 0)
{
byte[] data = connection.Read(dataLength);
int errorCode = BitConverter.ToInt16(BitWorks.ReverseBytes(data.Take(2).ToArray<byte>()), 0);
if (errorCode != KafkaException.NoError)
{
throw new KafkaException(errorCode);
}
// skip the error code and process the rest
byte[] unbufferedData = data.Skip(2).ToArray();
// first four bytes are the number of offsets
int numOfOffsets = BitConverter.ToInt32(BitWorks.ReverseBytes(unbufferedData.Take(4).ToArray<byte>()), 0);
int position = 0;
for (int ix = 0; ix < numOfOffsets; ix++)
{
position = (ix * 8) + 4;
offsets.Add(BitConverter.ToInt64(BitWorks.ReverseBytes(unbufferedData.Skip(position).Take(8).ToArray<byte>()), 0));
}
}
}
return offsets;
}
}
}

View File

@ -0,0 +1,69 @@
<?xml version="1.0" encoding="utf-8"?>
<Project ToolsVersion="4.0" DefaultTargets="Build" xmlns="http://schemas.microsoft.com/developer/msbuild/2003">
<PropertyGroup>
<Configuration Condition=" '$(Configuration)' == '' ">Debug</Configuration>
<Platform Condition=" '$(Platform)' == '' ">AnyCPU</Platform>
<ProductVersion>8.0.30703</ProductVersion>
<SchemaVersion>2.0</SchemaVersion>
<ProjectGuid>{A92DD03B-EE4F-4A78-9FB2-279B6348C7D2}</ProjectGuid>
<OutputType>Library</OutputType>
<AppDesignerFolder>Properties</AppDesignerFolder>
<RootNamespace>Kafka.Client</RootNamespace>
<AssemblyName>Kafka.Client</AssemblyName>
<TargetFrameworkVersion>v4.0</TargetFrameworkVersion>
<FileAlignment>512</FileAlignment>
</PropertyGroup>
<PropertyGroup Condition=" '$(Configuration)|$(Platform)' == 'Debug|AnyCPU' ">
<DebugSymbols>true</DebugSymbols>
<DebugType>full</DebugType>
<Optimize>false</Optimize>
<OutputPath>bin\Debug\</OutputPath>
<DefineConstants>DEBUG;TRACE</DefineConstants>
<ErrorReport>prompt</ErrorReport>
<WarningLevel>4</WarningLevel>
</PropertyGroup>
<PropertyGroup Condition=" '$(Configuration)|$(Platform)' == 'Release|AnyCPU' ">
<DebugType>pdbonly</DebugType>
<Optimize>true</Optimize>
<OutputPath>bin\Release\</OutputPath>
<DefineConstants>TRACE</DefineConstants>
<ErrorReport>prompt</ErrorReport>
<WarningLevel>4</WarningLevel>
</PropertyGroup>
<ItemGroup>
<Reference Include="System" />
<Reference Include="System.Core" />
<Reference Include="System.Xml.Linq" />
<Reference Include="System.Data.DataSetExtensions" />
<Reference Include="Microsoft.CSharp" />
<Reference Include="System.Data" />
<Reference Include="System.Xml" />
</ItemGroup>
<ItemGroup>
<Compile Include="AbstractRequest.cs" />
<Compile Include="Consumer.cs" />
<Compile Include="KafkaException.cs" />
<Compile Include="RequestContext.cs" />
<Compile Include="Request\FetchRequest.cs" />
<Compile Include="Request\MultiFetchRequest.cs" />
<Compile Include="Request\MultiProducerRequest.cs" />
<Compile Include="Request\OffsetRequest.cs" />
<Compile Include="Request\ProducerRequest.cs" />
<Compile Include="Util\Crc32.cs" />
<Compile Include="KafkaConnection.cs" />
<Compile Include="Message.cs" />
<Compile Include="Producer.cs" />
<Compile Include="Properties\AssemblyInfo.cs" />
<Compile Include="RequestType.cs" />
<Compile Include="Util\BitWorks.cs" />
</ItemGroup>
<ItemGroup />
<Import Project="$(MSBuildToolsPath)\Microsoft.CSharp.targets" />
<!-- To modify your build process, add your task inside one of the targets below and uncomment it.
Other similar extension points exist, see Microsoft.Common.targets.
<Target Name="BeforeBuild">
</Target>
<Target Name="AfterBuild">
</Target>
-->
</Project>

View File

@ -0,0 +1,204 @@
using System;
using System.Net.Sockets;
using System.Threading;
using Kafka.Client.Request;
namespace Kafka.Client
{
/// <summary>
/// Callback made when a message request is finished being sent asynchronously.
/// </summary>
/// <typeparam name="T">
/// Must be of type <see cref="AbstractRequest"/> and represents the type of message
/// sent to Kafka.
/// </typeparam>
/// <param name="request">The request that was sent to the server.</param>
public delegate void MessageSent<T>(RequestContext<T> request) where T : AbstractRequest;
/// <summary>
/// Manages connections to the Kafka.
/// </summary>
public class KafkaConnection : IDisposable
{
/// <summary>
/// TCP client that connects to the server.
/// </summary>
private TcpClient _client;
/// <summary>
/// Initializes a new instance of the KafkaConnection class.
/// </summary>
/// <param name="server">The server to connect to.</param>
/// <param name="port">The port to connect to.</param>
public KafkaConnection(string server, int port)
{
Server = server;
Port = port;
// connection opened
_client = new TcpClient(server, port);
}
/// <summary>
/// Gets the server to which the connection is to be established.
/// </summary>
public string Server { get; private set; }
/// <summary>
/// Gets the port to which the connection is to be established.
/// </summary>
public int Port { get; private set; }
/// <summary>
/// Readds data from the server.
/// </summary>
/// <remarks>
/// Defauls the amount of time that a read operation blocks waiting for data to <see cref="Timeout.Infinite"/>.
/// </remarks>
/// <param name="size">The number of bytes to read from the server.</param>
/// <returns>The data read from the server as a byte array.</returns>
public byte[] Read(int size)
{
return Read(size, Timeout.Infinite);
}
/// <summary>
/// Readds data from the server.
/// </summary>
/// <param name="size">The number of bytes to read from the server.</param>
/// <param name="readTimeout">The amount of time that a read operation blocks waiting for data.</param>
/// <returns>The data read from the server as a byte array.</returns>
public byte[] Read(int size, int readTimeout)
{
NetworkStream stream = _client.GetStream();
stream.ReadTimeout = readTimeout;
byte[] bytes = new byte[size];
bool readComplete = false;
int numberOfTries = 0;
while (!readComplete && numberOfTries < 1000)
{
if (stream.DataAvailable)
{
stream.Read(bytes, 0, size);
readComplete = true;
}
else
{
// wait until the server is ready to send some stuff.
numberOfTries++;
Thread.Sleep(10);
}
}
return bytes;
}
/// <summary>
/// Writes a producer request to the server asynchronously.
/// </summary>
/// <param name="request">The request to make.</param>
/// <param name="callback">The code to execute once the message is completely sent.</param>
public void BeginWrite(ProducerRequest request, MessageSent<ProducerRequest> callback)
{
NetworkStream stream = _client.GetStream();
RequestContext<ProducerRequest> ctx = new RequestContext<ProducerRequest>(stream, request);
byte[] data = request.GetBytes();
stream.BeginWrite(
data,
0,
data.Length,
delegate(IAsyncResult asyncResult)
{
RequestContext<ProducerRequest> context = (RequestContext<ProducerRequest>)asyncResult.AsyncState;
if (callback != null)
{
callback(context);
}
context.NetworkStream.EndWrite(asyncResult);
context.NetworkStream.Dispose();
},
ctx);
}
/// <summary>
/// Writes a producer request to the server asynchronously.
/// </summary>
/// <remarks>
/// The default callback simply calls the <see cref="NetworkStream.EndWrite"/>. This is
/// basically a low level fire and forget call.
/// </remarks>
/// <param name="data">The data to send to the server.</param>
public void BeginWrite(byte[] data)
{
NetworkStream stream = _client.GetStream();
stream.BeginWrite(data, 0, data.Length, (asyncResult) => ((NetworkStream)asyncResult.AsyncState).EndWrite(asyncResult), stream);
}
/// <summary>
/// Writes data to the server.
/// </summary>
/// <remarks>
/// Write timeout is defaulted to infinite.
/// </remarks>
/// <param name="data">The data to write to the server.</param>
public void Write(byte[] data)
{
Write(data, Timeout.Infinite);
}
/// <summary>
/// Writes a producer request to the server.
/// </summary>
/// <remarks>
/// Write timeout is defaulted to infitite.
/// </remarks>
/// <param name="request">The <see cref="ProducerRequest"/> to send to the server.</param>
public void Write(ProducerRequest request)
{
Write(request.GetBytes());
}
/// <summary>
/// Writes a multi-producer request to the server.
/// </summary>
/// <remarks>
/// Write timeout is defaulted to infitite.
/// </remarks>
/// <param name="request">The <see cref="MultiProducerRequest"/> to send to the server.</param>
public void Write(MultiProducerRequest request)
{
Write(request.GetBytes());
}
/// <summary>
/// Writes data to the server.
/// </summary>
/// <param name="data">The data to write to the server.</param>
/// <param name="writeTimeout">The amount of time that a write operation blocks waiting for data.</param>
public void Write(byte[] data, int writeTimeout)
{
NetworkStream stream = _client.GetStream();
stream.WriteTimeout = writeTimeout;
// Send the message to the connected TcpServer.
stream.Write(data, 0, data.Length);
}
/// <summary>
/// Close the connection to the server.
/// </summary>
public void Dispose()
{
if (_client != null)
{
_client.GetStream().Close();
_client.Close();
}
}
}
}

View File

@ -0,0 +1,81 @@
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
namespace Kafka.Client
{
/// <summary>
/// A wrapping of an error code returned from Kafka.
/// </summary>
public class KafkaException : Exception
{
/// <summary>
/// No error occurred.
/// </summary>
public const int NoError = 0;
/// <summary>
/// The offset requested was out of range.
/// </summary>
public const int OffsetOutOfRangeCode = 1;
/// <summary>
/// The message was invalid.
/// </summary>
public const int InvalidMessageCode = 2;
/// <summary>
/// The wrong partition.
/// </summary>
public const int WrongPartitionCode = 3;
/// <summary>
/// Invalid message size.
/// </summary>
public const int InvalidRetchSizeCode = 4;
/// <summary>
/// Initializes a new instance of the KafkaException class.
/// </summary>
/// <param name="errorCode">The error code generated by a request to Kafka.</param>
public KafkaException(int errorCode) : base(GetMessage(errorCode))
{
ErrorCode = errorCode;
}
/// <summary>
/// Gets the error code that was sent from Kafka.
/// </summary>
public int ErrorCode { get; private set; }
/// <summary>
/// Gets the message for the exception based on the Kafka error code.
/// </summary>
/// <param name="errorCode">The error code from Kafka.</param>
/// <returns>A string message representation </returns>
private static string GetMessage(int errorCode)
{
if (errorCode == OffsetOutOfRangeCode)
{
return "Offset out of range";
}
else if (errorCode == InvalidMessageCode)
{
return "Invalid message";
}
else if (errorCode == WrongPartitionCode)
{
return "Wrong partition";
}
else if (errorCode == InvalidRetchSizeCode)
{
return "Invalid message size";
}
else
{
return "Unknown error";
}
}
}
}

View File

@ -0,0 +1,140 @@
using System;
using System.Linq;
using System.Text;
using Kafka.Client.Util;
namespace Kafka.Client
{
/// <summary>
/// Message for Kafka.
/// </summary>
/// <remarks>
/// A message. The format of an N byte message is the following:
/// <list type="bullet">
/// <item>
/// <description>1 byte "magic" identifier to allow format changes</description>
/// </item>
/// <item>
/// <description>4 byte CRC32 of the payload</description>
/// </item>
/// <item>
/// <description>N - 5 byte payload</description>
/// </item>
/// </list>
/// </remarks>
public class Message
{
/// <summary>
/// Magic identifier for Kafka.
/// </summary>
private static readonly byte DefaultMagicIdentifier = 0;
/// <summary>
/// Initializes a new instance of the Message class.
/// </summary>
/// <remarks>
/// Uses the <see cref="DefaultMagicIdentifier"/> as a default.
/// </remarks>
/// <param name="payload">The data for the payload.</param>
public Message(byte[] payload) : this(payload, DefaultMagicIdentifier)
{
}
/// <summary>
/// Initializes a new instance of the Message class.
/// </summary>
/// <remarks>
/// Initializes the checksum as null. It will be automatically computed.
/// </remarks>
/// <param name="payload">The data for the payload.</param>
/// <param name="magic">The magic identifier.</param>
public Message(byte[] payload, byte magic) : this(payload, magic, null)
{
}
/// <summary>
/// Initializes a new instance of the Message class.
/// </summary>
/// <param name="payload">The data for the payload.</param>
/// <param name="magic">The magic identifier.</param>
/// <param name="checksum">The checksum for the payload.</param>
public Message(byte[] payload, byte magic, byte[] checksum)
{
Payload = payload;
Magic = magic;
Checksum = checksum == null ? CalculateChecksum() : checksum;
}
/// <summary>
/// Gets the magic bytes.
/// </summary>
public byte Magic { get; private set; }
/// <summary>
/// Gets the CRC32 checksum for the payload.
/// </summary>
public byte[] Checksum { get; private set; }
/// <summary>
/// Gets the payload.
/// </summary>
public byte[] Payload { get; private set; }
/// <summary>
/// Parses a message from a byte array given the format Kafka likes.
/// </summary>
/// <param name="data">The data for a message.</param>
/// <returns>The message.</returns>
public static Message ParseFrom(byte[] data)
{
int size = BitConverter.ToInt32(BitWorks.ReverseBytes(data.Take(4).ToArray<byte>()), 0);
byte magic = data[4];
byte[] checksum = data.Skip(5).Take(4).ToArray<byte>();
byte[] payload = data.Skip(9).Take(size).ToArray<byte>();
return new Message(payload, magic, checksum);
}
/// <summary>
/// Converts the message to bytes in the format Kafka likes.
/// </summary>
/// <returns>The byte array.</returns>
public byte[] GetBytes()
{
byte[] encodedMessage = new byte[Payload.Length + 1 + Checksum.Length];
encodedMessage[0] = Magic;
Buffer.BlockCopy(Checksum, 0, encodedMessage, 1, Checksum.Length);
Buffer.BlockCopy(Payload, 0, encodedMessage, 1 + Checksum.Length, Payload.Length);
return encodedMessage;
}
/// <summary>
/// Determines if the message is valid given the payload and its checksum.
/// </summary>
/// <returns>True if valid and false otherwise.</returns>
public bool IsValid()
{
return Checksum.SequenceEqual(CalculateChecksum());
}
/// <summary>
/// Try to show the payload as decoded to UTF-8.
/// </summary>
/// <returns>The decoded payload as string.</returns>
public override string ToString()
{
return Encoding.UTF8.GetString(Payload);
}
/// <summary>
/// Calculates the CRC32 checksum on the payload of the message.
/// </summary>
/// <returns>The checksum given the payload.</returns>
private byte[] CalculateChecksum()
{
Crc32 crc32 = new Crc32();
return crc32.ComputeHash(Payload);
}
}
}

View File

@ -0,0 +1,135 @@
using System;
using System.Collections.Generic;
using System.Text;
using Kafka.Client.Request;
using Kafka.Client.Util;
namespace Kafka.Client
{
/// <summary>
/// Sends message to Kafka.
/// </summary>
public class Producer
{
/// <summary>
/// Initializes a new instance of the Producer class.
/// </summary>
/// <param name="server">The server to connect to.</param>
/// <param name="port">The port to connect to.</param>
public Producer(string server, int port)
{
Server = server;
Port = port;
}
/// <summary>
/// Gets the server to which the connection is to be established.
/// </summary>
public string Server { get; private set; }
/// <summary>
/// Gets the port to which the connection is to be established.
/// </summary>
public int Port { get; private set; }
/// <summary>
/// Sends a message to Kafka.
/// </summary>
/// <param name="topic">The topic to publish to.</param>
/// <param name="partition">The partition to publish to.</param>
/// <param name="msg">The message to send.</param>
public void Send(string topic, int partition, Message msg)
{
Send(topic, partition, new List<Message> { msg });
}
/// <summary>
/// Sends a list of messages to Kafka.
/// </summary>
/// <param name="topic">The topic to publish to.</param>
/// <param name="partition">The partition to publish to.</param>
/// <param name="messages">The list of messages to send.</param>
public void Send(string topic, int partition, IList<Message> messages)
{
Send(new ProducerRequest(topic, partition, messages));
}
/// <summary>
/// Sends a request to Kafka.
/// </summary>
/// <param name="request">The request to send to Kafka.</param>
public void Send(ProducerRequest request)
{
if (request.IsValid())
{
using (KafkaConnection connection = new KafkaConnection(Server, Port))
{
connection.Write(request);
}
}
}
/// <summary>
/// Sends a request to Kafka.
/// </summary>
/// <param name="request">The request to send to Kafka.</param>
public void Send(MultiProducerRequest request)
{
if (request.IsValid())
{
using (KafkaConnection connection = new KafkaConnection(Server, Port))
{
connection.Write(request);
}
}
}
/// <summary>
/// Sends a list of messages to Kafka.
/// </summary>
/// <param name="topic">The topic to publish to.</param>
/// <param name="partition">The partition to publish to.</param>
/// <param name="messages">The list of messages to send.</param>
/// <param name="callback">
/// A block of code to execute once the request has been sent to Kafka. This value may
/// be set to null.
/// </param>
public void SendAsync(string topic, int partition, IList<Message> messages, MessageSent<ProducerRequest> callback)
{
SendAsync(new ProducerRequest(topic, partition, messages), callback);
}
/// <summary>
/// Send a request to Kafka asynchronously.
/// </summary>
/// <remarks>
/// If the callback is not specified then the method behaves as a fire-and-forget call
/// with the callback being ignored. By the time this callback is executed, the
/// <see cref="RequestContext.NetworkStream"/> will already have been closed given an
/// internal call <see cref="NetworkStream.EndWrite"/>.
/// </remarks>
/// <param name="request">The request to send to Kafka.</param>
/// <param name="callback">
/// A block of code to execute once the request has been sent to Kafka. This value may
/// be set to null.
/// </param>
public void SendAsync(ProducerRequest request, MessageSent<ProducerRequest> callback)
{
if (request.IsValid())
{
KafkaConnection connection = new KafkaConnection(Server, Port);
if (callback == null)
{
// fire and forget
connection.BeginWrite(request.GetBytes());
}
else
{
// execute with callback
connection.BeginWrite(request, callback);
}
}
}
}
}

View File

@ -0,0 +1,36 @@
using System.Reflection;
using System.Runtime.CompilerServices;
using System.Runtime.InteropServices;
// General Information about an assembly is controlled through the following
// set of attributes. Change these attribute values to modify the information
// associated with an assembly.
[assembly: AssemblyTitle("Kafka.Client")]
[assembly: AssemblyDescription("")]
[assembly: AssemblyConfiguration("")]
[assembly: AssemblyCompany("Microsoft")]
[assembly: AssemblyProduct("Kafka.Client")]
[assembly: AssemblyCopyright("Copyright © Microsoft 2011")]
[assembly: AssemblyTrademark("")]
[assembly: AssemblyCulture("")]
// Setting ComVisible to false makes the types in this assembly not visible
// to COM components. If you need to access a type in this assembly from
// COM, set the ComVisible attribute to true on that type.
[assembly: ComVisible(false)]
// The following GUID is for the ID of the typelib if this project is exposed to COM
[assembly: Guid("93d702e5-9998-49a8-8c16-5b04b3ba55c1")]
// Version information for an assembly consists of the following four values:
//
// Major Version
// Minor Version
// Build Number
// Revision
//
// You can specify all the values or you can default the Build and Revision Numbers
// by using the '*' as shown below:
// [assembly: AssemblyVersion("1.0.*")]
[assembly: AssemblyVersion("1.0.0.0")]
[assembly: AssemblyFileVersion("1.0.0.0")]

View File

@ -0,0 +1,113 @@
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using Kafka.Client.Util;
namespace Kafka.Client.Request
{
/// <summary>
/// Constructs a request to send to Kafka.
/// </summary>
public class FetchRequest : AbstractRequest
{
/// <summary>
/// Maximum size.
/// </summary>
private static readonly int DefaultMaxSize = 1048576;
/// <summary>
/// Initializes a new instance of the FetchRequest class.
/// </summary>
public FetchRequest()
{
}
/// <summary>
/// Initializes a new instance of the FetchRequest class.
/// </summary>
/// <param name="topic">The topic to publish to.</param>
/// <param name="partition">The partition to publish to.</param>
/// <param name="offset">The offset in the topic/partition to retrieve from.</param>
public FetchRequest(string topic, int partition, long offset)
: this(topic, partition, offset, DefaultMaxSize)
{
}
/// <summary>
/// Initializes a new instance of the FetchRequest class.
/// </summary>
/// <param name="topic">The topic to publish to.</param>
/// <param name="partition">The partition to publish to.</param>
/// <param name="offset">The offset in the topic/partition to retrieve from.</param>
/// <param name="maxSize">The maximum size.</param>
public FetchRequest(string topic, int partition, long offset, int maxSize)
{
Topic = topic;
Partition = partition;
Offset = offset;
MaxSize = maxSize;
}
/// <summary>
/// Gets or sets the offset to request.
/// </summary>
public long Offset { get; set; }
/// <summary>
/// Gets or sets the maximum size to pass in the request.
/// </summary>
public int MaxSize { get; set; }
/// <summary>
/// Determines if the request has valid settings.
/// </summary>
/// <returns>True if valid and false otherwise.</returns>
public override bool IsValid()
{
return !string.IsNullOrWhiteSpace(Topic);
}
/// <summary>
/// Gets the bytes matching the expected Kafka structure.
/// </summary>
/// <returns>The byte array of the request.</returns>
public override byte[] GetBytes()
{
byte[] internalBytes = GetInternalBytes();
List<byte> request = new List<byte>();
// add the 2 for the RequestType.Fetch
request.AddRange(BitWorks.GetBytesReversed(internalBytes.Length + 2));
request.AddRange(BitWorks.GetBytesReversed((short)RequestType.Fetch));
request.AddRange(internalBytes);
return request.ToArray<byte>();
}
/// <summary>
/// Gets the bytes representing the request which is used when generating a multi-request.
/// </summary>
/// <remarks>
/// The <see cref="GetBytes"/> method is used for sending a single <see cref="RequestType.Fetch"/>.
/// It prefixes this byte array with the request type and the number of messages. This method
/// is used to supply the <see cref="MultiFetchRequest"/> with the contents for its message.
/// </remarks>
/// <returns>The bytes that represent this <see cref="FetchRequest"/>.</returns>
internal byte[] GetInternalBytes()
{
// TOPIC LENGTH + TOPIC + PARTITION + OFFSET + MAX SIZE
int requestSize = 2 + Topic.Length + 4 + 8 + 4;
List<byte> request = new List<byte>();
request.AddRange(BitWorks.GetBytesReversed((short)Topic.Length));
request.AddRange(Encoding.ASCII.GetBytes(Topic));
request.AddRange(BitWorks.GetBytesReversed(Partition));
request.AddRange(BitWorks.GetBytesReversed(Offset));
request.AddRange(BitWorks.GetBytesReversed(MaxSize));
return request.ToArray<byte>();
}
}
}

View File

@ -0,0 +1,62 @@
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using Kafka.Client.Util;
namespace Kafka.Client.Request
{
/// <summary>
/// Constructs a multi-consumer request to send to Kafka.
/// </summary>
public class MultiFetchRequest : AbstractRequest
{
/// <summary>
/// Initializes a new instance of the MultiFetchRequest class.
/// </summary>
/// <param name="requests">Requests to package up and batch.</param>
public MultiFetchRequest(IList<FetchRequest> requests)
{
ConsumerRequests = requests;
}
/// <summary>
/// Gets or sets the consumer requests to be batched into this multi-request.
/// </summary>
public IList<FetchRequest> ConsumerRequests { get; set; }
/// <summary>
/// Determines if the request has valid settings.
/// </summary>
/// <returns>True if valid and false otherwise.</returns>
public override bool IsValid()
{
return ConsumerRequests != null && ConsumerRequests.Count > 0
&& ConsumerRequests.Select(itm => !itm.IsValid()).Count() > 0;
}
/// <summary>
/// Gets the bytes matching the expected Kafka structure.
/// </summary>
/// <returns>The byte array of the request.</returns>
public override byte[] GetBytes()
{
List<byte> messagePack = new List<byte>();
byte[] requestBytes = BitWorks.GetBytesReversed(Convert.ToInt16((int)RequestType.MultiFetch));
byte[] consumerRequestCountBytes = BitWorks.GetBytesReversed(Convert.ToInt16(ConsumerRequests.Count));
List<byte> encodedMessageSet = new List<byte>();
encodedMessageSet.AddRange(requestBytes);
encodedMessageSet.AddRange(consumerRequestCountBytes);
foreach (FetchRequest consumerRequest in ConsumerRequests)
{
encodedMessageSet.AddRange(consumerRequest.GetInternalBytes());
}
encodedMessageSet.InsertRange(0, BitWorks.GetBytesReversed(encodedMessageSet.Count));
return encodedMessageSet.ToArray();
}
}
}

View File

@ -0,0 +1,71 @@
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using Kafka.Client.Util;
namespace Kafka.Client.Request
{
/// <summary>
/// Constructs a request containing multiple producer requests to send to Kafka.
/// </summary>
public class MultiProducerRequest : AbstractRequest
{
/// <summary>
/// Initializes a new instance of the MultiProducerRequest class.
/// </summary>
public MultiProducerRequest()
{
}
/// <summary>
/// Initializes a new instance of the MultiProducerRequest class.
/// </summary>
/// <param name="producerRequests">
/// The list of individual producer requests to send in this request.
/// </param>
public MultiProducerRequest(IList<ProducerRequest> producerRequests)
{
ProducerRequests = producerRequests;
}
/// <summary>
/// Gets or sets the list of producer requests to be sent in batch.
/// </summary>
public IList<ProducerRequest> ProducerRequests { get; set; }
/// <summary>
/// Determines if the request has valid settings.
/// </summary>
/// <returns>True if valid and false otherwise.</returns>
public override bool IsValid()
{
return ProducerRequests != null && ProducerRequests.Count > 0
&& ProducerRequests.Select(itm => !itm.IsValid()).Count() > 0;
}
/// <summary>
/// Gets the bytes matching the expected Kafka structure.
/// </summary>
/// <returns>The byte array of the request.</returns>
public override byte[] GetBytes()
{
List<byte> messagePack = new List<byte>();
byte[] requestBytes = BitWorks.GetBytesReversed(Convert.ToInt16((int)RequestType.MultiProduce));
byte[] producerRequestCountBytes = BitWorks.GetBytesReversed(Convert.ToInt16(ProducerRequests.Count));
List<byte> encodedMessageSet = new List<byte>();
encodedMessageSet.AddRange(requestBytes);
encodedMessageSet.AddRange(producerRequestCountBytes);
foreach (ProducerRequest producerRequest in ProducerRequests)
{
encodedMessageSet.AddRange(producerRequest.GetInternalBytes());
}
encodedMessageSet.InsertRange(0, BitWorks.GetBytesReversed(encodedMessageSet.Count));
return encodedMessageSet.ToArray();
}
}
}

View File

@ -0,0 +1,90 @@
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using Kafka.Client.Util;
namespace Kafka.Client.Request
{
/// <summary>
/// Constructs a request to send to Kafka.
/// </summary>
public class OffsetRequest : AbstractRequest
{
/// <summary>
/// The latest time constant.
/// </summary>
public static readonly long LatestTime = -1L;
/// <summary>
/// The earliest time constant.
/// </summary>
public static readonly long EarliestTime = -2L;
/// <summary>
/// Initializes a new instance of the OffsetRequest class.
/// </summary>
public OffsetRequest()
{
}
/// <summary>
/// Initializes a new instance of the OffsetRequest class.
/// </summary>
/// <param name="topic">The topic to publish to.</param>
/// <param name="partition">The partition to publish to.</param>
/// <param name="time">The time from which to request offsets.</param>
/// <param name="maxOffsets">The maximum amount of offsets to return.</param>
public OffsetRequest(string topic, int partition, long time, int maxOffsets)
{
Topic = topic;
Partition = partition;
Time = time;
MaxOffsets = maxOffsets;
}
/// <summary>
/// Gets the time.
/// </summary>
public long Time { get; private set; }
/// <summary>
/// Gets the maximum number of offsets to return.
/// </summary>
public int MaxOffsets { get; private set; }
/// <summary>
/// Determines if the request has valid settings.
/// </summary>
/// <returns>True if valid and false otherwise.</returns>
public override bool IsValid()
{
return !string.IsNullOrWhiteSpace(Topic);
}
/// <summary>
/// Converts the request to an array of bytes that is expected by Kafka.
/// </summary>
/// <returns>An array of bytes that represents the request.</returns>
public override byte[] GetBytes()
{
byte[] requestBytes = BitWorks.GetBytesReversed(Convert.ToInt16((int)RequestType.Offsets));
byte[] topicLengthBytes = BitWorks.GetBytesReversed(Convert.ToInt16(Topic.Length));
byte[] topicBytes = Encoding.UTF8.GetBytes(Topic);
byte[] partitionBytes = BitWorks.GetBytesReversed(Partition);
byte[] timeBytes = BitWorks.GetBytesReversed(Time);
byte[] maxOffsetsBytes = BitWorks.GetBytesReversed(MaxOffsets);
List<byte> encodedMessageSet = new List<byte>();
encodedMessageSet.AddRange(requestBytes);
encodedMessageSet.AddRange(topicLengthBytes);
encodedMessageSet.AddRange(topicBytes);
encodedMessageSet.AddRange(partitionBytes);
encodedMessageSet.AddRange(timeBytes);
encodedMessageSet.AddRange(maxOffsetsBytes);
encodedMessageSet.InsertRange(0, BitWorks.GetBytesReversed(encodedMessageSet.Count));
return encodedMessageSet.ToArray();
}
}
}

View File

@ -0,0 +1,98 @@
using System;
using System.Collections.Generic;
using System.Text;
using Kafka.Client.Util;
namespace Kafka.Client.Request
{
/// <summary>
/// Constructs a request to send to Kafka.
/// </summary>
public class ProducerRequest : AbstractRequest
{
/// <summary>
/// Initializes a new instance of the ProducerRequest class.
/// </summary>
public ProducerRequest()
{
}
/// <summary>
/// Initializes a new instance of the ProducerRequest class.
/// </summary>
/// <param name="topic">The topic to publish to.</param>
/// <param name="partition">The partition to publish to.</param>
/// <param name="messages">The list of messages to send.</param>
public ProducerRequest(string topic, int partition, IList<Message> messages)
{
Topic = topic;
Partition = partition;
Messages = messages;
}
/// <summary>
/// Gets or sets the messages to publish.
/// </summary>
public IList<Message> Messages { get; set; }
/// <summary>
/// Determines if the request has valid settings.
/// </summary>
/// <returns>True if valid and false otherwise.</returns>
public override bool IsValid()
{
return !string.IsNullOrWhiteSpace(Topic) && Messages != null && Messages.Count > 0;
}
/// <summary>
/// Gets the bytes matching the expected Kafka structure.
/// </summary>
/// <returns>The byte array of the request.</returns>
public override byte[] GetBytes()
{
List<byte> encodedMessageSet = new List<byte>();
encodedMessageSet.AddRange(GetInternalBytes());
byte[] requestBytes = BitWorks.GetBytesReversed(Convert.ToInt16((int)RequestType.Produce));
encodedMessageSet.InsertRange(0, requestBytes);
encodedMessageSet.InsertRange(0, BitWorks.GetBytesReversed(encodedMessageSet.Count));
return encodedMessageSet.ToArray();
}
/// <summary>
/// Gets the bytes representing the request which is used when generating a multi-request.
/// </summary>
/// <remarks>
/// The <see cref="GetBytes"/> method is used for sending a single <see cref="RequestType.Produce"/>.
/// It prefixes this byte array with the request type and the number of messages. This method
/// is used to supply the <see cref="MultiProducerRequest"/> with the contents for its message.
/// </remarks>
/// <returns>The bytes that represent this <see cref="ProducerRequest"/>.</returns>
internal byte[] GetInternalBytes()
{
List<byte> messagePack = new List<byte>();
foreach (Message message in Messages)
{
byte[] messageBytes = message.GetBytes();
messagePack.AddRange(BitWorks.GetBytesReversed(messageBytes.Length));
messagePack.AddRange(messageBytes);
}
byte[] topicLengthBytes = BitWorks.GetBytesReversed(Convert.ToInt16(Topic.Length));
byte[] topicBytes = Encoding.UTF8.GetBytes(Topic);
byte[] partitionBytes = BitWorks.GetBytesReversed(Partition);
byte[] messagePackLengthBytes = BitWorks.GetBytesReversed(messagePack.Count);
byte[] messagePackBytes = messagePack.ToArray();
List<byte> encodedMessageSet = new List<byte>();
encodedMessageSet.AddRange(topicLengthBytes);
encodedMessageSet.AddRange(topicBytes);
encodedMessageSet.AddRange(partitionBytes);
encodedMessageSet.AddRange(messagePackLengthBytes);
encodedMessageSet.AddRange(messagePackBytes);
return encodedMessageSet.ToArray();
}
}
}

View File

@ -0,0 +1,36 @@
using System.Net.Sockets;
namespace Kafka.Client
{
/// <summary>
/// The context of a request made to Kafka.
/// </summary>
/// <typeparam name="T">
/// Must be of type <see cref="AbstractRequest"/> and represents the type of request
/// sent to Kafka.
/// </typeparam>
public class RequestContext<T> where T : AbstractRequest
{
/// <summary>
/// Initializes a new instance of the RequestContext class.
/// </summary>
/// <param name="networkStream">The network stream that sent the message.</param>
/// <param name="request">The request sent over the stream.</param>
public RequestContext(NetworkStream networkStream, T request)
{
NetworkStream = networkStream;
Request = request;
}
/// <summary>
/// Gets the <see cref="NetworkStream"/> instance of the request.
/// </summary>
public NetworkStream NetworkStream { get; private set; }
/// <summary>
/// Gets the <see cref="FetchRequest"/> or <see cref="ProducerRequest"/> object
/// associated with the <see cref="RequestContext"/>.
/// </summary>
public T Request { get; private set; }
}
}

View File

@ -0,0 +1,36 @@
namespace Kafka.Client
{
/// <summary>
/// Requests types for Kafka
/// </summary>
/// <remarks>
/// Many of these are not in play yet.
/// </remarks>
public enum RequestType
{
/// <summary>
/// Produce a message.
/// </summary>
Produce = 0,
/// <summary>
/// Fetch a message.
/// </summary>
Fetch = 1,
/// <summary>
/// Multi-fetch messages.
/// </summary>
MultiFetch = 2,
/// <summary>
/// Multi-produce messages.
/// </summary>
MultiProduce = 3,
/// <summary>
/// Gets offsets.
/// </summary>
Offsets = 4
}
}

View File

@ -0,0 +1,69 @@
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
namespace Kafka.Client.Util
{
/// <summary>
/// Utilty class for managing bits and bytes.
/// </summary>
public class BitWorks
{
/// <summary>
/// Converts the value to bytes and reverses them.
/// </summary>
/// <param name="value">The value to convert to bytes.</param>
/// <returns>Bytes representing the value.</returns>
public static byte[] GetBytesReversed(short value)
{
return ReverseBytes(BitConverter.GetBytes(value));
}
/// <summary>
/// Converts the value to bytes and reverses them.
/// </summary>
/// <param name="value">The value to convert to bytes.</param>
/// <returns>Bytes representing the value.</returns>
public static byte[] GetBytesReversed(int value)
{
return ReverseBytes(BitConverter.GetBytes(value));
}
/// <summary>
/// Converts the value to bytes and reverses them.
/// </summary>
/// <param name="value">The value to convert to bytes.</param>
/// <returns>Bytes representing the value.</returns>
public static byte[] GetBytesReversed(long value)
{
return ReverseBytes(BitConverter.GetBytes(value));
}
/// <summary>
/// Reverse the position of an array of bytes.
/// </summary>
/// <param name="inArray">
/// The array to reverse. If null or zero-length then the returned array will be null.
/// </param>
/// <returns>The reversed array.</returns>
public static byte[] ReverseBytes(byte[] inArray)
{
if (inArray != null && inArray.Length > 0)
{
int highCtr = inArray.Length - 1;
byte temp;
for (int ctr = 0; ctr < inArray.Length / 2; ctr++)
{
temp = inArray[ctr];
inArray[ctr] = inArray[highCtr];
inArray[highCtr] = temp;
highCtr -= 1;
}
}
return inArray;
}
}
}

View File

@ -0,0 +1,115 @@
// <auto-generated />
using System;
using System.Security.Cryptography;
namespace Kafka.Client.Util
{
/// <summary>
/// From http://damieng.com/blog/2006/08/08/calculating_crc32_in_c_and_net
/// </summary>
public class Crc32 : HashAlgorithm
{
public const UInt32 DefaultPolynomial = 0xedb88320;
public const UInt32 DefaultSeed = 0xffffffff;
private UInt32 hash;
private UInt32 seed;
private UInt32[] table;
private static UInt32[] defaultTable;
public Crc32()
{
table = InitializeTable(DefaultPolynomial);
seed = DefaultSeed;
Initialize();
}
public Crc32(UInt32 polynomial, UInt32 seed)
{
table = InitializeTable(polynomial);
this.seed = seed;
Initialize();
}
public override void Initialize()
{
hash = seed;
}
protected override void HashCore(byte[] buffer, int start, int length)
{
hash = CalculateHash(table, hash, buffer, start, length);
}
protected override byte[] HashFinal()
{
byte[] hashBuffer = UInt32ToBigEndianBytes(~hash);
this.HashValue = hashBuffer;
return hashBuffer;
}
public override int HashSize
{
get { return 32; }
}
public static UInt32 Compute(byte[] buffer)
{
return ~CalculateHash(InitializeTable(DefaultPolynomial), DefaultSeed, buffer, 0, buffer.Length);
}
public static UInt32 Compute(UInt32 seed, byte[] buffer)
{
return ~CalculateHash(InitializeTable(DefaultPolynomial), seed, buffer, 0, buffer.Length);
}
public static UInt32 Compute(UInt32 polynomial, UInt32 seed, byte[] buffer)
{
return ~CalculateHash(InitializeTable(polynomial), seed, buffer, 0, buffer.Length);
}
private static UInt32[] InitializeTable(UInt32 polynomial)
{
if (polynomial == DefaultPolynomial && defaultTable != null)
return defaultTable;
UInt32[] createTable = new UInt32[256];
for (int i = 0; i < 256; i++)
{
UInt32 entry = (UInt32)i;
for (int j = 0; j < 8; j++)
if ((entry & 1) == 1)
entry = (entry >> 1) ^ polynomial;
else
entry = entry >> 1;
createTable[i] = entry;
}
if (polynomial == DefaultPolynomial)
defaultTable = createTable;
return createTable;
}
private static UInt32 CalculateHash(UInt32[] table, UInt32 seed, byte[] buffer, int start, int size)
{
UInt32 crc = seed;
for (int i = start; i < size; i++)
unchecked
{
crc = (crc >> 8) ^ table[buffer[i] ^ crc & 0xff];
}
return crc;
}
private byte[] UInt32ToBigEndianBytes(UInt32 x)
{
return new byte[] {
(byte)((x >> 24) & 0xff),
(byte)((x >> 16) & 0xff),
(byte)((x >> 8) & 0xff),
(byte)(x & 0xff)
};
}
}
}

View File

@ -0,0 +1,38 @@

Microsoft Visual Studio Solution File, Format Version 11.00
# Visual Studio 2010
Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "Kafka.Client", "Kafka.Client\Kafka.Client.csproj", "{A92DD03B-EE4F-4A78-9FB2-279B6348C7D2}"
EndProject
Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "Kafka.Client.Tests", "Tests\Kafka.Client.Tests\Kafka.Client.Tests.csproj", "{9BA1A0BF-B207-4A11-8883-5F64B113C07D}"
EndProject
Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "Tests", "Tests", "{06FD20F1-CE06-430E-AF6E-2EBECE6E47B3}"
EndProject
Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "Kafka.Client.IntegrationTests", "Tests\Kafka.Client.IntegrationTests\Kafka.Client.IntegrationTests.csproj", "{AF29C330-49BD-4648-B692-882E922C435B}"
EndProject
Global
GlobalSection(SolutionConfigurationPlatforms) = preSolution
Debug|Any CPU = Debug|Any CPU
Release|Any CPU = Release|Any CPU
EndGlobalSection
GlobalSection(ProjectConfigurationPlatforms) = postSolution
{A92DD03B-EE4F-4A78-9FB2-279B6348C7D2}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
{A92DD03B-EE4F-4A78-9FB2-279B6348C7D2}.Debug|Any CPU.Build.0 = Debug|Any CPU
{A92DD03B-EE4F-4A78-9FB2-279B6348C7D2}.Release|Any CPU.ActiveCfg = Release|Any CPU
{A92DD03B-EE4F-4A78-9FB2-279B6348C7D2}.Release|Any CPU.Build.0 = Release|Any CPU
{9BA1A0BF-B207-4A11-8883-5F64B113C07D}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
{9BA1A0BF-B207-4A11-8883-5F64B113C07D}.Debug|Any CPU.Build.0 = Debug|Any CPU
{9BA1A0BF-B207-4A11-8883-5F64B113C07D}.Release|Any CPU.ActiveCfg = Release|Any CPU
{9BA1A0BF-B207-4A11-8883-5F64B113C07D}.Release|Any CPU.Build.0 = Release|Any CPU
{AF29C330-49BD-4648-B692-882E922C435B}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
{AF29C330-49BD-4648-B692-882E922C435B}.Debug|Any CPU.Build.0 = Debug|Any CPU
{AF29C330-49BD-4648-B692-882E922C435B}.Release|Any CPU.ActiveCfg = Release|Any CPU
{AF29C330-49BD-4648-B692-882E922C435B}.Release|Any CPU.Build.0 = Release|Any CPU
EndGlobalSection
GlobalSection(SolutionProperties) = preSolution
HideSolutionNode = FALSE
EndGlobalSection
GlobalSection(NestedProjects) = preSolution
{9BA1A0BF-B207-4A11-8883-5F64B113C07D} = {06FD20F1-CE06-430E-AF6E-2EBECE6E47B3}
{AF29C330-49BD-4648-B692-882E922C435B} = {06FD20F1-CE06-430E-AF6E-2EBECE6E47B3}
EndGlobalSection
EndGlobal

View File

@ -0,0 +1,64 @@
<?xml version="1.0" encoding="utf-8"?>
<Project ToolsVersion="4.0" DefaultTargets="Build" xmlns="http://schemas.microsoft.com/developer/msbuild/2003">
<PropertyGroup>
<Configuration Condition=" '$(Configuration)' == '' ">Debug</Configuration>
<Platform Condition=" '$(Platform)' == '' ">AnyCPU</Platform>
<ProductVersion>8.0.30703</ProductVersion>
<SchemaVersion>2.0</SchemaVersion>
<ProjectGuid>{AF29C330-49BD-4648-B692-882E922C435B}</ProjectGuid>
<OutputType>Library</OutputType>
<AppDesignerFolder>Properties</AppDesignerFolder>
<RootNamespace>Kafka.Client.IntegrationTests</RootNamespace>
<AssemblyName>Kafka.Client.IntegrationTests</AssemblyName>
<TargetFrameworkVersion>v4.0</TargetFrameworkVersion>
<FileAlignment>512</FileAlignment>
</PropertyGroup>
<PropertyGroup Condition=" '$(Configuration)|$(Platform)' == 'Debug|AnyCPU' ">
<DebugSymbols>true</DebugSymbols>
<DebugType>full</DebugType>
<Optimize>false</Optimize>
<OutputPath>bin\Debug\</OutputPath>
<DefineConstants>DEBUG;TRACE</DefineConstants>
<ErrorReport>prompt</ErrorReport>
<WarningLevel>4</WarningLevel>
</PropertyGroup>
<PropertyGroup Condition=" '$(Configuration)|$(Platform)' == 'Release|AnyCPU' ">
<DebugType>pdbonly</DebugType>
<Optimize>true</Optimize>
<OutputPath>bin\Release\</OutputPath>
<DefineConstants>TRACE</DefineConstants>
<ErrorReport>prompt</ErrorReport>
<WarningLevel>4</WarningLevel>
</PropertyGroup>
<ItemGroup>
<Reference Include="nunit.framework, Version=2.5.9.10348, Culture=neutral, PublicKeyToken=96d09a1eb7f44a77, processorArchitecture=MSIL">
<SpecificVersion>False</SpecificVersion>
<HintPath>..\..\..\..\lib\nunit\2.5.9\nunit.framework.dll</HintPath>
</Reference>
<Reference Include="System" />
<Reference Include="System.Core" />
<Reference Include="System.Xml.Linq" />
<Reference Include="System.Data.DataSetExtensions" />
<Reference Include="Microsoft.CSharp" />
<Reference Include="System.Data" />
<Reference Include="System.Xml" />
</ItemGroup>
<ItemGroup>
<Compile Include="KafkaIntegrationTest.cs" />
<Compile Include="Properties\AssemblyInfo.cs" />
</ItemGroup>
<ItemGroup>
<ProjectReference Include="..\..\Kafka.Client\Kafka.Client.csproj">
<Project>{A92DD03B-EE4F-4A78-9FB2-279B6348C7D2}</Project>
<Name>Kafka.Client</Name>
</ProjectReference>
</ItemGroup>
<Import Project="$(MSBuildToolsPath)\Microsoft.CSharp.targets" />
<!-- To modify your build process, add your task inside one of the targets below and uncomment it.
Other similar extension points exist, see Microsoft.Common.targets.
<Target Name="BeforeBuild">
</Target>
<Target Name="AfterBuild">
</Target>
-->
</Project>

View File

@ -0,0 +1,181 @@
using System;
using System.Collections.Generic;
using System.Text;
using System.Threading;
using Kafka.Client.Request;
using NUnit.Framework;
namespace Kafka.Client.Tests
{
/// <summary>
/// Contains tests that go all the way to Kafka and back.
/// </summary>
[TestFixture]
[Ignore("Requires a Kafka server running to execute")]
public class KafkaIntegrationTest
{
/// <summary>
/// Kafka server to test against.
/// </summary>
private static readonly string KafkaServer = "192.168.50.203";
/// <summary>
/// Port of the Kafka server to test against.
/// </summary>
private static readonly int KafkaPort = 9092;
/// <summary>
/// Sends a pair of message to Kafka.
/// </summary>
[Test]
public void ProducerSendsMessage()
{
string payload1 = "kafka 1.";
byte[] payloadData1 = Encoding.UTF8.GetBytes(payload1);
Message msg1 = new Message(payloadData1);
string payload2 = "kafka 2.";
byte[] payloadData2 = Encoding.UTF8.GetBytes(payload2);
Message msg2 = new Message(payloadData2);
Producer producer = new Producer(KafkaServer, KafkaPort);
producer.Send("test", 0, new List<Message> { msg1, msg2 });
}
/// <summary>
/// Asynchronously sends a pair of message to Kafka.
/// </summary>
[Test]
public void ProducerSendsMessageAsynchronously()
{
bool waiting = true;
List<Message> messages = GenerateRandomMessages(50);
Producer producer = new Producer(KafkaServer, KafkaPort);
producer.SendAsync(
"test",
0,
messages,
(requestContext) => { waiting = false; });
while (waiting)
{
Console.WriteLine("Keep going...");
Thread.Sleep(10);
}
}
/// <summary>
/// Send a multi-produce request to Kafka.
/// </summary>
[Test]
public void ProducerSendMultiRequest()
{
List<ProducerRequest> requests = new List<ProducerRequest>
{
new ProducerRequest("test", 0, new List<Message> { new Message(Encoding.UTF8.GetBytes("1: " + DateTime.UtcNow)) }),
new ProducerRequest("test", 0, new List<Message> { new Message(Encoding.UTF8.GetBytes("2: " + DateTime.UtcNow)) }),
new ProducerRequest("testa", 0, new List<Message> { new Message(Encoding.UTF8.GetBytes("3: " + DateTime.UtcNow)) }),
new ProducerRequest("testa", 0, new List<Message> { new Message(Encoding.UTF8.GetBytes("4: " + DateTime.UtcNow)) })
};
MultiProducerRequest request = new MultiProducerRequest(requests);
Producer producer = new Producer(KafkaServer, KafkaPort);
producer.Send(request);
}
/// <summary>
/// Generates messages for Kafka then gets them back.
/// </summary>
[Test]
public void ConsumerFetchMessage()
{
ProducerSendsMessage();
Consumer consumer = new Consumer(KafkaServer, KafkaPort);
List<Message> messages = consumer.Consume("test", 0, 0);
foreach (Message msg in messages)
{
Console.WriteLine(msg);
}
}
/// <summary>
/// Generates multiple messages for Kafka then gets them back.
/// </summary>
[Test]
public void ConsumerMultiFetchGetsMessage()
{
ProducerSendMultiRequest();
Consumer consumer = new Consumer(KafkaServer, KafkaPort);
MultiFetchRequest request = new MultiFetchRequest(new List<FetchRequest>
{
new FetchRequest("test", 0, 0),
new FetchRequest("test", 0, 0),
new FetchRequest("testa", 0, 0)
});
List<List<Message>> messages = consumer.Consume(request);
for (int ix = 0; ix < messages.Count; ix++)
{
List<Message> messageSet = messages[ix];
Console.WriteLine(string.Format("Request #{0}-->", ix));
foreach (Message msg in messageSet)
{
Console.WriteLine(msg);
}
}
}
/// <summary>
/// Gets offsets from Kafka.
/// </summary>
[Test]
public void ConsumerGetsOffsets()
{
OffsetRequest request = new OffsetRequest("test", 0, DateTime.Now.AddHours(-24).Ticks, 10);
Consumer consumer = new Consumer(KafkaServer, KafkaPort);
IList<long> list = consumer.GetOffsetsBefore(request);
foreach (long l in list)
{
Console.Out.WriteLine(l);
}
}
/// <summary>
/// Gererates a randome list of messages.
/// </summary>
/// <param name="numberOfMessages">The number of messages to generate.</param>
/// <returns>A list of random messages.</returns>
private static List<Message> GenerateRandomMessages(int numberOfMessages)
{
List<Message> messages = new List<Message>();
for (int ix = 0; ix < numberOfMessages; ix++)
{
messages.Add(new Message(GenerateRandomBytes(10000)));
}
return messages;
}
/// <summary>
/// Generate a random set of bytes.
/// </summary>
/// <param name="length">Length of the byte array.</param>
/// <returns>Random byte array.</returns>
private static byte[] GenerateRandomBytes(int length)
{
byte[] randBytes = new byte[length];
Random randNum = new Random();
randNum.NextBytes(randBytes);
return randBytes;
}
}
}

View File

@ -0,0 +1,36 @@
using System.Reflection;
using System.Runtime.CompilerServices;
using System.Runtime.InteropServices;
// General Information about an assembly is controlled through the following
// set of attributes. Change these attribute values to modify the information
// associated with an assembly.
[assembly: AssemblyTitle("Kafka.Client.IntegrationTests")]
[assembly: AssemblyDescription("")]
[assembly: AssemblyConfiguration("")]
[assembly: AssemblyCompany("Microsoft")]
[assembly: AssemblyProduct("Kafka.Client.IntegrationTests")]
[assembly: AssemblyCopyright("Copyright © Microsoft 2011")]
[assembly: AssemblyTrademark("")]
[assembly: AssemblyCulture("")]
// Setting ComVisible to false makes the types in this assembly not visible
// to COM components. If you need to access a type in this assembly from
// COM, set the ComVisible attribute to true on that type.
[assembly: ComVisible(false)]
// The following GUID is for the ID of the typelib if this project is exposed to COM
[assembly: Guid("7b2387b7-6a58-4e8b-ae06-8aadf1a64949")]
// Version information for an assembly consists of the following four values:
//
// Major Version
// Minor Version
// Build Number
// Revision
//
// You can specify all the values or you can default the Build and Revision Numbers
// by using the '*' as shown below:
// [assembly: AssemblyVersion("1.0.*")]
[assembly: AssemblyVersion("1.0.0.0")]
[assembly: AssemblyFileVersion("1.0.0.0")]

View File

@ -0,0 +1,70 @@
<?xml version="1.0" encoding="utf-8"?>
<Project ToolsVersion="4.0" DefaultTargets="Build" xmlns="http://schemas.microsoft.com/developer/msbuild/2003">
<PropertyGroup>
<Configuration Condition=" '$(Configuration)' == '' ">Debug</Configuration>
<Platform Condition=" '$(Platform)' == '' ">AnyCPU</Platform>
<ProductVersion>8.0.30703</ProductVersion>
<SchemaVersion>2.0</SchemaVersion>
<ProjectGuid>{9BA1A0BF-B207-4A11-8883-5F64B113C07D}</ProjectGuid>
<OutputType>Library</OutputType>
<AppDesignerFolder>Properties</AppDesignerFolder>
<RootNamespace>Kafka.Client.Tests</RootNamespace>
<AssemblyName>Kafka.Client.Tests</AssemblyName>
<TargetFrameworkVersion>v4.0</TargetFrameworkVersion>
<FileAlignment>512</FileAlignment>
</PropertyGroup>
<PropertyGroup Condition=" '$(Configuration)|$(Platform)' == 'Debug|AnyCPU' ">
<DebugSymbols>true</DebugSymbols>
<DebugType>full</DebugType>
<Optimize>false</Optimize>
<OutputPath>bin\Debug\</OutputPath>
<DefineConstants>DEBUG;TRACE</DefineConstants>
<ErrorReport>prompt</ErrorReport>
<WarningLevel>4</WarningLevel>
</PropertyGroup>
<PropertyGroup Condition=" '$(Configuration)|$(Platform)' == 'Release|AnyCPU' ">
<DebugType>pdbonly</DebugType>
<Optimize>true</Optimize>
<OutputPath>bin\Release\</OutputPath>
<DefineConstants>TRACE</DefineConstants>
<ErrorReport>prompt</ErrorReport>
<WarningLevel>4</WarningLevel>
</PropertyGroup>
<ItemGroup>
<Reference Include="nunit.framework, Version=2.5.9.10348, Culture=neutral, PublicKeyToken=96d09a1eb7f44a77, processorArchitecture=MSIL">
<SpecificVersion>False</SpecificVersion>
<HintPath>..\..\..\..\lib\nunit\2.5.9\nunit.framework.dll</HintPath>
</Reference>
<Reference Include="System" />
<Reference Include="System.Core" />
<Reference Include="System.Xml.Linq" />
<Reference Include="System.Data.DataSetExtensions" />
<Reference Include="Microsoft.CSharp" />
<Reference Include="System.Data" />
<Reference Include="System.Xml" />
</ItemGroup>
<ItemGroup>
<Compile Include="MessageTests.cs" />
<Compile Include="Properties\AssemblyInfo.cs" />
<Compile Include="Request\FetchRequestTests.cs" />
<Compile Include="Request\MultiFetchRequestTests.cs" />
<Compile Include="Request\MultiProducerRequestTests.cs" />
<Compile Include="Request\OffsetRequestTests.cs" />
<Compile Include="Request\ProducerRequestTests.cs" />
<Compile Include="Util\BitWorksTests.cs" />
</ItemGroup>
<ItemGroup>
<ProjectReference Include="..\..\Kafka.Client\Kafka.Client.csproj">
<Project>{A92DD03B-EE4F-4A78-9FB2-279B6348C7D2}</Project>
<Name>Kafka.Client</Name>
</ProjectReference>
</ItemGroup>
<Import Project="$(MSBuildToolsPath)\Microsoft.CSharp.targets" />
<!-- To modify your build process, add your task inside one of the targets below and uncomment it.
Other similar extension points exist, see Microsoft.Common.targets.
<Target Name="BeforeBuild">
</Target>
<Target Name="AfterBuild">
</Target>
-->
</Project>

View File

@ -0,0 +1,68 @@
using System;
using System.Linq;
using System.Text;
using Kafka.Client.Util;
using NUnit.Framework;
namespace Kafka.Client.Tests
{
/// <summary>
/// Tests for the <see cref="Message"/> class.
/// </summary>
[TestFixture]
public class MessageTests
{
/// <summary>
/// Demonstrates a properly parsed message.
/// </summary>
[Test]
public void ParseFromValid()
{
Crc32 crc32 = new Crc32();
string payload = "kafka";
byte magic = 0;
byte[] payloadData = Encoding.UTF8.GetBytes(payload);
byte[] payloadSize = BitConverter.GetBytes(payloadData.Length);
byte[] checksum = crc32.ComputeHash(payloadData);
byte[] messageData = new byte[payloadData.Length + 1 + payloadSize.Length + checksum.Length];
Buffer.BlockCopy(payloadSize, 0, messageData, 0, payloadSize.Length);
messageData[4] = magic;
Buffer.BlockCopy(checksum, 0, messageData, payloadSize.Length + 1, checksum.Length);
Buffer.BlockCopy(payloadData, 0, messageData, payloadSize.Length + 1 + checksum.Length, payloadData.Length);
Message message = Message.ParseFrom(messageData);
Assert.IsNotNull(message);
Assert.AreEqual(magic, message.Magic);
Assert.IsTrue(payloadData.SequenceEqual(message.Payload));
Assert.IsTrue(checksum.SequenceEqual(message.Checksum));
}
/// <summary>
/// Ensure that the bytes returned from the message are in valid kafka sequence.
/// </summary>
[Test]
public void GetBytesValidSequence()
{
Message message = new Message(new byte[10], (byte)245);
byte[] bytes = message.GetBytes();
Assert.IsNotNull(bytes);
// len(payload) + 1 + 4
Assert.AreEqual(15, bytes.Length);
// first 4 bytes = the magic number
Assert.AreEqual((byte)245, bytes[0]);
// next 4 bytes = the checksum
Assert.IsTrue(message.Checksum.SequenceEqual(bytes.Skip(1).Take(4).ToArray<byte>()));
// remaining bytes = the payload
Assert.AreEqual(10, bytes.Skip(5).ToArray<byte>().Length);
}
}
}

View File

@ -0,0 +1,36 @@
using System.Reflection;
using System.Runtime.CompilerServices;
using System.Runtime.InteropServices;
// General Information about an assembly is controlled through the following
// set of attributes. Change these attribute values to modify the information
// associated with an assembly.
[assembly: AssemblyTitle("Kafka.Client.Tests")]
[assembly: AssemblyDescription("")]
[assembly: AssemblyConfiguration("")]
[assembly: AssemblyCompany("Microsoft")]
[assembly: AssemblyProduct("Kafka.Client.Tests")]
[assembly: AssemblyCopyright("Copyright © Microsoft 2011")]
[assembly: AssemblyTrademark("")]
[assembly: AssemblyCulture("")]
// Setting ComVisible to false makes the types in this assembly not visible
// to COM components. If you need to access a type in this assembly from
// COM, set the ComVisible attribute to true on that type.
[assembly: ComVisible(false)]
// The following GUID is for the ID of the typelib if this project is exposed to COM
[assembly: Guid("bf361ee0-5cbb-4fd6-bded-67bedcb603b8")]
// Version information for an assembly consists of the following four values:
//
// Major Version
// Minor Version
// Build Number
// Revision
//
// You can specify all the values or you can default the Build and Revision Numbers
// by using the '*' as shown below:
// [assembly: AssemblyVersion("1.0.*")]
[assembly: AssemblyVersion("1.0.0.0")]
[assembly: AssemblyFileVersion("1.0.0.0")]

View File

@ -0,0 +1,86 @@
using System;
using System.Linq;
using System.Text;
using Kafka.Client.Request;
using Kafka.Client.Util;
using NUnit.Framework;
namespace Kafka.Client.Request.Tests
{
/// <summary>
/// Tests for the <see cref="FetchRequest"/> class.
/// </summary>
[TestFixture]
public class FetchRequestTests
{
/// <summary>
/// Tests a valid request.
/// </summary>
[Test]
public void IsValidTrue()
{
FetchRequest request = new FetchRequest("topic", 1, 10L, 100);
Assert.IsTrue(request.IsValid());
}
/// <summary>
/// Tests a invalid request with no topic.
/// </summary>
[Test]
public void IsValidNoTopic()
{
FetchRequest request = new FetchRequest(" ", 1, 10L, 100);
Assert.IsFalse(request.IsValid());
}
/// <summary>
/// Tests a invalid request with no topic.
/// </summary>
[Test]
public void IsValidNulltopic()
{
FetchRequest request = new FetchRequest(null, 1, 10L, 100);
Assert.IsFalse(request.IsValid());
}
/// <summary>
/// Tests to ensure that the request follows the expected structure.
/// </summary>
[Test]
public void GetBytesValidStructure()
{
string topicName = "topic";
FetchRequest request = new FetchRequest(topicName, 1, 10L, 100);
// REQUEST TYPE ID + TOPIC LENGTH + TOPIC + PARTITION + OFFSET + MAX SIZE
int requestSize = 2 + 2 + topicName.Length + 4 + 8 + 4;
byte[] bytes = request.GetBytes();
Assert.IsNotNull(bytes);
// add 4 bytes for the length of the message at the beginning
Assert.AreEqual(requestSize + 4, bytes.Length);
// first 4 bytes = the message length
Assert.AreEqual(25, BitConverter.ToInt32(BitWorks.ReverseBytes(bytes.Take(4).ToArray<byte>()), 0));
// next 2 bytes = the request type
Assert.AreEqual((short)RequestType.Fetch, BitConverter.ToInt16(BitWorks.ReverseBytes(bytes.Skip(4).Take(2).ToArray<byte>()), 0));
// next 2 bytes = the topic length
Assert.AreEqual((short)topicName.Length, BitConverter.ToInt16(BitWorks.ReverseBytes(bytes.Skip(6).Take(2).ToArray<byte>()), 0));
// next few bytes = the topic
Assert.AreEqual(topicName, Encoding.ASCII.GetString(bytes.Skip(8).Take(topicName.Length).ToArray<byte>()));
// next 4 bytes = the partition
Assert.AreEqual(1, BitConverter.ToInt32(BitWorks.ReverseBytes(bytes.Skip(8 + topicName.Length).Take(4).ToArray<byte>()), 0));
// next 8 bytes = the offset
Assert.AreEqual(10, BitConverter.ToInt32(BitWorks.ReverseBytes(bytes.Skip(12 + topicName.Length).Take(8).ToArray<byte>()), 0));
// last 4 bytes = the max size
Assert.AreEqual(100, BitConverter.ToInt32(BitWorks.ReverseBytes(bytes.Skip(20 + +topicName.Length).Take(4).ToArray<byte>()), 0));
}
}
}

View File

@ -0,0 +1,86 @@
using System;
using System.Collections.Generic;
using System.Linq;
using Kafka.Client.Request;
using Kafka.Client.Util;
using NUnit.Framework;
namespace Kafka.Client.Request.Tests
{
/// <summary>
/// Tests for the <see cref="MultiFetchRequest"/> class.
/// </summary>
[TestFixture]
public class MultiFetchRequestTests
{
/// <summary>
/// Tests a valid multi-consumer request.
/// </summary>
[Test]
public void IsValidTrue()
{
List<FetchRequest> requests = new List<FetchRequest>
{
new FetchRequest("topic a", 0, 0),
new FetchRequest("topic a", 0, 0),
new FetchRequest("topic b", 0, 0),
new FetchRequest("topic c", 0, 0)
};
MultiFetchRequest multiRequest = new MultiFetchRequest(requests);
Assert.IsTrue(multiRequest.IsValid());
}
/// <summary>
/// Tests for an invalid multi-request with no requests provided.
/// </summary>
[Test]
public void IsValidNoRequests()
{
MultiFetchRequest multiRequest = new MultiFetchRequest(new List<FetchRequest>());
Assert.IsFalse(multiRequest.IsValid());
}
/// <summary>
/// Tests for an invalid multi-request with no requests provided.
/// </summary>
[Test]
public void IsValidNullRequests()
{
MultiFetchRequest multiRequest = new MultiFetchRequest(null);
Assert.IsFalse(multiRequest.IsValid());
}
/// <summary>
/// Test to ensure a valid format in the returned byte array as expected by Kafka.
/// </summary>
[Test]
public void GetBytesValidFormat()
{
List<FetchRequest> requests = new List<FetchRequest>
{
new FetchRequest("topic a", 0, 0),
new FetchRequest("topic a", 0, 0),
new FetchRequest("topic b", 0, 0),
new FetchRequest("topic c", 0, 0)
};
MultiFetchRequest request = new MultiFetchRequest(requests);
// format = len(request) + requesttype + requestcount + requestpackage
// total byte count = 4 + (2 + 2 + 100)
byte[] bytes = request.GetBytes();
Assert.IsNotNull(bytes);
Assert.AreEqual(108, bytes.Length);
// first 4 bytes = the length of the request
Assert.AreEqual(104, BitConverter.ToInt32(BitWorks.ReverseBytes(bytes.Take(4).ToArray<byte>()), 0));
// next 2 bytes = the RequestType which in this case should be Produce
Assert.AreEqual((short)RequestType.MultiFetch, BitConverter.ToInt16(BitWorks.ReverseBytes(bytes.Skip(4).Take(2).ToArray<byte>()), 0));
// next 2 bytes = the number of messages
Assert.AreEqual((short)4, BitConverter.ToInt16(BitWorks.ReverseBytes(bytes.Skip(6).Take(2).ToArray<byte>()), 0));
}
}
}

View File

@ -0,0 +1,86 @@
using System;
using System.Collections.Generic;
using System.Linq;
using Kafka.Client.Request;
using Kafka.Client.Util;
using NUnit.Framework;
namespace Kafka.Client.Request.Tests
{
/// <summary>
/// Tests for the <see cref="MultiProducerRequest"/> class.
/// </summary>
[TestFixture]
public class MultiProducerRequestTests
{
/// <summary>
/// Tests a valid multi-producer request.
/// </summary>
[Test]
public void IsValidTrue()
{
List<ProducerRequest> requests = new List<ProducerRequest>
{
new ProducerRequest("topic a", 0, new List<Message> { new Message(new byte[10]) }),
new ProducerRequest("topic a", 0, new List<Message> { new Message(new byte[10]) }),
new ProducerRequest("topic b", 0, new List<Message> { new Message(new byte[10]) }),
new ProducerRequest("topic c", 0, new List<Message> { new Message(new byte[10]) })
};
MultiProducerRequest multiRequest = new MultiProducerRequest(requests);
Assert.IsTrue(multiRequest.IsValid());
}
/// <summary>
/// Tests for an invalid multi-request with no requests provided.
/// </summary>
[Test]
public void IsValidNoRequests()
{
MultiProducerRequest multiRequest = new MultiProducerRequest(new List<ProducerRequest>());
Assert.IsFalse(multiRequest.IsValid());
}
/// <summary>
/// Tests for an invalid multi-request with no requests provided.
/// </summary>
[Test]
public void IsValidNullRequests()
{
MultiProducerRequest multiRequest = new MultiProducerRequest(null);
Assert.IsFalse(multiRequest.IsValid());
}
/// <summary>
/// Test to ensure a valid format in the returned byte array as expected by Kafka.
/// </summary>
[Test]
public void GetBytesValidFormat()
{
List<ProducerRequest> requests = new List<ProducerRequest>
{
new ProducerRequest("topic a", 0, new List<Message> { new Message(new byte[10]) }),
new ProducerRequest("topic a", 0, new List<Message> { new Message(new byte[10]) }),
new ProducerRequest("topic b", 0, new List<Message> { new Message(new byte[10]) }),
new ProducerRequest("topic c", 0, new List<Message> { new Message(new byte[10]) })
};
MultiProducerRequest request = new MultiProducerRequest(requests);
// format = len(request) + requesttype + requestcount + requestpackage
// total byte count = 4 + (2 + 2 + 144)
byte[] bytes = request.GetBytes();
Assert.IsNotNull(bytes);
Assert.AreEqual(152, bytes.Length);
// first 4 bytes = the length of the request
Assert.AreEqual(148, BitConverter.ToInt32(BitWorks.ReverseBytes(bytes.Take(4).ToArray<byte>()), 0));
// next 2 bytes = the RequestType which in this case should be Produce
Assert.AreEqual((short)RequestType.MultiProduce, BitConverter.ToInt16(BitWorks.ReverseBytes(bytes.Skip(4).Take(2).ToArray<byte>()), 0));
// next 2 bytes = the number of messages
Assert.AreEqual((short)4, BitConverter.ToInt16(BitWorks.ReverseBytes(bytes.Skip(6).Take(2).ToArray<byte>()), 0));
}
}
}

View File

@ -0,0 +1,83 @@
using System;
using System.Linq;
using System.Text;
using Kafka.Client.Request;
using Kafka.Client.Util;
using NUnit.Framework;
namespace Kafka.Client.Request.Tests
{
/// <summary>
/// Tests the <see cref="OffsetRequest"/> class.
/// </summary>
[TestFixture]
public class OffsetRequestTests
{
/// <summary>
/// Tests a valid request.
/// </summary>
[Test]
public void IsValidTrue()
{
FetchRequest request = new FetchRequest("topic", 1, 10L, 100);
Assert.IsTrue(request.IsValid());
}
/// <summary>
/// Tests a invalid request with no topic.
/// </summary>
[Test]
public void IsValidNoTopic()
{
FetchRequest request = new FetchRequest(" ", 1, 10L, 100);
Assert.IsFalse(request.IsValid());
}
/// <summary>
/// Tests a invalid request with no topic.
/// </summary>
[Test]
public void IsValidNulltopic()
{
FetchRequest request = new FetchRequest(null, 1, 10L, 100);
Assert.IsFalse(request.IsValid());
}
/// <summary>
/// Validates the list of bytes meet Kafka expectations.
/// </summary>
[Test]
public void GetBytesValid()
{
string topicName = "topic";
OffsetRequest request = new OffsetRequest(topicName, 0, OffsetRequest.LatestTime, 10);
// format = len(request) + requesttype + len(topic) + topic + partition + time + max
// total byte count = 4 + (2 + 2 + 5 + 4 + 8 + 4)
byte[] bytes = request.GetBytes();
Assert.IsNotNull(bytes);
Assert.AreEqual(29, bytes.Length);
// first 4 bytes = the length of the request
Assert.AreEqual(25, BitConverter.ToInt32(BitWorks.ReverseBytes(bytes.Take(4).ToArray<byte>()), 0));
// next 2 bytes = the RequestType which in this case should be Produce
Assert.AreEqual((short)RequestType.Offsets, BitConverter.ToInt16(BitWorks.ReverseBytes(bytes.Skip(4).Take(2).ToArray<byte>()), 0));
// next 2 bytes = the length of the topic
Assert.AreEqual((short)5, BitConverter.ToInt16(BitWorks.ReverseBytes(bytes.Skip(6).Take(2).ToArray<byte>()), 0));
// next 5 bytes = the topic
Assert.AreEqual(topicName, Encoding.ASCII.GetString(bytes.Skip(8).Take(5).ToArray<byte>()));
// next 4 bytes = the partition
Assert.AreEqual(0, BitConverter.ToInt32(BitWorks.ReverseBytes(bytes.Skip(13).Take(4).ToArray<byte>()), 0));
// next 8 bytes = time
Assert.AreEqual(OffsetRequest.LatestTime, BitConverter.ToInt64(BitWorks.ReverseBytes(bytes.Skip(17).Take(8).ToArray<byte>()), 0));
// next 4 bytes = max offsets
Assert.AreEqual(10, BitConverter.ToInt32(BitWorks.ReverseBytes(bytes.Skip(25).Take(4).ToArray<byte>()), 0));
}
}
}

View File

@ -0,0 +1,86 @@
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using Kafka.Client.Request;
using Kafka.Client.Util;
using NUnit.Framework;
namespace Kafka.Client.Request.Tests
{
/// <summary>
/// Tests for the <see cref="ProducerRequest"/> class.
/// </summary>
[TestFixture]
public class ProducerRequestTests
{
/// <summary>
/// Tests a valid producer request.
/// </summary>
[Test]
public void IsValidTrue()
{
ProducerRequest request = new ProducerRequest(
"topic", 0, new List<Message> { new Message(new byte[10]) });
Assert.IsTrue(request.IsValid());
}
/// <summary>
/// Tests a invalid producer request with no topic.
/// </summary>
[Test]
public void IsValidFalseNoTopic()
{
ProducerRequest request = new ProducerRequest(null, 0, null);
Assert.IsFalse(request.IsValid());
}
/// <summary>
/// Tests a invalid producer request with no messages to send.
/// </summary>
[Test]
public void IsValidFalseNoMessages()
{
ProducerRequest request = new ProducerRequest("topic", 0, null);
Assert.IsFalse(request.IsValid());
}
/// <summary>
/// Test to ensure a valid format in the returned byte array as expected by Kafka.
/// </summary>
[Test]
public void GetBytesValidFormat()
{
string topicName = "topic";
ProducerRequest request = new ProducerRequest(
topicName, 0, new List<Message> { new Message(new byte[10]) });
// format = len(request) + requesttype + len(topic) + topic + partition + len(messagepack) + message
// total byte count = 4 + (2 + 2 + 5 + 4 + 4 + 19)
byte[] bytes = request.GetBytes();
Assert.IsNotNull(bytes);
Assert.AreEqual(40, bytes.Length);
// first 4 bytes = the length of the request
Assert.AreEqual(36, BitConverter.ToInt32(BitWorks.ReverseBytes(bytes.Take(4).ToArray<byte>()), 0));
// next 2 bytes = the RequestType which in this case should be Produce
Assert.AreEqual((short)RequestType.Produce, BitConverter.ToInt16(BitWorks.ReverseBytes(bytes.Skip(4).Take(2).ToArray<byte>()), 0));
// next 2 bytes = the length of the topic
Assert.AreEqual((short)5, BitConverter.ToInt16(BitWorks.ReverseBytes(bytes.Skip(6).Take(2).ToArray<byte>()), 0));
// next 5 bytes = the topic
Assert.AreEqual(topicName, Encoding.ASCII.GetString(bytes.Skip(8).Take(5).ToArray<byte>()));
// next 4 bytes = the partition
Assert.AreEqual(0, BitConverter.ToInt32(BitWorks.ReverseBytes(bytes.Skip(13).Take(4).ToArray<byte>()), 0));
// next 4 bytes = the length of the individual messages in the pack
Assert.AreEqual(19, BitConverter.ToInt32(BitWorks.ReverseBytes(bytes.Skip(17).Take(4).ToArray<byte>()), 0));
// fianl bytes = the individual messages in the pack
Assert.AreEqual(19, bytes.Skip(21).ToArray<byte>().Length);
}
}
}

View File

@ -0,0 +1,104 @@
using System;
using Kafka.Client.Util;
using NUnit.Framework;
namespace Kafka.Client.Tests.Util
{
/// <summary>
/// Tests for <see cref="BitWorks"/> utility class.
/// </summary>
[TestFixture]
public class BitWorksTests
{
/// <summary>
/// Ensures bytes are returned reversed.
/// </summary>
[Test]
public void GetBytesReversedShortValid()
{
short val = (short)100;
byte[] normal = BitConverter.GetBytes(val);
byte[] reversed = BitWorks.GetBytesReversed(val);
TestReversedArray(normal, reversed);
}
/// <summary>
/// Ensures bytes are returned reversed.
/// </summary>
[Test]
public void GetBytesReversedIntValid()
{
int val = 100;
byte[] normal = BitConverter.GetBytes(val);
byte[] reversed = BitWorks.GetBytesReversed(val);
TestReversedArray(normal, reversed);
}
/// <summary>
/// Ensures bytes are returned reversed.
/// </summary>
[Test]
public void GetBytesReversedLongValid()
{
long val = 100L;
byte[] normal = BitConverter.GetBytes(val);
byte[] reversed = BitWorks.GetBytesReversed(val);
TestReversedArray(normal, reversed);
}
/// <summary>
/// Null array will reverse to a null.
/// </summary>
[Test]
public void ReverseBytesNullArray()
{
byte[] arr = null;
Assert.IsNull(BitWorks.ReverseBytes(arr));
}
/// <summary>
/// Zero length array will reverse to a zero length array.
/// </summary>
[Test]
public void ReverseBytesZeroLengthArray()
{
byte[] arr = new byte[0];
byte[] reversedArr = BitWorks.ReverseBytes(arr);
Assert.IsNotNull(reversedArr);
Assert.AreEqual(0, reversedArr.Length);
}
/// <summary>
/// Array is reversed.
/// </summary>
[Test]
public void ReverseBytesValid()
{
byte[] arr = BitConverter.GetBytes((short)1);
byte[] original = new byte[2];
arr.CopyTo(original, 0);
byte[] reversedArr = BitWorks.ReverseBytes(arr);
TestReversedArray(original, reversedArr);
}
/// <summary>
/// Performs asserts for two arrays that should be exactly the same, but values
/// in one are in reverse order of the other.
/// </summary>
/// <param name="normal">The "normal" array.</param>
/// <param name="reversed">The array that is in reverse order to the "normal" one.</param>
private static void TestReversedArray(byte[] normal, byte[] reversed)
{
Assert.IsNotNull(reversed);
Assert.AreEqual(normal.Length, reversed.Length);
for (int ix = 0; ix < normal.Length; ix++)
{
Assert.AreEqual(normal[ix], reversed[reversed.Length - 1 - ix]);
}
}
}
}

13
clients/go/.gitignore vendored Normal file
View File

@ -0,0 +1,13 @@
_go_.6
_obj
6.out
_gotest_.6
_test
_testmain.go
_testmain.6
tools/*/_obj
tools/*/_go_.6
tools/consumer/consumer
tools/publisher/publisher
tools/consumer/test.txt
tools/offsets/offsets

208
clients/go/LICENSE Normal file
View File

@ -0,0 +1,208 @@
Apache License
Version 2.0, January 2004
http://www.apache.org/licenses/
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
1. Definitions.
"License" shall mean the terms and conditions for use, reproduction,
and distribution as defined by Sections 1 through 9 of this document.
"Licensor" shall mean the copyright owner or entity authorized by
the copyright owner that is granting the License.
"Legal Entity" shall mean the union of the acting entity and all
other entities that control, are controlled by, or are under common
control with that entity. For the purposes of this definition,
"control" means (i) the power, direct or indirect, to cause the
direction or management of such entity, whether by contract or
otherwise, or (ii) ownership of fifty percent (50%) or more of the
outstanding shares, or (iii) beneficial ownership of such entity.
"You" (or "Your") shall mean an individual or Legal Entity
exercising permissions granted by this License.
"Source" form shall mean the preferred form for making modifications,
including but not limited to software source code, documentation
source, and configuration files.
"Object" form shall mean any form resulting from mechanical
transformation or translation of a Source form, including but
not limited to compiled object code, generated documentation,
and conversions to other media types.
"Work" shall mean the work of authorship, whether in Source or
Object form, made available under the License, as indicated by a
copyright notice that is included in or attached to the work
(an example is provided in the Appendix below).
"Derivative Works" shall mean any work, whether in Source or Object
form, that is based on (or derived from) the Work and for which the
editorial revisions, annotations, elaborations, or other modifications
represent, as a whole, an original work of authorship. For the purposes
of this License, Derivative Works shall not include works that remain
separable from, or merely link (or bind by name) to the interfaces of,
the Work and Derivative Works thereof.
"Contribution" shall mean any work of authorship, including
the original version of the Work and any modifications or additions
to that Work or Derivative Works thereof, that is intentionally
submitted to Licensor for inclusion in the Work by the copyright owner
or by an individual or Legal Entity authorized to submit on behalf of
the copyright owner. For the purposes of this definition, "submitted"
means any form of electronic, verbal, or written communication sent
to the Licensor or its representatives, including but not limited to
communication on electronic mailing lists, source code control systems,
and issue tracking systems that are managed by, or on behalf of, the
Licensor for the purpose of discussing and improving the Work, but
excluding communication that is conspicuously marked or otherwise
designated in writing by the copyright owner as "Not a Contribution."
"Contributor" shall mean Licensor and any individual or Legal Entity
on behalf of whom a Contribution has been received by Licensor and
subsequently incorporated within the Work.
2. Grant of Copyright License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
copyright license to reproduce, prepare Derivative Works of,
publicly display, publicly perform, sublicense, and distribute the
Work and such Derivative Works in Source or Object form.
3. Grant of Patent License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
(except as stated in this section) patent license to make, have made,
use, offer to sell, sell, import, and otherwise transfer the Work,
where such license applies only to those patent claims licensable
by such Contributor that are necessarily infringed by their
Contribution(s) alone or by combination of their Contribution(s)
with the Work to which such Contribution(s) was submitted. If You
institute patent litigation against any entity (including a
cross-claim or counterclaim in a lawsuit) alleging that the Work
or a Contribution incorporated within the Work constitutes direct
or contributory patent infringement, then any patent licenses
granted to You under this License for that Work shall terminate
as of the date such litigation is filed.
4. Redistribution. You may reproduce and distribute copies of the
Work or Derivative Works thereof in any medium, with or without
modifications, and in Source or Object form, provided that You
meet the following conditions:
(a) You must give any other recipients of the Work or
Derivative Works a copy of this License; and
(b) You must cause any modified files to carry prominent notices
stating that You changed the files; and
(c) You must retain, in the Source form of any Derivative Works
that You distribute, all copyright, patent, trademark, and
attribution notices from the Source form of the Work,
excluding those notices that do not pertain to any part of
the Derivative Works; and
(d) If the Work includes a "NOTICE" text file as part of its
distribution, then any Derivative Works that You distribute must
include a readable copy of the attribution notices contained
within such NOTICE file, excluding those notices that do not
pertain to any part of the Derivative Works, in at least one
of the following places: within a NOTICE text file distributed
as part of the Derivative Works; within the Source form or
documentation, if provided along with the Derivative Works; or,
within a display generated by the Derivative Works, if and
wherever such third-party notices normally appear. The contents
of the NOTICE file are for informational purposes only and
do not modify the License. You may add Your own attribution
notices within Derivative Works that You distribute, alongside
or as an addendum to the NOTICE text from the Work, provided
that such additional attribution notices cannot be construed
as modifying the License.
You may add Your own copyright statement to Your modifications and
may provide additional or different license terms and conditions
for use, reproduction, or distribution of Your modifications, or
for any such Derivative Works as a whole, provided Your use,
reproduction, and distribution of the Work otherwise complies with
the conditions stated in this License.
5. Submission of Contributions. Unless You explicitly state otherwise,
any Contribution intentionally submitted for inclusion in the Work
by You to the Licensor shall be under the terms and conditions of
this License, without any additional terms or conditions.
Notwithstanding the above, nothing herein shall supersede or modify
the terms of any separate license agreement you may have executed
with Licensor regarding such Contributions.
6. Trademarks. This License does not grant permission to use the trade
names, trademarks, service marks, or product names of the Licensor,
except as required for reasonable and customary use in describing the
origin of the Work and reproducing the content of the NOTICE file.
7. Disclaimer of Warranty. Unless required by applicable law or
agreed to in writing, Licensor provides the Work (and each
Contributor provides its Contributions) on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
implied, including, without limitation, any warranties or conditions
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
PARTICULAR PURPOSE. You are solely responsible for determining the
appropriateness of using or redistributing the Work and assume any
risks associated with Your exercise of permissions under this License.
8. Limitation of Liability. In no event and under no legal theory,
whether in tort (including negligence), contract, or otherwise,
unless required by applicable law (such as deliberate and grossly
negligent acts) or agreed to in writing, shall any Contributor be
liable to You for damages, including any direct, indirect, special,
incidental, or consequential damages of any character arising as a
result of this License or out of the use or inability to use the
Work (including but not limited to damages for loss of goodwill,
work stoppage, computer failure or malfunction, or any and all
other commercial damages or losses), even if such Contributor
has been advised of the possibility of such damages.
9. Accepting Warranty or Additional Liability. While redistributing
the Work or Derivative Works thereof, You may choose to offer,
and charge a fee for, acceptance of support, warranty, indemnity,
or other liability obligations and/or rights consistent with this
License. However, in accepting such obligations, You may act only
on Your own behalf and on Your sole responsibility, not on behalf
of any other Contributor, and only if You agree to indemnify,
defend, and hold each Contributor harmless for any liability
incurred by, or claims asserted against, such Contributor by reason
of your accepting any such warranty or additional liability.
END OF TERMS AND CONDITIONS
APPENDIX: How to apply the Apache License to your work.
To apply the Apache License to your work, attach the following
boilerplate notice, with the fields enclosed by brackets "[]"
replaced with your own identifying information. (Don't include
the brackets!) The text should be enclosed in the appropriate
comment syntax for the file format. We also recommend that a
file or class name and description of purpose be included on the
same "printed page" as the copyright notice for easier
identification within third-party archives.
Copyright (c) 2011 NeuStar, Inc.
All rights reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
NeuStar, the Neustar logo and related names and logos are registered
trademarks, service marks or tradenames of NeuStar, Inc. All other
product names, company names, marks, logos and symbols may be trademarks
of their respective owners.

25
clients/go/Makefile Normal file
View File

@ -0,0 +1,25 @@
include $(GOROOT)/src/Make.inc
TARG=kafka
GOFILES=\
src/kafka.go\
src/message.go\
src/converts.go\
src/consumer.go\
src/publisher.go\
src/timing.go\
src/request.go\
include $(GOROOT)/src/Make.pkg
tools: force
make -C tools/consumer clean all
make -C tools/publisher clean all
make -C tools/offsets clean all
format:
gofmt -w -tabwidth=2 -tabindent=false src/*.go tools/consumer/*.go tools/publisher/*.go kafka_test.go
full: format clean install tools
.PHONY: force

83
clients/go/README.md Normal file
View File

@ -0,0 +1,83 @@
# Kafka.go - Publisher & Consumer for Kafka in Go #
Kafka is a distributed publish-subscribe messaging system: (http://sna-projects.com/kafka/)
Go language: (http://golang.org/) <br/>
## Get up and running ##
Install kafka.go package: <br/>
<code>make install</code>
<br/>
Make the tools (publisher & consumer) <br/>
<code>make tools</code>
<br/>
Start zookeeper, Kafka server <br/>
For more info on Kafka, see: http://sna-projects.com/kafka/quickstart.php
## Tools ##
Start a consumer:
<pre><code>
./tools/consumer/consumer -topic test -consumeforever
Consuming Messages :
From: localhost:9092, topic: test, partition: 0
----------------------
</code></pre>
Now the consumer will just poll until a message is received.
Publish a message:
<pre><code>
./tools/publisher/publisher -topic test -message "Hello World"
</code></pre>
The consumer should output message.
## API Usage ##
### Publishing ###
<pre><code>
broker := kafka.NewBrokerPublisher("localhost:9092", "mytesttopic", 0)
broker.Publish(kafka.NewMessage([]byte("tesing 1 2 3")))
</code></pre>
### Consumer ###
<pre><code>
broker := kafka.NewBrokerConsumer("localhost:9092", "mytesttopic", 0, 0, 1048576)
broker.Consume(func(msg *kafka.Message) { msg.Print() })
</code></pre>
Or the consumer can use a channel based approach:
<pre><code>
broker := kafka.NewBrokerConsumer("localhost:9092", "mytesttopic", 0, 0, 1048576)
go broker.ConsumeOnChannel(msgChan, 10, quitChan)
</code></pre>
### Consuming Offsets ###
<pre><code>
broker := kafka.NewBrokerOffsetConsumer("localhost:9092", "mytesttopic", 0)
offsets, err := broker.GetOffsets(-1, 1)
</code></pre>
### Contact ###
jeffreydamick (at) gmail (dot) com
http://twitter.com/jeffreydamick
Big thank you to [NeuStar](http://neustar.biz) for sponsoring this work.

128
clients/go/kafka_test.go Normal file
View File

@ -0,0 +1,128 @@
/*
* Copyright (c) 2011 NeuStar, Inc.
* All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* NeuStar, the Neustar logo and related names and logos are registered
* trademarks, service marks or tradenames of NeuStar, Inc. All other
* product names, company names, marks, logos and symbols may be trademarks
* of their respective owners.
*/
package kafka
import (
"testing"
//"fmt"
"bytes"
"container/list"
)
func TestMessageCreation(t *testing.T) {
payload := []byte("testing")
msg := NewMessage(payload)
if msg.magic != 0 {
t.Errorf("magic incorrect")
t.Fail()
}
// generated by kafka-rb: e8 f3 5a 06
expected := []byte{0xe8, 0xf3, 0x5a, 0x06}
if !bytes.Equal(expected, msg.checksum[:]) {
t.Fail()
}
}
func TestMessageEncoding(t *testing.T) {
payload := []byte("testing")
msg := NewMessage(payload)
// generated by kafka-rb:
expected := []byte{0x00, 0x00, 0x00, 0x0c, 0x00, 0xe8, 0xf3, 0x5a, 0x06, 0x74, 0x65, 0x73, 0x74, 0x69, 0x6e, 0x67}
if !bytes.Equal(expected, msg.Encode()) {
t.Fail()
}
// verify round trip
msgDecoded := Decode(msg.Encode())
if !bytes.Equal(msgDecoded.payload, payload) {
t.Fail()
}
if !bytes.Equal(msgDecoded.payload, payload) {
t.Fail()
}
chksum := []byte{0xE8, 0xF3, 0x5A, 0x06}
if !bytes.Equal(msgDecoded.checksum[:], chksum) {
t.Fail()
}
if msgDecoded.magic != 0 {
t.Fail()
}
}
func TestRequestHeaderEncoding(t *testing.T) {
broker := newBroker("localhost:9092", "test", 0)
request := broker.EncodeRequestHeader(REQUEST_PRODUCE)
// generated by kafka-rb:
expected := []byte{0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x04, 0x74, 0x65, 0x73, 0x74,
0x00, 0x00, 0x00, 0x00}
if !bytes.Equal(expected, request.Bytes()) {
t.Errorf("expected length: %d but got: %d", len(expected), len(request.Bytes()))
t.Errorf("expected: %X\n but got: %X", expected, request)
t.Fail()
}
}
func TestPublishRequestEncoding(t *testing.T) {
payload := []byte("testing")
msg := NewMessage(payload)
messages := list.New()
messages.PushBack(msg)
pubBroker := NewBrokerPublisher("localhost:9092", "test", 0)
request := pubBroker.broker.EncodePublishRequest(messages)
// generated by kafka-rb:
expected := []byte{0x00, 0x00, 0x00, 0x20, 0x00, 0x00, 0x00, 0x04, 0x74, 0x65, 0x73, 0x74,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x0c,
0x00, 0xe8, 0xf3, 0x5a, 0x06, 0x74, 0x65, 0x73, 0x74, 0x69, 0x6e, 0x67}
if !bytes.Equal(expected, request) {
t.Errorf("expected length: %d but got: %d", len(expected), len(request))
t.Errorf("expected: %X\n but got: %X", expected, request)
t.Fail()
}
}
func TestConsumeRequestEncoding(t *testing.T) {
pubBroker := NewBrokerPublisher("localhost:9092", "test", 0)
request := pubBroker.broker.EncodeConsumeRequest(0, 1048576)
// generated by kafka-rb, encode_request_size + encode_request
expected := []byte{0x00, 0x00, 0x00, 0x18, 0x00, 0x01, 0x00, 0x04, 0x74,
0x65, 0x73, 0x74, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00}
if !bytes.Equal(expected, request) {
t.Errorf("expected length: %d but got: %d", len(expected), len(request))
t.Errorf("expected: %X\n but got: %X", expected, request)
t.Fail()
}
}

184
clients/go/src/consumer.go Normal file
View File

@ -0,0 +1,184 @@
/*
* Copyright (c) 2011 NeuStar, Inc.
* All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* NeuStar, the Neustar logo and related names and logos are registered
* trademarks, service marks or tradenames of NeuStar, Inc. All other
* product names, company names, marks, logos and symbols may be trademarks
* of their respective owners.
*/
package kafka
import (
"log"
"os"
"net"
"time"
"encoding/binary"
)
type BrokerConsumer struct {
broker *Broker
offset uint64
maxSize uint32
}
// Create a new broker consumer
// hostname - host and optionally port, delimited by ':'
// topic to consume
// partition to consume from
// offset to start consuming from
// maxSize (in bytes) of the message to consume (this should be at least as big as the biggest message to be published)
func NewBrokerConsumer(hostname string, topic string, partition int, offset uint64, maxSize uint32) *BrokerConsumer {
return &BrokerConsumer{broker: newBroker(hostname, topic, partition),
offset: offset,
maxSize: maxSize}
}
// Simplified consumer that defaults the offset and maxSize to 0.
// hostname - host and optionally port, delimited by ':'
// topic to consume
// partition to consume from
func NewBrokerOffsetConsumer(hostname string, topic string, partition int) *BrokerConsumer {
return &BrokerConsumer{broker: newBroker(hostname, topic, partition),
offset: 0,
maxSize: 0}
}
func (consumer *BrokerConsumer) ConsumeOnChannel(msgChan chan *Message, pollTimeoutMs int64, quit chan bool) (int, os.Error) {
conn, err := consumer.broker.connect()
if err != nil {
return -1, err
}
num := 0
done := make(chan bool, 1)
go func() {
for {
_, err := consumer.consumeWithConn(conn, func(msg *Message) {
msgChan <- msg
num += 1
})
if err != nil {
if err != os.EOF {
log.Println("Fatal Error: ", err)
}
break
}
time.Sleep(pollTimeoutMs * 1000000)
}
done <- true
}()
// wait to be told to stop..
<-quit
conn.Close()
close(msgChan)
<-done
return num, err
}
type MessageHandlerFunc func(msg *Message)
func (consumer *BrokerConsumer) Consume(handlerFunc MessageHandlerFunc) (int, os.Error) {
conn, err := consumer.broker.connect()
if err != nil {
return -1, err
}
defer conn.Close()
num, err := consumer.consumeWithConn(conn, handlerFunc)
if err != nil {
log.Println("Fatal Error: ", err)
}
return num, err
}
func (consumer *BrokerConsumer) consumeWithConn(conn *net.TCPConn, handlerFunc MessageHandlerFunc) (int, os.Error) {
_, err := conn.Write(consumer.broker.EncodeConsumeRequest(consumer.offset, consumer.maxSize))
if err != nil {
return -1, err
}
length, payload, err := consumer.broker.readResponse(conn)
if err != nil {
return -1, err
}
num := 0
if length > 2 {
// parse out the messages
var currentOffset uint64 = 0
for currentOffset <= uint64(length-4) {
msg := Decode(payload[currentOffset:])
if msg == nil {
return num, os.NewError("Error Decoding Message")
}
msg.offset = consumer.offset + currentOffset
currentOffset += uint64(4 + msg.totalLength)
handlerFunc(msg)
num += 1
}
// update the broker's offset for next consumption
consumer.offset += currentOffset
}
return num, err
}
// Get a list of valid offsets (up to maxNumOffsets) before the given time, where
// time is in milliseconds (-1, from the latest offset available, -2 from the smallest offset available)
// The result is a list of offsets, in descending order.
func (consumer *BrokerConsumer) GetOffsets(time int64, maxNumOffsets uint32) ([]uint64, os.Error) {
offsets := make([]uint64, 0)
conn, err := consumer.broker.connect()
if err != nil {
return offsets, err
}
defer conn.Close()
_, err = conn.Write(consumer.broker.EncodeOffsetRequest(time, maxNumOffsets))
if err != nil {
return offsets, err
}
length, payload, err := consumer.broker.readResponse(conn)
if err != nil {
return offsets, err
}
if length > 4 {
// get the number of offsets
numOffsets := binary.BigEndian.Uint32(payload[0:])
var currentOffset uint64 = 4
for currentOffset < uint64(length-4) && uint32(len(offsets)) < numOffsets {
offset := binary.BigEndian.Uint64(payload[currentOffset:])
offsets = append(offsets, offset)
currentOffset += 8 // offset size
}
}
return offsets, err
}

Some files were not shown because too many files have changed in this diff Show More