Compare commits
460 Commits
main
...
feature/sp
| Author | SHA1 | Date |
|---|---|---|
|
|
252b108227 | |
|
|
fd103496d3 | |
|
|
02a9f55072 | |
|
|
478b49400b | |
|
|
c3591979cd | |
|
|
3d6b4c42ff | |
|
|
47094288f4 | |
|
|
f08f224995 | |
|
|
b6cbdb7b00 | |
|
|
0e843b8c93 | |
|
|
945cfb383c | |
|
|
9bdf046914 | |
|
|
32c8b9e7b6 | |
|
|
ad5fb80ad4 | |
|
|
db5f5ce875 | |
|
|
b44f39cb46 | |
|
|
4974ced37c | |
|
|
711cee7043 | |
|
|
451566e890 | |
|
|
42c42c24e6 | |
|
|
fb9fcdde33 | |
|
|
f8170951b8 | |
|
|
17d3e5f6ab | |
|
|
dd708215ff | |
|
|
7e67790649 | |
|
|
0ae65d872d | |
|
|
1ef8405237 | |
|
|
b88e07422d | |
|
|
ea4d785f49 | |
|
|
f28e242143 | |
|
|
3acc1f83eb | |
|
|
d62b97492a | |
|
|
ee31e100b7 | |
|
|
33aa6e5416 | |
|
|
152d74bd11 | |
|
|
55c409e9be | |
|
|
7c47716be0 | |
|
|
1299fb82c5 | |
|
|
40972e2cc0 | |
|
|
7d05bf7c00 | |
|
|
f7d223cc76 | |
|
|
0e8b088691 | |
|
|
b474bbf8d2 | |
|
|
2d88b922f7 | |
|
|
7e97c2740f | |
|
|
d33e3dbbc0 | |
|
|
3877544e9c | |
|
|
e06a137605 | |
|
|
3c6ed43b5d | |
|
|
bb2218cf26 | |
|
|
1ab2ecfb5f | |
|
|
c604211cde | |
|
|
b9ff2e4f97 | |
|
|
3ce475fd7f | |
|
|
b14df25205 | |
|
|
82e86ab49f | |
|
|
520e952e03 | |
|
|
4659e20239 | |
|
|
273753d9ec | |
|
|
e9fd57f205 | |
|
|
2ad29cb5ef | |
|
|
69cbbe4ce0 | |
|
|
3d2dae191f | |
|
|
26faa487be | |
|
|
1dc38d289f | |
|
|
7a18d9f393 | |
|
|
dc032bb446 | |
|
|
91cc7b37c2 | |
|
|
bf4eca7bc3 | |
|
|
af3cf27f84 | |
|
|
fcde520734 | |
|
|
2751e22e5d | |
|
|
063a183f65 | |
|
|
0c435339ad | |
|
|
1b04239fb8 | |
|
|
fb8aef6677 | |
|
|
583d6a0f98 | |
|
|
82b5fc78da | |
|
|
ed292fb31e | |
|
|
6a9b36be2d | |
|
|
cb677cd154 | |
|
|
6666170d11 | |
|
|
48ab2cf7c7 | |
|
|
6892e90474 | |
|
|
b4f62449c8 | |
|
|
cdb485b8c9 | |
|
|
941f5c444e | |
|
|
a19960bb20 | |
|
|
2b313b37ea | |
|
|
60d8e59d1e | |
|
|
bb635078d9 | |
|
|
bd32b6b765 | |
|
|
2a250d644a | |
|
|
91a9168e99 | |
|
|
e10409f1f9 | |
|
|
7baebfa4d5 | |
|
|
4f6519d3db | |
|
|
5adaf353be | |
|
|
6bca87ea9c | |
|
|
76235d76e8 | |
|
|
078b8c3d7d | |
|
|
4285feffc4 | |
|
|
d2d5ee9422 | |
|
|
4aff134466 | |
|
|
6f4056bb17 | |
|
|
a9d81cba61 | |
|
|
99180f052e | |
|
|
cbaf0741b9 | |
|
|
1780adae21 | |
|
|
1f1caccdac | |
|
|
0f7b3583cb | |
|
|
18a39c1938 | |
|
|
659a31d957 | |
|
|
5492fb38f3 | |
|
|
62e8e456b0 | |
|
|
aa67bb78fa | |
|
|
5109c90a8a | |
|
|
4139ea4b29 | |
|
|
0ec42d6333 | |
|
|
b534857b3a | |
|
|
e63042f3e2 | |
|
|
55dbf25bc0 | |
|
|
9313a68534 | |
|
|
37010faf98 | |
|
|
775a817b33 | |
|
|
6d92c4ecd3 | |
|
|
4e7486095e | |
|
|
72a9e90286 | |
|
|
75253dc0fa | |
|
|
82a043e7ee | |
|
|
bb198a1ed4 | |
|
|
56b65eadb5 | |
|
|
ebcf1a7f7f | |
|
|
3d9bf9f551 | |
|
|
da28fd0f2a | |
|
|
dfb930bf8c | |
|
|
ed6625cf8a | |
|
|
cb53dd514a | |
|
|
0403aa2cfc | |
|
|
f8781a9be4 | |
|
|
c1e4cb7b96 | |
|
|
d50499eea6 | |
|
|
8641ba864c | |
|
|
387557b10d | |
|
|
bd9b78ee46 | |
|
|
6462c967a5 | |
|
|
219dba3e95 | |
|
|
92b6f7ef44 | |
|
|
a885e5ae8c | |
|
|
93087a07f9 | |
|
|
ca0e9bf40f | |
|
|
3e0af68ae9 | |
|
|
a9ee4f8e7b | |
|
|
60d671f706 | |
|
|
5a8af4b89d | |
|
|
a0500b4562 | |
|
|
38541af171 | |
|
|
0cd047c2ce | |
|
|
caaa41718d | |
|
|
8d624daf56 | |
|
|
1dfa8f75a0 | |
|
|
f473d7da1a | |
|
|
73642827c3 | |
|
|
a67349f845 | |
|
|
76195e23b9 | |
|
|
a9cf9b23ec | |
|
|
e86e00ae45 | |
|
|
2c42dec469 | |
|
|
5e7806178d | |
|
|
fe462777e6 | |
|
|
beb23756d4 | |
|
|
92094b533b | |
|
|
b3c1f10813 | |
|
|
47a4517a4d | |
|
|
1119acc065 | |
|
|
0fc2c7f0bd | |
|
|
c59127cd2a | |
|
|
6995c3f64d | |
|
|
2b407e0786 | |
|
|
92aef1d11e | |
|
|
8f58a419c9 | |
|
|
39bc9dae1f | |
|
|
f6bf3e64f8 | |
|
|
8ec58aa0c7 | |
|
|
86071df7b0 | |
|
|
dbef35334d | |
|
|
52871652c0 | |
|
|
4afee951c4 | |
|
|
f57de0ed0f | |
|
|
813c6ec54c | |
|
|
5f2808d0b0 | |
|
|
eea5f0a94d | |
|
|
e89412ec14 | |
|
|
59b3a21899 | |
|
|
814ff0e2e6 | |
|
|
8e1992d29a | |
|
|
fed831524d | |
|
|
e4494409fd | |
|
|
3343c0ed06 | |
|
|
35279a0552 | |
|
|
7a7cb7e842 | |
|
|
a906f602f8 | |
|
|
e820ea4048 | |
|
|
ddc8e3a6a7 | |
|
|
c9a97ebf19 | |
|
|
09359077c8 | |
|
|
fea145bfd8 | |
|
|
d041e13f9e | |
|
|
961ba10695 | |
|
|
887d5053e2 | |
|
|
ddfadbea0d | |
|
|
8a152dd74c | |
|
|
a67e45e1e3 | |
|
|
dc5b56b9ef | |
|
|
6220570ca2 | |
|
|
c775509bfd | |
|
|
e9ba7a8c71 | |
|
|
0c98593176 | |
|
|
d33bd4bbfc | |
|
|
022f2a7eb1 | |
|
|
303cd28732 | |
|
|
1de161e4e6 | |
|
|
10ba1a633f | |
|
|
c04275422f | |
|
|
d3e09e1ad2 | |
|
|
3a92901eb3 | |
|
|
f88c72b158 | |
|
|
87b63600c2 | |
|
|
dfa35b90b8 | |
|
|
95a2107030 | |
|
|
1610f2f4a5 | |
|
|
814644530c | |
|
|
94107fc1a9 | |
|
|
c0f5a7de29 | |
|
|
19900d258e | |
|
|
00dcea1738 | |
|
|
72009f1b60 | |
|
|
866088c70f | |
|
|
27aeefe056 | |
|
|
60c2ff747a | |
|
|
e63bfc8f29 | |
|
|
5cda6e4086 | |
|
|
d21b058721 | |
|
|
2bef7e31ec | |
|
|
c077d513eb | |
|
|
402c0a68f5 | |
|
|
513e5845d1 | |
|
|
3f1ca24b26 | |
|
|
3235c490cf | |
|
|
b4c5341e43 | |
|
|
aeb6c7d2f8 | |
|
|
9eca0de19d | |
|
|
9f3b55b87a | |
|
|
d38e9301fe | |
|
|
fd3e7122b9 | |
|
|
3fdef4c070 | |
|
|
5860a396b7 | |
|
|
273c134683 | |
|
|
62abf707e5 | |
|
|
cd7d337601 | |
|
|
732b3f8d1c | |
|
|
8189d89e78 | |
|
|
abba7c44b7 | |
|
|
bb2409d99d | |
|
|
d233658268 | |
|
|
64b704ef5e | |
|
|
9462e42f25 | |
|
|
9502ac3696 | |
|
|
50946bf210 | |
|
|
6605e37dd3 | |
|
|
8125b80ed4 | |
|
|
7e8776da63 | |
|
|
9fe78c2aaa | |
|
|
a1f9969773 | |
|
|
0fca069a12 | |
|
|
2d6fe1f805 | |
|
|
9cb537a9b3 | |
|
|
609bcc6672 | |
|
|
9e7fec1afc | |
|
|
985596c44c | |
|
|
5e11479053 | |
|
|
37ae1766d9 | |
|
|
8e004eea2d | |
|
|
d9245691b4 | |
|
|
b660d48fe1 | |
|
|
df78978543 | |
|
|
42099b0b3f | |
|
|
e8502b64b0 | |
|
|
4bfa8af6bd | |
|
|
574c0ea4cc | |
|
|
5d2f4a4c5d | |
|
|
ef0ce42126 | |
|
|
786d405caa | |
|
|
48eeb81cec | |
|
|
d79d08ab4a | |
|
|
080bae4d07 | |
|
|
504761b7dd | |
|
|
f6936cadab | |
|
|
7bf19db2c8 | |
|
|
0bac03ed48 | |
|
|
78cb2cb508 | |
|
|
1966e042ff | |
|
|
7a12c17e5d | |
|
|
a14f07e8b7 | |
|
|
cc34841c49 | |
|
|
f6f5412c76 | |
|
|
f7e8b9abb3 | |
|
|
bf8ebdb098 | |
|
|
67347165ca | |
|
|
a2ab79deae | |
|
|
82a76e87e0 | |
|
|
7d0f2a2746 | |
|
|
e0c761c110 | |
|
|
c091ba740f | |
|
|
504b0b1cb2 | |
|
|
aa856f83d9 | |
|
|
1ada92c329 | |
|
|
8be9e519c7 | |
|
|
0a851c3047 | |
|
|
fb5bce8291 | |
|
|
37e1af586d | |
|
|
cab9e191da | |
|
|
c243eaf9ca | |
|
|
cc53a632ed | |
|
|
7f9a63195c | |
|
|
c32772f588 | |
|
|
aafef77114 | |
|
|
dec14165fb | |
|
|
fcd0d5e529 | |
|
|
9b4db72a0d | |
|
|
4a562cddcb | |
|
|
c92bedb6a9 | |
|
|
c51cc0298a | |
|
|
e9b2117b38 | |
|
|
bcb5d167fd | |
|
|
72dfbc32f5 | |
|
|
0e84ac7de2 | |
|
|
7e759baf40 | |
|
|
2854533f42 | |
|
|
fbd0c7ce3e | |
|
|
398b4c4fa1 | |
|
|
c821449fb7 | |
|
|
7842e25d32 | |
|
|
de9a7199df | |
|
|
abd4bf08ab | |
|
|
796ce2121b | |
|
|
51253e2bf4 | |
|
|
8c3c6c3841 | |
|
|
44f15cc22c | |
|
|
4878174b77 | |
|
|
db4c80a455 | |
|
|
bf95a3239c | |
|
|
ab6dafaab6 | |
|
|
8af063a165 | |
|
|
0a70c3a61e | |
|
|
1d54a7373c | |
|
|
550bf60460 | |
|
|
5c95a5da31 | |
|
|
edd77c1e25 | |
|
|
2cbc5bd3ca | |
|
|
89cb632acd | |
|
|
c2c2dd424b | |
|
|
57b098c397 | |
|
|
e36c82d71c | |
|
|
333483a16e | |
|
|
7d14cd6b33 | |
|
|
83091994a6 | |
|
|
c141acb6bf | |
|
|
389a8d8dec | |
|
|
f324ef461f | |
|
|
a1a4389c35 | |
|
|
b43439482d | |
|
|
7d3ba8a0eb | |
|
|
4f0675d5e9 | |
|
|
4734077f47 | |
|
|
5a4d2b44d2 | |
|
|
6d3e77533e | |
|
|
f7fe4b9441 | |
|
|
e885daf0e3 | |
|
|
4b6437e6a5 | |
|
|
034780dce9 | |
|
|
d067ed0a2f | |
|
|
74d55ca639 | |
|
|
3cabf333ce | |
|
|
14e1ebee9e | |
|
|
4756402f75 | |
|
|
28f5ff6039 | |
|
|
a954ad1c67 | |
|
|
c7cc4d0b68 | |
|
|
d9ebb2e79b | |
|
|
fbaea5ff6a | |
|
|
7356328f53 | |
|
|
a4728f566f | |
|
|
e5f47ba350 | |
|
|
145fa49e54 | |
|
|
a87b501a47 | |
|
|
a19792fbd7 | |
|
|
6d2b81e07f | |
|
|
db686fb964 | |
|
|
dcb4578903 | |
|
|
1f6d5aec82 | |
|
|
29bbb6555c | |
|
|
b377ea94c5 | |
|
|
283b82d46a | |
|
|
321ab71192 | |
|
|
b7a97e7102 | |
|
|
313af4e83d | |
|
|
ed7cadd4c0 | |
|
|
bcf4c73bae | |
|
|
3b90bbaf6f | |
|
|
ba1995704a | |
|
|
4ea3b6181a | |
|
|
41107041f3 | |
|
|
32346c646b | |
|
|
500dc27c2b | |
|
|
df5c31e8df | |
|
|
b53dfebad5 | |
|
|
161c8c6383 | |
|
|
fccae40564 | |
|
|
5c97ab0a34 | |
|
|
682299a7df | |
|
|
75cf36050d | |
|
|
c4cc6d2ff3 | |
|
|
b048798a09 | |
|
|
9230a3899f | |
|
|
0b57b36c8f | |
|
|
7fe3cec4eb | |
|
|
dd5e7a8291 | |
|
|
c736d02b52 | |
|
|
4e6508b5e3 | |
|
|
4537c8af5b | |
|
|
81edb74c5e | |
|
|
129e7fb0b8 | |
|
|
c7d02127b1 | |
|
|
b65644c3e3 | |
|
|
2ddbfebecb | |
|
|
b622121c0a | |
|
|
38db4c46ff | |
|
|
66485b04c6 | |
|
|
4e75c57bbb | |
|
|
6b039ce75b | |
|
|
4afe5f380a | |
|
|
fbb598ce82 | |
|
|
60e1478fb9 | |
|
|
25f04804cd | |
|
|
578fef2355 | |
|
|
2c957a6e5c | |
|
|
aaed1bdd89 | |
|
|
ccb04acb56 | |
|
|
fbdfd0d596 | |
|
|
c8dc09c265 | |
|
|
0370a6464b | |
|
|
3d2ea547d8 | |
|
|
b8c54c3f38 | |
|
|
0969789973 | |
|
|
bc243ab1e8 | |
|
|
b8532070f7 | |
|
|
10d9f7872d | |
|
|
a4ea9aec73 | |
|
|
f26f0b6626 |
|
|
@ -13,4 +13,4 @@
|
|||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
* @superhx @SCNieh @ShadowySpirits @Chillax-0v0
|
||||
* @superhx @Gezi-lzq @1sonofqiu @woshigaopp
|
||||
|
|
|
|||
|
|
@ -49,7 +49,7 @@ jobs:
|
|||
- name: Setup Gradle
|
||||
uses: gradle/gradle-build-action@v2.9.0
|
||||
- name: Checkstyle
|
||||
run: ./gradlew --build-cache rat checkstyleMain checkstyleTest
|
||||
run: ./gradlew --build-cache rat checkstyleMain checkstyleTest spotlessJavaCheck
|
||||
spotbugs:
|
||||
name: "Spotbugs"
|
||||
runs-on: ${{ matrix.os }}
|
||||
|
|
|
|||
|
|
@ -0,0 +1,67 @@
|
|||
name: Docker Bitnami Release
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
push:
|
||||
tags:
|
||||
- '[0-9]+.[0-9]+.[0-9]+'
|
||||
- '[0-9]+.[0-9]+.[0-9]+-rc[0-9]+'
|
||||
|
||||
|
||||
jobs:
|
||||
docker-release:
|
||||
name: Docker Image Release
|
||||
strategy:
|
||||
matrix:
|
||||
platform: [ "ubuntu-24.04" ]
|
||||
jdk: ["17"]
|
||||
runs-on: ${{ matrix.platform }}
|
||||
permissions:
|
||||
contents: write
|
||||
steps:
|
||||
- name: Checkout Code
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: Set up JDK ${{ matrix.jdk }}
|
||||
uses: actions/setup-java@v3
|
||||
with:
|
||||
java-version: ${{ matrix.jdk }}
|
||||
distribution: "zulu"
|
||||
- name: Setup Gradle
|
||||
uses: gradle/gradle-build-action@v2.12.0
|
||||
- name: Get project version
|
||||
id: get_project_version
|
||||
run: |
|
||||
project_version=$(./gradlew properties | grep "version:" | awk '{print $2}')
|
||||
echo "PROJECT_VERSION=${project_version}" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Build TarGz
|
||||
run: |
|
||||
./gradlew -Pprefix=automq-${{ github.ref_name }}_ --build-cache --refresh-dependencies clean releaseTarGz
|
||||
|
||||
# docker image release
|
||||
- name: Cp TarGz to Docker Path
|
||||
run: |
|
||||
cp ./core/build/distributions/automq-${{ github.ref_name }}_kafka-${{ steps.get_project_version.outputs.PROJECT_VERSION }}.tgz ./container/bitnami
|
||||
- name: Determine Image Tags
|
||||
id: image_tags
|
||||
run: |
|
||||
echo "tags=${{ secrets.DOCKERHUB_USERNAME }}/automq:${{ github.ref_name }}-bitnami" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@v3
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v3
|
||||
- name: Login to Docker Hub
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_READ_WRITE_TOKEN }}
|
||||
- name: Build and push
|
||||
uses: docker/build-push-action@v5
|
||||
with:
|
||||
context: ./container/bitnami
|
||||
push: true
|
||||
tags: ${{ steps.image_tags.outputs.tags }}
|
||||
platforms: linux/amd64,linux/arm64
|
||||
|
|
@ -0,0 +1,70 @@
|
|||
name: AutoMQ Kafka Docker Release
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
tag:
|
||||
description: 'AutoMQ Version Tag'
|
||||
required: false
|
||||
type: string
|
||||
workflow_run:
|
||||
workflows: ["GitHub Release"]
|
||||
types:
|
||||
- completed
|
||||
|
||||
env:
|
||||
KAFKA_VERSION: "3.9.0"
|
||||
|
||||
jobs:
|
||||
automq-kafka-release:
|
||||
name: AutoMQ Kafka Docker Image Release
|
||||
strategy:
|
||||
matrix:
|
||||
platform: [ "ubuntu-24.04" ]
|
||||
jdk: [ "17" ]
|
||||
runs-on: ${{ matrix.platform }}
|
||||
permissions:
|
||||
contents: write
|
||||
steps:
|
||||
- name: Checkout Code
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Get release tag
|
||||
run: |
|
||||
if [[ "${{ github.event_name }}" == "workflow_dispatch" && -n "${{ github.event.inputs.tag }}" ]]; then
|
||||
TAG="${{ github.event.inputs.tag }}"
|
||||
# use the latest tag if not specified
|
||||
elif [[ "${{ github.event_name }}" == "workflow_dispatch" ]]; then
|
||||
TAG=$(git ls-remote --tags https://github.com/AutoMQ/automq.git | grep -v '\^{}' | tail -1 | sed 's/.*refs\/tags\///')
|
||||
else
|
||||
TAG="${{ github.event.workflow_run.head_branch }}"
|
||||
fi
|
||||
|
||||
AUTOMQ_URL="https://github.com/AutoMQ/automq/releases/download/${TAG}/automq-${TAG}_kafka-${KAFKA_VERSION}.tgz"
|
||||
|
||||
{
|
||||
echo "AUTOMQ_VERSION=${TAG}-kafka"
|
||||
echo "AUTOMQ_URL=${AUTOMQ_URL}"
|
||||
} >> $GITHUB_ENV
|
||||
|
||||
- name: Set up Python 3.10
|
||||
uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: "3.10"
|
||||
|
||||
- name: Login to Docker Hub
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_READ_WRITE_TOKEN }}
|
||||
|
||||
- name: Build and Push Docker Image
|
||||
run: |
|
||||
python3 -m venv .venv
|
||||
source .venv/bin/activate
|
||||
.venv/bin/pip install setuptools
|
||||
|
||||
cd docker
|
||||
python3 docker_release.py \
|
||||
${{ secrets.DOCKERHUB_USERNAME }}/automq:${AUTOMQ_VERSION} \
|
||||
--kafka-url ${AUTOMQ_URL}
|
||||
|
|
@ -1,6 +1,7 @@
|
|||
name: Docker Release
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
push:
|
||||
tags:
|
||||
- '[0-9]+.[0-9]+.[0-9]+'
|
||||
|
|
@ -12,7 +13,7 @@ jobs:
|
|||
name: Docker Image Release
|
||||
strategy:
|
||||
matrix:
|
||||
platform: [ "ubuntu-22.04" ]
|
||||
platform: [ "ubuntu-24.04" ]
|
||||
jdk: ["17"]
|
||||
runs-on: ${{ matrix.platform }}
|
||||
permissions:
|
||||
|
|
@ -69,4 +70,4 @@ jobs:
|
|||
context: ./docker
|
||||
push: true
|
||||
tags: ${{ steps.image_tags.outputs.tags }}
|
||||
platforms: linux/amd64,linux/arm64
|
||||
platforms: linux/amd64,linux/arm64
|
||||
|
|
@ -0,0 +1,84 @@
|
|||
name: Docker Strimzi Release
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
tag:
|
||||
description: 'AutoMQ Version Tag'
|
||||
required: false
|
||||
type: string
|
||||
workflow_run:
|
||||
workflows: ["GitHub Release"]
|
||||
types:
|
||||
- completed
|
||||
|
||||
env:
|
||||
KAFKA_VERSION: "3.9.0"
|
||||
STRIMZI_REPO: "https://github.com/AutoMQ/strimzi-kafka-operator.git"
|
||||
STRIMZI_BRANCH: "main"
|
||||
|
||||
jobs:
|
||||
strimzi-release:
|
||||
name: Strimzi Image Release
|
||||
if: ${{ github.event.workflow_run.conclusion == 'success' || github.event_name == 'workflow_dispatch' }}
|
||||
strategy:
|
||||
matrix:
|
||||
platform: [ "ubuntu-24.04" ]
|
||||
jdk: ["17"]
|
||||
runs-on: ${{ matrix.platform }}
|
||||
permissions:
|
||||
contents: write
|
||||
steps:
|
||||
- name: Checkout Code
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Get release tag
|
||||
run: |
|
||||
if [[ "${{ github.event_name }}" == "workflow_dispatch" && -n "${{ github.event.inputs.tag }}" ]]; then
|
||||
TAG="${{ github.event.inputs.tag }}"
|
||||
# use the latest tag if not specified
|
||||
elif [[ "${{ github.event_name }}" == "workflow_dispatch" ]]; then
|
||||
TAG=$(git ls-remote --tags https://github.com/AutoMQ/automq.git | grep -v '\^{}' | tail -1 | sed 's/.*refs\/tags\///')
|
||||
else
|
||||
TAG="${{ github.event.workflow_run.head_branch }}"
|
||||
fi
|
||||
|
||||
AUTOMQ_URL="https://github.com/AutoMQ/automq/releases/download/${TAG}/automq-${TAG}_kafka-${KAFKA_VERSION}.tgz"
|
||||
|
||||
{
|
||||
echo "AUTOMQ_VERSION=${TAG}"
|
||||
echo "AUTOMQ_URL=${AUTOMQ_URL}"
|
||||
} >> $GITHUB_ENV
|
||||
|
||||
- name: Set up JDK ${{ matrix.jdk }}
|
||||
uses: actions/setup-java@v3
|
||||
with:
|
||||
java-version: ${{ matrix.jdk }}
|
||||
distribution: "zulu"
|
||||
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@v3
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v3
|
||||
|
||||
- name: Login to Docker Hub
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_READ_WRITE_TOKEN }}
|
||||
|
||||
- name: Build AutoMQ Strimzi Image
|
||||
run: |
|
||||
git clone --depth 1 --branch "${{ env.STRIMZI_BRANCH }}" "${{ env.STRIMZI_REPO }}" strimzi
|
||||
cd strimzi
|
||||
|
||||
chmod +x ./tools/automq/build-automq-image.sh
|
||||
./tools/automq/build-automq-image.sh \
|
||||
"${{ env.AUTOMQ_VERSION }}" \
|
||||
"${{ env.AUTOMQ_URL }}" \
|
||||
"${{ env.KAFKA_VERSION }}" \
|
||||
"${{ secrets.DOCKERHUB_USERNAME }}" \
|
||||
"automq"
|
||||
|
|
@ -46,7 +46,7 @@ jobs:
|
|||
run: |
|
||||
python docker_build_test.py kafka/test -tag=test -type=${{ github.event.inputs.image_type }} -u=${{ github.event.inputs.kafka_url }}
|
||||
- name: Run CVE scan
|
||||
uses: aquasecurity/trivy-action@master
|
||||
uses: aquasecurity/trivy-action@6e7b7d1fd3e4fef0c5fa8cce1229c54b2c9bd0d8 # v0.24.0
|
||||
with:
|
||||
image-ref: 'kafka/test:test'
|
||||
format: 'table'
|
||||
|
|
|
|||
|
|
@ -45,7 +45,7 @@ jobs:
|
|||
run: |
|
||||
python docker_official_image_build_test.py kafka/test -tag=test -type=${{ github.event.inputs.image_type }} -v=${{ github.event.inputs.kafka_version }}
|
||||
- name: Run CVE scan
|
||||
uses: aquasecurity/trivy-action@master
|
||||
uses: aquasecurity/trivy-action@6e7b7d1fd3e4fef0c5fa8cce1229c54b2c9bd0d8 # v0.24.0
|
||||
with:
|
||||
image-ref: 'kafka/test:test'
|
||||
format: 'table'
|
||||
|
|
|
|||
|
|
@ -31,11 +31,11 @@ jobs:
|
|||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@v3
|
||||
uses: docker/setup-qemu-action@49b3bc8e6bdd4a60e6116a5414239cba5943d3cf # v3.2.0
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v3
|
||||
uses: docker/setup-buildx-action@988b5a0280414f521da01fcc63a27aeeb4b104db # v3.6.1
|
||||
- name: Login to Docker Hub
|
||||
uses: docker/login-action@v3
|
||||
uses: docker/login-action@9780b0c442fbb1117ed29e0efdff1e18412f7567 # v3.3.0
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USER }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
|
|
|
|||
|
|
@ -47,11 +47,11 @@ jobs:
|
|||
python -m pip install --upgrade pip
|
||||
pip install -r docker/requirements.txt
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@v3
|
||||
uses: docker/setup-qemu-action@49b3bc8e6bdd4a60e6116a5414239cba5943d3cf # v3.2.0
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v3
|
||||
uses: docker/setup-buildx-action@988b5a0280414f521da01fcc63a27aeeb4b104db # v3.6.1
|
||||
- name: Login to Docker Hub
|
||||
uses: docker/login-action@v3
|
||||
uses: docker/login-action@9780b0c442fbb1117ed29e0efdff1e18412f7567 # v3.3.0
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USER }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
|
|
|
|||
|
|
@ -29,7 +29,7 @@ jobs:
|
|||
supported_image_tag: ['latest', '3.7.0']
|
||||
steps:
|
||||
- name: Run CVE scan
|
||||
uses: aquasecurity/trivy-action@master
|
||||
uses: aquasecurity/trivy-action@6e7b7d1fd3e4fef0c5fa8cce1229c54b2c9bd0d8 # v0.24.0
|
||||
if: always()
|
||||
with:
|
||||
image-ref: apache/kafka:${{ matrix.supported_image_tag }}
|
||||
|
|
|
|||
|
|
@ -30,32 +30,39 @@ jobs:
|
|||
uses: gradle/gradle-build-action@v2.12.0
|
||||
|
||||
- name: Build TarGz
|
||||
id: build-targz
|
||||
run: |
|
||||
./gradlew -Pprefix=automq-${{ github.ref_name }}_ --build-cache --refresh-dependencies clean releaseTarGz
|
||||
mkdir -p core/build/distributions/latest
|
||||
LATEST_TAG=$(git tag --sort=-v:refname | grep -E '^[0-9]+\.[0-9]+\.[0-9]+$' | head -n 1)
|
||||
echo "LATEST_TAG=$LATEST_TAG"
|
||||
IS_LATEST="false"
|
||||
if [ "$LATEST_TAG" == "${{ github.ref_name }}" ]; then
|
||||
IS_LATEST=true
|
||||
fi
|
||||
echo "IS_LATEST=$IS_LATEST" >> $GITHUB_OUTPUT
|
||||
for file in core/build/distributions/automq-*.tgz; do
|
||||
if [[ ! "$file" =~ site-docs ]]; then
|
||||
echo "Find latest tgz file: $file"
|
||||
cp "$file" core/build/distributions/latest/automq-kafka-latest.tgz
|
||||
break
|
||||
if [ "$IS_LATEST" = "true" ]; then
|
||||
echo "Find latest tgz file: $file"
|
||||
cp "$file" core/build/distributions/latest/automq-kafka-latest.tgz
|
||||
fi
|
||||
else
|
||||
echo "Skip and remove site-docs file: $file"
|
||||
rm "$file"
|
||||
fi
|
||||
done
|
||||
|
||||
- uses: jakejarvis/s3-sync-action@master
|
||||
name: s3-upload-latest
|
||||
if: ${{ github.repository_owner == 'AutoMQ' }}
|
||||
- uses: tvrcgo/oss-action@master
|
||||
name: upload-latest
|
||||
if: ${{ github.repository_owner == 'AutoMQ' && steps.build-targz.outputs.IS_LATEST == 'true' }}
|
||||
with:
|
||||
args: --follow-symlinks --delete
|
||||
env:
|
||||
AWS_S3_BUCKET: ${{ secrets.AWS_CN_PROD_BUCKET }}
|
||||
AWS_ACCESS_KEY_ID: ${{ secrets.AWS_CN_PROD_AK }}
|
||||
AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_CN_PROD_SK }}
|
||||
AWS_REGION: 'cn-northwest-1'
|
||||
SOURCE_DIR: 'core/build/distributions/latest'
|
||||
DEST_DIR: 'community_edition/artifacts'
|
||||
bucket: ${{ secrets.UPLOAD_BUCKET }}
|
||||
key-id: ${{ secrets.UPLOAD_BUCKET_AK }}
|
||||
key-secret: ${{ secrets.UPLOAD_BUCKET_SK }}
|
||||
region: 'oss-cn-hangzhou'
|
||||
assets: |
|
||||
core/build/distributions/latest/automq-kafka-latest.tgz:community_edition/artifacts/automq-kafka-latest.tgz
|
||||
|
||||
- name: GitHub Release
|
||||
uses: softprops/action-gh-release@v1
|
||||
|
|
|
|||
|
|
@ -41,6 +41,22 @@ jobs:
|
|||
name: "Run Main E2E Tests 5"
|
||||
uses: ./.github/workflows/e2e-run.yml
|
||||
if: ${{ github.repository_owner == 'AutoMQ' }}
|
||||
with:
|
||||
suite-id: "main5"
|
||||
test-yaml: "tests/suites/main_kos_test_suite5.yml"
|
||||
runner: "e2e"
|
||||
main_e2e_6:
|
||||
name: "Run Main E2E Tests 6"
|
||||
uses: ./.github/workflows/e2e-run.yml
|
||||
if: ${{ github.repository_owner == 'AutoMQ' }}
|
||||
with:
|
||||
suite-id: "main6"
|
||||
test-yaml: "tests/suites/main_kos_test_suite6.yml"
|
||||
runner: "e2e"
|
||||
main_e2e_7:
|
||||
name: "Run Main E2E Tests 7"
|
||||
uses: ./.github/workflows/e2e-run.yml
|
||||
if: ${{ github.repository_owner == 'AutoMQ' }}
|
||||
with:
|
||||
suite-id: "main5"
|
||||
test-path: "tests/kafkatest/automq"
|
||||
|
|
@ -57,5 +73,5 @@ jobs:
|
|||
CURRENT_REPO: ${{ github.repository }}
|
||||
RUN_ID: ${{ github.run_id }}
|
||||
WEB_HOOK_URL: ${{ secrets.E2E_REPORT_WEB_HOOK_URL }}
|
||||
DATA_MAP: "{\"main_e2e_1\": ${{ toJSON(needs.main_e2e_1.outputs) }}, \"main_e2e_2\": ${{ toJSON(needs.main_e2e_2.outputs) }}, \"main_e2e_3\": ${{ toJSON(needs.main_e2e_3.outputs) }}, \"main_e2e_4\": ${{ toJSON(needs.main_e2e_4.outputs) }}, \"main_e2e_5\": ${{ toJSON(needs.main_e2e_5.outputs) }}}"
|
||||
DATA_MAP: "{\"main_e2e_1\": ${{ toJSON(needs.main_e2e_1.outputs) }}, \"main_e2e_2\": ${{ toJSON(needs.main_e2e_2.outputs) }}, \"main_e2e_3\": ${{ toJSON(needs.main_e2e_3.outputs) }}, \"main_e2e_4\": ${{ toJSON(needs.main_e2e_4.outputs) }}, \"main_e2e_5\": ${{ toJSON(needs.main_e2e_5.outputs) }}, \"main_e2e_6\": ${{ toJSON(needs.main_e2e_6.outputs) }}, \"main_e2e_7\": ${{ toJSON(needs.main_e2e_7.outputs) }}}"
|
||||
REPORT_TITLE_PREFIX: "Main"
|
||||
|
|
|
|||
|
|
@ -0,0 +1,59 @@
|
|||
# Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
# contributor license agreements. See the NOTICE file distributed with
|
||||
# this work for additional information regarding copyright ownership.
|
||||
# The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
# (the "License"); you may not use this file except in compliance with
|
||||
# the License. You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
name: Publish Maven Package
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
version:
|
||||
description: 'Version to publish'
|
||||
required: true
|
||||
push:
|
||||
tags:
|
||||
- '[0-9]+.[0-9]+.[0-9]+'
|
||||
- '[0-9]+.[0-9]+.[0-9]+-rc[0-9]+'
|
||||
|
||||
env:
|
||||
VERSION: ${{ github.event.inputs.version || github.ref_name }}
|
||||
|
||||
jobs:
|
||||
publish:
|
||||
name: "Publish to Github Packages"
|
||||
runs-on: ${{ matrix.os }}
|
||||
strategy:
|
||||
matrix:
|
||||
os: [ ubuntu-22.04 ]
|
||||
jdk: [ 17 ]
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
- name: Gradle wrapper validation
|
||||
uses: gradle/actions/wrapper-validation@v3
|
||||
- name: Set up JDK ${{ matrix.jdk }}
|
||||
uses: actions/setup-java@v3
|
||||
with:
|
||||
java-version: ${{ matrix.jdk }}
|
||||
distribution: "zulu"
|
||||
- name: Setup Gradle
|
||||
uses: gradle/actions/setup-gradle@v4
|
||||
with:
|
||||
gradle-version: '8.10'
|
||||
- name: Publish
|
||||
run: |
|
||||
gradle publish -PmavenUrl='https://maven.pkg.github.com/AutoMQ/automq' \
|
||||
-PmavenUsername=${{ env.GITHUB_ACTOR }} -PmavenPassword=${{ secrets.GITHUB_TOKEN }} \
|
||||
-PskipSigning=true \
|
||||
-Pgroup=com.automq.automq -Pversion=${{ env.VERSION }}
|
||||
|
|
@ -0,0 +1,31 @@
|
|||
name: Spark Iceberg image
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
docker:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@v3
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v3
|
||||
|
||||
- name: Login to Docker Hub
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_READ_WRITE_TOKEN }}
|
||||
|
||||
- name: Build and Push
|
||||
uses: docker/build-push-action@v6
|
||||
with:
|
||||
context: docker/table_topic/spark_iceberg/
|
||||
platforms: linux/amd64,linux/arm64
|
||||
push: true
|
||||
tags: automqinc/spark-iceberg:latest
|
||||
|
|
@ -62,3 +62,7 @@ storage/kafka-tiered-storage/
|
|||
docker/test/report_*.html
|
||||
kafka.Kafka
|
||||
__pycache__
|
||||
|
||||
# Ignore bin folder generated by the build, but exclude the one in the root
|
||||
bin/
|
||||
!/bin/
|
||||
|
|
|
|||
|
|
@ -1,6 +0,0 @@
|
|||
<component name="CopyrightManager">
|
||||
<copyright>
|
||||
<option name="notice" value="Copyright 2024, AutoMQ HK Limited. The use of this file is governed by the Business Source License, as detailed in the file "/LICENSE.S3Stream" included in this repository. As of the Change Date specified in that file, in accordance with the Business Source License, use of this software will be governed by the Apache License, Version 2.0" />
|
||||
<option name="myName" value="BSL" />
|
||||
</copyright>
|
||||
</component>
|
||||
|
|
@ -1,7 +0,0 @@
|
|||
<component name="CopyrightManager">
|
||||
<settings default="BSL">
|
||||
<module2copyright>
|
||||
<element module="All" copyright="BSL" />
|
||||
</module2copyright>
|
||||
</settings>
|
||||
</component>
|
||||
221
LICENSE
221
LICENSE
|
|
@ -1,29 +1,202 @@
|
|||
Copyright (c) 2023-2024 AutoMQ HK Limited.
|
||||
|
||||
this software are licensed as follows:
|
||||
Apache License
|
||||
Version 2.0, January 2004
|
||||
http://www.apache.org/licenses/
|
||||
|
||||
1. Apache Kafka Source and Dependency Licensing:
|
||||
All code in this repository that is forked from Apache Kafka and its
|
||||
dependencies will continue to be licensed under the original Apache Kafka
|
||||
open source license. For detailed licensing information regarding Apache
|
||||
Kafka and its dependencies, please refer to the files under the "/licenses/"
|
||||
folder in this repository.
|
||||
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
|
||||
|
||||
2. S3Stream Component Licensing:
|
||||
The S3Stream component added to this project (specifically referring to all
|
||||
files under the "/S3Stream/" directory) is licensed under a revised Business
|
||||
Source License (BSL) by AutoMQ HK Limited, with the specific terms available
|
||||
in the /LICENSE.S3Stream file in this repository. Any dependencies used by
|
||||
the S3Stream component are subject to their respective open source licenses.
|
||||
1. Definitions.
|
||||
|
||||
3. File-Level License Precedence:
|
||||
For each file in this repository, if the license is explicitly specified in
|
||||
the header of the file, the license stated in the file header shall prevail.
|
||||
"License" shall mean the terms and conditions for use, reproduction,
|
||||
and distribution as defined by Sections 1 through 9 of this document.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
||||
"Licensor" shall mean the copyright owner or entity authorized by
|
||||
the copyright owner that is granting the License.
|
||||
|
||||
"Legal Entity" shall mean the union of the acting entity and all
|
||||
other entities that control, are controlled by, or are under common
|
||||
control with that entity. For the purposes of this definition,
|
||||
"control" means (i) the power, direct or indirect, to cause the
|
||||
direction or management of such entity, whether by contract or
|
||||
otherwise, or (ii) ownership of fifty percent (50%) or more of the
|
||||
outstanding shares, or (iii) beneficial ownership of such entity.
|
||||
|
||||
"You" (or "Your") shall mean an individual or Legal Entity
|
||||
exercising permissions granted by this License.
|
||||
|
||||
"Source" form shall mean the preferred form for making modifications,
|
||||
including but not limited to software source code, documentation
|
||||
source, and configuration files.
|
||||
|
||||
"Object" form shall mean any form resulting from mechanical
|
||||
transformation or translation of a Source form, including but
|
||||
not limited to compiled object code, generated documentation,
|
||||
and conversions to other media types.
|
||||
|
||||
"Work" shall mean the work of authorship, whether in Source or
|
||||
Object form, made available under the License, as indicated by a
|
||||
copyright notice that is included in or attached to the work
|
||||
(an example is provided in the Appendix below).
|
||||
|
||||
"Derivative Works" shall mean any work, whether in Source or Object
|
||||
form, that is based on (or derived from) the Work and for which the
|
||||
editorial revisions, annotations, elaborations, or other modifications
|
||||
represent, as a whole, an original work of authorship. For the purposes
|
||||
of this License, Derivative Works shall not include works that remain
|
||||
separable from, or merely link (or bind by name) to the interfaces of,
|
||||
the Work and Derivative Works thereof.
|
||||
|
||||
"Contribution" shall mean any work of authorship, including
|
||||
the original version of the Work and any modifications or additions
|
||||
to that Work or Derivative Works thereof, that is intentionally
|
||||
submitted to Licensor for inclusion in the Work by the copyright owner
|
||||
or by an individual or Legal Entity authorized to submit on behalf of
|
||||
the copyright owner. For the purposes of this definition, "submitted"
|
||||
means any form of electronic, verbal, or written communication sent
|
||||
to the Licensor or its representatives, including but not limited to
|
||||
communication on electronic mailing lists, source code control systems,
|
||||
and issue tracking systems that are managed by, or on behalf of, the
|
||||
Licensor for the purpose of discussing and improving the Work, but
|
||||
excluding communication that is conspicuously marked or otherwise
|
||||
designated in writing by the copyright owner as "Not a Contribution."
|
||||
|
||||
"Contributor" shall mean Licensor and any individual or Legal Entity
|
||||
on behalf of whom a Contribution has been received by Licensor and
|
||||
subsequently incorporated within the Work.
|
||||
|
||||
2. Grant of Copyright License. Subject to the terms and conditions of
|
||||
this License, each Contributor hereby grants to You a perpetual,
|
||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||
copyright license to reproduce, prepare Derivative Works of,
|
||||
publicly display, publicly perform, sublicense, and distribute the
|
||||
Work and such Derivative Works in Source or Object form.
|
||||
|
||||
3. Grant of Patent License. Subject to the terms and conditions of
|
||||
this License, each Contributor hereby grants to You a perpetual,
|
||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||
(except as stated in this section) patent license to make, have made,
|
||||
use, offer to sell, sell, import, and otherwise transfer the Work,
|
||||
where such license applies only to those patent claims licensable
|
||||
by such Contributor that are necessarily infringed by their
|
||||
Contribution(s) alone or by combination of their Contribution(s)
|
||||
with the Work to which such Contribution(s) was submitted. If You
|
||||
institute patent litigation against any entity (including a
|
||||
cross-claim or counterclaim in a lawsuit) alleging that the Work
|
||||
or a Contribution incorporated within the Work constitutes direct
|
||||
or contributory patent infringement, then any patent licenses
|
||||
granted to You under this License for that Work shall terminate
|
||||
as of the date such litigation is filed.
|
||||
|
||||
4. Redistribution. You may reproduce and distribute copies of the
|
||||
Work or Derivative Works thereof in any medium, with or without
|
||||
modifications, and in Source or Object form, provided that You
|
||||
meet the following conditions:
|
||||
|
||||
(a) You must give any other recipients of the Work or
|
||||
Derivative Works a copy of this License; and
|
||||
|
||||
(b) You must cause any modified files to carry prominent notices
|
||||
stating that You changed the files; and
|
||||
|
||||
(c) You must retain, in the Source form of any Derivative Works
|
||||
that You distribute, all copyright, patent, trademark, and
|
||||
attribution notices from the Source form of the Work,
|
||||
excluding those notices that do not pertain to any part of
|
||||
the Derivative Works; and
|
||||
|
||||
(d) If the Work includes a "NOTICE" text file as part of its
|
||||
distribution, then any Derivative Works that You distribute must
|
||||
include a readable copy of the attribution notices contained
|
||||
within such NOTICE file, excluding those notices that do not
|
||||
pertain to any part of the Derivative Works, in at least one
|
||||
of the following places: within a NOTICE text file distributed
|
||||
as part of the Derivative Works; within the Source form or
|
||||
documentation, if provided along with the Derivative Works; or,
|
||||
within a display generated by the Derivative Works, if and
|
||||
wherever such third-party notices normally appear. The contents
|
||||
of the NOTICE file are for informational purposes only and
|
||||
do not modify the License. You may add Your own attribution
|
||||
notices within Derivative Works that You distribute, alongside
|
||||
or as an addendum to the NOTICE text from the Work, provided
|
||||
that such additional attribution notices cannot be construed
|
||||
as modifying the License.
|
||||
|
||||
You may add Your own copyright statement to Your modifications and
|
||||
may provide additional or different license terms and conditions
|
||||
for use, reproduction, or distribution of Your modifications, or
|
||||
for any such Derivative Works as a whole, provided Your use,
|
||||
reproduction, and distribution of the Work otherwise complies with
|
||||
the conditions stated in this License.
|
||||
|
||||
5. Submission of Contributions. Unless You explicitly state otherwise,
|
||||
any Contribution intentionally submitted for inclusion in the Work
|
||||
by You to the Licensor shall be under the terms and conditions of
|
||||
this License, without any additional terms or conditions.
|
||||
Notwithstanding the above, nothing herein shall supersede or modify
|
||||
the terms of any separate license agreement you may have executed
|
||||
with Licensor regarding such Contributions.
|
||||
|
||||
6. Trademarks. This License does not grant permission to use the trade
|
||||
names, trademarks, service marks, or product names of the Licensor,
|
||||
except as required for reasonable and customary use in describing the
|
||||
origin of the Work and reproducing the content of the NOTICE file.
|
||||
|
||||
7. Disclaimer of Warranty. Unless required by applicable law or
|
||||
agreed to in writing, Licensor provides the Work (and each
|
||||
Contributor provides its Contributions) on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||
implied, including, without limitation, any warranties or conditions
|
||||
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
|
||||
PARTICULAR PURPOSE. You are solely responsible for determining the
|
||||
appropriateness of using or redistributing the Work and assume any
|
||||
risks associated with Your exercise of permissions under this License.
|
||||
|
||||
8. Limitation of Liability. In no event and under no legal theory,
|
||||
whether in tort (including negligence), contract, or otherwise,
|
||||
unless required by applicable law (such as deliberate and grossly
|
||||
negligent acts) or agreed to in writing, shall any Contributor be
|
||||
liable to You for damages, including any direct, indirect, special,
|
||||
incidental, or consequential damages of any character arising as a
|
||||
result of this License or out of the use or inability to use the
|
||||
Work (including but not limited to damages for loss of goodwill,
|
||||
work stoppage, computer failure or malfunction, or any and all
|
||||
other commercial damages or losses), even if such Contributor
|
||||
has been advised of the possibility of such damages.
|
||||
|
||||
9. Accepting Warranty or Additional Liability. While redistributing
|
||||
the Work or Derivative Works thereof, You may choose to offer,
|
||||
and charge a fee for, acceptance of support, warranty, indemnity,
|
||||
or other liability obligations and/or rights consistent with this
|
||||
License. However, in accepting such obligations, You may act only
|
||||
on Your own behalf and on Your sole responsibility, not on behalf
|
||||
of any other Contributor, and only if You agree to indemnify,
|
||||
defend, and hold each Contributor harmless for any liability
|
||||
incurred by, or claims asserted against, such Contributor by reason
|
||||
of your accepting any such warranty or additional liability.
|
||||
|
||||
END OF TERMS AND CONDITIONS
|
||||
|
||||
APPENDIX: How to apply the Apache License to your work.
|
||||
|
||||
To apply the Apache License to your work, attach the following
|
||||
boilerplate notice, with the fields enclosed by brackets "[]"
|
||||
replaced with your own identifying information. (Don't include
|
||||
the brackets!) The text should be enclosed in the appropriate
|
||||
comment syntax for the file format. We also recommend that a
|
||||
file or class name and description of purpose be included on the
|
||||
same "printed page" as the copyright notice for easier
|
||||
identification within third-party archives.
|
||||
|
||||
Copyright [yyyy] [name of copyright owner]
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
|
|
|
|||
|
|
@ -1,96 +0,0 @@
|
|||
License text copyright © 2023 MariaDB plc, All Rights Reserved.
|
||||
"Business Source License" is a trademark of MariaDB plc.
|
||||
|
||||
|
||||
Parameters
|
||||
|
||||
Licensor: AutoMQ HK Limited.
|
||||
Licensed Work: AutoMQ Version 1.1.2 or later. The Licensed Work is (c) 2024
|
||||
AutoMQ HK Limited.
|
||||
Additional Use Grant: You may make production use of the Licensed Work, provided
|
||||
Your use does not include offering the Licensed Work to third
|
||||
parties on a hosted or embedded basis in order to compete with
|
||||
AutoMQ's paid version(s) of the Licensed Work. For purposes
|
||||
of this license:
|
||||
|
||||
A "competitive offering" is a Product that is offered to third
|
||||
parties on a paid basis, including through paid support
|
||||
arrangements, that significantly overlaps with the capabilities
|
||||
of AutoMQ's paid version(s) of the Licensed Work. If Your
|
||||
Product is not a competitive offering when You first make it
|
||||
generally available, it will not become a competitive offering
|
||||
later due to AutoMQ releasing a new version of the Licensed
|
||||
Work with additional capabilities. In addition, Products that
|
||||
are not provided on a paid basis are not competitive.
|
||||
|
||||
"Product" means software that is offered to end users to manage
|
||||
in their own environments or offered as a service on a hosted
|
||||
basis.
|
||||
|
||||
"Embedded" means including the source code or executable code
|
||||
from the Licensed Work in a competitive offering. "Embedded"
|
||||
also means packaging the competitive offering in such a way
|
||||
that the Licensed Work must be accessed or downloaded for the
|
||||
competitive offering to operate.
|
||||
|
||||
Hosting or using the Licensed Work(s) for internal purposes
|
||||
within an organization is not considered a competitive
|
||||
offering. AutoMQ considers your organization to include all
|
||||
of your affiliates under common control.
|
||||
|
||||
For binding interpretive guidance on using AutoMQ products
|
||||
under the Business Source License, please visit our FAQ.
|
||||
(https://www.automq.com/license-faq)
|
||||
Change Date: Change date is four years from release date.
|
||||
Please see https://github.com/AutoMQ/automq/releases for exact dates
|
||||
Change License: Apache License, Version 2.0
|
||||
URL: https://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
|
||||
For information about alternative licensing arrangements for the Licensed Work,
|
||||
please contact licensing@automq.com.
|
||||
|
||||
Notice
|
||||
|
||||
Business Source License 1.1
|
||||
|
||||
Terms
|
||||
|
||||
The Licensor hereby grants you the right to copy, modify, create derivative
|
||||
works, redistribute, and make non-production use of the Licensed Work. The
|
||||
Licensor may make an Additional Use Grant, above, permitting limited production use.
|
||||
|
||||
Effective on the Change Date, or the fourth anniversary of the first publicly
|
||||
available distribution of a specific version of the Licensed Work under this
|
||||
License, whichever comes first, the Licensor hereby grants you rights under
|
||||
the terms of the Change License, and the rights granted in the paragraph
|
||||
above terminate.
|
||||
|
||||
If your use of the Licensed Work does not comply with the requirements
|
||||
currently in effect as described in this License, you must purchase a
|
||||
commercial license from the Licensor, its affiliated entities, or authorized
|
||||
resellers, or you must refrain from using the Licensed Work.
|
||||
|
||||
All copies of the original and modified Licensed Work, and derivative works
|
||||
of the Licensed Work, are subject to this License. This License applies
|
||||
separately for each version of the Licensed Work and the Change Date may vary
|
||||
for each version of the Licensed Work released by Licensor.
|
||||
|
||||
You must conspicuously display this License on each original or modified copy
|
||||
of the Licensed Work. If you receive the Licensed Work in original or
|
||||
modified form from a third party, the terms and conditions set forth in this
|
||||
License apply to your use of that work.
|
||||
|
||||
Any use of the Licensed Work in violation of this License will automatically
|
||||
terminate your rights under this License for the current and all other
|
||||
versions of the Licensed Work.
|
||||
|
||||
This License does not grant you any right in any trademark or logo of
|
||||
Licensor or its affiliates (provided that you may use a trademark or logo of
|
||||
Licensor as expressly required by this License).
|
||||
|
||||
TO THE EXTENT PERMITTED BY APPLICABLE LAW, THE LICENSED WORK IS PROVIDED ON
|
||||
AN "AS IS" BASIS. LICENSOR HEREBY DISCLAIMS ALL WARRANTIES AND CONDITIONS,
|
||||
EXPRESS OR IMPLIED, INCLUDING (WITHOUT LIMITATION) WARRANTIES OF
|
||||
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE, NON-INFRINGEMENT, AND
|
||||
TITLE.
|
||||
2
NOTICE
2
NOTICE
|
|
@ -1,5 +1,5 @@
|
|||
AutoMQ NOTICE
|
||||
Copyright 2023-2024, AutoMQ HK Limited.
|
||||
Copyright 2023-2025, AutoMQ HK Limited.
|
||||
|
||||
---------------------------
|
||||
Apache Kafka NOTICE
|
||||
|
|
|
|||
|
|
@ -1,5 +1,5 @@
|
|||
AutoMQ Binary NOTICE
|
||||
Copyright 2023-2024, AutoMQ HK Limited.
|
||||
Copyright 2023-2025, AutoMQ HK Limited.
|
||||
|
||||
---------------------------
|
||||
Apache Kafka Binary NOTICE
|
||||
|
|
|
|||
|
|
@ -0,0 +1,125 @@
|
|||
# AutoMQ Log Uploader Module
|
||||
|
||||
This module provides asynchronous S3 log upload capability based on Log4j 1.x. Other submodules only need to depend on this module and configure it simply to synchronize logs to object storage. Core components:
|
||||
|
||||
- `com.automq.log.S3RollingFileAppender`: Extends `RollingFileAppender`, pushes log events to the uploader while writing to local files.
|
||||
- `com.automq.log.uploader.LogUploader`: Asynchronously buffers, compresses, and uploads logs; supports configuration switches and periodic cleanup.
|
||||
- `com.automq.log.uploader.S3LogConfig`: Interface that abstracts the configuration required for uploading. Implementations must provide cluster ID, node ID, object storage instance, and leadership status.
|
||||
|
||||
## Quick Integration
|
||||
|
||||
1. Add dependency in your module's `build.gradle`:
|
||||
```groovy
|
||||
implementation project(':automq-log-uploader')
|
||||
```
|
||||
2. Implement or provide an `S3LogConfig` instance and configure the appender:
|
||||
|
||||
```java
|
||||
// Set up the S3LogConfig through your application
|
||||
S3LogConfig config = // your S3LogConfig implementation
|
||||
S3RollingFileAppender.setup(config);
|
||||
```
|
||||
3. Reference the Appender in `log4j.properties`:
|
||||
|
||||
```properties
|
||||
log4j.appender.s3_uploader=com.automq.log.S3RollingFileAppender
|
||||
log4j.appender.s3_uploader.File=logs/server.log
|
||||
log4j.appender.s3_uploader.MaxFileSize=100MB
|
||||
log4j.appender.s3_uploader.MaxBackupIndex=10
|
||||
log4j.appender.s3_uploader.layout=org.apache.log4j.PatternLayout
|
||||
log4j.appender.s3_uploader.layout.ConversionPattern=[%d] %p %m (%c)%n
|
||||
```
|
||||
|
||||
## S3LogConfig Interface
|
||||
|
||||
The `S3LogConfig` interface provides the configuration needed for log uploading:
|
||||
|
||||
```java
|
||||
public interface S3LogConfig {
|
||||
boolean isEnabled(); // Whether S3 upload is enabled
|
||||
String clusterId(); // Cluster identifier
|
||||
int nodeId(); // Node identifier
|
||||
ObjectStorage objectStorage(); // S3 object storage instance
|
||||
boolean isLeader(); // Whether this node should upload logs
|
||||
}
|
||||
```
|
||||
|
||||
|
||||
The upload schedule can be overridden by environment variables:
|
||||
|
||||
- `AUTOMQ_OBSERVABILITY_UPLOAD_INTERVAL`: Maximum upload interval (milliseconds).
|
||||
- `AUTOMQ_OBSERVABILITY_CLEANUP_INTERVAL`: Retention period (milliseconds), old objects earlier than this time will be cleaned up.
|
||||
|
||||
## Implementation Notes
|
||||
|
||||
### Leader Selection
|
||||
|
||||
The log uploader relies on the `S3LogConfig.isLeader()` method to determine whether the current node should upload logs and perform cleanup tasks. This avoids multiple nodes in a cluster simultaneously executing these operations.
|
||||
|
||||
### Object Storage Path
|
||||
|
||||
Logs are uploaded to object storage following this path pattern:
|
||||
```
|
||||
automq/logs/{clusterId}/{nodeId}/{hour}/{uuid}
|
||||
```
|
||||
|
||||
Where:
|
||||
- `clusterId` and `nodeId` come from the S3LogConfig
|
||||
- `hour` is the timestamp hour for log organization
|
||||
- `uuid` is a unique identifier for each log batch
|
||||
|
||||
## Usage Example
|
||||
|
||||
Complete example of using the log uploader:
|
||||
|
||||
```java
|
||||
import com.automq.log.S3RollingFileAppender;
|
||||
import com.automq.log.uploader.S3LogConfig;
|
||||
import com.automq.stream.s3.operator.ObjectStorage;
|
||||
|
||||
// Implement S3LogConfig
|
||||
public class MyS3LogConfig implements S3LogConfig {
|
||||
@Override
|
||||
public boolean isEnabled() {
|
||||
return true; // Enable S3 upload
|
||||
}
|
||||
|
||||
@Override
|
||||
public String clusterId() {
|
||||
return "my-cluster";
|
||||
}
|
||||
|
||||
@Override
|
||||
public int nodeId() {
|
||||
return 1;
|
||||
}
|
||||
|
||||
@Override
|
||||
public ObjectStorage objectStorage() {
|
||||
// Return your ObjectStorage instance
|
||||
return myObjectStorage;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isLeader() {
|
||||
// Return true if this node should upload logs
|
||||
return isCurrentNodeLeader();
|
||||
}
|
||||
}
|
||||
|
||||
// Setup and use
|
||||
S3LogConfig config = new MyS3LogConfig();
|
||||
S3RollingFileAppender.setup(config);
|
||||
|
||||
// Configure Log4j to use the appender
|
||||
// The appender will now automatically upload logs to S3
|
||||
```
|
||||
|
||||
## Lifecycle Management
|
||||
|
||||
Remember to properly shutdown the log uploader when your application terminates:
|
||||
|
||||
```java
|
||||
// During application shutdown
|
||||
S3RollingFileAppender.shutdown();
|
||||
```
|
||||
|
|
@ -0,0 +1,105 @@
|
|||
/*
|
||||
* Copyright 2025, AutoMQ HK Limited.
|
||||
*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
* contributor license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright ownership.
|
||||
* The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
* (the "License"); you may not use this file except in compliance with
|
||||
* the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package com.automq.log;
|
||||
|
||||
import com.automq.log.uploader.LogRecorder;
|
||||
import com.automq.log.uploader.LogUploader;
|
||||
import com.automq.log.uploader.S3LogConfig;
|
||||
|
||||
import org.apache.log4j.RollingFileAppender;
|
||||
import org.apache.log4j.spi.LoggingEvent;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
public class S3RollingFileAppender extends RollingFileAppender {
|
||||
|
||||
private static final Logger LOGGER = LoggerFactory.getLogger(S3RollingFileAppender.class);
|
||||
private static final Object INIT_LOCK = new Object();
|
||||
|
||||
private static volatile LogUploader logUploaderInstance;
|
||||
private static volatile S3LogConfig s3LogConfig;
|
||||
|
||||
public S3RollingFileAppender() {
|
||||
super();
|
||||
}
|
||||
|
||||
public static void setup(S3LogConfig config) {
|
||||
s3LogConfig = config;
|
||||
synchronized (INIT_LOCK) {
|
||||
if (logUploaderInstance != null) {
|
||||
return;
|
||||
}
|
||||
try {
|
||||
if (s3LogConfig == null) {
|
||||
LOGGER.error("No s3LogConfig available; S3 log upload remains disabled.");
|
||||
throw new RuntimeException("S3 log configuration is missing.");
|
||||
}
|
||||
if (!s3LogConfig.isEnabled() || s3LogConfig.objectStorage() == null) {
|
||||
LOGGER.warn("S3 log upload is disabled by configuration.");
|
||||
return;
|
||||
}
|
||||
|
||||
LogUploader uploader = new LogUploader();
|
||||
uploader.start(s3LogConfig);
|
||||
logUploaderInstance = uploader;
|
||||
LOGGER.info("S3RollingFileAppender initialized successfully using s3LogConfig {}.", s3LogConfig.getClass().getName());
|
||||
} catch (Exception e) {
|
||||
LOGGER.error("Failed to initialize S3RollingFileAppender", e);
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public static void shutdown() {
|
||||
if (logUploaderInstance != null) {
|
||||
synchronized (INIT_LOCK) {
|
||||
if (logUploaderInstance != null) {
|
||||
try {
|
||||
logUploaderInstance.close();
|
||||
logUploaderInstance = null;
|
||||
LOGGER.info("S3RollingFileAppender log uploader closed successfully.");
|
||||
} catch (Exception e) {
|
||||
LOGGER.error("Failed to close S3RollingFileAppender log uploader", e);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void subAppend(LoggingEvent event) {
|
||||
super.subAppend(event);
|
||||
if (!closed && logUploaderInstance != null) {
|
||||
LogRecorder.LogEvent logEvent = new LogRecorder.LogEvent(
|
||||
event.getTimeStamp(),
|
||||
event.getLevel().toString(),
|
||||
event.getLoggerName(),
|
||||
event.getRenderedMessage(),
|
||||
event.getThrowableStrRep());
|
||||
|
||||
try {
|
||||
logEvent.validate();
|
||||
logUploaderInstance.append(logEvent);
|
||||
} catch (IllegalArgumentException e) {
|
||||
errorHandler.error("Failed to validate and append log event", e, 0);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -1,15 +1,23 @@
|
|||
/*
|
||||
* Copyright 2024, AutoMQ HK Limited.
|
||||
* Copyright 2025, AutoMQ HK Limited.
|
||||
*
|
||||
* The use of this file is governed by the Business Source License,
|
||||
* as detailed in the file "/LICENSE.S3Stream" included in this repository.
|
||||
* Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
* contributor license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright ownership.
|
||||
* The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
* (the "License"); you may not use this file except in compliance with
|
||||
* the License. You may obtain a copy of the License at
|
||||
*
|
||||
* As of the Change Date specified in that file, in accordance with
|
||||
* the Business Source License, use of this software will be governed
|
||||
* by the Apache License, Version 2.0
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package com.automq.shell.log;
|
||||
package com.automq.log.uploader;
|
||||
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
|
||||
|
|
@ -39,10 +47,10 @@ public interface LogRecorder {
|
|||
throw new IllegalArgumentException("Level cannot be blank");
|
||||
}
|
||||
if (StringUtils.isBlank(logger)) {
|
||||
throw new IllegalArgumentException("Level cannot be blank");
|
||||
throw new IllegalArgumentException("Logger cannot be blank");
|
||||
}
|
||||
if (StringUtils.isBlank(message)) {
|
||||
throw new IllegalArgumentException("Level cannot be blank");
|
||||
throw new IllegalArgumentException("Message cannot be blank");
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -1,17 +1,25 @@
|
|||
/*
|
||||
* Copyright 2024, AutoMQ HK Limited.
|
||||
* Copyright 2025, AutoMQ HK Limited.
|
||||
*
|
||||
* The use of this file is governed by the Business Source License,
|
||||
* as detailed in the file "/LICENSE.S3Stream" included in this repository.
|
||||
* Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
* contributor license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright ownership.
|
||||
* The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
* (the "License"); you may not use this file except in compliance with
|
||||
* the License. You may obtain a copy of the License at
|
||||
*
|
||||
* As of the Change Date specified in that file, in accordance with
|
||||
* the Business Source License, use of this software will be governed
|
||||
* by the Apache License, Version 2.0
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package com.automq.shell.log;
|
||||
package com.automq.log.uploader;
|
||||
|
||||
import com.automq.shell.AutoMQApplication;
|
||||
import com.automq.log.uploader.util.Utils;
|
||||
import com.automq.stream.s3.operator.ObjectStorage;
|
||||
import com.automq.stream.s3.operator.ObjectStorage.ObjectInfo;
|
||||
import com.automq.stream.s3.operator.ObjectStorage.ObjectPath;
|
||||
|
|
@ -46,12 +54,14 @@ public class LogUploader implements LogRecorder {
|
|||
|
||||
public static final int DEFAULT_MAX_QUEUE_SIZE = 64 * 1024;
|
||||
public static final int DEFAULT_BUFFER_SIZE = 16 * 1024 * 1024;
|
||||
public static final int UPLOAD_INTERVAL = System.getenv("AUTOMQ_OBSERVABILITY_UPLOAD_INTERVAL") != null ? Integer.parseInt(System.getenv("AUTOMQ_OBSERVABILITY_UPLOAD_INTERVAL")) : 60 * 1000;
|
||||
public static final int CLEANUP_INTERVAL = System.getenv("AUTOMQ_OBSERVABILITY_CLEANUP_INTERVAL") != null ? Integer.parseInt(System.getenv("AUTOMQ_OBSERVABILITY_CLEANUP_INTERVAL")) : 2 * 60 * 1000;
|
||||
public static final int UPLOAD_INTERVAL = System.getenv("AUTOMQ_OBSERVABILITY_UPLOAD_INTERVAL") != null
|
||||
? Integer.parseInt(System.getenv("AUTOMQ_OBSERVABILITY_UPLOAD_INTERVAL"))
|
||||
: 60 * 1000;
|
||||
public static final int CLEANUP_INTERVAL = System.getenv("AUTOMQ_OBSERVABILITY_CLEANUP_INTERVAL") != null
|
||||
? Integer.parseInt(System.getenv("AUTOMQ_OBSERVABILITY_CLEANUP_INTERVAL"))
|
||||
: 2 * 60 * 1000;
|
||||
public static final int MAX_JITTER_INTERVAL = 60 * 1000;
|
||||
|
||||
private static final LogUploader INSTANCE = new LogUploader();
|
||||
|
||||
private final BlockingQueue<LogEvent> queue = new LinkedBlockingQueue<>(DEFAULT_MAX_QUEUE_SIZE);
|
||||
private final ByteBuf uploadBuffer = Unpooled.directBuffer(DEFAULT_BUFFER_SIZE);
|
||||
private final Random random = new Random();
|
||||
|
|
@ -62,16 +72,42 @@ public class LogUploader implements LogRecorder {
|
|||
|
||||
private volatile S3LogConfig config;
|
||||
|
||||
private volatile CompletableFuture<Void> startFuture;
|
||||
private ObjectStorage objectStorage;
|
||||
private Thread uploadThread;
|
||||
private Thread cleanupThread;
|
||||
|
||||
private LogUploader() {
|
||||
public LogUploader() {
|
||||
}
|
||||
|
||||
public static LogUploader getInstance() {
|
||||
return INSTANCE;
|
||||
public synchronized void start(S3LogConfig config) {
|
||||
if (this.config != null) {
|
||||
LOGGER.warn("LogUploader is already started.");
|
||||
return;
|
||||
}
|
||||
this.config = config;
|
||||
if (!config.isEnabled() || config.objectStorage() == null) {
|
||||
LOGGER.warn("LogUploader is disabled due to configuration.");
|
||||
closed = true;
|
||||
return;
|
||||
}
|
||||
|
||||
try {
|
||||
this.objectStorage = config.objectStorage();
|
||||
this.uploadThread = new Thread(new UploadTask());
|
||||
this.uploadThread.setName("log-uploader-upload-thread");
|
||||
this.uploadThread.setDaemon(true);
|
||||
this.uploadThread.start();
|
||||
|
||||
this.cleanupThread = new Thread(new CleanupTask());
|
||||
this.cleanupThread.setName("log-uploader-cleanup-thread");
|
||||
this.cleanupThread.setDaemon(true);
|
||||
this.cleanupThread.start();
|
||||
|
||||
LOGGER.info("LogUploader started successfully.");
|
||||
} catch (Exception e) {
|
||||
LOGGER.error("Failed to start LogUploader", e);
|
||||
closed = true;
|
||||
}
|
||||
}
|
||||
|
||||
public void close() throws InterruptedException {
|
||||
|
|
@ -88,63 +124,15 @@ public class LogUploader implements LogRecorder {
|
|||
|
||||
@Override
|
||||
public boolean append(LogEvent event) {
|
||||
if (!closed && couldUpload()) {
|
||||
if (!closed) {
|
||||
return queue.offer(event);
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
private boolean couldUpload() {
|
||||
initConfiguration();
|
||||
boolean enabled = config != null && config.isEnabled() && config.objectStorage() != null;
|
||||
|
||||
if (enabled) {
|
||||
initUploadComponent();
|
||||
}
|
||||
|
||||
return enabled && startFuture != null && startFuture.isDone();
|
||||
}
|
||||
|
||||
private void initConfiguration() {
|
||||
if (config == null) {
|
||||
synchronized (this) {
|
||||
if (config == null) {
|
||||
config = AutoMQApplication.getBean(S3LogConfig.class);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private void initUploadComponent() {
|
||||
if (startFuture == null) {
|
||||
synchronized (this) {
|
||||
if (startFuture == null) {
|
||||
startFuture = CompletableFuture.runAsync(() -> {
|
||||
try {
|
||||
objectStorage = config.objectStorage();
|
||||
uploadThread = new Thread(new UploadTask());
|
||||
uploadThread.setName("log-uploader-upload-thread");
|
||||
uploadThread.setDaemon(true);
|
||||
uploadThread.start();
|
||||
|
||||
cleanupThread = new Thread(new CleanupTask());
|
||||
cleanupThread.setName("log-uploader-cleanup-thread");
|
||||
cleanupThread.setDaemon(true);
|
||||
cleanupThread.start();
|
||||
|
||||
startFuture.complete(null);
|
||||
} catch (Exception e) {
|
||||
LOGGER.error("Initialize log uploader failed", e);
|
||||
}
|
||||
}, command -> new Thread(command).start());
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private class UploadTask implements Runnable {
|
||||
|
||||
public String formatTimestampInMillis(long timestamp) {
|
||||
private String formatTimestampInMillis(long timestamp) {
|
||||
return ZonedDateTime.ofInstant(Instant.ofEpochMilli(timestamp), ZoneId.systemDefault())
|
||||
.format(DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm:ss.SSS Z"));
|
||||
}
|
||||
|
|
@ -156,7 +144,6 @@ public class LogUploader implements LogRecorder {
|
|||
long now = System.currentTimeMillis();
|
||||
LogEvent event = queue.poll(1, TimeUnit.SECONDS);
|
||||
if (event != null) {
|
||||
// DateTime Level [Logger] Message \n stackTrace
|
||||
StringBuilder logLine = new StringBuilder()
|
||||
.append(formatTimestampInMillis(event.timestampMillis()))
|
||||
.append(" ")
|
||||
|
|
@ -195,25 +182,22 @@ public class LogUploader implements LogRecorder {
|
|||
|
||||
private void upload(long now) {
|
||||
if (uploadBuffer.readableBytes() > 0) {
|
||||
if (couldUpload()) {
|
||||
try {
|
||||
while (!Thread.currentThread().isInterrupted()) {
|
||||
if (objectStorage == null) {
|
||||
break;
|
||||
}
|
||||
|
||||
try {
|
||||
String objectKey = getObjectKey();
|
||||
objectStorage.write(WriteOptions.DEFAULT, objectKey, uploadBuffer.retainedSlice().asReadOnly()).get();
|
||||
break;
|
||||
} catch (Exception e) {
|
||||
e.printStackTrace(System.err);
|
||||
Thread.sleep(1000);
|
||||
}
|
||||
try {
|
||||
while (!Thread.currentThread().isInterrupted()) {
|
||||
if (objectStorage == null) {
|
||||
break;
|
||||
}
|
||||
try {
|
||||
String objectKey = getObjectKey();
|
||||
objectStorage.write(WriteOptions.DEFAULT, objectKey, Utils.compress(uploadBuffer.slice().asReadOnly())).get();
|
||||
break;
|
||||
} catch (Exception e) {
|
||||
LOGGER.warn("Failed to upload logs, will retry", e);
|
||||
Thread.sleep(1000);
|
||||
}
|
||||
} catch (InterruptedException e) {
|
||||
//ignore
|
||||
}
|
||||
} catch (InterruptedException e) {
|
||||
Thread.currentThread().interrupt();
|
||||
}
|
||||
uploadBuffer.clear();
|
||||
lastUploadTimestamp = now;
|
||||
|
|
@ -228,12 +212,11 @@ public class LogUploader implements LogRecorder {
|
|||
public void run() {
|
||||
while (!Thread.currentThread().isInterrupted()) {
|
||||
try {
|
||||
if (closed || !config.isActiveController()) {
|
||||
if (closed || !config.isLeader()) {
|
||||
Thread.sleep(Duration.ofMinutes(1).toMillis());
|
||||
continue;
|
||||
}
|
||||
long expiredTime = System.currentTimeMillis() - CLEANUP_INTERVAL;
|
||||
|
||||
List<ObjectInfo> objects = objectStorage.list(String.format("automq/logs/%s", config.clusterId())).join();
|
||||
|
||||
if (!objects.isEmpty()) {
|
||||
|
|
@ -243,7 +226,6 @@ public class LogUploader implements LogRecorder {
|
|||
.collect(Collectors.toList());
|
||||
|
||||
if (!keyList.isEmpty()) {
|
||||
// Some of s3 implements allow only 1000 keys per request.
|
||||
CompletableFuture<?>[] deleteFutures = Lists.partition(keyList, 1000)
|
||||
.stream()
|
||||
.map(objectStorage::delete)
|
||||
|
|
@ -251,7 +233,6 @@ public class LogUploader implements LogRecorder {
|
|||
CompletableFuture.allOf(deleteFutures).join();
|
||||
}
|
||||
}
|
||||
|
||||
Thread.sleep(Duration.ofMinutes(1).toMillis());
|
||||
} catch (InterruptedException e) {
|
||||
break;
|
||||
|
|
@ -266,5 +247,4 @@ public class LogUploader implements LogRecorder {
|
|||
String hour = LocalDateTime.now(ZoneOffset.UTC).format(DateTimeFormatter.ofPattern("yyyyMMddHH"));
|
||||
return String.format("automq/logs/%s/%s/%s/%s", config.clusterId(), config.nodeId(), hour, UUID.randomUUID());
|
||||
}
|
||||
|
||||
}
|
||||
|
|
@ -0,0 +1,34 @@
|
|||
/*
|
||||
* Copyright 2025, AutoMQ HK Limited.
|
||||
*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
* contributor license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright ownership.
|
||||
* The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
* (the "License"); you may not use this file except in compliance with
|
||||
* the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package com.automq.log.uploader;
|
||||
|
||||
import com.automq.stream.s3.operator.ObjectStorage;
|
||||
|
||||
public interface S3LogConfig {
|
||||
boolean isEnabled();
|
||||
|
||||
String clusterId();
|
||||
|
||||
int nodeId();
|
||||
|
||||
ObjectStorage objectStorage();
|
||||
|
||||
boolean isLeader();
|
||||
}
|
||||
|
|
@ -0,0 +1,69 @@
|
|||
/*
|
||||
* Copyright 2025, AutoMQ HK Limited.
|
||||
*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
* contributor license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright ownership.
|
||||
* The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
* (the "License"); you may not use this file except in compliance with
|
||||
* the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package com.automq.log.uploader.util;
|
||||
|
||||
import com.automq.stream.s3.ByteBufAlloc;
|
||||
|
||||
import java.io.ByteArrayInputStream;
|
||||
import java.io.ByteArrayOutputStream;
|
||||
import java.io.IOException;
|
||||
import java.util.zip.GZIPInputStream;
|
||||
import java.util.zip.GZIPOutputStream;
|
||||
|
||||
import io.netty.buffer.ByteBuf;
|
||||
|
||||
public class Utils {
|
||||
|
||||
private Utils() {
|
||||
}
|
||||
|
||||
public static ByteBuf compress(ByteBuf input) throws IOException {
|
||||
ByteArrayOutputStream byteArrayOutputStream = new ByteArrayOutputStream();
|
||||
try (GZIPOutputStream gzipOutputStream = new GZIPOutputStream(byteArrayOutputStream)) {
|
||||
byte[] buffer = new byte[input.readableBytes()];
|
||||
input.readBytes(buffer);
|
||||
gzipOutputStream.write(buffer);
|
||||
}
|
||||
|
||||
ByteBuf compressed = ByteBufAlloc.byteBuffer(byteArrayOutputStream.size());
|
||||
compressed.writeBytes(byteArrayOutputStream.toByteArray());
|
||||
return compressed;
|
||||
}
|
||||
|
||||
public static ByteBuf decompress(ByteBuf input) throws IOException {
|
||||
byte[] compressedData = new byte[input.readableBytes()];
|
||||
input.readBytes(compressedData);
|
||||
ByteArrayInputStream byteArrayInputStream = new ByteArrayInputStream(compressedData);
|
||||
|
||||
try (GZIPInputStream gzipInputStream = new GZIPInputStream(byteArrayInputStream);
|
||||
ByteArrayOutputStream byteArrayOutputStream = new ByteArrayOutputStream()) {
|
||||
byte[] buffer = new byte[1024];
|
||||
int bytesRead;
|
||||
while ((bytesRead = gzipInputStream.read(buffer)) != -1) {
|
||||
byteArrayOutputStream.write(buffer, 0, bytesRead);
|
||||
}
|
||||
|
||||
byte[] uncompressedData = byteArrayOutputStream.toByteArray();
|
||||
ByteBuf output = ByteBufAlloc.byteBuffer(uncompressedData.length);
|
||||
output.writeBytes(uncompressedData);
|
||||
return output;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -0,0 +1,459 @@
|
|||
# AutoMQ automq-metrics Module
|
||||
|
||||
## Module Structure
|
||||
|
||||
```
|
||||
com.automq.opentelemetry/
|
||||
├── AutoMQTelemetryManager.java # Main management class for initialization and lifecycle
|
||||
├── TelemetryConstants.java # Constants definition
|
||||
├── common/
|
||||
│ ├── OTLPCompressionType.java # OTLP compression types
|
||||
│ └── OTLPProtocol.java # OTLP protocol types
|
||||
├── exporter/
|
||||
│ ├── MetricsExporter.java # Exporter interface
|
||||
│ ├── MetricsExportConfig.java # Export configuration
|
||||
│ ├── MetricsExporterProvider.java # Exporter factory provider
|
||||
│ ├── MetricsExporterType.java # Exporter type enumeration
|
||||
│ ├── MetricsExporterURI.java # URI parser for exporters
|
||||
│ ├── OTLPMetricsExporter.java # OTLP exporter implementation
|
||||
│ ├── PrometheusMetricsExporter.java # Prometheus exporter implementation
|
||||
│ └── s3/ # S3 metrics exporter implementation
|
||||
│ ├── CompressionUtils.java # Utility for data compression
|
||||
│ ├── PrometheusUtils.java # Utilities for Prometheus format
|
||||
│ ├── S3MetricsExporter.java # S3 metrics exporter implementation
|
||||
│ └── S3MetricsExporterAdapter.java # Adapter to handle S3 metrics export
|
||||
└── yammer/
|
||||
├── DeltaHistogram.java # Delta histogram implementation
|
||||
├── OTelMetricUtils.java # OpenTelemetry metrics utilities
|
||||
├── YammerMetricsProcessor.java # Yammer metrics processor
|
||||
└── YammerMetricsReporter.java # Yammer metrics reporter
|
||||
```
|
||||
|
||||
The AutoMQ OpenTelemetry module is a telemetry data collection and export component based on OpenTelemetry SDK, specifically designed for AutoMQ Kafka. This module provides unified telemetry data management capabilities, supporting the collection of JVM metrics, JMX metrics, and Yammer metrics, and can export data to Prometheus, OTLP-compatible backend systems, or S3-compatible storage.
|
||||
|
||||
## Core Features
|
||||
|
||||
### 1. Metrics Collection
|
||||
- **JVM Metrics**: Automatically collect JVM runtime metrics including CPU, memory pools, garbage collection, threads, etc.
|
||||
- **JMX Metrics**: Define and collect JMX Bean metrics through configuration files
|
||||
- **Yammer Metrics**: Bridge existing Kafka Yammer metrics system to OpenTelemetry
|
||||
|
||||
### 2. Multiple Exporter Support
|
||||
- **Prometheus**: Expose metrics in Prometheus format through HTTP server
|
||||
- **OTLP**: Support both gRPC and HTTP/Protobuf protocols for exporting to OTLP backends
|
||||
- **S3**: Export metrics to S3-compatible object storage systems
|
||||
|
||||
### 3. Flexible Configuration
|
||||
- Support parameter settings through Properties configuration files
|
||||
- Configurable export intervals, compression methods, timeout values, etc.
|
||||
- Support metric cardinality limits to control memory usage
|
||||
|
||||
## Module Structure
|
||||
|
||||
```
|
||||
com.automq.opentelemetry/
|
||||
├── AutoMQTelemetryManager.java # Main management class for initialization and lifecycle
|
||||
├── TelemetryConfig.java # Configuration management class
|
||||
├── TelemetryConstants.java # Constants definition
|
||||
├── common/
|
||||
│ └── MetricsUtils.java # Metrics utility class
|
||||
├── exporter/
|
||||
│ ├── MetricsExporter.java # Exporter interface
|
||||
│ ├── MetricsExporterURI.java # URI parser
|
||||
│ <20><><EFBFBD>── OTLPMetricsExporter.java # OTLP exporter implementation
|
||||
│ ├── PrometheusMetricsExporter.java # Prometheus exporter implementation
|
||||
│ └── s3/ # S3 metrics exporter implementation
|
||||
│ ├── CompressionUtils.java # Utility for data compression
|
||||
│ ├── PrometheusUtils.java # Utilities for Prometheus format
|
||||
│ ├── S3MetricsConfig.java # Configuration interface
|
||||
│ ├── S3MetricsExporter.java # S3 metrics exporter implementation
|
||||
│ ├── S3MetricsExporterAdapter.java # Adapter to handle S3 metrics export
|
||||
│ ├── LeaderNodeSelector.java # Interface for node selection logic
|
||||
│ └── LeaderNodeSelectors.java # Factory for node selector implementations
|
||||
└── yammer/
|
||||
├── DeltaHistogram.java # Delta histogram implementation
|
||||
├── OTelMetricUtils.java # OpenTelemetry metrics utilities
|
||||
├── YammerMetricsProcessor.java # Yammer metrics processor
|
||||
└── YammerMetricsReporter.java # Yammer metrics reporter
|
||||
```
|
||||
|
||||
## Quick Start
|
||||
|
||||
### 1. Basic Usage
|
||||
|
||||
```java
|
||||
import com.automq.opentelemetry.AutoMQTelemetryManager;
|
||||
import com.automq.opentelemetry.exporter.MetricsExportConfig;
|
||||
|
||||
// Implement MetricsExportConfig
|
||||
public class MyMetricsExportConfig implements MetricsExportConfig {
|
||||
@Override
|
||||
public String clusterId() { return "my-cluster"; }
|
||||
|
||||
@Override
|
||||
public boolean isLeader() { return true; }
|
||||
|
||||
@Override
|
||||
public int nodeId() { return 1; }
|
||||
|
||||
@Override
|
||||
public ObjectStorage objectStorage() {
|
||||
// Return your object storage instance for S3 exports
|
||||
return myObjectStorage;
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<Pair<String, String>> baseLabels() {
|
||||
return Arrays.asList(
|
||||
Pair.of("environment", "production"),
|
||||
Pair.of("region", "us-east-1")
|
||||
);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int intervalMs() { return 60000; } // 60 seconds
|
||||
}
|
||||
|
||||
// Create export configuration
|
||||
MetricsExportConfig config = new MyMetricsExportConfig();
|
||||
|
||||
// Initialize telemetry manager singleton
|
||||
AutoMQTelemetryManager manager = AutoMQTelemetryManager.initializeInstance(
|
||||
"prometheus://localhost:9090", // exporter URI
|
||||
"automq-kafka", // service name
|
||||
"broker-1", // instance ID
|
||||
config // export config
|
||||
);
|
||||
|
||||
// Start Yammer metrics reporting (optional)
|
||||
MetricsRegistry yammerRegistry = // Get Kafka's Yammer registry
|
||||
manager.startYammerMetricsReporter(yammerRegistry);
|
||||
|
||||
// Application running...
|
||||
|
||||
// Shutdown telemetry system
|
||||
AutoMQTelemetryManager.shutdownInstance();
|
||||
```
|
||||
|
||||
### 2. Get Meter Instance
|
||||
|
||||
```java
|
||||
// Get the singleton instance
|
||||
AutoMQTelemetryManager manager = AutoMQTelemetryManager.getInstance();
|
||||
|
||||
// Get Meter for custom metrics
|
||||
Meter meter = manager.getMeter();
|
||||
|
||||
// Create custom metrics
|
||||
LongCounter requestCounter = meter
|
||||
.counterBuilder("http_requests_total")
|
||||
.setDescription("Total number of HTTP requests")
|
||||
.build();
|
||||
|
||||
requestCounter.add(1, Attributes.of(AttributeKey.stringKey("method"), "GET"));
|
||||
```
|
||||
|
||||
## Configuration
|
||||
|
||||
### Basic Configuration
|
||||
|
||||
Configuration is provided through the `MetricsExportConfig` interface and constructor parameters:
|
||||
|
||||
| Parameter | Description | Example |
|
||||
|-----------|-------------|---------|
|
||||
| `exporterUri` | Metrics exporter URI | `prometheus://localhost:9090` |
|
||||
| `serviceName` | Service name for telemetry | `automq-kafka` |
|
||||
| `instanceId` | Unique service instance ID | `broker-1` |
|
||||
| `config` | MetricsExportConfig implementation | See example above |
|
||||
|
||||
### Exporter Configuration
|
||||
|
||||
All configuration is done through the `MetricsExportConfig` interface and constructor parameters. Export intervals, compression settings, and other options are controlled through:
|
||||
|
||||
1. **Exporter URI**: Determines the export destination and protocol
|
||||
2. **MetricsExportConfig**: Provides cluster information, intervals, and base labels
|
||||
3. **Constructor parameters**: Service name and instance ID
|
||||
|
||||
#### Prometheus Exporter
|
||||
```java
|
||||
// Use prometheus:// URI scheme
|
||||
AutoMQTelemetryManager manager = AutoMQTelemetryManager.initializeInstance(
|
||||
"prometheus://localhost:9090",
|
||||
"automq-kafka",
|
||||
"broker-1",
|
||||
config
|
||||
);
|
||||
```
|
||||
|
||||
#### OTLP Exporter
|
||||
```java
|
||||
// Use otlp:// URI scheme with optional query parameters
|
||||
AutoMQTelemetryManager manager = AutoMQTelemetryManager.initializeInstance(
|
||||
"otlp://localhost:4317?protocol=grpc&compression=gzip&timeout=30000",
|
||||
"automq-kafka",
|
||||
"broker-1",
|
||||
config
|
||||
);
|
||||
```
|
||||
|
||||
#### S3 Metrics Exporter
|
||||
```java
|
||||
// Use s3:// URI scheme
|
||||
AutoMQTelemetryManager manager = AutoMQTelemetryManager.initializeInstance(
|
||||
"s3://access-key:secret-key@my-bucket.s3.amazonaws.com",
|
||||
"automq-kafka",
|
||||
"broker-1",
|
||||
config // config.clusterId(), nodeId(), isLeader() used for S3 export
|
||||
);
|
||||
```
|
||||
|
||||
Example usage with S3 exporter:
|
||||
|
||||
```java
|
||||
// Implementation for S3 export configuration
|
||||
public class S3MetricsExportConfig implements MetricsExportConfig {
|
||||
private final ObjectStorage objectStorage;
|
||||
|
||||
public S3MetricsExportConfig(ObjectStorage objectStorage) {
|
||||
this.objectStorage = objectStorage;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String clusterId() { return "my-kafka-cluster"; }
|
||||
|
||||
@Override
|
||||
public boolean isLeader() {
|
||||
// Only one node in the cluster should return true
|
||||
return isCurrentNodeLeader();
|
||||
}
|
||||
|
||||
@Override
|
||||
public int nodeId() { return 1; }
|
||||
|
||||
@Override
|
||||
public ObjectStorage objectStorage() { return objectStorage; }
|
||||
|
||||
@Override
|
||||
public List<Pair<String, String>> baseLabels() {
|
||||
return Arrays.asList(Pair.of("environment", "production"));
|
||||
}
|
||||
|
||||
@Override
|
||||
public int intervalMs() { return 60000; }
|
||||
}
|
||||
|
||||
// Initialize telemetry manager with S3 export
|
||||
ObjectStorage objectStorage = // Create your object storage instance
|
||||
MetricsExportConfig config = new S3MetricsExportConfig(objectStorage);
|
||||
|
||||
AutoMQTelemetryManager manager = AutoMQTelemetryManager.initializeInstance(
|
||||
"s3://access-key:secret-key@my-bucket.s3.amazonaws.com",
|
||||
"automq-kafka",
|
||||
"broker-1",
|
||||
config
|
||||
);
|
||||
|
||||
// Application running...
|
||||
|
||||
// Shutdown telemetry system
|
||||
AutoMQTelemetryManager.shutdownInstance();
|
||||
```
|
||||
|
||||
### JMX Metrics Configuration
|
||||
|
||||
Define JMX metrics collection rules through YAML configuration files:
|
||||
|
||||
```java
|
||||
AutoMQTelemetryManager manager = AutoMQTelemetryManager.initializeInstance(
|
||||
exporterUri, serviceName, instanceId, config
|
||||
);
|
||||
|
||||
// Set JMX config paths after initialization
|
||||
manager.setJmxConfigPaths("/jmx-config.yaml,/kafka-jmx.yaml");
|
||||
```
|
||||
|
||||
#### Configuration File Requirements
|
||||
|
||||
1. **Directory Requirements**:
|
||||
- Configuration files must be placed in the project's classpath (e.g., `src/main/resources` directory)
|
||||
- Support subdirectory structure, e.g., `/config/jmx-metrics.yaml`
|
||||
|
||||
2. **Path Format**:
|
||||
- Paths must start with `/` to indicate starting from classpath root
|
||||
- Multiple configuration files separated by commas
|
||||
|
||||
3. **File Format**:
|
||||
- Use YAML format (`.yaml` or `.yml` extension)
|
||||
- Filenames can be customized, meaningful names are recommended
|
||||
|
||||
#### Recommended Directory Structure
|
||||
|
||||
```
|
||||
src/main/resources/
|
||||
├── jmx-kafka-broker.yaml # Kafka Broker metrics configuration
|
||||
├── jmx-kafka-consumer.yaml # Kafka Consumer metrics configuration
|
||||
├── jmx-kafka-producer.yaml # Kafka Producer metrics configuration
|
||||
└── config/
|
||||
├── custom-jmx.yaml # Custom JMX metrics configuration
|
||||
└── third-party-jmx.yaml # Third-party component JMX configuration
|
||||
```
|
||||
|
||||
JMX configuration file example (`jmx-config.yaml`):
|
||||
```yaml
|
||||
rules:
|
||||
- bean: kafka.server:type=BrokerTopicMetrics,name=MessagesInPerSec
|
||||
metricAttribute:
|
||||
name: kafka_server_broker_topic_messages_in_per_sec
|
||||
description: Messages in per second
|
||||
unit: "1/s"
|
||||
attributes:
|
||||
- name: topic
|
||||
value: topic
|
||||
```
|
||||
|
||||
## Supported Metric Types
|
||||
|
||||
### 1. JVM Metrics
|
||||
- Memory usage (heap memory, non-heap memory, memory pools)
|
||||
- CPU usage
|
||||
- Garbage collection statistics
|
||||
- Thread states
|
||||
|
||||
### 2. Kafka Metrics
|
||||
Through Yammer metrics bridging, supports the following types of Kafka metrics:
|
||||
- `BytesInPerSec` - Bytes input per second
|
||||
- `BytesOutPerSec` - Bytes output per second
|
||||
- `Size` - Log size (for identifying idle partitions)
|
||||
|
||||
### 3. Custom Metrics
|
||||
Support creating custom metrics through OpenTelemetry API:
|
||||
- Counter
|
||||
- Gauge
|
||||
- Histogram
|
||||
- UpDownCounter
|
||||
|
||||
## Best Practices
|
||||
|
||||
### 1. Production Environment Configuration
|
||||
|
||||
```java
|
||||
public class ProductionMetricsConfig implements MetricsExportConfig {
|
||||
@Override
|
||||
public String clusterId() { return "production-cluster"; }
|
||||
|
||||
@Override
|
||||
public boolean isLeader() {
|
||||
// Implement your leader election logic
|
||||
return isCurrentNodeController();
|
||||
}
|
||||
|
||||
@Override
|
||||
public int nodeId() { return getCurrentNodeId(); }
|
||||
|
||||
@Override
|
||||
public ObjectStorage objectStorage() {
|
||||
return productionObjectStorage;
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<Pair<String, String>> baseLabels() {
|
||||
return Arrays.asList(
|
||||
Pair.of("environment", "production"),
|
||||
Pair.of("region", System.getenv("AWS_REGION")),
|
||||
Pair.of("version", getApplicationVersion())
|
||||
);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int intervalMs() { return 60000; } // 1 minute
|
||||
}
|
||||
|
||||
// Initialize for production
|
||||
AutoMQTelemetryManager manager = AutoMQTelemetryManager.initializeInstance(
|
||||
"prometheus://0.0.0.0:9090", // Or S3 URI for object storage export
|
||||
"automq-kafka",
|
||||
System.getenv("HOSTNAME"),
|
||||
new ProductionMetricsConfig()
|
||||
);
|
||||
```
|
||||
|
||||
### 2. Development Environment Configuration
|
||||
|
||||
```java
|
||||
public class DevelopmentMetricsConfig implements MetricsExportConfig {
|
||||
@Override
|
||||
public String clusterId() { return "dev-cluster"; }
|
||||
|
||||
@Override
|
||||
public boolean isLeader() { return true; } // Single node in dev
|
||||
|
||||
@Override
|
||||
public int nodeId() { return 1; }
|
||||
|
||||
@Override
|
||||
public ObjectStorage objectStorage() { return null; } // Not needed for OTLP
|
||||
|
||||
@Override
|
||||
public List<Pair<String, String>> baseLabels() {
|
||||
return Arrays.asList(Pair.of("environment", "development"));
|
||||
}
|
||||
|
||||
@Override
|
||||
public int intervalMs() { return 10000; } // 10 seconds for faster feedback
|
||||
}
|
||||
|
||||
// Initialize for development
|
||||
AutoMQTelemetryManager manager = AutoMQTelemetryManager.initializeInstance(
|
||||
"otlp://localhost:4317",
|
||||
"automq-kafka-dev",
|
||||
"local-dev",
|
||||
new DevelopmentMetricsConfig()
|
||||
);
|
||||
```
|
||||
|
||||
### 3. Resource Management
|
||||
- Set appropriate metric cardinality limits to avoid memory leaks
|
||||
- Call `shutdown()` method when application closes to release resources
|
||||
- Monitor exporter health status
|
||||
|
||||
## Troubleshooting
|
||||
|
||||
### Common Issues
|
||||
|
||||
1. **Metrics not exported**
|
||||
- Check if exporter URI passed to `initializeInstance()` is correct
|
||||
- Verify target endpoint is reachable
|
||||
- Check error messages in logs
|
||||
- Ensure `MetricsExportConfig.intervalMs()` returns reasonable value
|
||||
|
||||
2. **JMX metrics missing**
|
||||
- Confirm JMX configuration file path set via `setJmxConfigPaths()` is correct
|
||||
- Check YAML configuration file format
|
||||
- Verify JMX Bean exists
|
||||
- Ensure files are in classpath
|
||||
|
||||
3. **High memory usage**
|
||||
- Implement cardinality limits in your `MetricsExportConfig`
|
||||
- Check for high cardinality labels in `baseLabels()`
|
||||
- Consider increasing export interval via `intervalMs()`
|
||||
|
||||
### Logging Configuration
|
||||
|
||||
Enable debug logging for more information using your logging framework configuration (e.g., logback.xml, log4j2.xml):
|
||||
|
||||
```xml
|
||||
<!-- For Logback -->
|
||||
<logger name="com.automq.opentelemetry" level="DEBUG" />
|
||||
<logger name="io.opentelemetry" level="INFO" />
|
||||
```
|
||||
|
||||
## Dependencies
|
||||
|
||||
- Java 8+
|
||||
- OpenTelemetry SDK 1.30+
|
||||
- Apache Commons Lang3
|
||||
- SLF4J logging framework
|
||||
|
||||
## License
|
||||
|
||||
This module is open source under the Apache License 2.0.
|
||||
|
|
@ -0,0 +1,330 @@
|
|||
/*
|
||||
* Copyright 2025, AutoMQ HK Limited.
|
||||
*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
* contributor license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright ownership.
|
||||
* The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
* (the "License"); you may not use this file except in compliance with
|
||||
* the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package com.automq.opentelemetry;
|
||||
|
||||
import com.automq.opentelemetry.exporter.MetricsExportConfig;
|
||||
import com.automq.opentelemetry.exporter.MetricsExporter;
|
||||
import com.automq.opentelemetry.exporter.MetricsExporterURI;
|
||||
import com.automq.opentelemetry.yammer.YammerMetricsReporter;
|
||||
import com.yammer.metrics.core.MetricsRegistry;
|
||||
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
import org.apache.commons.lang3.tuple.Pair;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.slf4j.bridge.SLF4JBridgeHandler;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.net.InetAddress;
|
||||
import java.net.UnknownHostException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
import java.util.stream.Collectors;
|
||||
import java.util.stream.Stream;
|
||||
|
||||
import io.opentelemetry.api.OpenTelemetry;
|
||||
import io.opentelemetry.api.baggage.propagation.W3CBaggagePropagator;
|
||||
import io.opentelemetry.api.common.Attributes;
|
||||
import io.opentelemetry.api.common.AttributesBuilder;
|
||||
import io.opentelemetry.api.metrics.Meter;
|
||||
import io.opentelemetry.api.trace.propagation.W3CTraceContextPropagator;
|
||||
import io.opentelemetry.context.propagation.ContextPropagators;
|
||||
import io.opentelemetry.context.propagation.TextMapPropagator;
|
||||
import io.opentelemetry.instrumentation.jmx.engine.JmxMetricInsight;
|
||||
import io.opentelemetry.instrumentation.jmx.engine.MetricConfiguration;
|
||||
import io.opentelemetry.instrumentation.jmx.yaml.RuleParser;
|
||||
import io.opentelemetry.instrumentation.runtimemetrics.java8.Cpu;
|
||||
import io.opentelemetry.instrumentation.runtimemetrics.java8.GarbageCollector;
|
||||
import io.opentelemetry.instrumentation.runtimemetrics.java8.MemoryPools;
|
||||
import io.opentelemetry.instrumentation.runtimemetrics.java8.Threads;
|
||||
import io.opentelemetry.sdk.OpenTelemetrySdk;
|
||||
import io.opentelemetry.sdk.metrics.SdkMeterProvider;
|
||||
import io.opentelemetry.sdk.metrics.SdkMeterProviderBuilder;
|
||||
import io.opentelemetry.sdk.metrics.export.MetricReader;
|
||||
import io.opentelemetry.sdk.metrics.internal.SdkMeterProviderUtil;
|
||||
import io.opentelemetry.sdk.resources.Resource;
|
||||
|
||||
/**
|
||||
* The main manager for AutoMQ telemetry.
|
||||
* This class is responsible for initializing, configuring, and managing the lifecycle of all
|
||||
* telemetry components, including the OpenTelemetry SDK, metric exporters, and various metric sources.
|
||||
*/
|
||||
public class AutoMQTelemetryManager {
|
||||
private static final Logger LOGGER = LoggerFactory.getLogger(AutoMQTelemetryManager.class);
|
||||
|
||||
// Singleton instance support
|
||||
private static volatile AutoMQTelemetryManager instance;
|
||||
private static final Object LOCK = new Object();
|
||||
|
||||
private final String exporterUri;
|
||||
private final String serviceName;
|
||||
private final String instanceId;
|
||||
private final MetricsExportConfig metricsExportConfig;
|
||||
private final List<MetricReader> metricReaders = new ArrayList<>();
|
||||
private final List<AutoCloseable> autoCloseableList;
|
||||
private OpenTelemetrySdk openTelemetrySdk;
|
||||
private YammerMetricsReporter yammerReporter;
|
||||
|
||||
private int metricCardinalityLimit = TelemetryConstants.DEFAULT_METRIC_CARDINALITY_LIMIT;
|
||||
private String jmxConfigPath;
|
||||
|
||||
/**
|
||||
* Constructs a new Telemetry Manager with the given configuration.
|
||||
*
|
||||
* @param exporterUri The metrics exporter URI.
|
||||
* @param serviceName The service name to be used in telemetry data.
|
||||
* @param instanceId The unique instance ID for this service instance.
|
||||
* @param metricsExportConfig The metrics configuration.
|
||||
*/
|
||||
public AutoMQTelemetryManager(String exporterUri, String serviceName, String instanceId, MetricsExportConfig metricsExportConfig) {
|
||||
this.exporterUri = exporterUri;
|
||||
this.serviceName = serviceName;
|
||||
this.instanceId = instanceId;
|
||||
this.metricsExportConfig = metricsExportConfig;
|
||||
this.autoCloseableList = new ArrayList<>();
|
||||
// Redirect JUL from OpenTelemetry SDK to SLF4J for unified logging
|
||||
SLF4JBridgeHandler.removeHandlersForRootLogger();
|
||||
SLF4JBridgeHandler.install();
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets the singleton instance of AutoMQTelemetryManager.
|
||||
* Returns null if no instance has been initialized.
|
||||
*
|
||||
* @return the singleton instance, or null if not initialized
|
||||
*/
|
||||
public static AutoMQTelemetryManager getInstance() {
|
||||
return instance;
|
||||
}
|
||||
|
||||
/**
|
||||
* Initializes the singleton instance with the given configuration.
|
||||
* This method should be called before any other components try to access the instance.
|
||||
*
|
||||
* @param exporterUri The metrics exporter URI.
|
||||
* @param serviceName The service name to be used in telemetry data.
|
||||
* @param instanceId The unique instance ID for this service instance.
|
||||
* @param metricsExportConfig The metrics configuration.
|
||||
* @return the initialized singleton instance
|
||||
*/
|
||||
public static AutoMQTelemetryManager initializeInstance(String exporterUri, String serviceName, String instanceId, MetricsExportConfig metricsExportConfig) {
|
||||
if (instance == null) {
|
||||
synchronized (LOCK) {
|
||||
if (instance == null) {
|
||||
AutoMQTelemetryManager newInstance = new AutoMQTelemetryManager(exporterUri, serviceName, instanceId, metricsExportConfig);
|
||||
newInstance.init();
|
||||
instance = newInstance;
|
||||
LOGGER.info("AutoMQTelemetryManager singleton instance initialized");
|
||||
}
|
||||
}
|
||||
}
|
||||
return instance;
|
||||
}
|
||||
|
||||
/**
|
||||
* Shuts down the singleton instance and releases all resources.
|
||||
*/
|
||||
public static void shutdownInstance() {
|
||||
if (instance != null) {
|
||||
synchronized (LOCK) {
|
||||
if (instance != null) {
|
||||
instance.shutdown();
|
||||
instance = null;
|
||||
LOGGER.info("AutoMQTelemetryManager singleton instance shutdown");
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Initializes the telemetry system. This method sets up the OpenTelemetry SDK,
|
||||
* configures exporters, and registers JVM and JMX metrics.
|
||||
*/
|
||||
public void init() {
|
||||
SdkMeterProvider meterProvider = buildMeterProvider();
|
||||
|
||||
this.openTelemetrySdk = OpenTelemetrySdk.builder()
|
||||
.setMeterProvider(meterProvider)
|
||||
.setPropagators(ContextPropagators.create(TextMapPropagator.composite(
|
||||
W3CTraceContextPropagator.getInstance(), W3CBaggagePropagator.getInstance())))
|
||||
.buildAndRegisterGlobal();
|
||||
|
||||
// Register JVM and JMX metrics
|
||||
registerJvmMetrics(openTelemetrySdk);
|
||||
registerJmxMetrics(openTelemetrySdk);
|
||||
|
||||
LOGGER.info("AutoMQ Telemetry Manager initialized successfully.");
|
||||
}
|
||||
|
||||
private SdkMeterProvider buildMeterProvider() {
|
||||
String hostName;
|
||||
try {
|
||||
hostName = InetAddress.getLocalHost().getHostName();
|
||||
} catch (UnknownHostException e) {
|
||||
hostName = "unknown-host";
|
||||
}
|
||||
AttributesBuilder attrsBuilder = Attributes.builder()
|
||||
.put(TelemetryConstants.SERVICE_NAME_KEY, serviceName)
|
||||
.put(TelemetryConstants.SERVICE_INSTANCE_ID_KEY, instanceId)
|
||||
.put(TelemetryConstants.HOST_NAME_KEY, hostName)
|
||||
// Add attributes for Prometheus compatibility
|
||||
.put(TelemetryConstants.PROMETHEUS_JOB_KEY, serviceName)
|
||||
.put(TelemetryConstants.PROMETHEUS_INSTANCE_KEY, instanceId);
|
||||
|
||||
for (Pair<String, String> label : metricsExportConfig.baseLabels()) {
|
||||
attrsBuilder.put(label.getKey(), label.getValue());
|
||||
}
|
||||
|
||||
Resource resource = Resource.getDefault().merge(Resource.create(attrsBuilder.build()));
|
||||
SdkMeterProviderBuilder meterProviderBuilder = SdkMeterProvider.builder().setResource(resource);
|
||||
|
||||
// Configure exporters from URI
|
||||
MetricsExporterURI exporterURI = buildMetricsExporterURI(exporterUri, metricsExportConfig);
|
||||
for (MetricsExporter exporter : exporterURI.getMetricsExporters()) {
|
||||
MetricReader reader = exporter.asMetricReader();
|
||||
metricReaders.add(reader);
|
||||
SdkMeterProviderUtil.registerMetricReaderWithCardinalitySelector(meterProviderBuilder, reader,
|
||||
instrumentType -> metricCardinalityLimit);
|
||||
}
|
||||
|
||||
return meterProviderBuilder.build();
|
||||
}
|
||||
|
||||
protected MetricsExporterURI buildMetricsExporterURI(String exporterUri, MetricsExportConfig metricsExportConfig) {
|
||||
return MetricsExporterURI.parse(exporterUri, metricsExportConfig);
|
||||
}
|
||||
|
||||
private void registerJvmMetrics(OpenTelemetry openTelemetry) {
|
||||
autoCloseableList.addAll(MemoryPools.registerObservers(openTelemetry));
|
||||
autoCloseableList.addAll(Cpu.registerObservers(openTelemetry));
|
||||
autoCloseableList.addAll(GarbageCollector.registerObservers(openTelemetry));
|
||||
autoCloseableList.addAll(Threads.registerObservers(openTelemetry));
|
||||
LOGGER.info("JVM metrics registered.");
|
||||
}
|
||||
|
||||
@SuppressWarnings({"NP_LOAD_OF_KNOWN_NULL_VALUE", "RCN_REDUNDANT_NULLCHECK_OF_NULL_VALUE"})
|
||||
private void registerJmxMetrics(OpenTelemetry openTelemetry) {
|
||||
List<String> jmxConfigPaths = getJmxConfigPaths();
|
||||
if (jmxConfigPaths.isEmpty()) {
|
||||
LOGGER.info("No JMX metric config paths provided, skipping JMX metrics registration.");
|
||||
return;
|
||||
}
|
||||
|
||||
JmxMetricInsight jmxMetricInsight = JmxMetricInsight.createService(openTelemetry, metricsExportConfig.intervalMs());
|
||||
MetricConfiguration metricConfig = new MetricConfiguration();
|
||||
|
||||
for (String path : jmxConfigPaths) {
|
||||
try (InputStream ins = this.getClass().getResourceAsStream(path)) {
|
||||
if (ins == null) {
|
||||
LOGGER.error("JMX config file not found in classpath: {}", path);
|
||||
continue;
|
||||
}
|
||||
RuleParser parser = RuleParser.get();
|
||||
parser.addMetricDefsTo(metricConfig, ins, path);
|
||||
} catch (Exception e) {
|
||||
LOGGER.error("Failed to parse JMX config file: {}", path, e);
|
||||
}
|
||||
}
|
||||
|
||||
jmxMetricInsight.start(metricConfig);
|
||||
// JmxMetricInsight doesn't implement Closeable, but we can create a wrapper
|
||||
|
||||
LOGGER.info("JMX metrics registered with config paths: {}", jmxConfigPaths);
|
||||
}
|
||||
|
||||
public List<String> getJmxConfigPaths() {
|
||||
if (StringUtils.isEmpty(jmxConfigPath)) {
|
||||
return Collections.emptyList();
|
||||
}
|
||||
return Stream.of(jmxConfigPath.split(","))
|
||||
.map(String::trim)
|
||||
.filter(s -> !s.isEmpty())
|
||||
.collect(Collectors.toList());
|
||||
}
|
||||
|
||||
/**
|
||||
* Starts reporting metrics from a given Yammer MetricsRegistry.
|
||||
*
|
||||
* @param registry The Yammer registry to bridge metrics from.
|
||||
*/
|
||||
public void startYammerMetricsReporter(MetricsRegistry registry) {
|
||||
if (this.openTelemetrySdk == null) {
|
||||
throw new IllegalStateException("TelemetryManager is not initialized. Call init() first.");
|
||||
}
|
||||
if (registry == null) {
|
||||
LOGGER.warn("Yammer MetricsRegistry is null, skipping reporter start.");
|
||||
return;
|
||||
}
|
||||
this.yammerReporter = new YammerMetricsReporter(registry);
|
||||
this.yammerReporter.start(getMeter());
|
||||
}
|
||||
|
||||
public void shutdown() {
|
||||
autoCloseableList.forEach(autoCloseable -> {
|
||||
try {
|
||||
autoCloseable.close();
|
||||
} catch (Exception e) {
|
||||
LOGGER.error("Failed to close auto closeable", e);
|
||||
}
|
||||
});
|
||||
metricReaders.forEach(metricReader -> {
|
||||
metricReader.forceFlush();
|
||||
try {
|
||||
metricReader.close();
|
||||
} catch (IOException e) {
|
||||
LOGGER.error("Failed to close metric reader", e);
|
||||
}
|
||||
});
|
||||
if (openTelemetrySdk != null) {
|
||||
openTelemetrySdk.close();
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* get YammerMetricsReporter instance.
|
||||
*
|
||||
* @return The YammerMetricsReporter instance.
|
||||
*/
|
||||
public YammerMetricsReporter getYammerReporter() {
|
||||
return this.yammerReporter;
|
||||
}
|
||||
|
||||
public void setMetricCardinalityLimit(int limit) {
|
||||
this.metricCardinalityLimit = limit;
|
||||
}
|
||||
|
||||
public void setJmxConfigPaths(String jmxConfigPaths) {
|
||||
this.jmxConfigPath = jmxConfigPaths;
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets the default meter from the initialized OpenTelemetry SDK.
|
||||
*
|
||||
* @return The meter instance.
|
||||
*/
|
||||
public Meter getMeter() {
|
||||
if (this.openTelemetrySdk == null) {
|
||||
throw new IllegalStateException("TelemetryManager is not initialized. Call init() first.");
|
||||
}
|
||||
return this.openTelemetrySdk.getMeter(TelemetryConstants.TELEMETRY_SCOPE_NAME);
|
||||
}
|
||||
}
|
||||
|
|
@ -0,0 +1,54 @@
|
|||
/*
|
||||
* Copyright 2025, AutoMQ HK Limited.
|
||||
*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
* contributor license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright ownership.
|
||||
* The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
* (the "License"); you may not use this file except in compliance with
|
||||
* the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package com.automq.opentelemetry;
|
||||
|
||||
import io.opentelemetry.api.common.AttributeKey;
|
||||
|
||||
/**
|
||||
* Constants for telemetry, including configuration keys, attribute keys, and default values.
|
||||
*/
|
||||
public class TelemetryConstants {
|
||||
|
||||
//################################################################
|
||||
// Service and Resource Attributes
|
||||
//################################################################
|
||||
public static final String SERVICE_NAME_KEY = "service.name";
|
||||
public static final String SERVICE_INSTANCE_ID_KEY = "service.instance.id";
|
||||
public static final String HOST_NAME_KEY = "host.name";
|
||||
public static final String TELEMETRY_SCOPE_NAME = "automq_for_kafka";
|
||||
|
||||
/**
|
||||
* The cardinality limit for any single metric.
|
||||
*/
|
||||
public static final String METRIC_CARDINALITY_LIMIT_KEY = "automq.telemetry.metric.cardinality.limit";
|
||||
public static final int DEFAULT_METRIC_CARDINALITY_LIMIT = 20000;
|
||||
|
||||
//################################################################
|
||||
// Prometheus specific Attributes, for compatibility
|
||||
//################################################################
|
||||
public static final String PROMETHEUS_JOB_KEY = "job";
|
||||
public static final String PROMETHEUS_INSTANCE_KEY = "instance";
|
||||
|
||||
//################################################################
|
||||
// Custom Kafka-related Attribute Keys
|
||||
//################################################################
|
||||
public static final AttributeKey<Long> START_OFFSET_KEY = AttributeKey.longKey("startOffset");
|
||||
public static final AttributeKey<Long> END_OFFSET_KEY = AttributeKey.longKey("endOffset");
|
||||
}
|
||||
|
|
@ -0,0 +1,44 @@
|
|||
/*
|
||||
* Copyright 2025, AutoMQ HK Limited.
|
||||
*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
* contributor license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright ownership.
|
||||
* The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
* (the "License"); you may not use this file except in compliance with
|
||||
* the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package com.automq.opentelemetry.common;
|
||||
|
||||
public enum OTLPCompressionType {
|
||||
GZIP("gzip"),
|
||||
NONE("none");
|
||||
|
||||
private final String type;
|
||||
|
||||
OTLPCompressionType(String type) {
|
||||
this.type = type;
|
||||
}
|
||||
|
||||
public String getType() {
|
||||
return type;
|
||||
}
|
||||
|
||||
public static OTLPCompressionType fromString(String type) {
|
||||
for (OTLPCompressionType compressionType : OTLPCompressionType.values()) {
|
||||
if (compressionType.getType().equalsIgnoreCase(type)) {
|
||||
return compressionType;
|
||||
}
|
||||
}
|
||||
throw new IllegalArgumentException("Invalid OTLP compression type: " + type);
|
||||
}
|
||||
}
|
||||
|
|
@ -0,0 +1,44 @@
|
|||
/*
|
||||
* Copyright 2025, AutoMQ HK Limited.
|
||||
*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
* contributor license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright ownership.
|
||||
* The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
* (the "License"); you may not use this file except in compliance with
|
||||
* the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package com.automq.opentelemetry.common;
|
||||
|
||||
public enum OTLPProtocol {
|
||||
GRPC("grpc"),
|
||||
HTTP("http");
|
||||
|
||||
private final String protocol;
|
||||
|
||||
OTLPProtocol(String protocol) {
|
||||
this.protocol = protocol;
|
||||
}
|
||||
|
||||
public String getProtocol() {
|
||||
return protocol;
|
||||
}
|
||||
|
||||
public static OTLPProtocol fromString(String protocol) {
|
||||
for (OTLPProtocol otlpProtocol : OTLPProtocol.values()) {
|
||||
if (otlpProtocol.getProtocol().equalsIgnoreCase(protocol)) {
|
||||
return otlpProtocol;
|
||||
}
|
||||
}
|
||||
throw new IllegalArgumentException("Invalid OTLP protocol: " + protocol);
|
||||
}
|
||||
}
|
||||
|
|
@ -0,0 +1,68 @@
|
|||
/*
|
||||
* Copyright 2025, AutoMQ HK Limited.
|
||||
*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
* contributor license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright ownership.
|
||||
* The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
* (the "License"); you may not use this file except in compliance with
|
||||
* the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package com.automq.opentelemetry.exporter;
|
||||
|
||||
import com.automq.stream.s3.operator.ObjectStorage;
|
||||
|
||||
import org.apache.commons.lang3.tuple.Pair;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
/**
|
||||
* Configuration interface for metrics exporter.
|
||||
*/
|
||||
public interface MetricsExportConfig {
|
||||
|
||||
/**
|
||||
* Get the cluster ID.
|
||||
* @return The cluster ID.
|
||||
*/
|
||||
String clusterId();
|
||||
|
||||
/**
|
||||
* Check if the current node is a primary node for metrics upload.
|
||||
* @return True if the current node should upload metrics, false otherwise.
|
||||
*/
|
||||
boolean isLeader();
|
||||
|
||||
/**
|
||||
* Get the node ID.
|
||||
* @return The node ID.
|
||||
*/
|
||||
int nodeId();
|
||||
|
||||
/**
|
||||
* Get the object storage instance.
|
||||
* @return The object storage instance.
|
||||
*/
|
||||
ObjectStorage objectStorage();
|
||||
|
||||
/**
|
||||
* Get the base labels to include in all metrics.
|
||||
* @return The base labels.
|
||||
*/
|
||||
List<Pair<String, String>> baseLabels();
|
||||
|
||||
/**
|
||||
* Get the interval in milliseconds for metrics export.
|
||||
* @return The interval in milliseconds.
|
||||
*/
|
||||
int intervalMs();
|
||||
}
|
||||
|
|
@ -1,4 +1,6 @@
|
|||
/*
|
||||
* Copyright 2025, AutoMQ HK Limited.
|
||||
*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
* contributor license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright ownership.
|
||||
|
|
@ -14,16 +16,14 @@
|
|||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.apache.kafka.raft.errors;
|
||||
|
||||
package com.automq.opentelemetry.exporter;
|
||||
|
||||
import io.opentelemetry.sdk.metrics.export.MetricReader;
|
||||
|
||||
/**
|
||||
* Indicates that an append operation cannot be completed because it would have resulted in an
|
||||
* unexpected base offset.
|
||||
* An interface for metrics exporters, which can be converted to an OpenTelemetry MetricReader.
|
||||
*/
|
||||
public class UnexpectedBaseOffsetException extends RaftException {
|
||||
private static final long serialVersionUID = 1L;
|
||||
|
||||
public UnexpectedBaseOffsetException(String s) {
|
||||
super(s);
|
||||
}
|
||||
public interface MetricsExporter {
|
||||
MetricReader asMetricReader();
|
||||
}
|
||||
|
|
@ -0,0 +1,47 @@
|
|||
/*
|
||||
* Copyright 2025, AutoMQ HK Limited.
|
||||
*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
* contributor license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright ownership.
|
||||
* The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
* (the "License"); you may not use this file except in compliance with
|
||||
* the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package com.automq.opentelemetry.exporter;
|
||||
|
||||
import java.net.URI;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
/**
|
||||
* Service Provider Interface that allows extending the available metrics exporters
|
||||
* without modifying the core AutoMQ OpenTelemetry module.
|
||||
*/
|
||||
public interface MetricsExporterProvider {
|
||||
|
||||
/**
|
||||
* @param scheme exporter scheme (e.g. "rw")
|
||||
* @return true if this provider can create an exporter for the supplied scheme
|
||||
*/
|
||||
boolean supports(String scheme);
|
||||
|
||||
/**
|
||||
* Creates a metrics exporter for the provided URI.
|
||||
*
|
||||
* @param config metrics configuration
|
||||
* @param uri original exporter URI
|
||||
* @param queryParameters parsed query parameters from the URI
|
||||
* @return a MetricsExporter instance, or {@code null} if unable to create one
|
||||
*/
|
||||
MetricsExporter create(MetricsExportConfig config, URI uri, Map<String, List<String>> queryParameters);
|
||||
}
|
||||
|
|
@ -0,0 +1,46 @@
|
|||
/*
|
||||
* Copyright 2025, AutoMQ HK Limited.
|
||||
*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
* contributor license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright ownership.
|
||||
* The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
* (the "License"); you may not use this file except in compliance with
|
||||
* the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package com.automq.opentelemetry.exporter;
|
||||
|
||||
public enum MetricsExporterType {
|
||||
OTLP("otlp"),
|
||||
PROMETHEUS("prometheus"),
|
||||
OPS("ops"),
|
||||
OTHER("other");
|
||||
|
||||
private final String type;
|
||||
|
||||
MetricsExporterType(String type) {
|
||||
this.type = type;
|
||||
}
|
||||
|
||||
public String getType() {
|
||||
return type;
|
||||
}
|
||||
|
||||
public static MetricsExporterType fromString(String type) {
|
||||
for (MetricsExporterType exporterType : MetricsExporterType.values()) {
|
||||
if (exporterType.getType().equalsIgnoreCase(type)) {
|
||||
return exporterType;
|
||||
}
|
||||
}
|
||||
return OTHER;
|
||||
}
|
||||
}
|
||||
|
|
@ -0,0 +1,220 @@
|
|||
/*
|
||||
* Copyright 2025, AutoMQ HK Limited.
|
||||
*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
* contributor license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright ownership.
|
||||
* The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
* (the "License"); you may not use this file except in compliance with
|
||||
* the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package com.automq.opentelemetry.exporter;
|
||||
|
||||
import com.automq.opentelemetry.common.OTLPCompressionType;
|
||||
import com.automq.opentelemetry.common.OTLPProtocol;
|
||||
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
import java.net.URI;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.ServiceLoader;
|
||||
|
||||
/**
|
||||
* Parses the exporter URI and creates the corresponding MetricsExporter instances.
|
||||
*/
|
||||
public class MetricsExporterURI {
|
||||
private static final Logger LOGGER = LoggerFactory.getLogger(MetricsExporterURI.class);
|
||||
|
||||
private static final List<MetricsExporterProvider> PROVIDERS;
|
||||
|
||||
static {
|
||||
List<MetricsExporterProvider> providers = new ArrayList<>();
|
||||
ServiceLoader.load(MetricsExporterProvider.class).forEach(providers::add);
|
||||
PROVIDERS = Collections.unmodifiableList(providers);
|
||||
if (!PROVIDERS.isEmpty()) {
|
||||
LOGGER.info("Loaded {} telemetry exporter providers", PROVIDERS.size());
|
||||
}
|
||||
}
|
||||
|
||||
private final List<MetricsExporter> metricsExporters;
|
||||
|
||||
private MetricsExporterURI(List<MetricsExporter> metricsExporters) {
|
||||
this.metricsExporters = metricsExporters != null ? metricsExporters : new ArrayList<>();
|
||||
}
|
||||
|
||||
public List<MetricsExporter> getMetricsExporters() {
|
||||
return metricsExporters;
|
||||
}
|
||||
|
||||
public static MetricsExporterURI parse(String uriStr, MetricsExportConfig config) {
|
||||
LOGGER.info("Parsing metrics exporter URI: {}", uriStr);
|
||||
if (StringUtils.isBlank(uriStr)) {
|
||||
LOGGER.info("Metrics exporter URI is not configured, no metrics will be exported.");
|
||||
return new MetricsExporterURI(Collections.emptyList());
|
||||
}
|
||||
|
||||
// Support multiple exporters separated by comma
|
||||
String[] exporterUris = uriStr.split(",");
|
||||
if (exporterUris.length == 0) {
|
||||
return new MetricsExporterURI(Collections.emptyList());
|
||||
}
|
||||
|
||||
List<MetricsExporter> exporters = new ArrayList<>();
|
||||
for (String uri : exporterUris) {
|
||||
if (StringUtils.isBlank(uri)) {
|
||||
continue;
|
||||
}
|
||||
MetricsExporter exporter = parseExporter(config, uri.trim());
|
||||
if (exporter != null) {
|
||||
exporters.add(exporter);
|
||||
}
|
||||
}
|
||||
return new MetricsExporterURI(exporters);
|
||||
}
|
||||
|
||||
public static MetricsExporter parseExporter(MetricsExportConfig config, String uriStr) {
|
||||
try {
|
||||
URI uri = new URI(uriStr);
|
||||
String type = uri.getScheme();
|
||||
if (StringUtils.isBlank(type)) {
|
||||
LOGGER.error("Invalid metrics exporter URI: {}, exporter scheme is missing", uriStr);
|
||||
throw new IllegalArgumentException("Invalid metrics exporter URI: " + uriStr);
|
||||
}
|
||||
|
||||
Map<String, List<String>> queries = parseQueryParameters(uri);
|
||||
return parseExporter(config, type, queries, uri);
|
||||
} catch (Exception e) {
|
||||
LOGGER.warn("Parse metrics exporter URI {} failed", uriStr, e);
|
||||
throw new IllegalArgumentException("Invalid metrics exporter URI: " + uriStr, e);
|
||||
}
|
||||
}
|
||||
|
||||
public static MetricsExporter parseExporter(MetricsExportConfig config, String type, Map<String, List<String>> queries, URI uri) {
|
||||
MetricsExporterType exporterType = MetricsExporterType.fromString(type);
|
||||
switch (exporterType) {
|
||||
case PROMETHEUS:
|
||||
return buildPrometheusExporter(config, queries, uri);
|
||||
case OTLP:
|
||||
return buildOtlpExporter(config, queries, uri);
|
||||
case OPS:
|
||||
return buildS3MetricsExporter(config, uri);
|
||||
default:
|
||||
break;
|
||||
}
|
||||
|
||||
MetricsExporterProvider provider = findProvider(type);
|
||||
if (provider != null) {
|
||||
MetricsExporter exporter = provider.create(config, uri, queries);
|
||||
if (exporter != null) {
|
||||
return exporter;
|
||||
}
|
||||
}
|
||||
|
||||
LOGGER.warn("Unsupported metrics exporter type: {}", type);
|
||||
return null;
|
||||
}
|
||||
|
||||
private static MetricsExporter buildPrometheusExporter(MetricsExportConfig config, Map<String, List<String>> queries, URI uri) {
|
||||
// Use query parameters if available, otherwise fall back to URI authority or config defaults
|
||||
String host = getStringFromQuery(queries, "host", uri.getHost());
|
||||
if (StringUtils.isBlank(host)) {
|
||||
host = "localhost";
|
||||
}
|
||||
|
||||
int port = uri.getPort();
|
||||
if (port <= 0) {
|
||||
String portStr = getStringFromQuery(queries, "port", null);
|
||||
if (StringUtils.isNotBlank(portStr)) {
|
||||
try {
|
||||
port = Integer.parseInt(portStr);
|
||||
} catch (NumberFormatException e) {
|
||||
LOGGER.warn("Invalid port in query parameters: {}, using default", portStr);
|
||||
port = 9090;
|
||||
}
|
||||
} else {
|
||||
port = 9090;
|
||||
}
|
||||
}
|
||||
|
||||
return new PrometheusMetricsExporter(host, port, config.baseLabels());
|
||||
}
|
||||
|
||||
private static MetricsExporter buildOtlpExporter(MetricsExportConfig config, Map<String, List<String>> queries, URI uri) {
|
||||
// Get endpoint from query parameters or construct from URI
|
||||
String endpoint = getStringFromQuery(queries, "endpoint", null);
|
||||
if (StringUtils.isBlank(endpoint)) {
|
||||
endpoint = uri.getScheme() + "://" + uri.getAuthority();
|
||||
}
|
||||
|
||||
// Get protocol from query parameters or config
|
||||
String protocol = getStringFromQuery(queries, "protocol", OTLPProtocol.GRPC.getProtocol());
|
||||
|
||||
// Get compression from query parameters or config
|
||||
String compression = getStringFromQuery(queries, "compression", OTLPCompressionType.NONE.getType());
|
||||
|
||||
return new OTLPMetricsExporter(config.intervalMs(), endpoint, protocol, compression);
|
||||
}
|
||||
|
||||
private static MetricsExporter buildS3MetricsExporter(MetricsExportConfig config, URI uri) {
|
||||
LOGGER.info("Creating S3 metrics exporter from URI: {}", uri);
|
||||
if (config.objectStorage() == null) {
|
||||
LOGGER.warn("No object storage configured, skip s3 metrics exporter creation.");
|
||||
return null;
|
||||
}
|
||||
// Create the S3MetricsExporterAdapter with appropriate configuration
|
||||
return new com.automq.opentelemetry.exporter.s3.S3MetricsExporterAdapter(config);
|
||||
}
|
||||
|
||||
private static Map<String, List<String>> parseQueryParameters(URI uri) {
|
||||
Map<String, List<String>> queries = new HashMap<>();
|
||||
String query = uri.getQuery();
|
||||
if (StringUtils.isNotBlank(query)) {
|
||||
String[] pairs = query.split("&");
|
||||
for (String pair : pairs) {
|
||||
String[] keyValue = pair.split("=", 2);
|
||||
if (keyValue.length == 2) {
|
||||
String key = keyValue[0];
|
||||
String value = keyValue[1];
|
||||
queries.computeIfAbsent(key, k -> new ArrayList<>()).add(value);
|
||||
}
|
||||
}
|
||||
}
|
||||
return queries;
|
||||
}
|
||||
|
||||
private static String getStringFromQuery(Map<String, List<String>> queries, String key, String defaultValue) {
|
||||
List<String> values = queries.get(key);
|
||||
if (values != null && !values.isEmpty()) {
|
||||
return values.get(0);
|
||||
}
|
||||
return defaultValue;
|
||||
}
|
||||
|
||||
private static MetricsExporterProvider findProvider(String scheme) {
|
||||
for (MetricsExporterProvider provider : PROVIDERS) {
|
||||
try {
|
||||
if (provider.supports(scheme)) {
|
||||
return provider;
|
||||
}
|
||||
} catch (Exception e) {
|
||||
LOGGER.warn("Telemetry exporter provider {} failed to evaluate support for scheme {}", provider.getClass().getName(), scheme, e);
|
||||
}
|
||||
}
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
|
@ -1,18 +1,28 @@
|
|||
/*
|
||||
* Copyright 2024, AutoMQ HK Limited.
|
||||
* Copyright 2025, AutoMQ HK Limited.
|
||||
*
|
||||
* The use of this file is governed by the Business Source License,
|
||||
* as detailed in the file "/LICENSE.S3Stream" included in this repository.
|
||||
* Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
* contributor license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright ownership.
|
||||
* The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
* (the "License"); you may not use this file except in compliance with
|
||||
* the License. You may obtain a copy of the License at
|
||||
*
|
||||
* As of the Change Date specified in that file, in accordance with
|
||||
* the Business Source License, use of this software will be governed
|
||||
* by the Apache License, Version 2.0
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package kafka.log.stream.s3.telemetry.exporter;
|
||||
package com.automq.opentelemetry.exporter;
|
||||
|
||||
import org.apache.kafka.common.utils.Utils;
|
||||
import com.automq.opentelemetry.common.OTLPCompressionType;
|
||||
import com.automq.opentelemetry.common.OTLPProtocol;
|
||||
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
|
|
@ -28,13 +38,16 @@ import io.opentelemetry.sdk.metrics.export.PeriodicMetricReaderBuilder;
|
|||
|
||||
public class OTLPMetricsExporter implements MetricsExporter {
|
||||
private static final Logger LOGGER = LoggerFactory.getLogger(OTLPMetricsExporter.class);
|
||||
private final int intervalMs;
|
||||
private final long intervalMs;
|
||||
private final String endpoint;
|
||||
private final OTLPProtocol protocol;
|
||||
private final OTLPCompressionType compression;
|
||||
// Default timeout for OTLP exporters
|
||||
private static final long DEFAULT_EXPORTER_TIMEOUT_MS = 30000;
|
||||
|
||||
|
||||
public OTLPMetricsExporter(int intervalMs, String endpoint, String protocol, String compression) {
|
||||
if (Utils.isBlank(endpoint) || "null".equals(endpoint)) {
|
||||
public OTLPMetricsExporter(long intervalMs, String endpoint, String protocol, String compression) {
|
||||
if (StringUtils.isBlank(endpoint) || "null".equals(endpoint)) {
|
||||
throw new IllegalArgumentException("OTLP endpoint is required");
|
||||
}
|
||||
this.intervalMs = intervalMs;
|
||||
|
|
@ -42,7 +55,7 @@ public class OTLPMetricsExporter implements MetricsExporter {
|
|||
this.protocol = OTLPProtocol.fromString(protocol);
|
||||
this.compression = OTLPCompressionType.fromString(compression);
|
||||
LOGGER.info("OTLPMetricsExporter initialized with endpoint: {}, protocol: {}, compression: {}, intervalMs: {}",
|
||||
endpoint, protocol, compression, intervalMs);
|
||||
endpoint, protocol, compression, intervalMs);
|
||||
}
|
||||
|
||||
public String endpoint() {
|
||||
|
|
@ -57,31 +70,29 @@ public class OTLPMetricsExporter implements MetricsExporter {
|
|||
return compression;
|
||||
}
|
||||
|
||||
public int intervalMs() {
|
||||
public long intervalMs() {
|
||||
return intervalMs;
|
||||
}
|
||||
|
||||
@Override
|
||||
public MetricReader asMetricReader() {
|
||||
PeriodicMetricReaderBuilder builder;
|
||||
switch (protocol) {
|
||||
case GRPC:
|
||||
PeriodicMetricReaderBuilder builder = switch (protocol) {
|
||||
case GRPC -> {
|
||||
OtlpGrpcMetricExporterBuilder otlpExporterBuilder = OtlpGrpcMetricExporter.builder()
|
||||
.setEndpoint(endpoint)
|
||||
.setCompression(compression.getType())
|
||||
.setTimeout(Duration.ofMillis(ExporterConstants.DEFAULT_EXPORTER_TIMEOUT_MS));
|
||||
builder = PeriodicMetricReader.builder(otlpExporterBuilder.build());
|
||||
break;
|
||||
case HTTP:
|
||||
.setTimeout(Duration.ofMillis(DEFAULT_EXPORTER_TIMEOUT_MS));
|
||||
yield PeriodicMetricReader.builder(otlpExporterBuilder.build());
|
||||
}
|
||||
case HTTP -> {
|
||||
OtlpHttpMetricExporterBuilder otlpHttpExporterBuilder = OtlpHttpMetricExporter.builder()
|
||||
.setEndpoint(endpoint)
|
||||
.setCompression(compression.getType())
|
||||
.setTimeout(Duration.ofMillis(ExporterConstants.DEFAULT_EXPORTER_TIMEOUT_MS));
|
||||
builder = PeriodicMetricReader.builder(otlpHttpExporterBuilder.build());
|
||||
break;
|
||||
default:
|
||||
throw new IllegalArgumentException("Unsupported OTLP protocol: " + protocol);
|
||||
}
|
||||
.setTimeout(Duration.ofMillis(DEFAULT_EXPORTER_TIMEOUT_MS));
|
||||
yield PeriodicMetricReader.builder(otlpHttpExporterBuilder.build());
|
||||
}
|
||||
default -> throw new IllegalArgumentException("Unsupported OTLP protocol: " + protocol);
|
||||
};
|
||||
|
||||
return builder.setInterval(Duration.ofMillis(intervalMs)).build();
|
||||
}
|
||||
|
|
@ -0,0 +1,68 @@
|
|||
/*
|
||||
* Copyright 2025, AutoMQ HK Limited.
|
||||
*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
* contributor license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright ownership.
|
||||
* The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
* (the "License"); you may not use this file except in compliance with
|
||||
* the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package com.automq.opentelemetry.exporter;
|
||||
|
||||
import com.automq.opentelemetry.TelemetryConstants;
|
||||
|
||||
import org.apache.commons.lang3.tuple.Pair;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
import java.util.List;
|
||||
import java.util.Set;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import io.opentelemetry.exporter.prometheus.PrometheusHttpServer;
|
||||
import io.opentelemetry.sdk.metrics.export.MetricReader;
|
||||
|
||||
public class PrometheusMetricsExporter implements MetricsExporter {
|
||||
private static final Logger LOGGER = LoggerFactory.getLogger(PrometheusMetricsExporter.class);
|
||||
private final String host;
|
||||
private final int port;
|
||||
private final Set<String> baseLabelKeys;
|
||||
|
||||
public PrometheusMetricsExporter(String host, int port, List<Pair<String, String>> baseLabels) {
|
||||
if (host == null || host.isEmpty()) {
|
||||
throw new IllegalArgumentException("Illegal Prometheus host");
|
||||
}
|
||||
if (port <= 0) {
|
||||
throw new IllegalArgumentException("Illegal Prometheus port");
|
||||
}
|
||||
this.host = host;
|
||||
this.port = port;
|
||||
this.baseLabelKeys = baseLabels.stream().map(Pair::getKey).collect(Collectors.toSet());
|
||||
LOGGER.info("PrometheusMetricsExporter initialized with host: {}, port: {}, labels: {}", host, port, baseLabels);
|
||||
}
|
||||
|
||||
@Override
|
||||
public MetricReader asMetricReader() {
|
||||
return PrometheusHttpServer.builder()
|
||||
.setHost(host)
|
||||
.setPort(port)
|
||||
// This filter is to align with the original behavior, allowing only specific resource attributes
|
||||
// to be converted to prometheus labels.
|
||||
.setAllowedResourceAttributesFilter(resourceAttributeKey ->
|
||||
TelemetryConstants.PROMETHEUS_JOB_KEY.equals(resourceAttributeKey)
|
||||
|| TelemetryConstants.PROMETHEUS_INSTANCE_KEY.equals(resourceAttributeKey)
|
||||
|| TelemetryConstants.HOST_NAME_KEY.equals(resourceAttributeKey)
|
||||
|| baseLabelKeys.contains(resourceAttributeKey))
|
||||
.build();
|
||||
}
|
||||
}
|
||||
|
|
@ -0,0 +1,86 @@
|
|||
/*
|
||||
* Copyright 2025, AutoMQ HK Limited.
|
||||
*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
* contributor license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright ownership.
|
||||
* The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
* (the "License"); you may not use this file except in compliance with
|
||||
* the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package com.automq.opentelemetry.exporter.s3;
|
||||
|
||||
import com.automq.stream.s3.ByteBufAlloc;
|
||||
|
||||
import java.io.ByteArrayInputStream;
|
||||
import java.io.ByteArrayOutputStream;
|
||||
import java.io.IOException;
|
||||
import java.util.zip.GZIPInputStream;
|
||||
import java.util.zip.GZIPOutputStream;
|
||||
|
||||
import io.netty.buffer.ByteBuf;
|
||||
|
||||
/**
|
||||
* Utility class for data compression and decompression.
|
||||
*/
|
||||
public class CompressionUtils {
|
||||
|
||||
/**
|
||||
* Compress a ByteBuf using GZIP.
|
||||
*
|
||||
* @param input The input ByteBuf to compress.
|
||||
* @return A new ByteBuf containing the compressed data.
|
||||
* @throws IOException If an I/O error occurs during compression.
|
||||
*/
|
||||
public static ByteBuf compress(ByteBuf input) throws IOException {
|
||||
ByteArrayOutputStream byteArrayOutputStream = new ByteArrayOutputStream();
|
||||
GZIPOutputStream gzipOutputStream = new GZIPOutputStream(byteArrayOutputStream);
|
||||
|
||||
byte[] buffer = new byte[input.readableBytes()];
|
||||
input.readBytes(buffer);
|
||||
gzipOutputStream.write(buffer);
|
||||
gzipOutputStream.close();
|
||||
|
||||
ByteBuf compressed = ByteBufAlloc.byteBuffer(byteArrayOutputStream.size());
|
||||
compressed.writeBytes(byteArrayOutputStream.toByteArray());
|
||||
return compressed;
|
||||
}
|
||||
|
||||
/**
|
||||
* Decompress a GZIP-compressed ByteBuf.
|
||||
*
|
||||
* @param input The compressed ByteBuf to decompress.
|
||||
* @return A new ByteBuf containing the decompressed data.
|
||||
* @throws IOException If an I/O error occurs during decompression.
|
||||
*/
|
||||
public static ByteBuf decompress(ByteBuf input) throws IOException {
|
||||
byte[] compressedData = new byte[input.readableBytes()];
|
||||
input.readBytes(compressedData);
|
||||
ByteArrayInputStream byteArrayInputStream = new ByteArrayInputStream(compressedData);
|
||||
GZIPInputStream gzipInputStream = new GZIPInputStream(byteArrayInputStream);
|
||||
|
||||
ByteArrayOutputStream byteArrayOutputStream = new ByteArrayOutputStream();
|
||||
byte[] buffer = new byte[1024];
|
||||
int bytesRead;
|
||||
while ((bytesRead = gzipInputStream.read(buffer)) != -1) {
|
||||
byteArrayOutputStream.write(buffer, 0, bytesRead);
|
||||
}
|
||||
|
||||
gzipInputStream.close();
|
||||
byteArrayOutputStream.close();
|
||||
|
||||
byte[] uncompressedData = byteArrayOutputStream.toByteArray();
|
||||
ByteBuf output = ByteBufAlloc.byteBuffer(uncompressedData.length);
|
||||
output.writeBytes(uncompressedData);
|
||||
return output;
|
||||
}
|
||||
}
|
||||
|
|
@ -0,0 +1,276 @@
|
|||
/*
|
||||
* Copyright 2025, AutoMQ HK Limited.
|
||||
*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
* contributor license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright ownership.
|
||||
* The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
* (the "License"); you may not use this file except in compliance with
|
||||
* the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package com.automq.opentelemetry.exporter.s3;
|
||||
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
|
||||
import java.util.Locale;
|
||||
|
||||
/**
|
||||
* Utility class for Prometheus metric and label naming.
|
||||
*/
|
||||
public class PrometheusUtils {
|
||||
private static final String TOTAL_SUFFIX = "_total";
|
||||
|
||||
/**
|
||||
* Get the Prometheus unit from the OpenTelemetry unit.
|
||||
*
|
||||
* @param unit The OpenTelemetry unit.
|
||||
* @return The Prometheus unit.
|
||||
*/
|
||||
public static String getPrometheusUnit(String unit) {
|
||||
if (unit.contains("{")) {
|
||||
return "";
|
||||
}
|
||||
switch (unit) {
|
||||
// Time
|
||||
case "d":
|
||||
return "days";
|
||||
case "h":
|
||||
return "hours";
|
||||
case "min":
|
||||
return "minutes";
|
||||
case "s":
|
||||
return "seconds";
|
||||
case "ms":
|
||||
return "milliseconds";
|
||||
case "us":
|
||||
return "microseconds";
|
||||
case "ns":
|
||||
return "nanoseconds";
|
||||
// Bytes
|
||||
case "By":
|
||||
return "bytes";
|
||||
case "KiBy":
|
||||
return "kibibytes";
|
||||
case "MiBy":
|
||||
return "mebibytes";
|
||||
case "GiBy":
|
||||
return "gibibytes";
|
||||
case "TiBy":
|
||||
return "tibibytes";
|
||||
case "KBy":
|
||||
return "kilobytes";
|
||||
case "MBy":
|
||||
return "megabytes";
|
||||
case "GBy":
|
||||
return "gigabytes";
|
||||
case "TBy":
|
||||
return "terabytes";
|
||||
// SI
|
||||
case "m":
|
||||
return "meters";
|
||||
case "V":
|
||||
return "volts";
|
||||
case "A":
|
||||
return "amperes";
|
||||
case "J":
|
||||
return "joules";
|
||||
case "W":
|
||||
return "watts";
|
||||
case "g":
|
||||
return "grams";
|
||||
// Misc
|
||||
case "Cel":
|
||||
return "celsius";
|
||||
case "Hz":
|
||||
return "hertz";
|
||||
case "1":
|
||||
return "";
|
||||
case "%":
|
||||
return "percent";
|
||||
// Rate units (per second)
|
||||
case "1/s":
|
||||
return "per_second";
|
||||
case "By/s":
|
||||
return "bytes_per_second";
|
||||
case "KiBy/s":
|
||||
return "kibibytes_per_second";
|
||||
case "MiBy/s":
|
||||
return "mebibytes_per_second";
|
||||
case "GiBy/s":
|
||||
return "gibibytes_per_second";
|
||||
case "KBy/s":
|
||||
return "kilobytes_per_second";
|
||||
case "MBy/s":
|
||||
return "megabytes_per_second";
|
||||
case "GBy/s":
|
||||
return "gigabytes_per_second";
|
||||
// Rate units (per minute)
|
||||
case "1/min":
|
||||
return "per_minute";
|
||||
case "By/min":
|
||||
return "bytes_per_minute";
|
||||
// Rate units (per hour)
|
||||
case "1/h":
|
||||
return "per_hour";
|
||||
case "By/h":
|
||||
return "bytes_per_hour";
|
||||
// Rate units (per day)
|
||||
case "1/d":
|
||||
return "per_day";
|
||||
case "By/d":
|
||||
return "bytes_per_day";
|
||||
default:
|
||||
return unit;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Map a metric name to a Prometheus-compatible name.
|
||||
*
|
||||
* @param name The original metric name.
|
||||
* @param unit The metric unit.
|
||||
* @param isCounter Whether the metric is a counter.
|
||||
* @param isGauge Whether the metric is a gauge.
|
||||
* @return The Prometheus-compatible metric name.
|
||||
*/
|
||||
public static String mapMetricsName(String name, String unit, boolean isCounter, boolean isGauge) {
|
||||
// Replace "." into "_"
|
||||
name = name.replaceAll("\\.", "_");
|
||||
|
||||
String prometheusUnit = getPrometheusUnit(unit);
|
||||
boolean shouldAppendUnit = StringUtils.isNotBlank(prometheusUnit) && !name.contains(prometheusUnit);
|
||||
|
||||
// append prometheus unit if not null or empty.
|
||||
// unit should be appended before type suffix
|
||||
if (shouldAppendUnit) {
|
||||
name = name + "_" + prometheusUnit;
|
||||
}
|
||||
|
||||
// trim counter's _total suffix so the unit is placed before it.
|
||||
if (isCounter && name.endsWith(TOTAL_SUFFIX)) {
|
||||
name = name.substring(0, name.length() - TOTAL_SUFFIX.length());
|
||||
}
|
||||
|
||||
// replace _total suffix, or add if it wasn't already present.
|
||||
if (isCounter) {
|
||||
name = name + TOTAL_SUFFIX;
|
||||
}
|
||||
|
||||
// special case - gauge with intelligent Connect metric handling
|
||||
if ("1".equals(unit) && isGauge && !name.contains("ratio")) {
|
||||
if (isConnectMetric(name)) {
|
||||
// For Connect metrics, use improved logic to avoid misleading _ratio suffix
|
||||
if (shouldAddRatioSuffixForConnect(name)) {
|
||||
name = name + "_ratio";
|
||||
}
|
||||
} else {
|
||||
// For other metrics, maintain original behavior
|
||||
name = name + "_ratio";
|
||||
}
|
||||
}
|
||||
return name;
|
||||
}
|
||||
|
||||
/**
|
||||
* Map a label name to a Prometheus-compatible name.
|
||||
*
|
||||
* @param name The original label name.
|
||||
* @return The Prometheus-compatible label name.
|
||||
*/
|
||||
public static String mapLabelName(String name) {
|
||||
if (StringUtils.isBlank(name)) {
|
||||
return "";
|
||||
}
|
||||
return name.replaceAll("\\.", "_");
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if a metric name is related to Kafka Connect.
|
||||
*
|
||||
* @param name The metric name to check.
|
||||
* @return true if it's a Connect metric, false otherwise.
|
||||
*/
|
||||
private static boolean isConnectMetric(String name) {
|
||||
String lowerName = name.toLowerCase(Locale.ROOT);
|
||||
return lowerName.contains("kafka_connector_") ||
|
||||
lowerName.contains("kafka_task_") ||
|
||||
lowerName.contains("kafka_worker_") ||
|
||||
lowerName.contains("kafka_connect_") ||
|
||||
lowerName.contains("kafka_source_task_") ||
|
||||
lowerName.contains("kafka_sink_task_") ||
|
||||
lowerName.contains("connector_metrics") ||
|
||||
lowerName.contains("task_metrics") ||
|
||||
lowerName.contains("worker_metrics") ||
|
||||
lowerName.contains("source_task_metrics") ||
|
||||
lowerName.contains("sink_task_metrics");
|
||||
}
|
||||
|
||||
/**
|
||||
* Intelligently determine if a Connect metric should have a _ratio suffix.
|
||||
* This method avoids adding misleading _ratio suffixes to count-based metrics.
|
||||
*
|
||||
* @param name The metric name to check.
|
||||
* @return true if _ratio suffix should be added, false otherwise.
|
||||
*/
|
||||
private static boolean shouldAddRatioSuffixForConnect(String name) {
|
||||
String lowerName = name.toLowerCase(Locale.ROOT);
|
||||
|
||||
if (hasRatioRelatedWords(lowerName)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
if (isCountMetric(lowerName)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
return isRatioMetric(lowerName);
|
||||
}
|
||||
|
||||
private static boolean hasRatioRelatedWords(String lowerName) {
|
||||
return lowerName.contains("ratio") || lowerName.contains("percent") ||
|
||||
lowerName.contains("rate") || lowerName.contains("fraction");
|
||||
}
|
||||
|
||||
private static boolean isCountMetric(String lowerName) {
|
||||
return hasBasicCountKeywords(lowerName) || hasConnectCountKeywords(lowerName) ||
|
||||
hasStatusCountKeywords(lowerName);
|
||||
}
|
||||
|
||||
private static boolean hasBasicCountKeywords(String lowerName) {
|
||||
return lowerName.contains("count") || lowerName.contains("num") ||
|
||||
lowerName.contains("size") || lowerName.contains("total") ||
|
||||
lowerName.contains("active") || lowerName.contains("current");
|
||||
}
|
||||
|
||||
private static boolean hasConnectCountKeywords(String lowerName) {
|
||||
return lowerName.contains("partition") || lowerName.contains("task") ||
|
||||
lowerName.contains("connector") || lowerName.contains("seq_no") ||
|
||||
lowerName.contains("seq_num") || lowerName.contains("attempts");
|
||||
}
|
||||
|
||||
private static boolean hasStatusCountKeywords(String lowerName) {
|
||||
return lowerName.contains("success") || lowerName.contains("failure") ||
|
||||
lowerName.contains("errors") || lowerName.contains("retries") ||
|
||||
lowerName.contains("skipped") || lowerName.contains("running") ||
|
||||
lowerName.contains("paused") || lowerName.contains("failed") ||
|
||||
lowerName.contains("destroyed");
|
||||
}
|
||||
|
||||
private static boolean isRatioMetric(String lowerName) {
|
||||
return lowerName.contains("utilization") ||
|
||||
lowerName.contains("usage") ||
|
||||
lowerName.contains("load") ||
|
||||
lowerName.contains("efficiency") ||
|
||||
lowerName.contains("hit_rate") ||
|
||||
lowerName.contains("miss_rate");
|
||||
}
|
||||
}
|
||||
|
|
@ -1,20 +1,30 @@
|
|||
/*
|
||||
* Copyright 2024, AutoMQ HK Limited.
|
||||
* Copyright 2025, AutoMQ HK Limited.
|
||||
*
|
||||
* The use of this file is governed by the Business Source License,
|
||||
* as detailed in the file "/LICENSE.S3Stream" included in this repository.
|
||||
* Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
* contributor license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright ownership.
|
||||
* The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
* (the "License"); you may not use this file except in compliance with
|
||||
* the License. You may obtain a copy of the License at
|
||||
*
|
||||
* As of the Change Date specified in that file, in accordance with
|
||||
* the Business Source License, use of this software will be governed
|
||||
* by the Apache License, Version 2.0
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package com.automq.shell.metrics;
|
||||
package com.automq.opentelemetry.exporter.s3;
|
||||
|
||||
import com.automq.opentelemetry.exporter.MetricsExportConfig;
|
||||
import com.automq.stream.s3.operator.ObjectStorage;
|
||||
import com.automq.stream.s3.operator.ObjectStorage.ObjectInfo;
|
||||
import com.automq.stream.s3.operator.ObjectStorage.ObjectPath;
|
||||
import com.automq.stream.s3.operator.ObjectStorage.WriteOptions;
|
||||
import com.automq.stream.utils.Threads;
|
||||
import com.fasterxml.jackson.databind.ObjectMapper;
|
||||
import com.fasterxml.jackson.databind.node.ArrayNode;
|
||||
import com.fasterxml.jackson.databind.node.ObjectNode;
|
||||
|
|
@ -50,6 +60,9 @@ import io.opentelemetry.sdk.metrics.data.HistogramPointData;
|
|||
import io.opentelemetry.sdk.metrics.data.MetricData;
|
||||
import io.opentelemetry.sdk.metrics.export.MetricExporter;
|
||||
|
||||
/**
|
||||
* An S3 metrics exporter that uploads metrics data to S3 buckets.
|
||||
*/
|
||||
public class S3MetricsExporter implements MetricExporter {
|
||||
private static final Logger LOGGER = LoggerFactory.getLogger(S3MetricsExporter.class);
|
||||
|
||||
|
|
@ -58,13 +71,13 @@ public class S3MetricsExporter implements MetricExporter {
|
|||
public static final int MAX_JITTER_INTERVAL = 60 * 1000;
|
||||
public static final int DEFAULT_BUFFER_SIZE = 16 * 1024 * 1024;
|
||||
|
||||
private final S3MetricsConfig config;
|
||||
private final MetricsExportConfig config;
|
||||
private final Map<String, String> defaultTagMap = new HashMap<>();
|
||||
|
||||
private final ByteBuf uploadBuffer = Unpooled.directBuffer(DEFAULT_BUFFER_SIZE);
|
||||
private final Random random = new Random();
|
||||
private static final Random RANDOM = new Random();
|
||||
private volatile long lastUploadTimestamp = System.currentTimeMillis();
|
||||
private volatile long nextUploadInterval = UPLOAD_INTERVAL + random.nextInt(MAX_JITTER_INTERVAL);
|
||||
private volatile long nextUploadInterval = UPLOAD_INTERVAL + RANDOM.nextInt(MAX_JITTER_INTERVAL);
|
||||
|
||||
private final ObjectStorage objectStorage;
|
||||
private final ObjectMapper objectMapper = new ObjectMapper();
|
||||
|
|
@ -73,7 +86,12 @@ public class S3MetricsExporter implements MetricExporter {
|
|||
private final Thread uploadThread;
|
||||
private final Thread cleanupThread;
|
||||
|
||||
public S3MetricsExporter(S3MetricsConfig config) {
|
||||
/**
|
||||
* Creates a new S3MetricsExporter.
|
||||
*
|
||||
* @param config The configuration for the S3 metrics exporter.
|
||||
*/
|
||||
public S3MetricsExporter(MetricsExportConfig config) {
|
||||
this.config = config;
|
||||
this.objectStorage = config.objectStorage();
|
||||
|
||||
|
|
@ -91,6 +109,9 @@ public class S3MetricsExporter implements MetricExporter {
|
|||
cleanupThread.setDaemon(true);
|
||||
}
|
||||
|
||||
/**
|
||||
* Starts the exporter threads.
|
||||
*/
|
||||
public void start() {
|
||||
uploadThread.start();
|
||||
cleanupThread.start();
|
||||
|
|
@ -129,7 +150,7 @@ public class S3MetricsExporter implements MetricExporter {
|
|||
public void run() {
|
||||
while (!Thread.currentThread().isInterrupted()) {
|
||||
try {
|
||||
if (closed || !config.isActiveController()) {
|
||||
if (closed || !config.isLeader()) {
|
||||
Thread.sleep(Duration.ofMinutes(1).toMillis());
|
||||
continue;
|
||||
}
|
||||
|
|
@ -152,8 +173,7 @@ public class S3MetricsExporter implements MetricExporter {
|
|||
CompletableFuture.allOf(deleteFutures).join();
|
||||
}
|
||||
}
|
||||
|
||||
Thread.sleep(Duration.ofMinutes(1).toMillis());
|
||||
Threads.sleep(Duration.ofMinutes(1).toMillis());
|
||||
} catch (InterruptedException e) {
|
||||
break;
|
||||
} catch (Exception e) {
|
||||
|
|
@ -242,13 +262,13 @@ public class S3MetricsExporter implements MetricExporter {
|
|||
synchronized (uploadBuffer) {
|
||||
if (uploadBuffer.readableBytes() > 0) {
|
||||
try {
|
||||
objectStorage.write(WriteOptions.DEFAULT, getObjectKey(), uploadBuffer.retainedSlice().asReadOnly()).get();
|
||||
objectStorage.write(WriteOptions.DEFAULT, getObjectKey(), CompressionUtils.compress(uploadBuffer.slice().asReadOnly())).get();
|
||||
} catch (Exception e) {
|
||||
LOGGER.error("Failed to upload metrics to s3", e);
|
||||
return CompletableResultCode.ofFailure();
|
||||
} finally {
|
||||
lastUploadTimestamp = System.currentTimeMillis();
|
||||
nextUploadInterval = UPLOAD_INTERVAL + random.nextInt(MAX_JITTER_INTERVAL);
|
||||
nextUploadInterval = UPLOAD_INTERVAL + RANDOM.nextInt(MAX_JITTER_INTERVAL);
|
||||
uploadBuffer.clear();
|
||||
}
|
||||
}
|
||||
|
|
@ -0,0 +1,63 @@
|
|||
/*
|
||||
* Copyright 2025, AutoMQ HK Limited.
|
||||
*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
* contributor license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright ownership.
|
||||
* The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
* (the "License"); you may not use this file except in compliance with
|
||||
* the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package com.automq.opentelemetry.exporter.s3;
|
||||
|
||||
import com.automq.opentelemetry.exporter.MetricsExportConfig;
|
||||
import com.automq.opentelemetry.exporter.MetricsExporter;
|
||||
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
import java.time.Duration;
|
||||
|
||||
import io.opentelemetry.sdk.metrics.export.MetricReader;
|
||||
import io.opentelemetry.sdk.metrics.export.PeriodicMetricReader;
|
||||
|
||||
/**
|
||||
* An adapter class that implements the MetricsExporter interface and uses S3MetricsExporter
|
||||
* for actual metrics exporting functionality.
|
||||
*/
|
||||
public class S3MetricsExporterAdapter implements MetricsExporter {
|
||||
private static final Logger LOGGER = LoggerFactory.getLogger(S3MetricsExporterAdapter.class);
|
||||
|
||||
private final MetricsExportConfig metricsExportConfig;
|
||||
|
||||
/**
|
||||
* Creates a new S3MetricsExporterAdapter.
|
||||
*
|
||||
* @param metricsExportConfig The configuration for the S3 metrics exporter.
|
||||
*/
|
||||
public S3MetricsExporterAdapter(MetricsExportConfig metricsExportConfig) {
|
||||
this.metricsExportConfig = metricsExportConfig;
|
||||
LOGGER.info("S3MetricsExporterAdapter initialized with labels :{}", metricsExportConfig.baseLabels());
|
||||
}
|
||||
|
||||
@Override
|
||||
public MetricReader asMetricReader() {
|
||||
// Create and start the S3MetricsExporter
|
||||
S3MetricsExporter s3MetricsExporter = new S3MetricsExporter(metricsExportConfig);
|
||||
s3MetricsExporter.start();
|
||||
|
||||
// Create and return the periodic metric reader
|
||||
return PeriodicMetricReader.builder(s3MetricsExporter)
|
||||
.setInterval(Duration.ofMillis(metricsExportConfig.intervalMs()))
|
||||
.build();
|
||||
}
|
||||
}
|
||||
|
|
@ -1,15 +1,23 @@
|
|||
/*
|
||||
* Copyright 2024, AutoMQ HK Limited.
|
||||
* Copyright 2025, AutoMQ HK Limited.
|
||||
*
|
||||
* The use of this file is governed by the Business Source License,
|
||||
* as detailed in the file "/LICENSE.S3Stream" included in this repository.
|
||||
* Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
* contributor license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright ownership.
|
||||
* The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
* (the "License"); you may not use this file except in compliance with
|
||||
* the License. You may obtain a copy of the License at
|
||||
*
|
||||
* As of the Change Date specified in that file, in accordance with
|
||||
* the Business Source License, use of this software will be governed
|
||||
* by the Apache License, Version 2.0
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package kafka.log.stream.s3.telemetry.otel;
|
||||
package com.automq.opentelemetry.yammer;
|
||||
|
||||
import com.yammer.metrics.core.Histogram;
|
||||
import com.yammer.metrics.core.Timer;
|
||||
|
|
@ -1,15 +1,23 @@
|
|||
/*
|
||||
* Copyright 2024, AutoMQ HK Limited.
|
||||
* Copyright 2025, AutoMQ HK Limited.
|
||||
*
|
||||
* The use of this file is governed by the Business Source License,
|
||||
* as detailed in the file "/LICENSE.S3Stream" included in this repository.
|
||||
* Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
* contributor license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright ownership.
|
||||
* The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
* (the "License"); you may not use this file except in compliance with
|
||||
* the License. You may obtain a copy of the License at
|
||||
*
|
||||
* As of the Change Date specified in that file, in accordance with
|
||||
* the Business Source License, use of this software will be governed
|
||||
* by the Apache License, Version 2.0
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package kafka.log.stream.s3.telemetry.otel;
|
||||
package com.automq.opentelemetry.yammer;
|
||||
|
||||
import com.yammer.metrics.core.MetricName;
|
||||
|
||||
|
|
@ -1,17 +1,24 @@
|
|||
/*
|
||||
* Copyright 2024, AutoMQ HK Limited.
|
||||
* Copyright 2025, AutoMQ HK Limited.
|
||||
*
|
||||
* The use of this file is governed by the Business Source License,
|
||||
* as detailed in the file "/LICENSE.S3Stream" included in this repository.
|
||||
* Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
* contributor license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright ownership.
|
||||
* The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
* (the "License"); you may not use this file except in compliance with
|
||||
* the License. You may obtain a copy of the License at
|
||||
*
|
||||
* As of the Change Date specified in that file, in accordance with
|
||||
* the Business Source License, use of this software will be governed
|
||||
* by the Apache License, Version 2.0
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package kafka.log.stream.s3.telemetry.otel;
|
||||
package com.automq.opentelemetry.yammer;
|
||||
|
||||
import kafka.autobalancer.metricsreporter.metric.MetricsUtils;
|
||||
|
||||
import com.yammer.metrics.core.Counter;
|
||||
import com.yammer.metrics.core.Gauge;
|
||||
|
|
@ -24,16 +31,54 @@ import com.yammer.metrics.core.Timer;
|
|||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
import java.util.concurrent.ConcurrentHashMap;
|
||||
|
||||
import io.opentelemetry.api.common.Attributes;
|
||||
import io.opentelemetry.api.common.AttributesBuilder;
|
||||
import io.opentelemetry.api.metrics.Meter;
|
||||
import scala.UninitializedFieldError;
|
||||
|
||||
public class OTelMetricsProcessor implements MetricProcessor<Void> {
|
||||
private static final Logger LOGGER = LoggerFactory.getLogger(OTelMetricsProcessor.class);
|
||||
/**
|
||||
* A metrics processor that bridges Yammer metrics to OpenTelemetry metrics.
|
||||
*
|
||||
* <p>This processor specifically handles Histogram and Timer metrics from the Yammer metrics
|
||||
* library and converts them to OpenTelemetry gauge metrics that track delta mean values.
|
||||
* It implements the Yammer {@link MetricProcessor} interface to process metrics and creates
|
||||
* corresponding OpenTelemetry metrics with proper attributes derived from the metric scope.
|
||||
*
|
||||
* <p>The processor:
|
||||
* <ul>
|
||||
* <li>Converts Yammer Histogram and Timer metrics to OpenTelemetry gauges</li>
|
||||
* <li>Calculates delta mean values using {@link DeltaHistogram}</li>
|
||||
* <li>Parses metric scopes to extract attributes for OpenTelemetry metrics</li>
|
||||
* <li>Maintains a registry of processed metrics for lifecycle management</li>
|
||||
* <li>Supports metric removal when metrics are no longer needed</li>
|
||||
* </ul>
|
||||
*
|
||||
* <p>Supported metric types:
|
||||
* <ul>
|
||||
* <li>{@link Histogram} - Converted to delta mean gauge</li>
|
||||
* <li>{@link Timer} - Converted to delta mean gauge</li>
|
||||
* </ul>
|
||||
*
|
||||
* <p>Unsupported metric types (will throw {@link UnsupportedOperationException}):
|
||||
* <ul>
|
||||
* <li>{@link Counter}</li>
|
||||
* <li>{@link Gauge}</li>
|
||||
* <li>{@link Metered}</li>
|
||||
* </ul>
|
||||
*
|
||||
* <p>Thread Safety: This class is thread-safe and uses concurrent data structures
|
||||
* to handle metrics registration and removal from multiple threads.
|
||||
*
|
||||
* @see MetricProcessor
|
||||
* @see DeltaHistogram
|
||||
* @see OTelMetricUtils
|
||||
*/
|
||||
public class YammerMetricsProcessor implements MetricProcessor<Void> {
|
||||
private static final Logger LOGGER = LoggerFactory.getLogger(YammerMetricsProcessor.class);
|
||||
private final Map<String, Map<MetricName, MetricWrapper>> metrics = new ConcurrentHashMap<>();
|
||||
private Meter meter = null;
|
||||
|
||||
|
|
@ -63,9 +108,9 @@ public class OTelMetricsProcessor implements MetricProcessor<Void> {
|
|||
|
||||
private void processDeltaHistogramMetric(MetricName name, DeltaHistogram deltaHistogram) {
|
||||
if (meter == null) {
|
||||
throw new UninitializedFieldError("Meter is not initialized");
|
||||
throw new IllegalStateException("Meter is not initialized");
|
||||
}
|
||||
Map<String, String> tags = MetricsUtils.yammerMetricScopeToTags(name.getScope());
|
||||
Map<String, String> tags = yammerMetricScopeToTags(name.getScope());
|
||||
AttributesBuilder attrBuilder = Attributes.builder();
|
||||
if (tags != null) {
|
||||
String value = tags.remove(OTelMetricUtils.REQUEST_TAG_KEY);
|
||||
|
|
@ -108,6 +153,29 @@ public class OTelMetricsProcessor implements MetricProcessor<Void> {
|
|||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Convert a yammer metrics scope to a tags map.
|
||||
*
|
||||
* @param scope Scope of the Yammer metric.
|
||||
* @return Empty map for {@code null} scope, {@code null} for scope with keys without a matching value (i.e. unacceptable
|
||||
* scope) (see <a href="https://github.com/linkedin/cruise-control/issues/1296">...</a>), parsed tags otherwise.
|
||||
*/
|
||||
public static Map<String, String> yammerMetricScopeToTags(String scope) {
|
||||
if (scope != null) {
|
||||
String[] kv = scope.split("\\.");
|
||||
if (kv.length % 2 != 0) {
|
||||
return null;
|
||||
}
|
||||
Map<String, String> tags = new HashMap<>();
|
||||
for (int i = 0; i < kv.length; i += 2) {
|
||||
tags.put(kv[i], kv[i + 1]);
|
||||
}
|
||||
return tags;
|
||||
} else {
|
||||
return Collections.emptyMap();
|
||||
}
|
||||
}
|
||||
|
||||
static class MetricWrapper {
|
||||
private final Attributes attr;
|
||||
private final DeltaHistogram deltaHistogram;
|
||||
|
|
@ -0,0 +1,93 @@
|
|||
/*
|
||||
* Copyright 2025, AutoMQ HK Limited.
|
||||
*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
* contributor license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright ownership.
|
||||
* The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
* (the "License"); you may not use this file except in compliance with
|
||||
* the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package com.automq.opentelemetry.yammer;
|
||||
|
||||
import com.yammer.metrics.core.Metric;
|
||||
import com.yammer.metrics.core.MetricName;
|
||||
import com.yammer.metrics.core.MetricsRegistry;
|
||||
import com.yammer.metrics.core.MetricsRegistryListener;
|
||||
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
import java.io.Closeable;
|
||||
import java.io.IOException;
|
||||
|
||||
import io.opentelemetry.api.metrics.Meter;
|
||||
|
||||
/**
|
||||
* A listener that bridges Yammer Histogram metrics to OpenTelemetry.
|
||||
* It listens for new metrics added to a MetricsRegistry and creates corresponding
|
||||
* OTel gauge metrics for mean and max values of histograms.
|
||||
*/
|
||||
public class YammerMetricsReporter implements MetricsRegistryListener, Closeable {
|
||||
private static final Logger LOGGER = LoggerFactory.getLogger(YammerMetricsReporter.class);
|
||||
private final MetricsRegistry metricsRegistry;
|
||||
private final YammerMetricsProcessor metricsProcessor;
|
||||
private volatile Meter meter;
|
||||
|
||||
public YammerMetricsReporter(MetricsRegistry metricsRegistry) {
|
||||
this.metricsRegistry = metricsRegistry;
|
||||
this.metricsProcessor = new YammerMetricsProcessor();
|
||||
}
|
||||
|
||||
public void start(Meter meter) {
|
||||
this.meter = meter;
|
||||
this.metricsProcessor.init(meter);
|
||||
metricsRegistry.addListener(this);
|
||||
LOGGER.info("OTelHistogramReporter started");
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onMetricAdded(MetricName name, Metric metric) {
|
||||
if (OTelMetricUtils.isInterestedMetric(name)) {
|
||||
if (this.meter == null) {
|
||||
LOGGER.info("Not initialized yet, skipping metric: {}", name);
|
||||
return;
|
||||
}
|
||||
try {
|
||||
metric.processWith(this.metricsProcessor, name, null);
|
||||
} catch (Throwable t) {
|
||||
LOGGER.error("Failed to process metric: {}", name, t);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onMetricRemoved(MetricName name) {
|
||||
try {
|
||||
this.metricsProcessor.remove(name);
|
||||
} catch (Throwable ignored) {
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void close() throws IOException {
|
||||
try {
|
||||
// Remove this reporter as a listener from the metrics registry
|
||||
metricsRegistry.removeListener(this);
|
||||
LOGGER.info("YammerMetricsReporter stopped and removed from metrics registry");
|
||||
} catch (Exception e) {
|
||||
LOGGER.error("Error while closing YammerMetricsReporter", e);
|
||||
throw new IOException("Failed to close YammerMetricsReporter", e);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -18,7 +18,8 @@ dependencies {
|
|||
compileOnly libs.awsSdkAuth
|
||||
implementation libs.reload4j
|
||||
implementation libs.nettyBuffer
|
||||
implementation libs.opentelemetrySdk
|
||||
implementation project(':automq-metrics')
|
||||
implementation project(':automq-log-uploader')
|
||||
implementation libs.jacksonDatabind
|
||||
implementation libs.jacksonYaml
|
||||
implementation libs.commonLang
|
||||
|
|
@ -65,4 +66,4 @@ jar {
|
|||
manifest {
|
||||
attributes 'Main-Class': 'com.automq.shell.AutoMQCLI'
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,12 +1,20 @@
|
|||
/*
|
||||
* Copyright 2024, AutoMQ HK Limited.
|
||||
* Copyright 2025, AutoMQ HK Limited.
|
||||
*
|
||||
* The use of this file is governed by the Business Source License,
|
||||
* as detailed in the file "/LICENSE.S3Stream" included in this repository.
|
||||
* Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
* contributor license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright ownership.
|
||||
* The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
* (the "License"); you may not use this file except in compliance with
|
||||
* the License. You may obtain a copy of the License at
|
||||
*
|
||||
* As of the Change Date specified in that file, in accordance with
|
||||
* the Business Source License, use of this software will be governed
|
||||
* by the Apache License, Version 2.0
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package com.automq.shell;
|
||||
|
|
|
|||
|
|
@ -1,12 +1,20 @@
|
|||
/*
|
||||
* Copyright 2024, AutoMQ HK Limited.
|
||||
* Copyright 2025, AutoMQ HK Limited.
|
||||
*
|
||||
* The use of this file is governed by the Business Source License,
|
||||
* as detailed in the file "/LICENSE.S3Stream" included in this repository.
|
||||
* Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
* contributor license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright ownership.
|
||||
* The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
* (the "License"); you may not use this file except in compliance with
|
||||
* the License. You may obtain a copy of the License at
|
||||
*
|
||||
* As of the Change Date specified in that file, in accordance with
|
||||
* the Business Source License, use of this software will be governed
|
||||
* by the Apache License, Version 2.0
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package com.automq.shell;
|
||||
|
|
|
|||
|
|
@ -1,12 +1,20 @@
|
|||
/*
|
||||
* Copyright 2024, AutoMQ HK Limited.
|
||||
* Copyright 2025, AutoMQ HK Limited.
|
||||
*
|
||||
* The use of this file is governed by the Business Source License,
|
||||
* as detailed in the file "/LICENSE.S3Stream" included in this repository.
|
||||
* Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
* contributor license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright ownership.
|
||||
* The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
* (the "License"); you may not use this file except in compliance with
|
||||
* the License. You may obtain a copy of the License at
|
||||
*
|
||||
* As of the Change Date specified in that file, in accordance with
|
||||
* the Business Source License, use of this software will be governed
|
||||
* by the Apache License, Version 2.0
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package com.automq.shell.commands.cluster;
|
||||
|
|
|
|||
|
|
@ -1,12 +1,20 @@
|
|||
/*
|
||||
* Copyright 2024, AutoMQ HK Limited.
|
||||
* Copyright 2025, AutoMQ HK Limited.
|
||||
*
|
||||
* The use of this file is governed by the Business Source License,
|
||||
* as detailed in the file "/LICENSE.S3Stream" included in this repository.
|
||||
* Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
* contributor license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright ownership.
|
||||
* The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
* (the "License"); you may not use this file except in compliance with
|
||||
* the License. You may obtain a copy of the License at
|
||||
*
|
||||
* As of the Change Date specified in that file, in accordance with
|
||||
* the Business Source License, use of this software will be governed
|
||||
* by the Apache License, Version 2.0
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package com.automq.shell.commands.cluster;
|
||||
|
|
|
|||
|
|
@ -1,12 +1,20 @@
|
|||
/*
|
||||
* Copyright 2024, AutoMQ HK Limited.
|
||||
* Copyright 2025, AutoMQ HK Limited.
|
||||
*
|
||||
* The use of this file is governed by the Business Source License,
|
||||
* as detailed in the file "/LICENSE.S3Stream" included in this repository.
|
||||
* Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
* contributor license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright ownership.
|
||||
* The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
* (the "License"); you may not use this file except in compliance with
|
||||
* the License. You may obtain a copy of the License at
|
||||
*
|
||||
* As of the Change Date specified in that file, in accordance with
|
||||
* the Business Source License, use of this software will be governed
|
||||
* by the Apache License, Version 2.0
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package com.automq.shell.commands.cluster;
|
||||
|
|
@ -102,9 +110,11 @@ public class Deploy implements Callable<Integer> {
|
|||
String globalAccessKey = null;
|
||||
String globalSecretKey = null;
|
||||
for (Env env : topo.getGlobal().getEnvs()) {
|
||||
if ("KAFKA_S3_ACCESS_KEY".equals(env.getName())) {
|
||||
if ("KAFKA_S3_ACCESS_KEY".equals(env.getName()) ||
|
||||
"AWS_ACCESS_KEY_ID".equals(env.getName())) {
|
||||
globalAccessKey = env.getValue();
|
||||
} else if ("KAFKA_S3_SECRET_KEY".equals(env.getName())) {
|
||||
} else if ("KAFKA_S3_SECRET_KEY".equals(env.getName()) ||
|
||||
"AWS_SECRET_ACCESS_KEY".equals(env.getName())) {
|
||||
globalSecretKey = env.getValue();
|
||||
}
|
||||
}
|
||||
|
|
@ -159,6 +169,7 @@ public class Deploy implements Callable<Integer> {
|
|||
sb.append("--override cluster.id=").append(topo.getGlobal().getClusterId()).append(" ");
|
||||
sb.append("--override node.id=").append(node.getNodeId()).append(" ");
|
||||
sb.append("--override controller.quorum.voters=").append(getQuorumVoters(topo)).append(" ");
|
||||
sb.append("--override controller.quorum.bootstrap.servers=").append(getBootstrapServers(topo)).append(" ");
|
||||
sb.append("--override advertised.listeners=").append("PLAINTEXT://").append(node.getHost()).append(":9092").append(" ");
|
||||
}
|
||||
|
||||
|
|
@ -181,4 +192,14 @@ public class Deploy implements Callable<Integer> {
|
|||
.map(node -> node.getNodeId() + "@" + node.getHost() + ":9093")
|
||||
.collect(Collectors.joining(","));
|
||||
}
|
||||
|
||||
private static String getBootstrapServers(ClusterTopology topo) {
|
||||
List<Node> nodes = topo.getControllers();
|
||||
if (!(nodes.size() == 1 || nodes.size() == 3)) {
|
||||
throw new IllegalArgumentException("Only support 1 or 3 controllers");
|
||||
}
|
||||
return nodes.stream()
|
||||
.map(node -> node.getHost() + ":9093")
|
||||
.collect(Collectors.joining(","));
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,12 +1,20 @@
|
|||
/*
|
||||
* Copyright 2024, AutoMQ HK Limited.
|
||||
* Copyright 2025, AutoMQ HK Limited.
|
||||
*
|
||||
* The use of this file is governed by the Business Source License,
|
||||
* as detailed in the file "/LICENSE.S3Stream" included in this repository.
|
||||
* Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
* contributor license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright ownership.
|
||||
* The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
* (the "License"); you may not use this file except in compliance with
|
||||
* the License. You may obtain a copy of the License at
|
||||
*
|
||||
* As of the Change Date specified in that file, in accordance with
|
||||
* the Business Source License, use of this software will be governed
|
||||
* by the Apache License, Version 2.0
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package com.automq.shell.commands.cluster;
|
||||
|
|
|
|||
|
|
@ -1,12 +1,20 @@
|
|||
/*
|
||||
* Copyright 2024, AutoMQ HK Limited.
|
||||
* Copyright 2025, AutoMQ HK Limited.
|
||||
*
|
||||
* The use of this file is governed by the Business Source License,
|
||||
* as detailed in the file "/LICENSE.S3Stream" included in this repository.
|
||||
* Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
* contributor license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright ownership.
|
||||
* The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
* (the "License"); you may not use this file except in compliance with
|
||||
* the License. You may obtain a copy of the License at
|
||||
*
|
||||
* As of the Change Date specified in that file, in accordance with
|
||||
* the Business Source License, use of this software will be governed
|
||||
* by the Apache License, Version 2.0
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package com.automq.shell.constant;
|
||||
|
||||
|
|
|
|||
|
|
@ -1,27 +0,0 @@
|
|||
/*
|
||||
* Copyright 2024, AutoMQ HK Limited.
|
||||
*
|
||||
* The use of this file is governed by the Business Source License,
|
||||
* as detailed in the file "/LICENSE.S3Stream" included in this repository.
|
||||
*
|
||||
* As of the Change Date specified in that file, in accordance with
|
||||
* the Business Source License, use of this software will be governed
|
||||
* by the Apache License, Version 2.0
|
||||
*/
|
||||
|
||||
package com.automq.shell.log;
|
||||
|
||||
import com.automq.stream.s3.operator.ObjectStorage;
|
||||
|
||||
public interface S3LogConfig {
|
||||
|
||||
boolean isEnabled();
|
||||
|
||||
boolean isActiveController();
|
||||
|
||||
String clusterId();
|
||||
|
||||
int nodeId();
|
||||
|
||||
ObjectStorage objectStorage();
|
||||
}
|
||||
|
|
@ -1,42 +0,0 @@
|
|||
/*
|
||||
* Copyright 2024, AutoMQ HK Limited.
|
||||
*
|
||||
* The use of this file is governed by the Business Source License,
|
||||
* as detailed in the file "/LICENSE.S3Stream" included in this repository.
|
||||
*
|
||||
* As of the Change Date specified in that file, in accordance with
|
||||
* the Business Source License, use of this software will be governed
|
||||
* by the Apache License, Version 2.0
|
||||
*/
|
||||
|
||||
package com.automq.shell.log;
|
||||
|
||||
import org.apache.log4j.RollingFileAppender;
|
||||
import org.apache.log4j.spi.LoggingEvent;
|
||||
|
||||
public class S3RollingFileAppender extends RollingFileAppender {
|
||||
private final LogUploader logUploader = LogUploader.getInstance();
|
||||
|
||||
@Override
|
||||
protected void subAppend(LoggingEvent event) {
|
||||
super.subAppend(event);
|
||||
if (!closed) {
|
||||
LogRecorder.LogEvent logEvent = new LogRecorder.LogEvent(
|
||||
event.getTimeStamp(),
|
||||
event.getLevel().toString(),
|
||||
event.getLoggerName(),
|
||||
event.getRenderedMessage(),
|
||||
event.getThrowableStrRep());
|
||||
|
||||
try {
|
||||
logEvent.validate();
|
||||
} catch (IllegalArgumentException e) {
|
||||
// Drop invalid log event
|
||||
errorHandler.error("Failed to validate log event", e, 0);
|
||||
return;
|
||||
}
|
||||
|
||||
logUploader.append(logEvent);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -1,120 +0,0 @@
|
|||
/*
|
||||
* Copyright 2024, AutoMQ HK Limited.
|
||||
*
|
||||
* The use of this file is governed by the Business Source License,
|
||||
* as detailed in the file "/LICENSE.S3Stream" included in this repository.
|
||||
*
|
||||
* As of the Change Date specified in that file, in accordance with
|
||||
* the Business Source License, use of this software will be governed
|
||||
* by the Apache License, Version 2.0
|
||||
*/
|
||||
|
||||
package com.automq.shell.metrics;
|
||||
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
|
||||
public class PrometheusUtils {
|
||||
private static final String TOTAL_SUFFIX = "_total";
|
||||
|
||||
public static String getPrometheusUnit(String unit) {
|
||||
if (unit.contains("{")) {
|
||||
return "";
|
||||
}
|
||||
switch (unit) {
|
||||
// Time
|
||||
case "d":
|
||||
return "days";
|
||||
case "h":
|
||||
return "hours";
|
||||
case "min":
|
||||
return "minutes";
|
||||
case "s":
|
||||
return "seconds";
|
||||
case "ms":
|
||||
return "milliseconds";
|
||||
case "us":
|
||||
return "microseconds";
|
||||
case "ns":
|
||||
return "nanoseconds";
|
||||
// Bytes
|
||||
case "By":
|
||||
return "bytes";
|
||||
case "KiBy":
|
||||
return "kibibytes";
|
||||
case "MiBy":
|
||||
return "mebibytes";
|
||||
case "GiBy":
|
||||
return "gibibytes";
|
||||
case "TiBy":
|
||||
return "tibibytes";
|
||||
case "KBy":
|
||||
return "kilobytes";
|
||||
case "MBy":
|
||||
return "megabytes";
|
||||
case "GBy":
|
||||
return "gigabytes";
|
||||
case "TBy":
|
||||
return "terabytes";
|
||||
// SI
|
||||
case "m":
|
||||
return "meters";
|
||||
case "V":
|
||||
return "volts";
|
||||
case "A":
|
||||
return "amperes";
|
||||
case "J":
|
||||
return "joules";
|
||||
case "W":
|
||||
return "watts";
|
||||
case "g":
|
||||
return "grams";
|
||||
// Misc
|
||||
case "Cel":
|
||||
return "celsius";
|
||||
case "Hz":
|
||||
return "hertz";
|
||||
case "1":
|
||||
return "";
|
||||
case "%":
|
||||
return "percent";
|
||||
default:
|
||||
return unit;
|
||||
}
|
||||
}
|
||||
|
||||
public static String mapMetricsName(String name, String unit, boolean isCounter, boolean isGauge) {
|
||||
// Replace "." into "_"
|
||||
name = name.replaceAll("\\.", "_");
|
||||
|
||||
String prometheusUnit = getPrometheusUnit(unit);
|
||||
boolean shouldAppendUnit = StringUtils.isNotBlank(prometheusUnit) && !name.contains(prometheusUnit);
|
||||
|
||||
// append prometheus unit if not null or empty.
|
||||
// unit should be appended before type suffix
|
||||
if (shouldAppendUnit) {
|
||||
name = name + "_" + prometheusUnit;
|
||||
}
|
||||
|
||||
// trim counter's _total suffix so the unit is placed before it.
|
||||
if (isCounter && name.endsWith(TOTAL_SUFFIX)) {
|
||||
name = name.substring(0, name.length() - TOTAL_SUFFIX.length());
|
||||
}
|
||||
|
||||
// replace _total suffix, or add if it wasn't already present.
|
||||
if (isCounter) {
|
||||
name = name + TOTAL_SUFFIX;
|
||||
}
|
||||
// special case - gauge
|
||||
if (unit.equals("1") && isGauge && !name.contains("ratio")) {
|
||||
name = name + "_ratio";
|
||||
}
|
||||
return name;
|
||||
}
|
||||
|
||||
public static String mapLabelName(String name) {
|
||||
if (StringUtils.isBlank(name)) {
|
||||
return "";
|
||||
}
|
||||
return name.replaceAll("\\.", "_");
|
||||
}
|
||||
}
|
||||
|
|
@ -1,31 +0,0 @@
|
|||
/*
|
||||
* Copyright 2024, AutoMQ HK Limited.
|
||||
*
|
||||
* The use of this file is governed by the Business Source License,
|
||||
* as detailed in the file "/LICENSE.S3Stream" included in this repository.
|
||||
*
|
||||
* As of the Change Date specified in that file, in accordance with
|
||||
* the Business Source License, use of this software will be governed
|
||||
* by the Apache License, Version 2.0
|
||||
*/
|
||||
|
||||
package com.automq.shell.metrics;
|
||||
|
||||
import com.automq.stream.s3.operator.ObjectStorage;
|
||||
|
||||
import org.apache.commons.lang3.tuple.Pair;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
public interface S3MetricsConfig {
|
||||
|
||||
String clusterId();
|
||||
|
||||
boolean isActiveController();
|
||||
|
||||
int nodeId();
|
||||
|
||||
ObjectStorage objectStorage();
|
||||
|
||||
List<Pair<String, String>> baseLabels();
|
||||
}
|
||||
|
|
@ -1,12 +1,20 @@
|
|||
/*
|
||||
* Copyright 2024, AutoMQ HK Limited.
|
||||
* Copyright 2025, AutoMQ HK Limited.
|
||||
*
|
||||
* The use of this file is governed by the Business Source License,
|
||||
* as detailed in the file "/LICENSE.S3Stream" included in this repository.
|
||||
* Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
* contributor license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright ownership.
|
||||
* The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
* (the "License"); you may not use this file except in compliance with
|
||||
* the License. You may obtain a copy of the License at
|
||||
*
|
||||
* As of the Change Date specified in that file, in accordance with
|
||||
* the Business Source License, use of this software will be governed
|
||||
* by the Apache License, Version 2.0
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package com.automq.shell.model;
|
||||
|
|
|
|||
|
|
@ -1,12 +1,20 @@
|
|||
/*
|
||||
* Copyright 2024, AutoMQ HK Limited.
|
||||
* Copyright 2025, AutoMQ HK Limited.
|
||||
*
|
||||
* The use of this file is governed by the Business Source License,
|
||||
* as detailed in the file "/LICENSE.S3Stream" included in this repository.
|
||||
* Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
* contributor license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright ownership.
|
||||
* The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
* (the "License"); you may not use this file except in compliance with
|
||||
* the License. You may obtain a copy of the License at
|
||||
*
|
||||
* As of the Change Date specified in that file, in accordance with
|
||||
* the Business Source License, use of this software will be governed
|
||||
* by the Apache License, Version 2.0
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package com.automq.shell.model;
|
||||
|
||||
|
|
|
|||
|
|
@ -1,12 +1,20 @@
|
|||
/*
|
||||
* Copyright 2024, AutoMQ HK Limited.
|
||||
* Copyright 2025, AutoMQ HK Limited.
|
||||
*
|
||||
* The use of this file is governed by the Business Source License,
|
||||
* as detailed in the file "/LICENSE.S3Stream" included in this repository.
|
||||
* Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
* contributor license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright ownership.
|
||||
* The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
* (the "License"); you may not use this file except in compliance with
|
||||
* the License. You may obtain a copy of the License at
|
||||
*
|
||||
* As of the Change Date specified in that file, in accordance with
|
||||
* the Business Source License, use of this software will be governed
|
||||
* by the Apache License, Version 2.0
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package com.automq.shell.model;
|
||||
|
|
|
|||
|
|
@ -1,12 +1,20 @@
|
|||
/*
|
||||
* Copyright 2024, AutoMQ HK Limited.
|
||||
* Copyright 2025, AutoMQ HK Limited.
|
||||
*
|
||||
* The use of this file is governed by the Business Source License,
|
||||
* as detailed in the file "/LICENSE.S3Stream" included in this repository.
|
||||
* Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
* contributor license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright ownership.
|
||||
* The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
* (the "License"); you may not use this file except in compliance with
|
||||
* the License. You may obtain a copy of the License at
|
||||
*
|
||||
* As of the Change Date specified in that file, in accordance with
|
||||
* the Business Source License, use of this software will be governed
|
||||
* by the Apache License, Version 2.0
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package com.automq.shell.model;
|
||||
|
|
|
|||
|
|
@ -1,12 +1,20 @@
|
|||
/*
|
||||
* Copyright 2024, AutoMQ HK Limited.
|
||||
* Copyright 2025, AutoMQ HK Limited.
|
||||
*
|
||||
* The use of this file is governed by the Business Source License,
|
||||
* as detailed in the file "/LICENSE.S3Stream" included in this repository.
|
||||
* Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
* contributor license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright ownership.
|
||||
* The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
* (the "License"); you may not use this file except in compliance with
|
||||
* the License. You may obtain a copy of the License at
|
||||
*
|
||||
* As of the Change Date specified in that file, in accordance with
|
||||
* the Business Source License, use of this software will be governed
|
||||
* by the Apache License, Version 2.0
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package com.automq.shell.model;
|
||||
|
|
|
|||
|
|
@ -1,12 +1,20 @@
|
|||
/*
|
||||
* Copyright 2024, AutoMQ HK Limited.
|
||||
* Copyright 2025, AutoMQ HK Limited.
|
||||
*
|
||||
* The use of this file is governed by the Business Source License,
|
||||
* as detailed in the file "/LICENSE.S3Stream" included in this repository.
|
||||
* Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
* contributor license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright ownership.
|
||||
* The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
* (the "License"); you may not use this file except in compliance with
|
||||
* the License. You may obtain a copy of the License at
|
||||
*
|
||||
* As of the Change Date specified in that file, in accordance with
|
||||
* the Business Source License, use of this software will be governed
|
||||
* by the Apache License, Version 2.0
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package com.automq.shell.model;
|
||||
|
||||
|
|
|
|||
|
|
@ -1,12 +1,20 @@
|
|||
/*
|
||||
* Copyright 2024, AutoMQ HK Limited.
|
||||
* Copyright 2025, AutoMQ HK Limited.
|
||||
*
|
||||
* The use of this file is governed by the Business Source License,
|
||||
* as detailed in the file "/LICENSE.S3Stream" included in this repository.
|
||||
* Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
* contributor license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright ownership.
|
||||
* The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
* (the "License"); you may not use this file except in compliance with
|
||||
* the License. You may obtain a copy of the License at
|
||||
*
|
||||
* As of the Change Date specified in that file, in accordance with
|
||||
* the Business Source License, use of this software will be governed
|
||||
* by the Apache License, Version 2.0
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package com.automq.shell.model;
|
||||
|
|
|
|||
|
|
@ -1,12 +1,20 @@
|
|||
/*
|
||||
* Copyright 2024, AutoMQ HK Limited.
|
||||
* Copyright 2025, AutoMQ HK Limited.
|
||||
*
|
||||
* The use of this file is governed by the Business Source License,
|
||||
* as detailed in the file "/LICENSE.S3Stream" included in this repository.
|
||||
* Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
* contributor license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright ownership.
|
||||
* The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
* (the "License"); you may not use this file except in compliance with
|
||||
* the License. You may obtain a copy of the License at
|
||||
*
|
||||
* As of the Change Date specified in that file, in accordance with
|
||||
* the Business Source License, use of this software will be governed
|
||||
* by the Apache License, Version 2.0
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package com.automq.shell.stream;
|
||||
|
|
@ -29,7 +37,6 @@ import org.apache.kafka.common.requests.s3.GetKVsRequest;
|
|||
import org.apache.kafka.common.requests.s3.PutKVsRequest;
|
||||
import org.apache.kafka.common.utils.Time;
|
||||
|
||||
import com.automq.shell.metrics.S3MetricsExporter;
|
||||
import com.automq.stream.api.KeyValue;
|
||||
|
||||
import org.slf4j.Logger;
|
||||
|
|
@ -40,7 +47,7 @@ import java.util.List;
|
|||
import java.util.Objects;
|
||||
|
||||
public class ClientKVClient {
|
||||
private static final Logger LOGGER = LoggerFactory.getLogger(S3MetricsExporter.class);
|
||||
private static final Logger LOGGER = LoggerFactory.getLogger(ClientKVClient.class);
|
||||
|
||||
private final NetworkClient networkClient;
|
||||
private final Node bootstrapServer;
|
||||
|
|
|
|||
|
|
@ -1,12 +1,20 @@
|
|||
/*
|
||||
* Copyright 2024, AutoMQ HK Limited.
|
||||
* Copyright 2025, AutoMQ HK Limited.
|
||||
*
|
||||
* The use of this file is governed by the Business Source License,
|
||||
* as detailed in the file "/LICENSE.S3Stream" included in this repository.
|
||||
* Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
* contributor license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright ownership.
|
||||
* The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
* (the "License"); you may not use this file except in compliance with
|
||||
* the License. You may obtain a copy of the License at
|
||||
*
|
||||
* As of the Change Date specified in that file, in accordance with
|
||||
* the Business Source License, use of this software will be governed
|
||||
* by the Apache License, Version 2.0
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package com.automq.shell.stream;
|
||||
|
|
|
|||
|
|
@ -1,12 +1,20 @@
|
|||
/*
|
||||
* Copyright 2024, AutoMQ HK Limited.
|
||||
* Copyright 2025, AutoMQ HK Limited.
|
||||
*
|
||||
* The use of this file is governed by the Business Source License,
|
||||
* as detailed in the file "/LICENSE.S3Stream" included in this repository.
|
||||
* Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
* contributor license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright ownership.
|
||||
* The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
* (the "License"); you may not use this file except in compliance with
|
||||
* the License. You may obtain a copy of the License at
|
||||
*
|
||||
* As of the Change Date specified in that file, in accordance with
|
||||
* the Business Source License, use of this software will be governed
|
||||
* by the Apache License, Version 2.0
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package com.automq.shell.util;
|
||||
|
|
|
|||
|
|
@ -1,12 +1,20 @@
|
|||
/*
|
||||
* Copyright 2024, AutoMQ HK Limited.
|
||||
* Copyright 2025, AutoMQ HK Limited.
|
||||
*
|
||||
* The use of this file is governed by the Business Source License,
|
||||
* as detailed in the file "/LICENSE.S3Stream" included in this repository.
|
||||
* Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
* contributor license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright ownership.
|
||||
* The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
* (the "License"); you may not use this file except in compliance with
|
||||
* the License. You may obtain a copy of the License at
|
||||
*
|
||||
* As of the Change Date specified in that file, in accordance with
|
||||
* the Business Source License, use of this software will be governed
|
||||
* by the Apache License, Version 2.0
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package com.automq.shell.util;
|
||||
|
||||
|
|
|
|||
|
|
@ -0,0 +1,69 @@
|
|||
/*
|
||||
* Copyright 2025, AutoMQ HK Limited.
|
||||
*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
* contributor license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright ownership.
|
||||
* The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
* (the "License"); you may not use this file except in compliance with
|
||||
* the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package com.automq.shell.util;
|
||||
|
||||
import com.automq.stream.s3.ByteBufAlloc;
|
||||
|
||||
import java.io.ByteArrayInputStream;
|
||||
import java.io.ByteArrayOutputStream;
|
||||
import java.io.IOException;
|
||||
import java.util.zip.GZIPInputStream;
|
||||
import java.util.zip.GZIPOutputStream;
|
||||
|
||||
import io.netty.buffer.ByteBuf;
|
||||
|
||||
public class Utils {
|
||||
|
||||
public static ByteBuf compress(ByteBuf input) throws IOException {
|
||||
ByteArrayOutputStream byteArrayOutputStream = new ByteArrayOutputStream();
|
||||
GZIPOutputStream gzipOutputStream = new GZIPOutputStream(byteArrayOutputStream);
|
||||
|
||||
byte[] buffer = new byte[input.readableBytes()];
|
||||
input.readBytes(buffer);
|
||||
gzipOutputStream.write(buffer);
|
||||
gzipOutputStream.close();
|
||||
|
||||
ByteBuf compressed = ByteBufAlloc.byteBuffer(byteArrayOutputStream.size());
|
||||
compressed.writeBytes(byteArrayOutputStream.toByteArray());
|
||||
return compressed;
|
||||
}
|
||||
|
||||
public static ByteBuf decompress(ByteBuf input) throws IOException {
|
||||
byte[] compressedData = new byte[input.readableBytes()];
|
||||
input.readBytes(compressedData);
|
||||
ByteArrayInputStream byteArrayInputStream = new ByteArrayInputStream(compressedData);
|
||||
GZIPInputStream gzipInputStream = new GZIPInputStream(byteArrayInputStream);
|
||||
|
||||
ByteArrayOutputStream byteArrayOutputStream = new ByteArrayOutputStream();
|
||||
byte[] buffer = new byte[1024];
|
||||
int bytesRead;
|
||||
while ((bytesRead = gzipInputStream.read(buffer)) != -1) {
|
||||
byteArrayOutputStream.write(buffer, 0, bytesRead);
|
||||
}
|
||||
|
||||
gzipInputStream.close();
|
||||
byteArrayOutputStream.close();
|
||||
|
||||
byte[] uncompressedData = byteArrayOutputStream.toByteArray();
|
||||
ByteBuf output = ByteBufAlloc.byteBuffer(uncompressedData.length);
|
||||
output.writeBytes(uncompressedData);
|
||||
return output;
|
||||
}
|
||||
}
|
||||
|
|
@ -9,10 +9,12 @@ global:
|
|||
config: |
|
||||
s3.data.buckets=0@s3://xxx_bucket?region=us-east-1
|
||||
s3.ops.buckets=1@s3://xxx_bucket?region=us-east-1
|
||||
s3.wal.path=0@s3://xxx_bucket?region=us-east-1
|
||||
log.dirs=/root/kraft-logs
|
||||
envs:
|
||||
- name: KAFKA_S3_ACCESS_KEY
|
||||
- name: AWS_ACCESS_KEY_ID
|
||||
value: 'xxxxx'
|
||||
- name: KAFKA_S3_SECRET_KEY
|
||||
- name: AWS_SECRET_ACCESS_KEY
|
||||
value: 'xxxxx'
|
||||
controllers:
|
||||
# The controllers default are combined nodes which roles are controller and broker.
|
||||
|
|
|
|||
|
|
@ -0,0 +1,50 @@
|
|||
/*
|
||||
* Copyright 2025, AutoMQ HK Limited.
|
||||
*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
* contributor license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright ownership.
|
||||
* The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
* (the "License"); you may not use this file except in compliance with
|
||||
* the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package com.automq.shell.util;
|
||||
|
||||
import com.automq.stream.s3.ByteBufAlloc;
|
||||
|
||||
import org.junit.jupiter.api.Assertions;
|
||||
import org.junit.jupiter.api.Tag;
|
||||
import org.junit.jupiter.api.Test;
|
||||
import org.junit.jupiter.api.Timeout;
|
||||
|
||||
import io.netty.buffer.ByteBuf;
|
||||
|
||||
@Timeout(60)
|
||||
@Tag("S3Unit")
|
||||
public class UtilsTest {
|
||||
|
||||
@Test
|
||||
public void testCompression() {
|
||||
String testStr = "This is a test string";
|
||||
ByteBuf input = ByteBufAlloc.byteBuffer(testStr.length());
|
||||
input.writeBytes(testStr.getBytes());
|
||||
try {
|
||||
ByteBuf compressed = Utils.compress(input);
|
||||
ByteBuf decompressed = Utils.decompress(compressed);
|
||||
String decompressedStr = decompressed.toString(io.netty.util.CharsetUtil.UTF_8);
|
||||
System.out.printf("Original: %s, Decompressed: %s\n", testStr, decompressedStr);
|
||||
Assertions.assertEquals(testStr, decompressedStr);
|
||||
} catch (Exception e) {
|
||||
Assertions.fail("Exception occurred during compression/decompression: " + e.getMessage());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -23,4 +23,10 @@ fi
|
|||
if [ "x$KAFKA_HEAP_OPTS" = "x" ]; then
|
||||
export KAFKA_HEAP_OPTS="-Xmx1024M"
|
||||
fi
|
||||
# Add additional help info for the new parameter (this won't be displayed directly but documents the change)
|
||||
# --consumers-during-catchup: Percentage of consumers to activate during catch-up read (0-100, default: 100)
|
||||
# This allows controlling what percentage of consumer groups are activated during catch-up
|
||||
# reading to better simulate real-world scenarios where only a subset of consumers
|
||||
# experience catch-up reads at the same time.
|
||||
|
||||
exec "$(dirname "$0")/kafka-run-class.sh" -name kafkaClient -loggc org.apache.kafka.tools.automq.PerfCommand "$@"
|
||||
|
|
|
|||
|
|
@ -42,4 +42,5 @@ case $COMMAND in
|
|||
;;
|
||||
esac
|
||||
|
||||
export KAFKA_CONNECT_MODE=true
|
||||
exec $(dirname $0)/kafka-run-class.sh $EXTRA_ARGS org.apache.kafka.connect.cli.ConnectDistributed "$@"
|
||||
|
|
|
|||
|
|
@ -42,4 +42,5 @@ case $COMMAND in
|
|||
;;
|
||||
esac
|
||||
|
||||
export KAFKA_CONNECT_MODE=true
|
||||
exec $(dirname $0)/kafka-run-class.sh $EXTRA_ARGS org.apache.kafka.connect.cli.ConnectStandalone "$@"
|
||||
|
|
|
|||
|
|
@ -40,7 +40,23 @@ should_include_file() {
|
|||
fi
|
||||
file=$1
|
||||
if [ -z "$(echo "$file" | grep -E "$regex")" ] ; then
|
||||
return 0
|
||||
# If Connect mode is enabled, apply additional filtering
|
||||
if [ "$KAFKA_CONNECT_MODE" = "true" ]; then
|
||||
# Skip if file doesn't exist
|
||||
[ ! -f "$file" ] && return 1
|
||||
|
||||
# Exclude heavy dependencies that Connect doesn't need
|
||||
case "$file" in
|
||||
*hadoop*) return 1 ;;
|
||||
*hive*) return 1 ;;
|
||||
*iceberg*) return 1 ;;
|
||||
*avro*) return 1 ;;
|
||||
*parquet*) return 1 ;;
|
||||
*) return 0 ;;
|
||||
esac
|
||||
else
|
||||
return 0
|
||||
fi
|
||||
else
|
||||
return 1
|
||||
fi
|
||||
|
|
|
|||
|
|
@ -47,7 +47,7 @@ if [ "x$KAFKA_HEAP_OPTS" = "x" ]; then
|
|||
fi
|
||||
|
||||
if [ "x$KAFKA_OPTS" = "x" ]; then
|
||||
export KAFKA_OPTS="-Dio.netty.allocator.maxOrder=11"
|
||||
export KAFKA_OPTS="-XX:+ExitOnOutOfMemoryError -XX:+HeapDumpOnOutOfMemoryError -Dio.netty.allocator.maxOrder=11"
|
||||
fi
|
||||
|
||||
EXTRA_ARGS=${EXTRA_ARGS-'-name kafkaServer -loggc'}
|
||||
|
|
|
|||
320
build.gradle
320
build.gradle
|
|
@ -44,7 +44,9 @@ plugins {
|
|||
// be dropped from gradle/resources/dependencycheck-suppressions.xml
|
||||
id "com.github.spotbugs" version '5.1.3' apply false
|
||||
id 'org.scoverage' version '8.0.3' apply false
|
||||
id 'io.github.goooler.shadow' version '8.1.3' apply false
|
||||
// Updating the shadow plugin version to 8.1.1 causes issue with signing and publishing the shadowed
|
||||
// artifacts - see https://github.com/johnrengelman/shadow/issues/901
|
||||
id 'com.github.johnrengelman.shadow' version '8.1.0' apply false
|
||||
// Spotless 6.13.0 has issue with Java 21 (see https://github.com/diffplug/spotless/pull/1920), and Spotless 6.14.0+ requires JRE 11
|
||||
// We are going to drop JDK8 support. Hence, the spotless is upgrade to newest version and be applied only if the build env is compatible with JDK 11.
|
||||
// spotless 6.15.0+ has issue in runtime with JDK8 even through we define it with `apply:false`. see https://github.com/diffplug/spotless/issues/2156 for more details
|
||||
|
|
@ -53,7 +55,7 @@ plugins {
|
|||
|
||||
ext {
|
||||
gradleVersion = versions.gradle
|
||||
minJavaVersion = 11
|
||||
minJavaVersion = 17
|
||||
buildVersionFileName = "kafka-version.properties"
|
||||
|
||||
defaultMaxHeapSize = "2g"
|
||||
|
|
@ -128,6 +130,9 @@ allprojects {
|
|||
|
||||
repositories {
|
||||
mavenCentral()
|
||||
maven {
|
||||
url = uri("https://packages.confluent.io/maven/")
|
||||
}
|
||||
}
|
||||
|
||||
dependencyUpdates {
|
||||
|
|
@ -147,6 +152,10 @@ allprojects {
|
|||
}
|
||||
|
||||
configurations.all {
|
||||
// Globally exclude commons-logging and logback to ensure a single logging implementation (reload4j)
|
||||
exclude group: "commons-logging", module: "commons-logging"
|
||||
exclude group: "ch.qos.logback", module: "logback-classic"
|
||||
exclude group: "ch.qos.logback", module: "logback-core"
|
||||
// zinc is the Scala incremental compiler, it has a configuration for its own dependencies
|
||||
// that are unrelated to the project dependencies, we should not change them
|
||||
if (name != "zinc") {
|
||||
|
|
@ -162,8 +171,8 @@ allprojects {
|
|||
// ZooKeeper (potentially older and containing CVEs)
|
||||
libs.nettyHandler,
|
||||
libs.nettyTransportNativeEpoll,
|
||||
// be explicit about the reload4j version instead of relying on the transitive versions
|
||||
libs.reload4j
|
||||
// be explicit about the reload4j version instead of relying on the transitive versions
|
||||
libs.reload4j
|
||||
)
|
||||
}
|
||||
}
|
||||
|
|
@ -257,7 +266,10 @@ subprojects {
|
|||
options.compilerArgs << "-Xlint:-rawtypes"
|
||||
options.compilerArgs << "-Xlint:-serial"
|
||||
options.compilerArgs << "-Xlint:-try"
|
||||
options.compilerArgs << "-Werror"
|
||||
// AutoMQ inject start
|
||||
// TODO: remove me, when upgrade to 4.x
|
||||
// options.compilerArgs << "-Werror"
|
||||
// AutoMQ inject start
|
||||
|
||||
// --release is the recommended way to select the target release, but it's only supported in Java 9 so we also
|
||||
// set --source and --target via `sourceCompatibility` and `targetCompatibility` a couple of lines below
|
||||
|
|
@ -295,7 +307,7 @@ subprojects {
|
|||
if (!shouldPublishWithShadow) {
|
||||
from components.java
|
||||
} else {
|
||||
apply plugin: 'io.github.goooler.shadow'
|
||||
apply plugin: 'com.github.johnrengelman.shadow'
|
||||
project.shadow.component(mavenJava)
|
||||
|
||||
// Fix for avoiding inclusion of runtime dependencies marked as 'shadow' in MANIFEST Class-Path.
|
||||
|
|
@ -728,7 +740,7 @@ subprojects {
|
|||
jacoco {
|
||||
toolVersion = versions.jacoco
|
||||
}
|
||||
|
||||
|
||||
jacocoTestReport {
|
||||
dependsOn tasks.test
|
||||
sourceSets sourceSets.main
|
||||
|
|
@ -752,8 +764,8 @@ subprojects {
|
|||
skipProjects = [ ":jmh-benchmarks", ":trogdor" ]
|
||||
skipConfigurations = [ "zinc" ]
|
||||
}
|
||||
// the task `removeUnusedImports` is implemented by google-java-format,
|
||||
// and unfortunately the google-java-format version used by spotless 6.14.0 can't work with JDK 21.
|
||||
// the task `removeUnusedImports` is implemented by google-java-format,
|
||||
// and unfortunately the google-java-format version used by spotless 6.14.0 can't work with JDK 21.
|
||||
// Hence, we apply spotless tasks only if the env is either JDK11 or JDK17
|
||||
if ((JavaVersion.current().isJava11() || (JavaVersion.current() == JavaVersion.VERSION_17))) {
|
||||
apply plugin: 'com.diffplug.spotless'
|
||||
|
|
@ -828,6 +840,13 @@ tasks.create(name: "jarConnect", dependsOn: connectPkgs.collect { it + ":jar" })
|
|||
|
||||
tasks.create(name: "testConnect", dependsOn: connectPkgs.collect { it + ":test" }) {}
|
||||
|
||||
// OpenTelemetry related tasks
|
||||
tasks.create(name: "jarOpenTelemetry", dependsOn: ":opentelemetry:jar") {}
|
||||
|
||||
tasks.create(name: "testOpenTelemetry", dependsOn: ":opentelemetry:test") {}
|
||||
|
||||
tasks.create(name: "buildOpenTelemetry", dependsOn: [":opentelemetry:jar", ":opentelemetry:test"]) {}
|
||||
|
||||
project(':server') {
|
||||
base {
|
||||
archivesName = "kafka-server"
|
||||
|
|
@ -835,6 +854,7 @@ project(':server') {
|
|||
|
||||
dependencies {
|
||||
implementation project(':clients')
|
||||
implementation project(':metadata')
|
||||
implementation project(':server-common')
|
||||
implementation project(':storage')
|
||||
implementation project(':group-coordinator')
|
||||
|
|
@ -928,6 +948,8 @@ project(':core') {
|
|||
implementation project(':storage')
|
||||
implementation project(':server')
|
||||
implementation project(':automq-shell')
|
||||
implementation project(':automq-metrics')
|
||||
implementation project(':automq-log-uploader')
|
||||
|
||||
implementation libs.argparse4j
|
||||
implementation libs.commonsValidator
|
||||
|
|
@ -944,6 +966,7 @@ project(':core') {
|
|||
implementation libs.scalaReflect
|
||||
implementation libs.scalaLogging
|
||||
implementation libs.slf4jApi
|
||||
implementation libs.commonsIo // ZooKeeper dependency. Do not use, this is going away.
|
||||
implementation(libs.zookeeper) {
|
||||
// Dropwizard Metrics are required by ZooKeeper as of v3.6.0,
|
||||
// but the library should *not* be used in Kafka code
|
||||
|
|
@ -965,17 +988,77 @@ project(':core') {
|
|||
implementation libs.guava
|
||||
implementation libs.slf4jBridge
|
||||
implementation libs.slf4jReload4j
|
||||
// The `jcl-over-slf4j` library is used to redirect JCL logging to SLF4J.
|
||||
implementation libs.jclOverSlf4j
|
||||
|
||||
implementation libs.opentelemetryJava8
|
||||
implementation libs.opentelemetryOshi
|
||||
implementation libs.opentelemetrySdk
|
||||
implementation libs.opentelemetrySdkMetrics
|
||||
implementation libs.opentelemetryExporterLogging
|
||||
implementation libs.opentelemetryExporterProm
|
||||
implementation libs.opentelemetryExporterOTLP
|
||||
implementation libs.opentelemetryJmx
|
||||
implementation libs.awsSdkAuth
|
||||
|
||||
// table topic start
|
||||
implementation ("org.apache.avro:avro:${versions.avro}")
|
||||
implementation ("org.apache.avro:avro-protobuf:${versions.avro}")
|
||||
implementation('com.google.protobuf:protobuf-java:3.25.5')
|
||||
implementation ("org.apache.iceberg:iceberg-core:${versions.iceberg}")
|
||||
implementation ("org.apache.iceberg:iceberg-api:${versions.iceberg}")
|
||||
implementation ("org.apache.iceberg:iceberg-data:${versions.iceberg}")
|
||||
implementation ("org.apache.iceberg:iceberg-parquet:${versions.iceberg}")
|
||||
implementation ("org.apache.iceberg:iceberg-common:${versions.iceberg}")
|
||||
implementation ("org.apache.iceberg:iceberg-aws:${versions.iceberg}")
|
||||
implementation ("org.apache.iceberg:iceberg-nessie:${versions.iceberg}")
|
||||
implementation ("software.amazon.awssdk:glue:${versions.awsSdk}")
|
||||
implementation ("software.amazon.awssdk:s3tables:${versions.awsSdk}")
|
||||
implementation 'software.amazon.s3tables:s3-tables-catalog-for-iceberg:0.1.0'
|
||||
|
||||
implementation ('org.apache.hadoop:hadoop-common:3.4.1') {
|
||||
exclude group: 'org.eclipse.jetty', module: '*'
|
||||
exclude group: 'com.sun.jersey', module: '*'
|
||||
}
|
||||
// for hadoop common
|
||||
implementation ("org.eclipse.jetty:jetty-webapp:${versions.jetty}")
|
||||
|
||||
implementation (libs.kafkaAvroSerializer) {
|
||||
exclude group: 'org.apache.kafka', module: 'kafka-clients'
|
||||
}
|
||||
|
||||
// > hive ext start
|
||||
implementation 'org.apache.iceberg:iceberg-hive-metastore:1.6.1'
|
||||
implementation('org.apache.hive:hive-metastore:3.1.3') {
|
||||
// Remove useless dependencies (copy from iceberg-kafka-connect)
|
||||
exclude group: "org.apache.avro", module: "avro"
|
||||
exclude group: "org.slf4j", module: "slf4j-log4j12"
|
||||
exclude group: "org.pentaho" // missing dependency
|
||||
exclude group: "org.apache.hbase"
|
||||
exclude group: "org.apache.logging.log4j"
|
||||
exclude group: "co.cask.tephra"
|
||||
exclude group: "com.google.code.findbugs", module: "jsr305"
|
||||
exclude group: "org.eclipse.jetty.aggregate", module: "jetty-all"
|
||||
exclude group: "org.eclipse.jetty.orbit", module: "javax.servlet"
|
||||
exclude group: "org.apache.parquet", module: "parquet-hadoop-bundle"
|
||||
exclude group: "com.tdunning", module: "json"
|
||||
exclude group: "javax.transaction", module: "transaction-api"
|
||||
exclude group: "com.zaxxer", module: "HikariCP"
|
||||
exclude group: "org.apache.hadoop", module: "hadoop-yarn-server-common"
|
||||
exclude group: "org.apache.hadoop", module: "hadoop-yarn-server-applicationhistoryservice"
|
||||
exclude group: "org.apache.hadoop", module: "hadoop-yarn-server-resourcemanager"
|
||||
exclude group: "org.apache.hadoop", module: "hadoop-yarn-server-web-proxy"
|
||||
exclude group: "org.apache.hive", module: "hive-service-rpc"
|
||||
exclude group: "com.github.joshelser", module: "dropwizard-metrics-hadoop-metrics2-reporter"
|
||||
}
|
||||
implementation ('org.apache.hadoop:hadoop-mapreduce-client-core:3.4.1') {
|
||||
exclude group: 'com.sun.jersey', module: '*'
|
||||
exclude group: 'com.sun.jersey.contribs', module: '*'
|
||||
exclude group: 'com.github.pjfanning', module: 'jersey-json'
|
||||
}
|
||||
// > hive ext end
|
||||
|
||||
// > Protobuf ext start
|
||||
// Wire Runtime for schema handling
|
||||
implementation ("com.squareup.wire:wire-schema:${versions.wire}")
|
||||
implementation ("com.squareup.wire:wire-runtime:${versions.wire}")
|
||||
implementation 'com.google.api.grpc:proto-google-common-protos:2.52.0'
|
||||
// > Protobuf ext end
|
||||
|
||||
// table topic end
|
||||
|
||||
implementation(libs.oshi) {
|
||||
exclude group: 'org.slf4j', module: '*'
|
||||
}
|
||||
|
|
@ -990,6 +1073,7 @@ project(':core') {
|
|||
testImplementation project(':storage:storage-api').sourceSets.test.output
|
||||
testImplementation project(':server').sourceSets.test.output
|
||||
testImplementation libs.bcpkix
|
||||
testImplementation libs.mockitoJunitJupiter // supports MockitoExtension
|
||||
testImplementation libs.mockitoCore
|
||||
testImplementation libs.guava
|
||||
testImplementation(libs.apacheda) {
|
||||
|
|
@ -1160,7 +1244,6 @@ project(':core') {
|
|||
from(project.file("$rootDir/docker/docker-compose.yaml")) { into "docker/" }
|
||||
from(project.file("$rootDir/docker/telemetry")) { into "docker/telemetry/" }
|
||||
from(project.file("$rootDir/LICENSE")) { into "" }
|
||||
from(project.file("$rootDir/LICENSE.S3Stream")) { into "" }
|
||||
from "$rootDir/NOTICE-binary" rename {String filename -> filename.replace("-binary", "")}
|
||||
from(configurations.runtimeClasspath) { into("libs/") }
|
||||
from(configurations.archives.artifacts.files) { into("libs/") }
|
||||
|
|
@ -1171,6 +1254,10 @@ project(':core') {
|
|||
from(project(':trogdor').configurations.runtimeClasspath) { into("libs/") }
|
||||
from(project(':automq-shell').jar) { into("libs/") }
|
||||
from(project(':automq-shell').configurations.runtimeClasspath) { into("libs/") }
|
||||
from(project(':automq-metrics').jar) { into("libs/") }
|
||||
from(project(':automq-metrics').configurations.runtimeClasspath) { into("libs/") }
|
||||
from(project(':automq-log-uploader').jar) { into("libs/") }
|
||||
from(project(':automq-log-uploader').configurations.runtimeClasspath) { into("libs/") }
|
||||
from(project(':shell').jar) { into("libs/") }
|
||||
from(project(':shell').configurations.runtimeClasspath) { into("libs/") }
|
||||
from(project(':connect:api').jar) { into("libs/") }
|
||||
|
|
@ -1201,6 +1288,38 @@ project(':core') {
|
|||
from(project(':tools:tools-api').configurations.runtimeClasspath) { into("libs/") }
|
||||
duplicatesStrategy 'exclude'
|
||||
}
|
||||
|
||||
// AutoMQ inject start
|
||||
tasks.create(name: "releaseE2ETar", dependsOn: [configurations.archives.artifacts, 'copyDependantTestLibs'], type: Tar) {
|
||||
def prefix = project.findProperty('prefix') ?: ''
|
||||
archiveBaseName = "${prefix}kafka"
|
||||
|
||||
into "${prefix}kafka-${archiveVersion.get()}"
|
||||
compression = Compression.GZIP
|
||||
from(project.file("$rootDir/bin")) { into "bin/" }
|
||||
from(project.file("$rootDir/config")) { into "config/" }
|
||||
from(project.file("$rootDir/licenses")) { into "licenses/" }
|
||||
from(project.file("$rootDir/docker/docker-compose.yaml")) { into "docker/" }
|
||||
from(project.file("$rootDir/docker/telemetry")) { into "docker/telemetry/" }
|
||||
from(project.file("$rootDir/LICENSE")) { into "" }
|
||||
from "$rootDir/NOTICE-binary" rename {String filename -> filename.replace("-binary", "")}
|
||||
from(configurations.runtimeClasspath) { into("libs/") }
|
||||
from(configurations.archives.artifacts.files) { into("libs/") }
|
||||
from(project.siteDocsTar) { into("site-docs/") }
|
||||
|
||||
// Include main and test jars from all subprojects
|
||||
rootProject.subprojects.each { subproject ->
|
||||
if (subproject.tasks.findByName('jar')) {
|
||||
from(subproject.tasks.named('jar')) { into('libs/') }
|
||||
}
|
||||
if (subproject.tasks.findByName('testJar')) {
|
||||
from(subproject.tasks.named('testJar')) { into('libs/') }
|
||||
}
|
||||
from(subproject.configurations.runtimeClasspath) { into('libs/') }
|
||||
}
|
||||
duplicatesStrategy 'exclude'
|
||||
}
|
||||
// AutoMQ inject end
|
||||
|
||||
jar {
|
||||
dependsOn('copyDependantLibs')
|
||||
|
|
@ -1220,7 +1339,7 @@ project(':core') {
|
|||
//By default gradle does not handle test dependencies between the sub-projects
|
||||
//This line is to include clients project test jar to dependant-testlibs
|
||||
from (project(':clients').testJar ) { "$buildDir/dependant-testlibs" }
|
||||
// log4j-appender is not in core dependencies,
|
||||
// log4j-appender is not in core dependencies,
|
||||
// so we add it to dependant-testlibs to avoid ClassNotFoundException in running kafka_log4j_appender.py
|
||||
from (project(':log4j-appender').jar ) { "$buildDir/dependant-testlibs" }
|
||||
duplicatesStrategy 'exclude'
|
||||
|
|
@ -1253,6 +1372,7 @@ project(':core') {
|
|||
}
|
||||
}
|
||||
|
||||
|
||||
project(':metadata') {
|
||||
base {
|
||||
archivesName = "kafka-metadata"
|
||||
|
|
@ -1275,6 +1395,7 @@ project(':metadata') {
|
|||
implementation libs.guava
|
||||
implementation libs.awsSdkAuth
|
||||
implementation project(':s3stream')
|
||||
implementation ("org.apache.avro:avro:${versions.avro}")
|
||||
|
||||
implementation libs.jacksonDatabind
|
||||
implementation libs.jacksonJDK8Datatypes
|
||||
|
|
@ -1480,7 +1601,7 @@ project(':transaction-coordinator') {
|
|||
implementation project(':clients')
|
||||
generator project(':generator')
|
||||
}
|
||||
|
||||
|
||||
sourceSets {
|
||||
main {
|
||||
java {
|
||||
|
|
@ -1571,6 +1692,7 @@ project(':clients') {
|
|||
implementation libs.snappy
|
||||
implementation libs.slf4jApi
|
||||
implementation libs.opentelemetryProto
|
||||
implementation libs.protobuf
|
||||
|
||||
// libraries which should be added as runtime dependencies in generated pom.xml should be defined here:
|
||||
shadowed libs.zstd
|
||||
|
|
@ -1755,6 +1877,7 @@ project(':raft') {
|
|||
testImplementation libs.junitJupiter
|
||||
testImplementation libs.mockitoCore
|
||||
testImplementation libs.jqwik
|
||||
testImplementation libs.hamcrest
|
||||
|
||||
testRuntimeOnly libs.slf4jReload4j
|
||||
testRuntimeOnly libs.junitPlatformLanucher
|
||||
|
|
@ -1845,7 +1968,12 @@ project(':server-common') {
|
|||
implementation libs.jacksonDatabind
|
||||
implementation libs.pcollections
|
||||
implementation libs.opentelemetrySdk
|
||||
|
||||
// AutoMQ inject start
|
||||
implementation project(':s3stream')
|
||||
implementation libs.commonLang
|
||||
// AutoMQ inject end
|
||||
|
||||
|
||||
testImplementation project(':clients')
|
||||
testImplementation project(':clients').sourceSets.test.output
|
||||
|
|
@ -2140,11 +2268,12 @@ project(':s3stream') {
|
|||
implementation 'commons-codec:commons-codec:1.17.0'
|
||||
implementation 'org.hdrhistogram:HdrHistogram:2.2.2'
|
||||
implementation 'software.amazon.awssdk.crt:aws-crt:0.30.8'
|
||||
implementation 'com.ibm.async:asyncutil:0.1.0'
|
||||
|
||||
testImplementation 'org.slf4j:slf4j-simple:2.0.9'
|
||||
testImplementation 'org.junit.jupiter:junit-jupiter:5.10.0'
|
||||
testImplementation 'org.mockito:mockito-core:5.5.0'
|
||||
testImplementation 'org.mockito:mockito-junit-jupiter:5.5.0'
|
||||
testImplementation 'org.slf4j:slf4j-simple:1.7.36'
|
||||
testImplementation libs.junitJupiter
|
||||
testImplementation libs.mockitoCore
|
||||
testImplementation libs.mockitoJunitJupiter // supports MockitoExtension
|
||||
testImplementation 'org.awaitility:awaitility:4.2.1'
|
||||
}
|
||||
|
||||
|
|
@ -2211,27 +2340,115 @@ project(':tools:tools-api') {
|
|||
}
|
||||
}
|
||||
|
||||
project(':automq-metrics') {
|
||||
archivesBaseName = "automq-metrics"
|
||||
|
||||
checkstyle {
|
||||
configProperties = checkstyleConfigProperties("import-control-server.xml")
|
||||
}
|
||||
|
||||
dependencies {
|
||||
// OpenTelemetry core dependencies
|
||||
api libs.opentelemetryJava8
|
||||
api libs.opentelemetryOshi
|
||||
api libs.opentelemetrySdk
|
||||
api libs.opentelemetrySdkMetrics
|
||||
api libs.opentelemetryExporterLogging
|
||||
api libs.opentelemetryExporterProm
|
||||
api libs.opentelemetryExporterOTLP
|
||||
api libs.opentelemetryJmx
|
||||
|
||||
// Logging dependencies
|
||||
api libs.slf4jApi
|
||||
api libs.slf4jBridge // 添加 SLF4J Bridge 依赖
|
||||
api libs.reload4j
|
||||
|
||||
api libs.commonLang
|
||||
|
||||
// Yammer metrics (for integration)
|
||||
api 'com.yammer.metrics:metrics-core:2.2.0'
|
||||
|
||||
implementation(project(':s3stream')) {
|
||||
exclude(group: 'io.opentelemetry', module: '*')
|
||||
exclude(group: 'io.opentelemetry.instrumentation', module: '*')
|
||||
exclude(group: 'io.opentelemetry.proto', module: '*')
|
||||
exclude(group: 'io.netty', module: 'netty-tcnative-boringssl-static')
|
||||
exclude(group: 'com.github.jnr', module: '*')
|
||||
exclude(group: 'org.aspectj', module: '*')
|
||||
exclude(group: 'net.java.dev.jna', module: '*')
|
||||
exclude(group: 'net.sourceforge.argparse4j', module: '*')
|
||||
exclude(group: 'com.bucket4j', module: '*')
|
||||
exclude(group: 'com.yammer.metrics', module: '*')
|
||||
exclude(group: 'com.github.spotbugs', module: '*')
|
||||
exclude(group: 'org.apache.kafka.shaded', module: '*')
|
||||
}
|
||||
implementation libs.nettyBuffer
|
||||
implementation libs.jacksonDatabind
|
||||
implementation libs.guava
|
||||
implementation project(':clients')
|
||||
|
||||
// Test dependencies
|
||||
testImplementation libs.junitJupiter
|
||||
testImplementation libs.mockitoCore
|
||||
testImplementation libs.slf4jReload4j
|
||||
|
||||
testRuntimeOnly libs.junitPlatformLanucher
|
||||
|
||||
implementation('io.opentelemetry:opentelemetry-sdk:1.40.0')
|
||||
implementation("io.opentelemetry.semconv:opentelemetry-semconv:1.25.0-alpha")
|
||||
implementation("io.opentelemetry.instrumentation:opentelemetry-runtime-telemetry-java8:2.6.0-alpha")
|
||||
implementation('com.google.protobuf:protobuf-java:3.25.5')
|
||||
implementation('org.xerial.snappy:snappy-java:1.1.10.5')
|
||||
}
|
||||
|
||||
clean.doFirst {
|
||||
delete "$buildDir/kafka/"
|
||||
}
|
||||
|
||||
javadoc {
|
||||
enabled = false
|
||||
}
|
||||
}
|
||||
|
||||
project(':automq-log-uploader') {
|
||||
archivesBaseName = "automq-log-uploader"
|
||||
|
||||
checkstyle {
|
||||
configProperties = checkstyleConfigProperties("import-control-server.xml")
|
||||
}
|
||||
|
||||
dependencies {
|
||||
api project(':s3stream')
|
||||
|
||||
implementation project(':clients')
|
||||
implementation libs.reload4j
|
||||
implementation libs.slf4jApi
|
||||
implementation libs.slf4jBridge
|
||||
implementation libs.nettyBuffer
|
||||
implementation libs.guava
|
||||
implementation libs.commonLang
|
||||
}
|
||||
|
||||
javadoc {
|
||||
enabled = false
|
||||
}
|
||||
}
|
||||
|
||||
project(':tools') {
|
||||
base {
|
||||
archivesName = "kafka-tools"
|
||||
}
|
||||
|
||||
dependencies {
|
||||
implementation (project(':clients')){
|
||||
exclude group: 'org.slf4j', module: '*'
|
||||
}
|
||||
implementation (project(':server-common')){
|
||||
exclude group: 'org.slf4j', module: '*'
|
||||
}
|
||||
implementation (project(':log4j-appender')){
|
||||
exclude group: 'org.slf4j', module: '*'
|
||||
}
|
||||
implementation project(':automq-shell')
|
||||
|
||||
implementation project(':clients')
|
||||
implementation project(':metadata')
|
||||
implementation project(':storage')
|
||||
implementation project(':server')
|
||||
implementation project(':server-common')
|
||||
implementation project(':connect:runtime')
|
||||
implementation project(':tools:tools-api')
|
||||
implementation project(':transaction-coordinator')
|
||||
implementation project(':group-coordinator')
|
||||
implementation libs.argparse4j
|
||||
implementation libs.jacksonDatabind
|
||||
implementation libs.jacksonDataformatCsv
|
||||
|
|
@ -2243,6 +2460,16 @@ project(':tools') {
|
|||
implementation libs.hdrHistogram
|
||||
implementation libs.spotbugsAnnotations
|
||||
|
||||
// AutoMQ inject start
|
||||
implementation project(':automq-shell')
|
||||
implementation libs.guava
|
||||
implementation (libs.kafkaAvroSerializer) {
|
||||
exclude group: 'org.apache.kafka', module: 'kafka-clients'
|
||||
}
|
||||
implementation libs.bucket4j
|
||||
implementation libs.oshi
|
||||
// AutoMQ inject end
|
||||
|
||||
// for SASL/OAUTHBEARER JWT validation
|
||||
implementation (libs.jose4j){
|
||||
exclude group: 'org.slf4j', module: '*'
|
||||
|
|
@ -2279,7 +2506,7 @@ project(':tools') {
|
|||
testImplementation project(':connect:runtime')
|
||||
testImplementation project(':connect:runtime').sourceSets.test.output
|
||||
testImplementation project(':storage:storage-api').sourceSets.main.output
|
||||
testImplementation project(':group-coordinator')
|
||||
testImplementation project(':storage').sourceSets.test.output
|
||||
testImplementation libs.junitJupiter
|
||||
testImplementation libs.mockitoCore
|
||||
testImplementation libs.mockitoJunitJupiter // supports MockitoExtension
|
||||
|
|
@ -2577,6 +2804,7 @@ project(':streams') {
|
|||
':streams:upgrade-system-tests-35:test',
|
||||
':streams:upgrade-system-tests-36:test',
|
||||
':streams:upgrade-system-tests-37:test',
|
||||
':streams:upgrade-system-tests-38:test',
|
||||
':streams:examples:test'
|
||||
]
|
||||
)
|
||||
|
|
@ -3076,9 +3304,24 @@ project(':streams:upgrade-system-tests-37') {
|
|||
}
|
||||
}
|
||||
|
||||
project(':streams:upgrade-system-tests-38') {
|
||||
base {
|
||||
archivesName = "kafka-streams-upgrade-system-tests-38"
|
||||
}
|
||||
|
||||
dependencies {
|
||||
testImplementation libs.kafkaStreams_38
|
||||
testRuntimeOnly libs.junitJupiter
|
||||
}
|
||||
|
||||
systemTestLibs {
|
||||
dependsOn testJar
|
||||
}
|
||||
}
|
||||
|
||||
project(':jmh-benchmarks') {
|
||||
|
||||
apply plugin: 'io.github.goooler.shadow'
|
||||
apply plugin: 'com.github.johnrengelman.shadow'
|
||||
|
||||
shadowJar {
|
||||
archiveBaseName = 'kafka-jmh-benchmarks'
|
||||
|
|
@ -3308,6 +3551,8 @@ project(':connect:runtime') {
|
|||
api project(':clients')
|
||||
api project(':connect:json')
|
||||
api project(':connect:transforms')
|
||||
api project(':automq-metrics')
|
||||
api project(':automq-log-uploader')
|
||||
|
||||
implementation libs.slf4jApi
|
||||
implementation libs.reload4j
|
||||
|
|
@ -3316,6 +3561,7 @@ project(':connect:runtime') {
|
|||
implementation libs.jacksonJaxrsJsonProvider
|
||||
implementation libs.jerseyContainerServlet
|
||||
implementation libs.jerseyHk2
|
||||
implementation libs.jaxrsApi
|
||||
implementation libs.jaxbApi // Jersey dependency that was available in the JDK before Java 9
|
||||
implementation libs.activation // Jersey dependency that was available in the JDK before Java 9
|
||||
implementation libs.jettyServer
|
||||
|
|
|
|||
|
|
@ -0,0 +1,62 @@
|
|||
# AutoMQ
|
||||
|
||||
[AutoMQ](https://www.automq.com/) is a cloud-native alternative to Kafka by decoupling durability to cloud storage services like S3. 10x Cost-Effective. No Cross-AZ Traffic Cost. Autoscale in seconds. Single-digit ms latency.
|
||||
This Helm chart simplifies the deployment of AutoMQ into your Kubernetes cluster using the Software model.
|
||||
|
||||
## Prerequisites
|
||||
### Install Helm chart
|
||||
Install Helm chart and version v3.8.0+
|
||||
[Helm chart quickstart](https://helm.sh/zh/docs/intro/quickstart/)
|
||||
```shell
|
||||
helm version
|
||||
```
|
||||
### Using the Bitnami Helm repository
|
||||
AutoMQ is fully compatible with Bitnami's Helm Charts, so you can customize your AutoMQ Kubernetes cluster based on the relevant values.yaml of Bitnami.
|
||||
[Bitnami Helm Charts](https://github.com/bitnami/charts)
|
||||
|
||||
## Quickstart
|
||||
### Setup a Kubernetes Cluster
|
||||
The quickest way to set up a Kubernetes cluster to install Bitnami Charts is by following the "Bitnami Get Started" guides for the different services:
|
||||
|
||||
[Get Started with Bitnami Charts using the Amazon Elastic Container Service for Kubernetes (EKS)](https://docs.bitnami.com/kubernetes/get-started-eks/)
|
||||
|
||||
|
||||
### Installing the AutoMQ with Bitnami Chart
|
||||
|
||||
As an alternative to supplying the configuration parameters as arguments, you can create a supplemental YAML file containing your specific config parameters. Any parameters not specified in this file will default to those set in [values.yaml](values.yaml).
|
||||
|
||||
1. Create an empty `automq-values.yaml` file
|
||||
2. Edit the file with your specific parameters:
|
||||
|
||||
You can refer to the [demo-values.yaml](/chart/bitnami/demo-values.yaml) based on the bitnami [values.yaml](https://github.com/bitnami/charts/blob/main/bitnami/kafka/values.yaml)
|
||||
we provided for deploying AutoMQ on AWS across 3 Availability Zones using m7g.xlarge instances (4 vCPUs, 16GB Mem, 156MiB/s network bandwidth).
|
||||
|
||||
|
||||
You need to replace the bucket configurations in the placeholders ${}, such as ops-bucket, data-bucket, region, endpoint, access-key/secret-key.
|
||||
|
||||
3. Install or upgrade the AutoMQ Helm chart using your custom yaml file:
|
||||
|
||||
we recommend using the `--version` [31.x.x (31.1.0 ~ 31.5.0)](https://artifacthub.io/packages/helm/bitnami/kafka) bitnami helm chart while installing AutoMQ.
|
||||
|
||||
```shell
|
||||
helm install automq-release oci://registry-1.docker.io/bitnamicharts/kafka -f demo-values.yaml --version 31.5.0 --namespace automq --create-namespace
|
||||
```
|
||||
|
||||
### Upgrading
|
||||
|
||||
To upgrade the deployment:
|
||||
|
||||
```shell
|
||||
helm repo update
|
||||
helm upgrade automq-release oci://registry-1.docker.io/bitnamicharts/kafka -f demo-values.yaml --version 31.5.0 --namespace automq --create-namespace
|
||||
```
|
||||
|
||||
### Uninstalling the Chart
|
||||
|
||||
To uninstall/delete the deployment:
|
||||
|
||||
```shell
|
||||
helm uninstall automq-release --namespace automq
|
||||
```
|
||||
|
||||
This command removes all the Kubernetes components associated with the chart and deletes the release.
|
||||
|
|
@ -0,0 +1,141 @@
|
|||
global:
|
||||
security:
|
||||
allowInsecureImages: true
|
||||
image:
|
||||
registry: automqinc
|
||||
repository: automq
|
||||
tag: 1.6.1-rc0-bitnami
|
||||
pullPolicy: Always
|
||||
extraEnvVars:
|
||||
- name: AWS_ACCESS_KEY_ID
|
||||
value: "${access-key}"
|
||||
- name: AWS_SECRET_ACCESS_KEY
|
||||
value: "${secret-key}"
|
||||
controller:
|
||||
replicaCount: 3
|
||||
resources:
|
||||
requests:
|
||||
cpu: "3000m"
|
||||
memory: "12Gi"
|
||||
limits:
|
||||
cpu: "4000m"
|
||||
memory: "16Gi"
|
||||
heapOpts: -Xmx6g -Xms6g -XX:MaxDirectMemorySize=6g -XX:MetaspaceSize=96m
|
||||
extraConfig: |
|
||||
elasticstream.enable=true
|
||||
autobalancer.client.auth.sasl.mechanism=PLAIN
|
||||
autobalancer.client.auth.sasl.jaas.config=org.apache.kafka.common.security.plain.PlainLoginModule required username="inter_broker_user" password="interbroker-password-placeholder" user_inter_broker_user="interbroker-password-placeholder";
|
||||
autobalancer.client.auth.security.protocol=SASL_PLAINTEXT
|
||||
autobalancer.client.listener.name=INTERNAL
|
||||
s3.wal.cache.size=2147483648
|
||||
s3.block.cache.size=1073741824
|
||||
s3.stream.allocator.policy=POOLED_DIRECT
|
||||
s3.network.baseline.bandwidth=245366784
|
||||
# Replace the following with your bucket config
|
||||
s3.ops.buckets=1@s3://${ops-bucket}?region=${region}&endpoint=${endpoint}
|
||||
s3.data.buckets=0@s3://${data-bucket}?region=${region}&endpoint=${endpoint}
|
||||
s3.wal.path=0@s3://${data-bucket}?region=${region}&endpoint=${endpoint}
|
||||
automq.zonerouter.channels=0@s3://${data-bucket}?region=${region}&endpoint=${endpoint}
|
||||
affinity:
|
||||
podAntiAffinity:
|
||||
requiredDuringSchedulingIgnoredDuringExecution:
|
||||
- labelSelector:
|
||||
matchExpressions:
|
||||
- key: app.kubernetes.io/instance
|
||||
operator: In
|
||||
# your helm release name
|
||||
values:
|
||||
- automq-release
|
||||
- key: app.kubernetes.io/component
|
||||
operator: In
|
||||
values:
|
||||
- controller-eligible
|
||||
- broker
|
||||
topologyKey: kubernetes.io/hostname
|
||||
# --- nodeAffinity recommended ---
|
||||
# nodeAffinity:
|
||||
# requiredDuringSchedulingIgnoredDuringExecution:
|
||||
# nodeSelectorTerms:
|
||||
# - matchExpressions:
|
||||
# - key: "${your-node-label-key}"
|
||||
# operator: In
|
||||
# values:
|
||||
# - "${your-node-label-value}"
|
||||
topologySpreadConstraints:
|
||||
- maxSkew: 1
|
||||
topologyKey: topology.kubernetes.io/zone
|
||||
whenUnsatisfiable: DoNotSchedule
|
||||
labelSelector:
|
||||
matchLabels:
|
||||
app.kubernetes.io/component: controller-eligible
|
||||
tolerations:
|
||||
- key: "dedicated"
|
||||
operator: "Equal"
|
||||
value: "automq"
|
||||
effect: "NoSchedule"
|
||||
persistence:
|
||||
size: 20Gi
|
||||
|
||||
broker:
|
||||
replicaCount: 3
|
||||
resources:
|
||||
requests:
|
||||
cpu: "3000m"
|
||||
memory: "12Gi"
|
||||
limits:
|
||||
cpu: "4000m"
|
||||
memory: "16Gi"
|
||||
heapOpts: -Xmx6g -Xms6g -XX:MaxDirectMemorySize=6g -XX:MetaspaceSize=96m
|
||||
extraConfig: |
|
||||
elasticstream.enable=true
|
||||
autobalancer.client.auth.sasl.mechanism=PLAIN
|
||||
autobalancer.client.auth.sasl.jaas.config=org.apache.kafka.common.security.plain.PlainLoginModule required username="inter_broker_user" password="interbroker-password-placeholder" user_inter_broker_user="interbroker-password-placeholder";
|
||||
autobalancer.client.auth.security.protocol=SASL_PLAINTEXT
|
||||
autobalancer.client.listener.name=INTERNAL
|
||||
s3.wal.cache.size=2147483648
|
||||
s3.block.cache.size=1073741824
|
||||
s3.stream.allocator.policy=POOLED_DIRECT
|
||||
s3.network.baseline.bandwidth=245366784
|
||||
# Replace the following with your bucket config
|
||||
s3.ops.buckets=1@s3://${ops-bucket}?region=${region}&endpoint=${endpoint}
|
||||
s3.data.buckets=0@s3://${data-bucket}?region=${region}&endpoint=${endpoint}
|
||||
s3.wal.path=0@s3://${data-bucket}?region=${region}&endpoint=${endpoint}
|
||||
automq.zonerouter.channels=0@s3://${data-bucket}?region=${region}&endpoint=${endpoint}
|
||||
affinity:
|
||||
podAntiAffinity:
|
||||
requiredDuringSchedulingIgnoredDuringExecution:
|
||||
- labelSelector:
|
||||
matchExpressions:
|
||||
- key: app.kubernetes.io/instance
|
||||
operator: In
|
||||
# your helm release name
|
||||
values:
|
||||
- automq-release
|
||||
- key: app.kubernetes.io/component
|
||||
operator: In
|
||||
values:
|
||||
- controller-eligible
|
||||
- broker
|
||||
topologyKey: kubernetes.io/hostname
|
||||
# --- nodeAffinity recommended ---
|
||||
# nodeAffinity:
|
||||
# requiredDuringSchedulingIgnoredDuringExecution:
|
||||
# nodeSelectorTerms:
|
||||
# - matchExpressions:
|
||||
# - key: "${your-node-label-key}"
|
||||
# operator: In
|
||||
# values:
|
||||
# - "${your-node-label-value}"
|
||||
topologySpreadConstraints:
|
||||
- maxSkew: 1
|
||||
topologyKey: topology.kubernetes.io/zone
|
||||
whenUnsatisfiable: DoNotSchedule
|
||||
labelSelector:
|
||||
matchLabels:
|
||||
app.kubernetes.io/component: broker
|
||||
tolerations:
|
||||
- key: "dedicated"
|
||||
operator: "Equal"
|
||||
value: "automq"
|
||||
effect: "NoSchedule"
|
||||
brokerRackAssignment: aws-az
|
||||
|
|
@ -182,6 +182,10 @@
|
|||
<subpackage name="migration">
|
||||
<allow pkg="org.apache.kafka.controller" />
|
||||
</subpackage>
|
||||
<subpackage name="storage">
|
||||
<allow pkg="org.apache.kafka.common.internals" />
|
||||
<allow pkg="org.apache.kafka.snapshot" />
|
||||
</subpackage>
|
||||
<subpackage name="util">
|
||||
<allow class="org.apache.kafka.common.compress.Compression" exact-match="true" />
|
||||
</subpackage>
|
||||
|
|
|
|||
|
|
@ -80,6 +80,8 @@
|
|||
<allow pkg="org.apache.kafka.raft" />
|
||||
|
||||
<subpackage name="server">
|
||||
<allow pkg="org.apache.kafka.server" />
|
||||
<allow pkg="org.apache.kafka.image" />
|
||||
<subpackage name="metrics">
|
||||
<allow class="org.apache.kafka.server.authorizer.AuthorizableRequestContext" />
|
||||
<allow pkg="org.apache.kafka.server.telemetry" />
|
||||
|
|
|
|||
|
|
@ -83,6 +83,11 @@
|
|||
<allow pkg="org.apache.kafka.coordinator.transaction"/>
|
||||
</subpackage>
|
||||
|
||||
<subpackage name="storage.log">
|
||||
<allow pkg="org.apache.kafka.server" />
|
||||
<allow pkg="com.yammer.metrics" />
|
||||
</subpackage>
|
||||
|
||||
<!-- START OF TIERED STORAGE INTEGRATION TEST IMPORT DEPENDENCIES -->
|
||||
<subpackage name="tiered.storage">
|
||||
<allow pkg="scala" />
|
||||
|
|
|
|||
|
|
@ -49,6 +49,7 @@
|
|||
|
||||
<subpackage name="common">
|
||||
<allow class="org.apache.kafka.clients.consumer.ConsumerRecord" exact-match="true" />
|
||||
<allow class="org.apache.kafka.clients.NodeApiVersions" exact-match="true" />
|
||||
<allow class="org.apache.kafka.common.message.ApiMessageType" exact-match="true" />
|
||||
<disallow pkg="org.apache.kafka.clients" />
|
||||
<allow pkg="org.apache.kafka.common" exact-match="true" />
|
||||
|
|
@ -76,7 +77,10 @@
|
|||
<allow pkg="net.jpountz.xxhash" />
|
||||
<allow pkg="org.xerial.snappy" />
|
||||
<allow pkg="org.apache.kafka.common.compress" />
|
||||
<allow class="org.apache.kafka.common.record.CompressionType" exact-match="true" />
|
||||
<allow class="org.apache.kafka.common.record.CompressionType" />
|
||||
<allow class="org.apache.kafka.common.record.CompressionType.GZIP" />
|
||||
<allow class="org.apache.kafka.common.record.CompressionType.LZ4" />
|
||||
<allow class="org.apache.kafka.common.record.CompressionType.ZSTD" />
|
||||
<allow class="org.apache.kafka.common.record.RecordBatch" exact-match="true" />
|
||||
</subpackage>
|
||||
|
||||
|
|
@ -150,6 +154,7 @@
|
|||
</subpackage>
|
||||
|
||||
<subpackage name="record">
|
||||
<allow class="org.apache.kafka.common.config.ConfigDef.Range.between" exact-match="true" />
|
||||
<allow pkg="org.apache.kafka.common.compress" />
|
||||
<allow pkg="org.apache.kafka.common.header" />
|
||||
<allow pkg="org.apache.kafka.common.record" />
|
||||
|
|
@ -278,12 +283,16 @@
|
|||
|
||||
<subpackage name="tools">
|
||||
<allow pkg="org.apache.kafka.common"/>
|
||||
<allow pkg="org.apache.kafka.metadata.properties" />
|
||||
<allow pkg="org.apache.kafka.network" />
|
||||
<allow pkg="org.apache.kafka.server.util" />
|
||||
<allow pkg="kafka.admin" />
|
||||
<allow pkg="kafka.server" />
|
||||
<allow pkg="org.apache.kafka.storage.internals" />
|
||||
<allow pkg="org.apache.kafka.server.config" />
|
||||
<allow pkg="org.apache.kafka.server.common" />
|
||||
<allow pkg="org.apache.kafka.server.log.remote.metadata.storage" />
|
||||
<allow pkg="org.apache.kafka.server.log.remote.storage" />
|
||||
<allow pkg="org.apache.kafka.clients" />
|
||||
<allow pkg="org.apache.kafka.clients.admin" />
|
||||
<allow pkg="org.apache.kafka.clients.producer" />
|
||||
|
|
@ -301,6 +310,7 @@
|
|||
<allow pkg="kafka.utils" />
|
||||
<allow pkg="scala.collection" />
|
||||
<allow pkg="org.apache.kafka.coordinator.transaction" />
|
||||
<allow pkg="org.apache.kafka.coordinator.group" />
|
||||
|
||||
<subpackage name="consumer">
|
||||
<allow pkg="org.apache.kafka.tools"/>
|
||||
|
|
|
|||
|
|
@ -39,7 +39,7 @@
|
|||
<suppress checks="(NPathComplexity|ClassFanOutComplexity|CyclomaticComplexity|ClassDataAbstractionCoupling|FinalLocalVariable|LocalVariableName|MemberName|ParameterName|MethodLength|JavaNCSS|AvoidStarImport)"
|
||||
files="core[\\/]src[\\/](generated|generated-test)[\\/].+.java$"/>
|
||||
<suppress checks="NPathComplexity" files="(ClusterTestExtensions|KafkaApisBuilder|SharePartition).java"/>
|
||||
<suppress checks="NPathComplexity|ClassFanOutComplexity|ClassDataAbstractionCoupling" files="(RemoteLogManager|RemoteLogManagerTest).java"/>
|
||||
<suppress checks="NPathComplexity|ClassFanOutComplexity|ClassDataAbstractionCoupling|JavaNCSS" files="(RemoteLogManager|RemoteLogManagerTest).java"/>
|
||||
<suppress checks="MethodLength" files="RemoteLogManager.java"/>
|
||||
<suppress checks="ClassFanOutComplexity" files="RemoteLogManagerTest.java"/>
|
||||
<suppress checks="MethodLength"
|
||||
|
|
@ -190,11 +190,11 @@
|
|||
|
||||
<!-- Raft -->
|
||||
<suppress checks="NPathComplexity"
|
||||
files="RecordsIterator.java"/>
|
||||
files="(DynamicVoter|RecordsIterator).java"/>
|
||||
|
||||
<!-- Streams -->
|
||||
<suppress checks="ClassFanOutComplexity"
|
||||
files="(KafkaStreams|KStreamImpl|KTableImpl|InternalTopologyBuilder|StreamsPartitionAssignor|StreamThread|IQv2StoreIntegrationTest|KStreamImplTest|RocksDBStore).java"/>
|
||||
files="(KafkaStreams|KStreamImpl|KTableImpl|InternalTopologyBuilder|StreamsPartitionAssignor|StreamThread|IQv2StoreIntegrationTest|KStreamImplTest|RocksDBStore|StreamTask).java"/>
|
||||
|
||||
<suppress checks="MethodLength"
|
||||
files="KTableImpl.java"/>
|
||||
|
|
@ -326,7 +326,7 @@
|
|||
<suppress checks="(ParameterNumber|ClassDataAbstractionCoupling)"
|
||||
files="(QuorumController).java"/>
|
||||
<suppress checks="(CyclomaticComplexity|NPathComplexity)"
|
||||
files="(PartitionRegistration|PartitionChangeBuilder).java"/>
|
||||
files="(PartitionRegistration|PartitionChangeBuilder|ScramParser).java"/>
|
||||
<suppress checks="CyclomaticComplexity"
|
||||
files="(ClientQuotasImage|KafkaEventQueue|MetadataDelta|QuorumController|ReplicationControlManager|KRaftMigrationDriver|ClusterControlManager|MetaPropertiesEnsemble).java"/>
|
||||
<suppress checks="NPathComplexity"
|
||||
|
|
@ -372,11 +372,12 @@
|
|||
<suppress checks="CyclomaticComplexity"
|
||||
files="(S3StreamsMetadataImage|S3StreamMetricsManager|BlockCache|StreamReader|S3MetricsExporter|PrometheusUtils).java"/>
|
||||
<suppress checks="NPathComplexity"
|
||||
files="(StreamControlManager|S3StreamsMetadataImage|CompactionManagerTest|S3StreamMetricsManager|CompactionManager|BlockCache|DefaultS3BlockCache|StreamReader|S3Utils|AnomalyDetector|Recreate|ForceClose|QuorumController).java"/>
|
||||
files="(StreamControlManager|S3StreamsMetadataImage|CompactionManagerTest|S3StreamMetricsManager|CompactionManager|BlockCache|DefaultS3BlockCache|StreamReader|S3Utils|AnomalyDetector|Recreate|ForceClose|QuorumController|AbstractObjectStorage).java"/>
|
||||
<suppress checks="MethodLength"
|
||||
files="(S3StreamMetricsManager|BlockWALServiceTest).java"/>
|
||||
<suppress id="dontUseSystemExit"
|
||||
files="(BenchTool|S3Utils|AutoMQCLI).java"/>
|
||||
<suppress checks="ClassDataAbstractionCoupling" files="(StreamControlManagerTest|ControllerStreamManager).java"/>
|
||||
<suppress files="core[\/]src[\/]test[\/]java[\/]kafka[\/]automq[\/]table[\/]process[\/]proto[\/].*\.java$" checks=".*"/>
|
||||
|
||||
</suppressions>
|
||||
|
|
|
|||
|
|
@ -18,9 +18,21 @@ package org.apache.kafka.clients.admin;
|
|||
|
||||
import org.apache.kafka.common.annotation.InterfaceStability;
|
||||
|
||||
import java.util.Optional;
|
||||
|
||||
/**
|
||||
* Options for {@link Admin#addRaftVoter}.
|
||||
*/
|
||||
@InterfaceStability.Stable
|
||||
public class AddRaftVoterOptions extends AbstractOptions<AddRaftVoterOptions> {
|
||||
private Optional<String> clusterId = Optional.empty();
|
||||
|
||||
public AddRaftVoterOptions setClusterId(Optional<String> clusterId) {
|
||||
this.clusterId = clusterId;
|
||||
return this;
|
||||
}
|
||||
|
||||
public Optional<String> clusterId() {
|
||||
return clusterId;
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1729,6 +1729,16 @@ public interface Admin extends AutoCloseable {
|
|||
* @return {@link GetNodesResult}
|
||||
*/
|
||||
GetNodesResult getNodes(Collection<Integer> nodeIdList, GetNodesOptions options);
|
||||
|
||||
/**
|
||||
* Update consumer group
|
||||
*
|
||||
* @param groupId group id
|
||||
* @param groupSpec {@link UpdateGroupSpec}
|
||||
* @param options {@link UpdateGroupOptions}
|
||||
* @return {@link UpdateGroupResult}
|
||||
*/
|
||||
UpdateGroupResult updateGroup(String groupId, UpdateGroupSpec groupSpec, UpdateGroupOptions options);
|
||||
// AutoMQ inject end
|
||||
|
||||
/**
|
||||
|
|
|
|||
|
|
@ -314,5 +314,11 @@ public class ForwardingAdmin implements Admin {
|
|||
public GetNodesResult getNodes(Collection<Integer> nodeIdList, GetNodesOptions options) {
|
||||
return delegate.getNodes(nodeIdList, options);
|
||||
}
|
||||
|
||||
@Override
|
||||
public UpdateGroupResult updateGroup(String groupId, UpdateGroupSpec groupSpec, UpdateGroupOptions options) {
|
||||
return delegate.updateGroup(groupId, groupSpec, options);
|
||||
}
|
||||
|
||||
// AutoMQ inject end
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,12 +1,20 @@
|
|||
/*
|
||||
* Copyright 2024, AutoMQ HK Limited.
|
||||
* Copyright 2025, AutoMQ HK Limited.
|
||||
*
|
||||
* The use of this file is governed by the Business Source License,
|
||||
* as detailed in the file "/LICENSE.S3Stream" included in this repository.
|
||||
* Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
* contributor license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright ownership.
|
||||
* The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
* (the "License"); you may not use this file except in compliance with
|
||||
* the License. You may obtain a copy of the License at
|
||||
*
|
||||
* As of the Change Date specified in that file, in accordance with
|
||||
* the Business Source License, use of this software will be governed
|
||||
* by the Apache License, Version 2.0
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.apache.kafka.clients.admin;
|
||||
|
|
|
|||
|
|
@ -1,12 +1,20 @@
|
|||
/*
|
||||
* Copyright 2024, AutoMQ HK Limited.
|
||||
* Copyright 2025, AutoMQ HK Limited.
|
||||
*
|
||||
* The use of this file is governed by the Business Source License,
|
||||
* as detailed in the file "/LICENSE.S3Stream" included in this repository.
|
||||
* Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
* contributor license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright ownership.
|
||||
* The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
* (the "License"); you may not use this file except in compliance with
|
||||
* the License. You may obtain a copy of the License at
|
||||
*
|
||||
* As of the Change Date specified in that file, in accordance with
|
||||
* the Business Source License, use of this software will be governed
|
||||
* by the Apache License, Version 2.0
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.apache.kafka.clients.admin;
|
||||
|
|
|
|||
|
|
@ -56,6 +56,7 @@ import org.apache.kafka.clients.admin.internals.ListConsumerGroupOffsetsHandler;
|
|||
import org.apache.kafka.clients.admin.internals.ListOffsetsHandler;
|
||||
import org.apache.kafka.clients.admin.internals.ListTransactionsHandler;
|
||||
import org.apache.kafka.clients.admin.internals.RemoveMembersFromConsumerGroupHandler;
|
||||
import org.apache.kafka.clients.admin.internals.UpdateGroupHandler;
|
||||
import org.apache.kafka.clients.consumer.OffsetAndMetadata;
|
||||
import org.apache.kafka.clients.consumer.internals.ConsumerProtocol;
|
||||
import org.apache.kafka.common.Cluster;
|
||||
|
|
@ -241,6 +242,7 @@ import org.apache.kafka.common.requests.ListPartitionReassignmentsResponse;
|
|||
import org.apache.kafka.common.requests.MetadataRequest;
|
||||
import org.apache.kafka.common.requests.MetadataResponse;
|
||||
import org.apache.kafka.common.requests.RemoveRaftVoterRequest;
|
||||
import org.apache.kafka.common.requests.RemoveRaftVoterResponse;
|
||||
import org.apache.kafka.common.requests.RenewDelegationTokenRequest;
|
||||
import org.apache.kafka.common.requests.RenewDelegationTokenResponse;
|
||||
import org.apache.kafka.common.requests.UnregisterBrokerRequest;
|
||||
|
|
@ -1202,16 +1204,27 @@ public class KafkaAdminClient extends AdminClient {
|
|||
long pollTimeout = Long.MAX_VALUE;
|
||||
log.trace("Trying to choose nodes for {} at {}", pendingCalls, now);
|
||||
|
||||
Iterator<Call> pendingIter = pendingCalls.iterator();
|
||||
while (pendingIter.hasNext()) {
|
||||
Call call = pendingIter.next();
|
||||
List<Call> toRemove = new ArrayList<>();
|
||||
// Using pendingCalls.size() to get the list size before the for-loop to avoid infinite loop.
|
||||
// If call.fail keeps adding the call to pendingCalls,
|
||||
// the loop like for (int i = 0; i < pendingCalls.size(); i++) can't stop.
|
||||
int pendingSize = pendingCalls.size();
|
||||
// pendingCalls could be modified in this loop,
|
||||
// hence using for-loop instead of iterator to avoid ConcurrentModificationException.
|
||||
for (int i = 0; i < pendingSize; i++) {
|
||||
Call call = pendingCalls.get(i);
|
||||
// If the call is being retried, await the proper backoff before finding the node
|
||||
if (now < call.nextAllowedTryMs) {
|
||||
pollTimeout = Math.min(pollTimeout, call.nextAllowedTryMs - now);
|
||||
} else if (maybeDrainPendingCall(call, now)) {
|
||||
pendingIter.remove();
|
||||
toRemove.add(call);
|
||||
}
|
||||
}
|
||||
|
||||
// Use remove instead of removeAll to avoid delete all matched elements
|
||||
for (Call call : toRemove) {
|
||||
pendingCalls.remove(call);
|
||||
}
|
||||
return pollTimeout;
|
||||
}
|
||||
|
||||
|
|
@ -4701,6 +4714,8 @@ public class KafkaAdminClient extends AdminClient {
|
|||
setPort(endpoint.port())));
|
||||
return new AddRaftVoterRequest.Builder(
|
||||
new AddRaftVoterRequestData().
|
||||
setClusterId(options.clusterId().orElse(null)).
|
||||
setTimeoutMs(timeoutMs).
|
||||
setVoterId(voterId) .
|
||||
setVoterDirectoryId(voterDirectoryId).
|
||||
setListeners(listeners));
|
||||
|
|
@ -4745,13 +4760,14 @@ public class KafkaAdminClient extends AdminClient {
|
|||
RemoveRaftVoterRequest.Builder createRequest(int timeoutMs) {
|
||||
return new RemoveRaftVoterRequest.Builder(
|
||||
new RemoveRaftVoterRequestData().
|
||||
setClusterId(options.clusterId().orElse(null)).
|
||||
setVoterId(voterId) .
|
||||
setVoterDirectoryId(voterDirectoryId));
|
||||
}
|
||||
|
||||
@Override
|
||||
void handleResponse(AbstractResponse response) {
|
||||
AddRaftVoterResponse addResponse = (AddRaftVoterResponse) response;
|
||||
RemoveRaftVoterResponse addResponse = (RemoveRaftVoterResponse) response;
|
||||
if (addResponse.data().errorCode() != Errors.NONE.code()) {
|
||||
ApiError error = new ApiError(
|
||||
addResponse.data().errorCode(),
|
||||
|
|
@ -4857,6 +4873,14 @@ public class KafkaAdminClient extends AdminClient {
|
|||
return new GetNodesResult(future);
|
||||
}
|
||||
|
||||
@Override
|
||||
public UpdateGroupResult updateGroup(String groupId, UpdateGroupSpec groupSpec, UpdateGroupOptions options) {
|
||||
SimpleAdminApiFuture<CoordinatorKey, Void> future = UpdateGroupHandler.newFuture(groupId);
|
||||
UpdateGroupHandler handler = new UpdateGroupHandler(groupId, groupSpec, logContext);
|
||||
invokeDriver(handler, future, options.timeoutMs);
|
||||
return new UpdateGroupResult(future.get(CoordinatorKey.byGroupId(groupId)));
|
||||
}
|
||||
|
||||
private <K, V> void invokeDriver(
|
||||
AdminApiHandler<K, V> handler,
|
||||
AdminApiFuture<K, V> future,
|
||||
|
|
@ -4931,6 +4955,10 @@ public class KafkaAdminClient extends AdminClient {
|
|||
return ListOffsetsRequest.EARLIEST_TIMESTAMP;
|
||||
} else if (offsetSpec instanceof OffsetSpec.MaxTimestampSpec) {
|
||||
return ListOffsetsRequest.MAX_TIMESTAMP;
|
||||
} else if (offsetSpec instanceof OffsetSpec.EarliestLocalSpec) {
|
||||
return ListOffsetsRequest.EARLIEST_LOCAL_TIMESTAMP;
|
||||
} else if (offsetSpec instanceof OffsetSpec.LatestTieredSpec) {
|
||||
return ListOffsetsRequest.LATEST_TIERED_TIMESTAMP;
|
||||
}
|
||||
return ListOffsetsRequest.LATEST_TIMESTAMP;
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,12 +1,20 @@
|
|||
/*
|
||||
* Copyright 2024, AutoMQ HK Limited.
|
||||
* Copyright 2025, AutoMQ HK Limited.
|
||||
*
|
||||
* The use of this file is governed by the Business Source License,
|
||||
* as detailed in the file "/LICENSE.S3Stream" included in this repository.
|
||||
* Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
* contributor license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright ownership.
|
||||
* The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
* (the "License"); you may not use this file except in compliance with
|
||||
* the License. You may obtain a copy of the License at
|
||||
*
|
||||
* As of the Change Date specified in that file, in accordance with
|
||||
* the Business Source License, use of this software will be governed
|
||||
* by the Apache License, Version 2.0
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.apache.kafka.clients.admin;
|
||||
|
|
|
|||
|
|
@ -26,6 +26,8 @@ public class OffsetSpec {
|
|||
public static class EarliestSpec extends OffsetSpec { }
|
||||
public static class LatestSpec extends OffsetSpec { }
|
||||
public static class MaxTimestampSpec extends OffsetSpec { }
|
||||
public static class EarliestLocalSpec extends OffsetSpec { }
|
||||
public static class LatestTieredSpec extends OffsetSpec { }
|
||||
public static class TimestampSpec extends OffsetSpec {
|
||||
private final long timestamp;
|
||||
|
||||
|
|
@ -70,4 +72,23 @@ public class OffsetSpec {
|
|||
return new MaxTimestampSpec();
|
||||
}
|
||||
|
||||
/**
|
||||
* Used to retrieve the local log start offset.
|
||||
* Local log start offset is the offset of a log above which reads
|
||||
* are guaranteed to be served from the disk of the leader broker.
|
||||
* <br/>
|
||||
* Note: When tiered Storage is not enabled, it behaves the same as retrieving the earliest timestamp offset.
|
||||
*/
|
||||
public static OffsetSpec earliestLocal() {
|
||||
return new EarliestLocalSpec();
|
||||
}
|
||||
|
||||
/**
|
||||
* Used to retrieve the highest offset of data stored in remote storage.
|
||||
* <br/>
|
||||
* Note: When tiered storage is not enabled, we will return unknown offset.
|
||||
*/
|
||||
public static OffsetSpec latestTiered() {
|
||||
return new LatestTieredSpec();
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -91,10 +91,8 @@ public class RaftVoterEndpoint {
|
|||
|
||||
@Override
|
||||
public String toString() {
|
||||
return "RaftVoterEndpoint" +
|
||||
"(name=" + name +
|
||||
", host=" + host +
|
||||
", port=" + port +
|
||||
")";
|
||||
// enclose IPv6 hosts in square brackets for readability
|
||||
String hostString = host.contains(":") ? "[" + host + "]" : host;
|
||||
return name + "://" + hostString + ":" + port;
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -18,9 +18,21 @@ package org.apache.kafka.clients.admin;
|
|||
|
||||
import org.apache.kafka.common.annotation.InterfaceStability;
|
||||
|
||||
import java.util.Optional;
|
||||
|
||||
/**
|
||||
* Options for {@link Admin#removeRaftVoter}.
|
||||
*/
|
||||
@InterfaceStability.Stable
|
||||
public class RemoveRaftVoterOptions extends AbstractOptions<RemoveRaftVoterOptions> {
|
||||
private Optional<String> clusterId = Optional.empty();
|
||||
|
||||
public RemoveRaftVoterOptions setClusterId(Optional<String> clusterId) {
|
||||
this.clusterId = clusterId;
|
||||
return this;
|
||||
}
|
||||
|
||||
public Optional<String> clusterId() {
|
||||
return clusterId;
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -0,0 +1,23 @@
|
|||
/*
|
||||
* Copyright 2025, AutoMQ HK Limited.
|
||||
*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
* contributor license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright ownership.
|
||||
* The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
* (the "License"); you may not use this file except in compliance with
|
||||
* the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.apache.kafka.clients.admin;
|
||||
|
||||
public class UpdateGroupOptions extends AbstractOptions<UpdateGroupOptions> {
|
||||
}
|
||||
|
|
@ -0,0 +1,37 @@
|
|||
/*
|
||||
* Copyright 2025, AutoMQ HK Limited.
|
||||
*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
* contributor license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright ownership.
|
||||
* The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
* (the "License"); you may not use this file except in compliance with
|
||||
* the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.apache.kafka.clients.admin;
|
||||
|
||||
import org.apache.kafka.common.KafkaFuture;
|
||||
|
||||
public class UpdateGroupResult extends AbstractOptions<UpdateGroupResult> {
|
||||
private final KafkaFuture<Void> future;
|
||||
|
||||
UpdateGroupResult(final KafkaFuture<Void> future) {
|
||||
this.future = future;
|
||||
}
|
||||
|
||||
/**
|
||||
* Return a future which succeeds if all the feature updates succeed.
|
||||
*/
|
||||
public KafkaFuture<Void> all() {
|
||||
return future;
|
||||
}
|
||||
}
|
||||
|
|
@ -0,0 +1,68 @@
|
|||
/*
|
||||
* Copyright 2025, AutoMQ HK Limited.
|
||||
*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
* contributor license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright ownership.
|
||||
* The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
* (the "License"); you may not use this file except in compliance with
|
||||
* the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.apache.kafka.clients.admin;
|
||||
|
||||
import java.util.Objects;
|
||||
|
||||
public class UpdateGroupSpec {
|
||||
private String linkId;
|
||||
private boolean promoted;
|
||||
|
||||
public UpdateGroupSpec linkId(String linkId) {
|
||||
this.linkId = linkId;
|
||||
return this;
|
||||
}
|
||||
|
||||
public UpdateGroupSpec promoted(boolean promoted) {
|
||||
this.promoted = promoted;
|
||||
return this;
|
||||
}
|
||||
|
||||
public String linkId() {
|
||||
return linkId;
|
||||
}
|
||||
|
||||
public boolean promoted() {
|
||||
return promoted;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object o) {
|
||||
if (this == o)
|
||||
return true;
|
||||
if (o == null || getClass() != o.getClass())
|
||||
return false;
|
||||
UpdateGroupSpec spec = (UpdateGroupSpec) o;
|
||||
return promoted == spec.promoted && Objects.equals(linkId, spec.linkId);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return Objects.hash(linkId, promoted);
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return "UpdateGroupsSpec{" +
|
||||
"linkId='" + linkId + '\'' +
|
||||
", promoted=" + promoted +
|
||||
'}';
|
||||
}
|
||||
}
|
||||
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue