mirror of
https://github.com/didi/KnowStreaming.git
synced 2025-12-25 04:32:12 +08:00
Compare commits
1371 Commits
ve_kafka_g
...
feature/su
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
c1e858e998 | ||
|
|
232f06e5c2 | ||
|
|
fcf0a08e0a | ||
|
|
68839a6725 | ||
|
|
e2692a6fc4 | ||
|
|
c18eeb6d55 | ||
|
|
6853862753 | ||
|
|
610af4a9e8 | ||
|
|
ac4ea13be9 | ||
|
|
b6ea4aec19 | ||
|
|
8346453aa3 | ||
|
|
a9eb4ae30e | ||
|
|
cceff91f81 | ||
|
|
009ffeb099 | ||
|
|
e8e05812d0 | ||
|
|
58a421c4b9 | ||
|
|
af916d5a71 | ||
|
|
8b30f78744 | ||
|
|
592dee884a | ||
|
|
715744ca15 | ||
|
|
8a95401364 | ||
|
|
e80f8086d4 | ||
|
|
af82c2e615 | ||
|
|
1369e7b9eb | ||
|
|
ab6afe6dbc | ||
|
|
e24a582067 | ||
|
|
65f8beef32 | ||
|
|
38366809f1 | ||
|
|
530219a317 | ||
|
|
c07e544c50 | ||
|
|
c9308ee4f2 | ||
|
|
95158813b9 | ||
|
|
59e8a416b5 | ||
|
|
f6becbdf2c | ||
|
|
07bd00d60c | ||
|
|
1adfa639ac | ||
|
|
3f817991aa | ||
|
|
3b72f732be | ||
|
|
e2ad3afe3d | ||
|
|
ae04ffdd71 | ||
|
|
cf9d5b6832 | ||
|
|
9c418d3b38 | ||
|
|
128b180c83 | ||
|
|
b60941abc8 | ||
|
|
1a42472fd8 | ||
|
|
18e00f043e | ||
|
|
6385889902 | ||
|
|
ea0c744677 | ||
|
|
d1417bef8c | ||
|
|
a7309612d5 | ||
|
|
6e56688a31 | ||
|
|
a6abfb3ea8 | ||
|
|
ca696dd6e1 | ||
|
|
db40a5cd0a | ||
|
|
55161e439a | ||
|
|
bdffc10ca6 | ||
|
|
b1892c21e2 | ||
|
|
90e5492060 | ||
|
|
42195c3180 | ||
|
|
94b1e508fd | ||
|
|
dd3dcd37e9 | ||
|
|
0a6e9b7633 | ||
|
|
470e471cad | ||
|
|
bd58b48bcb | ||
|
|
0cd071c5c6 | ||
|
|
abaadfb9a8 | ||
|
|
49e7fea6d3 | ||
|
|
d68a19679e | ||
|
|
75be94fbea | ||
|
|
c11aa4fd17 | ||
|
|
cb96fef1a5 | ||
|
|
e98cfbcf91 | ||
|
|
0140b2e898 | ||
|
|
b3b7ab9f6b | ||
|
|
b34edb9b64 | ||
|
|
c2bc0f788d | ||
|
|
3f518c9e63 | ||
|
|
7f7801a5f7 | ||
|
|
e1e02f7c2a | ||
|
|
c497e4cb2d | ||
|
|
e34e3f3e3d | ||
|
|
b3fd494398 | ||
|
|
ffc115cb76 | ||
|
|
7bfe787e39 | ||
|
|
2256e8bbdb | ||
|
|
e975932d41 | ||
|
|
db044caf8b | ||
|
|
82fbea4e5f | ||
|
|
6aaa4b34b8 | ||
|
|
3cb1f03668 | ||
|
|
e61c446410 | ||
|
|
9d0345c9cd | ||
|
|
62f870a342 | ||
|
|
13641c00ba | ||
|
|
769c2c0fbc | ||
|
|
c71865f623 | ||
|
|
258385dc9a | ||
|
|
65238231f0 | ||
|
|
cb22e02fbe | ||
|
|
aa0bec1206 | ||
|
|
793c780015 | ||
|
|
ec6f063450 | ||
|
|
f25c65b98b | ||
|
|
2d99aae779 | ||
|
|
a8847dc282 | ||
|
|
4852c01c88 | ||
|
|
3d6f405b69 | ||
|
|
18e3fbf41d | ||
|
|
ae8cc3092b | ||
|
|
5c26e8947b | ||
|
|
fbe6945d3b | ||
|
|
7dc8f2dc48 | ||
|
|
91c60ce72c | ||
|
|
687eea80c8 | ||
|
|
9bfe3fd1db | ||
|
|
03f81bc6de | ||
|
|
eed9571ffa | ||
|
|
e4651ef749 | ||
|
|
f715cf7a8d | ||
|
|
fad9ddb9a1 | ||
|
|
b6e4f50849 | ||
|
|
5c6911e398 | ||
|
|
a0371ab88b | ||
|
|
fa2abadc25 | ||
|
|
f03460f3cd | ||
|
|
b5683b73c2 | ||
|
|
c062586c7e | ||
|
|
98a5c7b776 | ||
|
|
e204023b1f | ||
|
|
4c5ffccc45 | ||
|
|
fbcf58e19c | ||
|
|
e5c6d00438 | ||
|
|
ab6a4d7099 | ||
|
|
78b2b8a45e | ||
|
|
add2af4f3f | ||
|
|
235c0ed30e | ||
|
|
5bd93aa478 | ||
|
|
f95be2c1b3 | ||
|
|
5110b30f62 | ||
|
|
861faa5df5 | ||
|
|
efdf624c67 | ||
|
|
caccf9cef5 | ||
|
|
6ba3dceb84 | ||
|
|
9b7c41e804 | ||
|
|
346aee8fe7 | ||
|
|
353d781bca | ||
|
|
3ce4bf231a | ||
|
|
d046cb8bf4 | ||
|
|
da95c63503 | ||
|
|
915e48de22 | ||
|
|
256f770971 | ||
|
|
16e251cbe8 | ||
|
|
67743b859a | ||
|
|
c275b42632 | ||
|
|
a02760417b | ||
|
|
0e50bfc5d4 | ||
|
|
eab988e18f | ||
|
|
dd6004b9d4 | ||
|
|
ac7c32acd5 | ||
|
|
f4a219ceef | ||
|
|
a8b56fb613 | ||
|
|
2925a20e8e | ||
|
|
6b3eb05735 | ||
|
|
17e0c39f83 | ||
|
|
4994639111 | ||
|
|
c187b5246f | ||
|
|
6ed6d5ec8a | ||
|
|
0735b332a8 | ||
|
|
344cec19fe | ||
|
|
6ef365e201 | ||
|
|
edfa6a9f71 | ||
|
|
860d0b92e2 | ||
|
|
5bceed7105 | ||
|
|
44a2fe0398 | ||
|
|
218459ad1b | ||
|
|
7db757bc12 | ||
|
|
896a943587 | ||
|
|
cd2c388e68 | ||
|
|
4543a339b7 | ||
|
|
1c4fbef9f2 | ||
|
|
b2f0f69365 | ||
|
|
c4fb18a73c | ||
|
|
5cad7b4106 | ||
|
|
f3c4133cd2 | ||
|
|
d9c59cb3d3 | ||
|
|
7a0db7161b | ||
|
|
6aefc16fa0 | ||
|
|
186dcd07e0 | ||
|
|
e8652d5db5 | ||
|
|
fb5964af84 | ||
|
|
249fe7c700 | ||
|
|
cc2a590b33 | ||
|
|
5b3f3e5575 | ||
|
|
36cf285397 | ||
|
|
4386563c2c | ||
|
|
0123ce4a5a | ||
|
|
c3d47d3093 | ||
|
|
9735c4f885 | ||
|
|
3a3141a361 | ||
|
|
ac30436324 | ||
|
|
7176e418f5 | ||
|
|
ca794f507e | ||
|
|
0f8be4fadc | ||
|
|
7066246e8f | ||
|
|
7d1bb48b59 | ||
|
|
dd0d519677 | ||
|
|
4293d05fca | ||
|
|
2c82baf9fc | ||
|
|
921161d6d0 | ||
|
|
e632c6c13f | ||
|
|
5833a8644c | ||
|
|
fab41e892f | ||
|
|
7a52cf67b0 | ||
|
|
175b8d643a | ||
|
|
6241eb052a | ||
|
|
c2fd0a8410 | ||
|
|
5127b600ec | ||
|
|
feb03aede6 | ||
|
|
47b6c5d86a | ||
|
|
c4a81613f4 | ||
|
|
daeb5c4cec | ||
|
|
38def45ad6 | ||
|
|
4b29a2fdfd | ||
|
|
a165ecaeef | ||
|
|
6637ba4ccc | ||
|
|
2f807eec2b | ||
|
|
636c2c6a83 | ||
|
|
898a55c703 | ||
|
|
8ffe7e7101 | ||
|
|
7661826ea5 | ||
|
|
e456be91ef | ||
|
|
da0a97cabf | ||
|
|
c1031a492a | ||
|
|
3c8aaf528c | ||
|
|
70ff20a2b0 | ||
|
|
6918f4babe | ||
|
|
805a704d34 | ||
|
|
c69c289bc4 | ||
|
|
dd5869e246 | ||
|
|
b51ffb81a3 | ||
|
|
ed0efd6bd2 | ||
|
|
39d2fe6195 | ||
|
|
7471d05c20 | ||
|
|
3492688733 | ||
|
|
a603783615 | ||
|
|
5c9096d564 | ||
|
|
c27786a257 | ||
|
|
81910d1958 | ||
|
|
55d5fc4bde | ||
|
|
f30586b150 | ||
|
|
37037c19f0 | ||
|
|
1a5e2c7309 | ||
|
|
941dd4fd65 | ||
|
|
5f6df3681c | ||
|
|
7d045dbf05 | ||
|
|
4ff4accdc3 | ||
|
|
bbe967c4a8 | ||
|
|
b101cec6fa | ||
|
|
e98ec562a2 | ||
|
|
0e71ecc587 | ||
|
|
0f11a65df8 | ||
|
|
da00c8c877 | ||
|
|
8b177877bb | ||
|
|
ea199dca8d | ||
|
|
88b5833f77 | ||
|
|
127b5be651 | ||
|
|
80f001cdd5 | ||
|
|
30d297cae1 | ||
|
|
a96853db90 | ||
|
|
c1502152c0 | ||
|
|
afda292796 | ||
|
|
163cab78ae | ||
|
|
8f4ff36c09 | ||
|
|
47b6b3577a | ||
|
|
f3eca3b214 | ||
|
|
62f7d3f72f | ||
|
|
26e60d8a64 | ||
|
|
df655a250c | ||
|
|
811fc9b400 | ||
|
|
83df02783c | ||
|
|
6a5efce874 | ||
|
|
fa0ae5e474 | ||
|
|
cafd665a2d | ||
|
|
e8f77a456b | ||
|
|
4510c62ebd | ||
|
|
79864955e1 | ||
|
|
ff26a8d46c | ||
|
|
cc226d552e | ||
|
|
962f89475b | ||
|
|
ec204a1605 | ||
|
|
58d7623938 | ||
|
|
8f4ecfcdc0 | ||
|
|
ef719cedbc | ||
|
|
b7856c892b | ||
|
|
7435a78883 | ||
|
|
f49206b316 | ||
|
|
7d500a0721 | ||
|
|
98a519f20b | ||
|
|
39b655bb43 | ||
|
|
78d56a49fe | ||
|
|
d2e9d1fa01 | ||
|
|
41ff914dc3 | ||
|
|
3ba447fac2 | ||
|
|
e9cc380a2e | ||
|
|
017cac9bbe | ||
|
|
9ad72694af | ||
|
|
e8f9821870 | ||
|
|
bb167b9f8d | ||
|
|
28fbb5e130 | ||
|
|
16101e81e8 | ||
|
|
aced504d2a | ||
|
|
abb064d9d1 | ||
|
|
dc1899a1cd | ||
|
|
442f34278c | ||
|
|
a6dcbcd35b | ||
|
|
2b600e96eb | ||
|
|
177bb80f31 | ||
|
|
63fbe728c4 | ||
|
|
b33020840b | ||
|
|
c5caf7c0d6 | ||
|
|
0f0473db4c | ||
|
|
beadde3e06 | ||
|
|
a423a20480 | ||
|
|
79f0a23813 | ||
|
|
780fdea2cc | ||
|
|
1c0fda1adf | ||
|
|
9cf13e9b30 | ||
|
|
87cd058fd8 | ||
|
|
81b1ec48c2 | ||
|
|
66dd82f4fd | ||
|
|
ce35b23911 | ||
|
|
e79342acf5 | ||
|
|
3fc9f39d24 | ||
|
|
0221fb3a4a | ||
|
|
f009f8b7ba | ||
|
|
b76959431a | ||
|
|
975370b593 | ||
|
|
7275030971 | ||
|
|
99b0be5a95 | ||
|
|
edd3f95fc4 | ||
|
|
479f983b09 | ||
|
|
7650332252 | ||
|
|
8f1a021851 | ||
|
|
ce4df4d5fd | ||
|
|
bd43ae1b5d | ||
|
|
8fa34116b9 | ||
|
|
7e92553017 | ||
|
|
b7e243a693 | ||
|
|
35d4888afb | ||
|
|
b3e8a4f0f6 | ||
|
|
321125caee | ||
|
|
e01427aa4f | ||
|
|
14652e7f7a | ||
|
|
7c05899dbd | ||
|
|
56726b703f | ||
|
|
6237b0182f | ||
|
|
be5b662f65 | ||
|
|
224698355c | ||
|
|
8f47138ecd | ||
|
|
d159746391 | ||
|
|
63df93ea5e | ||
|
|
38948c0daa | ||
|
|
6c610427b6 | ||
|
|
b4cc31c459 | ||
|
|
7d781712c9 | ||
|
|
dd61ce9b2a | ||
|
|
69a7212986 | ||
|
|
ff05a951fd | ||
|
|
89d5357b40 | ||
|
|
7ca3d65c42 | ||
|
|
7b5c2d800f | ||
|
|
f414b47a78 | ||
|
|
44f4e2f0f9 | ||
|
|
2361008bdf | ||
|
|
7377ef3ec5 | ||
|
|
a28d064b7a | ||
|
|
e2e57e8575 | ||
|
|
9d90bd2835 | ||
|
|
7445e68df4 | ||
|
|
ab42625ad2 | ||
|
|
18789a0a53 | ||
|
|
68a37bb56a | ||
|
|
3b33652c47 | ||
|
|
1e0c4c3904 | ||
|
|
04e223de16 | ||
|
|
c4a691aa8a | ||
|
|
ff9dde163a | ||
|
|
eb7efbd1a5 | ||
|
|
8c8c362c54 | ||
|
|
66e119ad5d | ||
|
|
6dedc04a05 | ||
|
|
0cf8bad0df | ||
|
|
95c9582d8b | ||
|
|
7815126ff5 | ||
|
|
a5fa9de54b | ||
|
|
95f1a2c630 | ||
|
|
1e256ae1fd | ||
|
|
9fc9c54fa1 | ||
|
|
1b362b1e02 | ||
|
|
04e3172cca | ||
|
|
1caab7f3f7 | ||
|
|
9d33c725ad | ||
|
|
6ed1d38106 | ||
|
|
0f07ddedaf | ||
|
|
289945b471 | ||
|
|
f331a6d144 | ||
|
|
0c8c12a651 | ||
|
|
028c3bb2fa | ||
|
|
d7a5a0d405 | ||
|
|
5ef5f6e531 | ||
|
|
1d205734b3 | ||
|
|
5edd43884f | ||
|
|
c1992373bc | ||
|
|
ed562f9c8a | ||
|
|
b4d44ef8c7 | ||
|
|
ad0c16a1b4 | ||
|
|
7eabe66853 | ||
|
|
3983d73695 | ||
|
|
161d4c4562 | ||
|
|
9a1e89564e | ||
|
|
0c18c5b4f6 | ||
|
|
3e12ba34f7 | ||
|
|
e71e29391b | ||
|
|
9b7b9a7af0 | ||
|
|
a23819c308 | ||
|
|
6cb1825d96 | ||
|
|
77b8c758dc | ||
|
|
e5a582cfad | ||
|
|
ec83db267e | ||
|
|
bfd026cae7 | ||
|
|
35f1dd8082 | ||
|
|
7ed0e7dd23 | ||
|
|
1a3cbf7a9d | ||
|
|
d9e4abc3de | ||
|
|
a4186085d3 | ||
|
|
26b1846bb4 | ||
|
|
1aa89527a6 | ||
|
|
eac76d7ad0 | ||
|
|
cea0cd56f6 | ||
|
|
c4b897f282 | ||
|
|
47389dbabb | ||
|
|
a2f8b1a851 | ||
|
|
feac0a058f | ||
|
|
27eeac9fd4 | ||
|
|
a14db4b194 | ||
|
|
54ee271a47 | ||
|
|
a3a9be4f7f | ||
|
|
d4f0a832f3 | ||
|
|
7dc533372c | ||
|
|
1737d87713 | ||
|
|
dbb98dea11 | ||
|
|
802b382b36 | ||
|
|
fc82999d45 | ||
|
|
08aa000c07 | ||
|
|
39015b5100 | ||
|
|
0d635ad419 | ||
|
|
9133205915 | ||
|
|
725ac10c3d | ||
|
|
2b76358c8f | ||
|
|
833c360698 | ||
|
|
7da1e67b01 | ||
|
|
7eb86a47dd | ||
|
|
d67e383c28 | ||
|
|
8749d3e1f5 | ||
|
|
30fba21c48 | ||
|
|
d83d35aee9 | ||
|
|
1d3caeea7d | ||
|
|
c8806dbb4d | ||
|
|
e5802c7f50 | ||
|
|
590f684d66 | ||
|
|
8e5a67f565 | ||
|
|
8d2fbce11e | ||
|
|
26916f6632 | ||
|
|
fbfa0d2d2a | ||
|
|
e626b99090 | ||
|
|
203859b71b | ||
|
|
9a25c22f3a | ||
|
|
0a03f41a7c | ||
|
|
56191939c8 | ||
|
|
beb754aaaa | ||
|
|
f234f740ca | ||
|
|
e14679694c | ||
|
|
e06712397e | ||
|
|
b6c6df7ffc | ||
|
|
375c6f56c9 | ||
|
|
0bf85c97b5 | ||
|
|
630e582321 | ||
|
|
a89fe23bdd | ||
|
|
a7a5fa9a31 | ||
|
|
c73a7eee2f | ||
|
|
121f8468d5 | ||
|
|
7b0b6936e0 | ||
|
|
597ea04a96 | ||
|
|
f7f90aeaaa | ||
|
|
227479f695 | ||
|
|
6477fb3fe0 | ||
|
|
4223f4f3c4 | ||
|
|
7288874d72 | ||
|
|
68f76f2daf | ||
|
|
fe6ddebc49 | ||
|
|
12b5acd073 | ||
|
|
a6f1fe07b3 | ||
|
|
85e3f2a946 | ||
|
|
d4f416de14 | ||
|
|
0d9a6702c1 | ||
|
|
d11285cdbf | ||
|
|
5f1f33d2b9 | ||
|
|
474daf752d | ||
|
|
27d1b92690 | ||
|
|
993afa4c19 | ||
|
|
028d891c32 | ||
|
|
0df55ec22d | ||
|
|
579f64774d | ||
|
|
792f8d939d | ||
|
|
e4fb02fcda | ||
|
|
0c14c641d0 | ||
|
|
dba671fd1e | ||
|
|
80d1693722 | ||
|
|
26014a11b2 | ||
|
|
848fddd55a | ||
|
|
97f5f05f1a | ||
|
|
25b82810f2 | ||
|
|
9b1e506fa7 | ||
|
|
7a42996e97 | ||
|
|
dbfcebcf67 | ||
|
|
37c3f69a28 | ||
|
|
5d412890b4 | ||
|
|
1e318a4c40 | ||
|
|
d4549176ec | ||
|
|
61efdf492f | ||
|
|
67ea4d44c8 | ||
|
|
fdae05a4aa | ||
|
|
5efb837ee8 | ||
|
|
584b626d93 | ||
|
|
de25a4ed8e | ||
|
|
2e852e5ca6 | ||
|
|
b11000715a | ||
|
|
b3f8b46f0f | ||
|
|
8d22a0664a | ||
|
|
20756a3453 | ||
|
|
c9b4d45a64 | ||
|
|
83f7f5468b | ||
|
|
59c042ad67 | ||
|
|
d550fc5068 | ||
|
|
6effba69a0 | ||
|
|
9b46956259 | ||
|
|
b5a4a732da | ||
|
|
487862367e | ||
|
|
5b63b9ce67 | ||
|
|
afbcd3e1df | ||
|
|
12b82c1395 | ||
|
|
863b765e0d | ||
|
|
731429c51c | ||
|
|
66f3bc61fe | ||
|
|
4efe35dd51 | ||
|
|
c92461ef93 | ||
|
|
405e6e0c1d | ||
|
|
0d227aef49 | ||
|
|
0e49002f42 | ||
|
|
2e016800e0 | ||
|
|
09f317b991 | ||
|
|
5a48cb1547 | ||
|
|
f632febf33 | ||
|
|
3c53467943 | ||
|
|
d358c0f4f7 | ||
|
|
de977a5b32 | ||
|
|
703d685d59 | ||
|
|
31a5f17408 | ||
|
|
c40ae3c455 | ||
|
|
b71a34279e | ||
|
|
8f8c0c4eda | ||
|
|
3a384f0e34 | ||
|
|
cf7bc11cbd | ||
|
|
be60ae8399 | ||
|
|
8e50d145d5 | ||
|
|
7a3d15525c | ||
|
|
64f32d8b24 | ||
|
|
949d6ba605 | ||
|
|
ceb8db09f4 | ||
|
|
ed05a0ebb8 | ||
|
|
a7cbb76655 | ||
|
|
93cbfa0b1f | ||
|
|
6120613a98 | ||
|
|
dbd00db159 | ||
|
|
befde952f5 | ||
|
|
1aa759e5be | ||
|
|
2de27719c1 | ||
|
|
21db57b537 | ||
|
|
dfe8d09477 | ||
|
|
90dfa22c64 | ||
|
|
0f35427645 | ||
|
|
7909f60ff8 | ||
|
|
9a1a8a4c30 | ||
|
|
fa7ad64140 | ||
|
|
8a0c23339d | ||
|
|
e7ab3aff16 | ||
|
|
d0948797b9 | ||
|
|
04a5e17451 | ||
|
|
47065c8042 | ||
|
|
488c778736 | ||
|
|
d10a7bcc75 | ||
|
|
afe44a2537 | ||
|
|
9eadafe850 | ||
|
|
dab3eefcc0 | ||
|
|
2b9a6b28d8 | ||
|
|
465f98ca2b | ||
|
|
a0312be4fd | ||
|
|
4a5161372b | ||
|
|
4c9921f752 | ||
|
|
6dd72d40ee | ||
|
|
db49c234bb | ||
|
|
4a9df0c4d9 | ||
|
|
461573c2ba | ||
|
|
291992753f | ||
|
|
fcefe7ac38 | ||
|
|
7da712fcff | ||
|
|
2fd8687624 | ||
|
|
639b1f8336 | ||
|
|
ab3b83e42a | ||
|
|
4818629c40 | ||
|
|
61784c860a | ||
|
|
d5667254f2 | ||
|
|
af2b93983f | ||
|
|
8281301cbd | ||
|
|
0043ab8371 | ||
|
|
500eaace82 | ||
|
|
28e8540c78 | ||
|
|
69adf682e2 | ||
|
|
69cd1ff6e1 | ||
|
|
415d67cc32 | ||
|
|
46a2fec79b | ||
|
|
560b322fca | ||
|
|
effe17ac85 | ||
|
|
7699acfc1b | ||
|
|
6e058240b3 | ||
|
|
f005c6bc44 | ||
|
|
7be462599f | ||
|
|
271ab432d9 | ||
|
|
4114777a4e | ||
|
|
9189a54442 | ||
|
|
b95ee762e3 | ||
|
|
9e3c4dc06b | ||
|
|
1891a3ac86 | ||
|
|
9ecdcac06d | ||
|
|
790cb6a2e1 | ||
|
|
4a98e5f025 | ||
|
|
507abc1d84 | ||
|
|
9b732fbbad | ||
|
|
220f1c6fc3 | ||
|
|
7a950c67b6 | ||
|
|
78f625dc8c | ||
|
|
211d26a3ed | ||
|
|
dce2bc6326 | ||
|
|
90e5d7f6f0 | ||
|
|
71d4e0f9e6 | ||
|
|
580b4534e0 | ||
|
|
fc835e09c6 | ||
|
|
c6e782a637 | ||
|
|
1ddfbfc833 | ||
|
|
dbf637fe0f | ||
|
|
110e129622 | ||
|
|
677e9d1b54 | ||
|
|
ad2adb905e | ||
|
|
5e9de7ac14 | ||
|
|
c63fb8380c | ||
|
|
2d39acc224 | ||
|
|
e68358e05f | ||
|
|
a96f10edf0 | ||
|
|
f03d94935b | ||
|
|
9c1320cd95 | ||
|
|
4f2ae588a5 | ||
|
|
eff51034b7 | ||
|
|
18832dc448 | ||
|
|
5262ae8907 | ||
|
|
7f251679fa | ||
|
|
5f5920b427 | ||
|
|
65a16d058a | ||
|
|
a73484d23a | ||
|
|
47887a20c6 | ||
|
|
9465c6f198 | ||
|
|
c09872c8c2 | ||
|
|
b0501cc80d | ||
|
|
f0792db6b3 | ||
|
|
e1514c901b | ||
|
|
e90c5003ae | ||
|
|
92a0d5d52c | ||
|
|
8912cb5323 | ||
|
|
d008c19149 | ||
|
|
e844b6444a | ||
|
|
02606cdce2 | ||
|
|
0081720f0e | ||
|
|
cca1e92868 | ||
|
|
69b774a074 | ||
|
|
5656b03fb4 | ||
|
|
02d0dcbb7f | ||
|
|
7b2e06df12 | ||
|
|
4259ae63d7 | ||
|
|
d7b11803bc | ||
|
|
fed298a6d4 | ||
|
|
51832385b1 | ||
|
|
462303fca0 | ||
|
|
4405703e42 | ||
|
|
23e398e121 | ||
|
|
b17bb89d04 | ||
|
|
5590cebf8f | ||
|
|
1fa043f09d | ||
|
|
3bd0af1451 | ||
|
|
1545962745 | ||
|
|
d032571681 | ||
|
|
33fb0acc7e | ||
|
|
1ec68a91e2 | ||
|
|
a23c113a46 | ||
|
|
371ae2c0a5 | ||
|
|
8f8f6ffa27 | ||
|
|
475fe0d91f | ||
|
|
3d74e60d03 | ||
|
|
83ac83bb28 | ||
|
|
8478fb857c | ||
|
|
7074bdaa9f | ||
|
|
58164294cc | ||
|
|
7c0e9df156 | ||
|
|
bd62212ecb | ||
|
|
2292039b42 | ||
|
|
73f8da8d5a | ||
|
|
e51dbe0ca7 | ||
|
|
482a375e31 | ||
|
|
689c5ce455 | ||
|
|
734a020ecc | ||
|
|
44d537f78c | ||
|
|
b4c60eb910 | ||
|
|
e120b32375 | ||
|
|
de54966d30 | ||
|
|
39a6302c18 | ||
|
|
05ceeea4b0 | ||
|
|
9f8e3373a8 | ||
|
|
42521cbae4 | ||
|
|
b23c35197e | ||
|
|
70f28d9ac4 | ||
|
|
912d73d98a | ||
|
|
2a720fce6f | ||
|
|
e4534c359f | ||
|
|
b91bec15f2 | ||
|
|
67ad5cacb7 | ||
|
|
b4a739476a | ||
|
|
a7bf2085db | ||
|
|
c3802cf48b | ||
|
|
54711c4491 | ||
|
|
fcb52a69c0 | ||
|
|
1b632f9754 | ||
|
|
73d7a0ecdc | ||
|
|
08943593b3 | ||
|
|
c949a88f20 | ||
|
|
a49c11f655 | ||
|
|
a66aed4a88 | ||
|
|
0045c953a0 | ||
|
|
fdce41b451 | ||
|
|
4d5e4d0f00 | ||
|
|
82c9b6481e | ||
|
|
053d4dcb18 | ||
|
|
e1b2c442aa | ||
|
|
0ed8ba8ca4 | ||
|
|
f195847c68 | ||
|
|
5beb13b17e | ||
|
|
7d9ec05062 | ||
|
|
fc604a9eaf | ||
|
|
4f3c1ad9b6 | ||
|
|
6d45ed586c | ||
|
|
1afb633b4f | ||
|
|
34d9f9174b | ||
|
|
3b0c208eff | ||
|
|
05022f8db4 | ||
|
|
3336de457a | ||
|
|
10a27bc29c | ||
|
|
542e5d3c2d | ||
|
|
7372617b14 | ||
|
|
89735a130b | ||
|
|
859cf74bd6 | ||
|
|
e2744ab399 | ||
|
|
16bd065098 | ||
|
|
71c52e6dd7 | ||
|
|
a7f8c3ced3 | ||
|
|
f3f0432c65 | ||
|
|
426ba2d150 | ||
|
|
2790099efa | ||
|
|
f6ba8bc95e | ||
|
|
d6181522c0 | ||
|
|
04cf071ca6 | ||
|
|
e4371b5d02 | ||
|
|
52c52b2a0d | ||
|
|
8f40f10575 | ||
|
|
fe0f6fcd0b | ||
|
|
31b1ad8bb4 | ||
|
|
373680d854 | ||
|
|
9e3bc80495 | ||
|
|
89405fe003 | ||
|
|
b9ea3865a5 | ||
|
|
b5bd643814 | ||
|
|
52ccaeffd5 | ||
|
|
18136c12fd | ||
|
|
dec3f9e75e | ||
|
|
ccc0ee4d18 | ||
|
|
69e9708080 | ||
|
|
5944ba099a | ||
|
|
ada2718b5e | ||
|
|
1f87bd63e7 | ||
|
|
c0f3259cf6 | ||
|
|
e1d5749a40 | ||
|
|
a8d7eb27d9 | ||
|
|
1eecdf3829 | ||
|
|
be8b345889 | ||
|
|
074da389b3 | ||
|
|
4df2dc09fe | ||
|
|
e8d42ba074 | ||
|
|
c036483680 | ||
|
|
2818584db6 | ||
|
|
37585f760d | ||
|
|
f5477a03a1 | ||
|
|
50388425b2 | ||
|
|
725c59eab0 | ||
|
|
7bf1de29a4 | ||
|
|
d90c3fc7dd | ||
|
|
80785ce072 | ||
|
|
44ea896de8 | ||
|
|
d30cb8a0f0 | ||
|
|
6c7b333b34 | ||
|
|
6d34a00e77 | ||
|
|
1f353e10ce | ||
|
|
4e10f8d1c5 | ||
|
|
a22cd853fc | ||
|
|
354e0d6a87 | ||
|
|
dfabe28645 | ||
|
|
fce230da48 | ||
|
|
055ba9bda6 | ||
|
|
ec19c3b4dd | ||
|
|
37aa526404 | ||
|
|
86c1faa40f | ||
|
|
8dcf15d0f9 | ||
|
|
6835e1e680 | ||
|
|
d8f89b8f67 | ||
|
|
ec28eba781 | ||
|
|
5ef8fff5bc | ||
|
|
4f317b76fa | ||
|
|
61672637dc | ||
|
|
ecf6e8f664 | ||
|
|
4115975320 | ||
|
|
21904a8609 | ||
|
|
10b0a3dabb | ||
|
|
b2091e9aed | ||
|
|
f2cb5bd77c | ||
|
|
19c61c52e6 | ||
|
|
b327359183 | ||
|
|
9e9bb72e17 | ||
|
|
a23907e009 | ||
|
|
ad131f5a2c | ||
|
|
dbeae4ca68 | ||
|
|
0fb0e94848 | ||
|
|
95d2a82d35 | ||
|
|
5bc6eb6774 | ||
|
|
3ba81e9aaa | ||
|
|
329a9b59c1 | ||
|
|
39cccd568e | ||
|
|
19b7f6ad8c | ||
|
|
41c000cf47 | ||
|
|
1b8ea61e87 | ||
|
|
22c26e24b1 | ||
|
|
396045177c | ||
|
|
4538593236 | ||
|
|
8086ef355b | ||
|
|
60d038fe46 | ||
|
|
ff0f4463be | ||
|
|
820571d993 | ||
|
|
e311d3767c | ||
|
|
24d7b80244 | ||
|
|
61f99e4d2e | ||
|
|
d5348bcf49 | ||
|
|
5d31d66365 | ||
|
|
29778a0154 | ||
|
|
165c0a5866 | ||
|
|
588323961e | ||
|
|
fd1c0b71c5 | ||
|
|
54fbdcadf9 | ||
|
|
69a30d0cf0 | ||
|
|
b8f9b44f38 | ||
|
|
cbf17d4eb5 | ||
|
|
327e025262 | ||
|
|
6b1e944bba | ||
|
|
668ed4d61b | ||
|
|
312c0584ed | ||
|
|
110d3acb58 | ||
|
|
ddbc60283b | ||
|
|
471bcecfd6 | ||
|
|
0245791b13 | ||
|
|
4794396ce8 | ||
|
|
c7088779d6 | ||
|
|
672905da12 | ||
|
|
47172b13be | ||
|
|
3668a10af6 | ||
|
|
a4e294c03f | ||
|
|
3fd6f4003f | ||
|
|
3eaf5cd530 | ||
|
|
c344fd8ca4 | ||
|
|
09639ca294 | ||
|
|
a81b6dca83 | ||
|
|
b74aefb08f | ||
|
|
fffc0c3add | ||
|
|
757f90aa7a | ||
|
|
022f9eb551 | ||
|
|
6e7b82cfcb | ||
|
|
b5fb24b360 | ||
|
|
b77345222c | ||
|
|
793e81406e | ||
|
|
cef1ec95d2 | ||
|
|
7e1b3c552b | ||
|
|
69736a63b6 | ||
|
|
fb4a9f9056 | ||
|
|
387d89d3af | ||
|
|
65d9ca9d39 | ||
|
|
8c842af4ba | ||
|
|
4faf9262c9 | ||
|
|
be7724c67d | ||
|
|
48d26347f7 | ||
|
|
bdb01ec8b5 | ||
|
|
9047815799 | ||
|
|
05bd94a2cc | ||
|
|
c9f7da84d0 | ||
|
|
bcc124e86a | ||
|
|
48d2733403 | ||
|
|
31fc6e4e56 | ||
|
|
fcdeef0146 | ||
|
|
1cd524c0cc | ||
|
|
0f746917a7 | ||
|
|
a2228d0169 | ||
|
|
e8a679d34b | ||
|
|
1912a42091 | ||
|
|
ca81f96635 | ||
|
|
eb3b8c4b31 | ||
|
|
6740d6d60b | ||
|
|
c46c35b248 | ||
|
|
0b2dcec4bc | ||
|
|
f8e2a4aff4 | ||
|
|
7256db8c4e | ||
|
|
b14d5d9bee | ||
|
|
12e15c3e4b | ||
|
|
51911bf272 | ||
|
|
6dc8061401 | ||
|
|
b8fa4f8797 | ||
|
|
cc0bea7f45 | ||
|
|
4e9124b244 | ||
|
|
f0eabef7b0 | ||
|
|
23e5557958 | ||
|
|
b1d02afa85 | ||
|
|
2edc380f47 | ||
|
|
cea8295c09 | ||
|
|
244bfc993a | ||
|
|
3a272a4493 | ||
|
|
a3300db770 | ||
|
|
b0394ce261 | ||
|
|
3123089790 | ||
|
|
f13cf66676 | ||
|
|
0c8c4d87fb | ||
|
|
066088fdeb | ||
|
|
cf641e41c7 | ||
|
|
5b48322e1b | ||
|
|
9d3f680d58 | ||
|
|
bed28d57e6 | ||
|
|
2538525103 | ||
|
|
6ed798db8c | ||
|
|
8e9d966829 | ||
|
|
be16640f92 | ||
|
|
0e1376dd2e | ||
|
|
0494575aa7 | ||
|
|
bed57534e0 | ||
|
|
1862d631d1 | ||
|
|
c977ce5690 | ||
|
|
84df377516 | ||
|
|
4d9a284f6e | ||
|
|
da7ad8b44a | ||
|
|
4164046323 | ||
|
|
72e743dfd1 | ||
|
|
7eb7edaf0a | ||
|
|
49368aaf76 | ||
|
|
b8c07a966f | ||
|
|
c6bcc0e3aa | ||
|
|
7719339f23 | ||
|
|
8ad64722ed | ||
|
|
611f8b8865 | ||
|
|
38bdc173e8 | ||
|
|
52244325d9 | ||
|
|
3fd3d99b8c | ||
|
|
d4ee5e91a2 | ||
|
|
c2ad2d7238 | ||
|
|
892e195f0e | ||
|
|
c5b1bed7dc | ||
|
|
0e388d7aa7 | ||
|
|
c3a0dbbe48 | ||
|
|
8b95b3ffc7 | ||
|
|
42b78461cd | ||
|
|
9190a41ca5 | ||
|
|
28a7251319 | ||
|
|
20565866ef | ||
|
|
246f10aee5 | ||
|
|
960017280d | ||
|
|
7218aaf52e | ||
|
|
62050cc7b6 | ||
|
|
f88a14ac0a | ||
|
|
9286761c30 | ||
|
|
07c3273247 | ||
|
|
eb8fe77582 | ||
|
|
b68ba0bff6 | ||
|
|
696657c09e | ||
|
|
12bea9b60a | ||
|
|
9334e9552f | ||
|
|
a43b04a98b | ||
|
|
f359ff995d | ||
|
|
9185d2646b | ||
|
|
33e61c762c | ||
|
|
e342e646ff | ||
|
|
ed163a80e0 | ||
|
|
b390df08b5 | ||
|
|
f0b3b9f7f4 | ||
|
|
a67d732507 | ||
|
|
ca0ebe0d75 | ||
|
|
94d113cbe0 | ||
|
|
25c3aeaa5f | ||
|
|
736d5a00b7 | ||
|
|
f1627b214c | ||
|
|
d9265ec7ea | ||
|
|
663e871bed | ||
|
|
5c5eaddef7 | ||
|
|
edaec4f1ae | ||
|
|
6d19acaa6c | ||
|
|
d29a619fbf | ||
|
|
b17808dd91 | ||
|
|
c5321a3667 | ||
|
|
8836691510 | ||
|
|
6568f6525d | ||
|
|
473fc27b49 | ||
|
|
74aeb55acb | ||
|
|
8efcf0529f | ||
|
|
06071c2f9c | ||
|
|
5eb4eca487 | ||
|
|
33f6153e12 | ||
|
|
df3283f526 | ||
|
|
b5901a2819 | ||
|
|
6d5f1402fe | ||
|
|
65e3782b2e | ||
|
|
135981dd30 | ||
|
|
fe5cf2d922 | ||
|
|
e15425cc2e | ||
|
|
c3cb0a4e33 | ||
|
|
cc32976bdd | ||
|
|
bc08318716 | ||
|
|
ee1ab30c2c | ||
|
|
7fa1a66f7e | ||
|
|
946bf37406 | ||
|
|
8706f6931a | ||
|
|
f551674860 | ||
|
|
d90fe0ef07 | ||
|
|
bf979fa3b3 | ||
|
|
b3b88891e9 | ||
|
|
01c5de60dc | ||
|
|
47b8fe5022 | ||
|
|
324b37b875 | ||
|
|
76e7e192d8 | ||
|
|
f9f3c4d923 | ||
|
|
a476476bd1 | ||
|
|
82a60a884a | ||
|
|
f17727de18 | ||
|
|
f1f33c79f4 | ||
|
|
d52eaafdbb | ||
|
|
e7a3e50ed1 | ||
|
|
2e09a87baa | ||
|
|
b92ae7e47e | ||
|
|
f98446e139 | ||
|
|
57a48dadaa | ||
|
|
c65ec68e46 | ||
|
|
d6559be3fc | ||
|
|
6fbf67f9a9 | ||
|
|
59df5b24fe | ||
|
|
3e1544294b | ||
|
|
a12c398816 | ||
|
|
0bd3e28348 | ||
|
|
ad4e39c088 | ||
|
|
2668d96e6a | ||
|
|
357c496aad | ||
|
|
22a513ba22 | ||
|
|
e6dd1119be | ||
|
|
2dbe454e04 | ||
|
|
e3a59b76eb | ||
|
|
01008acfcd | ||
|
|
b67a162d3f | ||
|
|
8bfde9fbaf | ||
|
|
1fdecf8def | ||
|
|
1141d4b833 | ||
|
|
cdac92ca7b | ||
|
|
2a57c260cc | ||
|
|
f41e29ab3a | ||
|
|
8f10624073 | ||
|
|
eb1f8be11e | ||
|
|
3333501ab9 | ||
|
|
0f40820315 | ||
|
|
5f1a839620 | ||
|
|
b9bb1c775d | ||
|
|
1059b7376b | ||
|
|
f38ab4a9ce | ||
|
|
9e7450c012 | ||
|
|
99a3e360fe | ||
|
|
d45f8f78d6 | ||
|
|
648af61116 | ||
|
|
eebf1b89b1 | ||
|
|
f8094bb624 | ||
|
|
ed13e0d2c2 | ||
|
|
aa830589b4 | ||
|
|
999a2bd929 | ||
|
|
d69ee98450 | ||
|
|
f6712c24ad | ||
|
|
89d2772194 | ||
|
|
03352142b6 | ||
|
|
73a51e0c00 | ||
|
|
2e26f8caa6 | ||
|
|
f9bcce9e43 | ||
|
|
2ecc877ba8 | ||
|
|
3f8a3c69e3 | ||
|
|
67c37a0984 | ||
|
|
a58a55d00d | ||
|
|
06d51dd0b8 | ||
|
|
d5db028f57 | ||
|
|
fcb85ff4be | ||
|
|
3695b4363d | ||
|
|
cb11e6437c | ||
|
|
5127bd11ce | ||
|
|
91f90aefa1 | ||
|
|
0a067bce36 | ||
|
|
f0aba433bf | ||
|
|
f06467a0e3 | ||
|
|
68bcd3c710 | ||
|
|
a645733cc5 | ||
|
|
49fe5baf94 | ||
|
|
411ee55653 | ||
|
|
e351ce7411 | ||
|
|
f33e585a71 | ||
|
|
77f3096e0d | ||
|
|
9a5b18c4e6 | ||
|
|
0c7112869a | ||
|
|
f66a4d71ea | ||
|
|
9b0ab878df | ||
|
|
d30b90dfd0 | ||
|
|
efd28f8c27 | ||
|
|
e05e722387 | ||
|
|
748e81956d | ||
|
|
c9a41febce | ||
|
|
18e244b756 | ||
|
|
47676139a3 | ||
|
|
1ed933b7ad | ||
|
|
f6a343ccd6 | ||
|
|
dd6cdc22e5 | ||
|
|
f70f4348b3 | ||
|
|
ec7f801929 | ||
|
|
0f8aca382e | ||
|
|
0270f77eaa | ||
|
|
dcba71ada4 | ||
|
|
6080f76a9c | ||
|
|
e7349161f3 | ||
|
|
2e2907ea09 | ||
|
|
25e84b2a6c | ||
|
|
5efd424172 | ||
|
|
2672502c07 | ||
|
|
83440cc3d9 | ||
|
|
8e5f93be1c | ||
|
|
c1afc07955 | ||
|
|
4a83e14878 | ||
|
|
832320abc6 | ||
|
|
70c237da72 | ||
|
|
edfcc5c023 | ||
|
|
0668debec6 | ||
|
|
02d6463faa | ||
|
|
1fdb85234c | ||
|
|
44b7dd1808 | ||
|
|
e983ee3101 | ||
|
|
75e7e81c05 | ||
|
|
31ce3b9c08 | ||
|
|
ed93c50fef | ||
|
|
4845660eb5 | ||
|
|
c7919210a2 | ||
|
|
9491418f3b | ||
|
|
e8de403286 | ||
|
|
dfb625377b | ||
|
|
2c0f2a8be6 | ||
|
|
787d3cb3e9 | ||
|
|
96ca17d26c | ||
|
|
3dd0f7f2c3 | ||
|
|
10ba0cf976 | ||
|
|
276c15cc23 | ||
|
|
2584b848ad | ||
|
|
6471efed5f | ||
|
|
5b7d7ad65d | ||
|
|
712851a8a5 | ||
|
|
63d291cb47 | ||
|
|
f825c92111 | ||
|
|
419eb2ea41 | ||
|
|
89b58dd64e | ||
|
|
6bc5f81440 | ||
|
|
424f4b7b5e | ||
|
|
9271a1caac | ||
|
|
0ee4df03f9 | ||
|
|
8ac713ce32 | ||
|
|
76b2489fe9 | ||
|
|
6786095154 | ||
|
|
2c5793ef37 | ||
|
|
d483f25b96 | ||
|
|
7118368979 | ||
|
|
59256c2e80 | ||
|
|
1fb8a0db1e | ||
|
|
07d0c8e8fa | ||
|
|
98452ead17 | ||
|
|
d8c9f40377 | ||
|
|
8148d5eec6 | ||
|
|
4c429ad604 | ||
|
|
a9c52de8d5 | ||
|
|
f648aa1f91 | ||
|
|
eaba388bdd | ||
|
|
73e6afcbc6 | ||
|
|
8c3b72adf2 | ||
|
|
ae18ff4262 | ||
|
|
1adc8af543 | ||
|
|
7413df6f1e | ||
|
|
bda8559190 | ||
|
|
b74612fa41 | ||
|
|
22e0c20dcd | ||
|
|
08f92e1100 | ||
|
|
bb12ece46e | ||
|
|
0065438305 | ||
|
|
7f115c1b3e | ||
|
|
4e0114ab0d | ||
|
|
0ef64fa4bd | ||
|
|
84dbc17c22 | ||
|
|
16e16e356d | ||
|
|
978ee885c4 | ||
|
|
850d43df63 | ||
|
|
fc109fd1b1 | ||
|
|
9aefc55534 | ||
|
|
2829947b93 | ||
|
|
0c2af89a1c | ||
|
|
14c2dc9624 | ||
|
|
4f35d710a6 | ||
|
|
fdb5e018e5 | ||
|
|
6001fde25c | ||
|
|
ae63c0adaf | ||
|
|
ad1539c8f6 | ||
|
|
634a0c8cd0 | ||
|
|
773f9a0c63 | ||
|
|
e4e320e9e3 | ||
|
|
3b4b400e6b | ||
|
|
a950be2d95 | ||
|
|
ba6f5ab984 | ||
|
|
f3a5e3f5ed | ||
|
|
e685e621f3 | ||
|
|
2cd2be9b67 | ||
|
|
e73d9e8a03 | ||
|
|
476f74a604 | ||
|
|
ab0d1d99e6 | ||
|
|
d5680ffd5d | ||
|
|
3c091a88d4 | ||
|
|
49b70b33de | ||
|
|
c5ff2716fb | ||
|
|
400fdf0896 | ||
|
|
cbb8c7323c | ||
|
|
60e79f8f77 | ||
|
|
0e829d739a | ||
|
|
62abb274e0 | ||
|
|
e4028785de | ||
|
|
2bb44bcb76 | ||
|
|
684599f81b | ||
|
|
b56d28f5df | ||
|
|
02b9ac04c8 | ||
|
|
2fc283990a | ||
|
|
abb652ebd5 | ||
|
|
55786cb7f7 | ||
|
|
447a575f4f | ||
|
|
49280a8617 | ||
|
|
ff78a9cc35 | ||
|
|
3fea5c9c8c | ||
|
|
aea63cad52 | ||
|
|
800abe9920 | ||
|
|
dd6069e41a | ||
|
|
90d31aeff0 | ||
|
|
4d9a327b1f | ||
|
|
06a97ef076 | ||
|
|
76c2477387 | ||
|
|
bc4dac9cad | ||
|
|
36e3d6c18a | ||
|
|
edfd84a8e3 | ||
|
|
fb20cf6069 | ||
|
|
abbe47f6b9 | ||
|
|
f84d250134 | ||
|
|
3ffb4b8990 | ||
|
|
f70cfabede | ||
|
|
3a81783d77 | ||
|
|
237a4a90ff | ||
|
|
99c7dfc98d | ||
|
|
48aba34370 | ||
|
|
29cca36f2c | ||
|
|
0f5819f5c2 | ||
|
|
373772de2d | ||
|
|
7f5bbe8b5f | ||
|
|
daee57167b | ||
|
|
03467196b9 | ||
|
|
d3f3531cdb | ||
|
|
883b694592 | ||
|
|
6c89d66af9 | ||
|
|
fb0a76b418 | ||
|
|
64f77fca5b | ||
|
|
b1fca2c5be | ||
|
|
108d705f09 | ||
|
|
a77242e66c | ||
|
|
8b153113ff | ||
|
|
6d0ec37135 | ||
|
|
603dadff35 | ||
|
|
1a4ef3d9c1 | ||
|
|
788468054a | ||
|
|
bdb44c6dce | ||
|
|
251086f9e9 | ||
|
|
b22aa62046 | ||
|
|
c6e4b60424 | ||
|
|
28d985aaf1 | ||
|
|
2397cbf80b | ||
|
|
a13d9daae3 | ||
|
|
c23870e020 | ||
|
|
dd2e29dd40 | ||
|
|
74b5700573 | ||
|
|
ba6abea6d8 | ||
|
|
33b231d512 | ||
|
|
61f0b67a92 | ||
|
|
4b679be310 | ||
|
|
a969795677 | ||
|
|
4f4e7e80fc | ||
|
|
2f72cbb627 | ||
|
|
a460e169ab | ||
|
|
27ce4d6a0d | ||
|
|
ac86f8aded | ||
|
|
93eca239cb | ||
|
|
dc5949d497 | ||
|
|
5e24f6b044 | ||
|
|
0cd31e0545 | ||
|
|
d4dc4b9d0a | ||
|
|
8c6fe40de1 | ||
|
|
e4dc4bae30 | ||
|
|
d99c21f4d7 | ||
|
|
8ef549de80 | ||
|
|
1b57758102 | ||
|
|
553fe30662 | ||
|
|
b6138afe8b | ||
|
|
64d64fe6fe | ||
|
|
f29b356b74 | ||
|
|
b5621d1ffd | ||
|
|
66f0da934d | ||
|
|
13a90fdd57 | ||
|
|
63966887ab | ||
|
|
2865b913af | ||
|
|
893ebdcc9b | ||
|
|
e82437a2db | ||
|
|
c44f08a7ac | ||
|
|
922545096a | ||
|
|
71f00576e0 | ||
|
|
cd11735e72 | ||
|
|
e1a8081f4b | ||
|
|
47265bb8d3 | ||
|
|
15d2fe7cfc | ||
|
|
a7850c4fd7 | ||
|
|
d87e64534d | ||
|
|
fc87aaf0db |
51
.github/ISSUE_TEMPLATE/bug_report.md
vendored
Normal file
51
.github/ISSUE_TEMPLATE/bug_report.md
vendored
Normal file
@@ -0,0 +1,51 @@
|
||||
---
|
||||
name: 报告Bug
|
||||
about: 报告KnowStreaming的相关Bug
|
||||
title: ''
|
||||
labels: bug
|
||||
assignees: ''
|
||||
|
||||
---
|
||||
|
||||
- [ ] 我已经在 [issues](https://github.com/didi/KnowStreaming/issues) 搜索过相关问题了,并没有重复的。
|
||||
|
||||
你是否希望来认领这个Bug。
|
||||
|
||||
「 Y / N 」
|
||||
|
||||
### 环境信息
|
||||
|
||||
* KnowStreaming version : <font size=4 color =red> xxx </font>
|
||||
* Operating System version : <font size=4 color =red> xxx </font>
|
||||
* Java version : <font size=4 color =red> xxx </font>
|
||||
|
||||
|
||||
### 重现该问题的步骤
|
||||
|
||||
1. xxx
|
||||
|
||||
|
||||
|
||||
2. xxx
|
||||
|
||||
|
||||
3. xxx
|
||||
|
||||
|
||||
|
||||
### 预期结果
|
||||
|
||||
<!-- 写下应该出现的预期结果?-->
|
||||
|
||||
### 实际结果
|
||||
|
||||
<!-- 实际发生了什么? -->
|
||||
|
||||
|
||||
---
|
||||
|
||||
如果有异常,请附上异常Trace:
|
||||
|
||||
```
|
||||
Just put your stack trace here!
|
||||
```
|
||||
8
.github/ISSUE_TEMPLATE/config.yml
vendored
Normal file
8
.github/ISSUE_TEMPLATE/config.yml
vendored
Normal file
@@ -0,0 +1,8 @@
|
||||
blank_issues_enabled: true
|
||||
contact_links:
|
||||
- name: 讨论问题
|
||||
url: https://github.com/didi/KnowStreaming/discussions/new
|
||||
about: 发起问题、讨论 等等
|
||||
- name: KnowStreaming官网
|
||||
url: https://knowstreaming.com/
|
||||
about: KnowStreaming website
|
||||
26
.github/ISSUE_TEMPLATE/detail_optimizing.md
vendored
Normal file
26
.github/ISSUE_TEMPLATE/detail_optimizing.md
vendored
Normal file
@@ -0,0 +1,26 @@
|
||||
---
|
||||
name: 优化建议
|
||||
about: 相关功能优化建议
|
||||
title: ''
|
||||
labels: Optimization Suggestions
|
||||
assignees: ''
|
||||
|
||||
---
|
||||
|
||||
- [ ] 我已经在 [issues](https://github.com/didi/KnowStreaming/issues) 搜索过相关问题了,并没有重复的。
|
||||
|
||||
你是否希望来认领这个优化建议。
|
||||
|
||||
「 Y / N 」
|
||||
|
||||
### 环境信息
|
||||
|
||||
* KnowStreaming version : <font size=4 color =red> xxx </font>
|
||||
* Operating System version : <font size=4 color =red> xxx </font>
|
||||
* Java version : <font size=4 color =red> xxx </font>
|
||||
|
||||
### 需要优化的功能点
|
||||
|
||||
|
||||
### 建议如何优化
|
||||
|
||||
20
.github/ISSUE_TEMPLATE/feature_request.md
vendored
Normal file
20
.github/ISSUE_TEMPLATE/feature_request.md
vendored
Normal file
@@ -0,0 +1,20 @@
|
||||
---
|
||||
name: 提议新功能/需求
|
||||
about: 给KnowStreaming提一个功能需求
|
||||
title: ''
|
||||
labels: feature
|
||||
assignees: ''
|
||||
|
||||
---
|
||||
|
||||
- [ ] 我在 [issues](https://github.com/didi/KnowStreaming/issues) 中并未搜索到与此相关的功能需求。
|
||||
- [ ] 我在 [release note](https://github.com/didi/KnowStreaming/releases) 已经发布的版本中并没有搜到相关功能.
|
||||
|
||||
你是否希望来认领这个Feature。
|
||||
|
||||
「 Y / N 」
|
||||
|
||||
|
||||
## 这里描述需求
|
||||
<!--请尽可能的描述清楚您的需求 -->
|
||||
|
||||
12
.github/ISSUE_TEMPLATE/question.md
vendored
Normal file
12
.github/ISSUE_TEMPLATE/question.md
vendored
Normal file
@@ -0,0 +1,12 @@
|
||||
---
|
||||
name: 提个问题
|
||||
about: 问KnowStreaming相关问题
|
||||
title: ''
|
||||
labels: question
|
||||
assignees: ''
|
||||
|
||||
---
|
||||
|
||||
- [ ] 我已经在 [issues](https://github.com/didi/KnowStreaming/issues) 搜索过相关问题了,并没有重复的。
|
||||
|
||||
## 在这里提出你的问题
|
||||
23
.github/PULL_REQUEST_TEMPLATE.md
vendored
Normal file
23
.github/PULL_REQUEST_TEMPLATE.md
vendored
Normal file
@@ -0,0 +1,23 @@
|
||||
请不要在没有先创建Issue的情况下创建Pull Request。
|
||||
|
||||
## 变更的目的是什么
|
||||
|
||||
XXXXX
|
||||
|
||||
## 简短的更新日志
|
||||
|
||||
XX
|
||||
|
||||
## 验证这一变化
|
||||
|
||||
XXXX
|
||||
|
||||
请遵循此清单,以帮助我们快速轻松地整合您的贡献:
|
||||
|
||||
* [ ] 一个 PR(Pull Request的简写)只解决一个问题,禁止一个 PR 解决多个问题;
|
||||
* [ ] 确保 PR 有对应的 Issue(通常在您开始处理之前创建),除非是书写错误之类的琐碎更改不需要 Issue ;
|
||||
* [ ] 格式化 PR 及 Commit-Log 的标题及内容,例如 #861 。PS:Commit-Log 需要在 Git Commit 代码时进行填写,在 GitHub 上修改不了;
|
||||
* [ ] 编写足够详细的 PR 描述,以了解 PR 的作用、方式和原因;
|
||||
* [ ] 编写必要的单元测试来验证您的逻辑更正。如果提交了新功能或重大更改,请记住在 test 模块中添加 integration-test;
|
||||
* [ ] 确保编译通过,集成测试通过;
|
||||
|
||||
43
.github/workflows/ci_build.yml
vendored
Normal file
43
.github/workflows/ci_build.yml
vendored
Normal file
@@ -0,0 +1,43 @@
|
||||
name: KnowStreaming Build
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: [ "*" ]
|
||||
pull_request:
|
||||
branches: [ "*" ]
|
||||
|
||||
jobs:
|
||||
build:
|
||||
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- name: Set up JDK 11
|
||||
uses: actions/setup-java@v3
|
||||
with:
|
||||
java-version: '11'
|
||||
distribution: 'temurin'
|
||||
cache: maven
|
||||
|
||||
- name: Setup Node
|
||||
uses: actions/setup-node@v1
|
||||
with:
|
||||
node-version: '12.22.12'
|
||||
|
||||
- name: Build With Maven
|
||||
run: mvn -Prelease-package -Dmaven.test.skip=true clean install -U
|
||||
|
||||
- name: Get KnowStreaming Version
|
||||
if: ${{ success() }}
|
||||
run: |
|
||||
version=`mvn -Dexec.executable='echo' -Dexec.args='${project.version}' --non-recursive exec:exec -q`
|
||||
echo "VERSION=${version}" >> $GITHUB_ENV
|
||||
|
||||
- name: Upload Binary Package
|
||||
if: ${{ success() }}
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: KnowStreaming-${{ env.VERSION }}.tar.gz
|
||||
path: km-dist/target/KnowStreaming-${{ env.VERSION }}.tar.gz
|
||||
23
.gitignore
vendored
23
.gitignore
vendored
@@ -5,7 +5,6 @@
|
||||
|
||||
## Directory-based project format:
|
||||
.idea/
|
||||
.gradle/
|
||||
# if you remove the above rule, at least ignore the following:
|
||||
|
||||
# User-specific stuff:
|
||||
@@ -28,12 +27,11 @@
|
||||
## File-based project format:
|
||||
*.ipr
|
||||
*.iws
|
||||
*.iml
|
||||
|
||||
## Plugin-specific files:
|
||||
|
||||
# IntelliJ
|
||||
/out/
|
||||
build/
|
||||
|
||||
# mpeltonen/sbt-idea plugin
|
||||
.idea_modules/
|
||||
@@ -58,6 +56,7 @@ fabric.properties
|
||||
*.jar
|
||||
*.war
|
||||
*.ear
|
||||
*.tar.gz
|
||||
|
||||
# virtual machine crash logs, see http://www.java.com/en/download/help/error_hotspot.xml
|
||||
hs_err_pid*
|
||||
@@ -101,13 +100,17 @@ target/
|
||||
*/velocity.log*
|
||||
*/*.log
|
||||
*/*.log.*
|
||||
web/node_modules/
|
||||
web/node_modules/*
|
||||
node_modules/
|
||||
node_modules/*
|
||||
workspace.xml
|
||||
/output/*
|
||||
.gitversion
|
||||
*/node_modules/*
|
||||
*/templates/*
|
||||
*/out/*
|
||||
*/dist/*
|
||||
.DS_Store
|
||||
out/*
|
||||
dist/
|
||||
dist/*
|
||||
km-rest/src/main/resources/templates/
|
||||
*dependency-reduced-pom*
|
||||
#filter flattened xml
|
||||
*/.flattened-pom.xml
|
||||
.flattened-pom.xml
|
||||
*/*/.flattened-pom.xml
|
||||
@@ -1,40 +0,0 @@
|
||||
#!/bin/bash
|
||||
|
||||
SERVICE_PATH="/home/xiaoju/${APPNAME}"
|
||||
|
||||
#nginx logs ln
|
||||
if [ ! -L /home/xiaoju/nginx/logs ]; then
|
||||
rm -rf /home/xiaoju/nginx/logs
|
||||
mkdir -p /home/xiaoju/data1/nginx-logs && \
|
||||
ln -s /home/xiaoju/data1/nginx-logs /home/xiaoju/nginx/logs
|
||||
fi
|
||||
|
||||
if [ -f "/home/xiaoju/$APPNAME/.deploy/service.json" ]; then
|
||||
# cp service.json for nginx metric collect.
|
||||
su xiaoju -c "mkdir -p /home/xiaoju/nginx/.deploy && cp /home/xiaoju/$APPNAME/.deploy/service.json /home/xiaoju/nginx/.deploy"
|
||||
fi
|
||||
|
||||
#tomcat logs ln
|
||||
if [ ! -L /home/xiaoju/tomcat/logs ]; then
|
||||
rm -rf /home/xiaoju/tomcat/logs
|
||||
mkdir -p /home/xiaoju/data1/tomcat-logs && \
|
||||
ln -s /home/xiaoju/data1/tomcat-logs /home/xiaoju/tomcat/logs
|
||||
fi
|
||||
|
||||
#application logs ln
|
||||
if [ ! -L /home/xiaoju/${APPNAME}/logs ]; then
|
||||
mkdir -p /home/xiaoju/data1/${APPNAME}-logs && \
|
||||
ln -s /home/xiaoju/data1/${APPNAME}-logs /home/xiaoju/${APPNAME}/logs
|
||||
fi
|
||||
|
||||
if [ ! -L /data1 ]; then
|
||||
ln -s /home/xiaoju/data1 /data1
|
||||
fi
|
||||
|
||||
chown -R xiaoju.xiaoju /home/xiaoju/data1/
|
||||
chown -R xiaoju.xiaoju /data1/
|
||||
|
||||
mkdir -p '/etc/odin-super-agent/'; echo 'consul-client' >> /etc/odin-super-agent/agents.deny; /home/odin/super-agent/data/install/consul-client/current/control stop
|
||||
su xiaoju -c "cd $SERVICE_PATH && bash -x ./control.sh start"
|
||||
|
||||
/usr/bin/monit -c /etc/monitrc
|
||||
@@ -1,8 +0,0 @@
|
||||
#!/bin/bash
|
||||
|
||||
SERVICE_PATH="/home/xiaoju/${APPNAME}"
|
||||
|
||||
/usr/bin/monit stop all
|
||||
|
||||
su xiaoju -c "cd $SERVICE_PATH && ./control.sh stop"
|
||||
|
||||
@@ -1,25 +0,0 @@
|
||||
FROM registry.xiaojukeji.com/didionline/bigdatadatabus-didi-jdk11-tomcat-nginx-centos7:stable
|
||||
MAINTAINER zhuyefeng <zhuyefeng@didichuxing.com>
|
||||
|
||||
ENV JAVA_HOME /usr/local/jdk-11.0.2
|
||||
# TODO 设置模块名字
|
||||
ENV APPNAME service-discovery
|
||||
|
||||
RUN mkdir -p /etc/container/prestop
|
||||
ADD ./APP_META/nginx/conf/nginx.conf /home/xiaoju/nginx/conf/
|
||||
ADD ./APP_META/monit/monitrc /etc/monitrc
|
||||
#ADD ./APP_META/monit/nginx.cfg /etc/monit.d/
|
||||
ADD ./APP_META/990-startapp.required.sh /etc/container/init/990-startapp.required.sh
|
||||
ADD ./APP_META/990-stopapp.sh /etc/container/prestop/990-stopapp.sh
|
||||
|
||||
RUN mkdir -p /home/xiaoju/${APPNAME} && \
|
||||
# TODO 如果tomcat容器应用需要下面这步
|
||||
#mkdir -p /home/xiaoju/tomcat/webapps && \
|
||||
chmod 0700 /etc/monitrc && \
|
||||
chmod a+x /etc/container/init/990-startapp.required.sh && \
|
||||
chmod a+x /etc/container/prestop/990-stopapp.sh
|
||||
|
||||
COPY ./home-xiaoju-${APPNAME} /home/xiaoju/${APPNAME}
|
||||
|
||||
# TODO 如果tomcat容器应用需要下面这步
|
||||
#RUN ln -s /home/xiaoju/${APPNAME} /home/xiaoju/tomcat/webapps/
|
||||
@@ -1,13 +0,0 @@
|
||||
set daemon 10 # check services at 10 seconds intervals
|
||||
set log syslog
|
||||
|
||||
set httpd port 2812 and
|
||||
use address localhost # only accept connection from localhost
|
||||
allow localhost # allow localhost to connect to the server and
|
||||
allow admin:monit # require user 'admin' with password 'monit'
|
||||
#with ssl { # enable SSL/TLS and set path to server certificate
|
||||
# pemfile: /etc/ssl/certs/monit.pem
|
||||
#}
|
||||
|
||||
include /etc/monit.d/*
|
||||
|
||||
@@ -1,127 +0,0 @@
|
||||
#user xiaoju xiaoju;
|
||||
|
||||
worker_rlimit_nofile 204800;
|
||||
worker_processes 4;
|
||||
error_log /home/xiaoju/nginx/logs/error.log;
|
||||
pid /home/xiaoju/nginx/run/nginx.pid;
|
||||
|
||||
# Load dynamic modules. See /usr/share/nginx/README.dynamic.
|
||||
include /home/xiaoju/nginx/modules/*.conf;
|
||||
|
||||
|
||||
events {
|
||||
use epoll;
|
||||
worker_connections 204800;
|
||||
|
||||
accept_mutex on;
|
||||
accept_mutex_delay 5ms;
|
||||
multi_accept on;
|
||||
}
|
||||
|
||||
|
||||
http {
|
||||
include mime.types;
|
||||
default_type application/octet-stream;
|
||||
|
||||
server_names_hash_bucket_size 128;
|
||||
#server_tag off;
|
||||
#server_info off;
|
||||
server_tokens off;
|
||||
|
||||
sendfile on;
|
||||
tcp_nopush on;
|
||||
tcp_nodelay on;
|
||||
|
||||
fastcgi_connect_timeout 5;
|
||||
fastcgi_send_timeout 10;
|
||||
fastcgi_read_timeout 10;
|
||||
fastcgi_buffer_size 64k;
|
||||
fastcgi_buffers 4 64k;
|
||||
fastcgi_busy_buffers_size 128k;
|
||||
fastcgi_temp_file_write_size 128k;
|
||||
|
||||
keepalive_timeout 60;
|
||||
keepalive_requests 1024;
|
||||
client_header_buffer_size 4k;
|
||||
large_client_header_buffers 4 32k;
|
||||
client_max_body_size 10m;
|
||||
|
||||
client_body_buffer_size 512k;
|
||||
client_body_timeout 30;
|
||||
client_header_timeout 10;
|
||||
send_timeout 240;
|
||||
|
||||
proxy_connect_timeout 10s;
|
||||
proxy_send_timeout 15s;
|
||||
proxy_read_timeout 15s;
|
||||
proxy_buffers 64 8k;
|
||||
proxy_busy_buffers_size 128k;
|
||||
proxy_temp_file_write_size 64k;
|
||||
proxy_redirect off;
|
||||
#proxy_upstream_tries 2;
|
||||
proxy_next_upstream error invalid_header timeout http_502 http_504;
|
||||
|
||||
gzip on;
|
||||
gzip_min_length 1k;
|
||||
gzip_buffers 4 16k;
|
||||
gzip_http_version 1.0;
|
||||
gzip_comp_level 2;
|
||||
gzip_types text/plain application/x-javascript text/css text/xml application/xml+css application/json text/javascript;
|
||||
gzip_vary on;
|
||||
|
||||
proxy_set_header X-Real-IP $remote_addr;
|
||||
proxy_set_header X-Real-Port $remote_port;
|
||||
proxy_set_header Host $http_host;
|
||||
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
||||
proxy_set_header X-Forwarded-Proto $scheme;
|
||||
proxy_pass_header Server;
|
||||
|
||||
#operationid on;
|
||||
#operationid_header didi-header-rid;
|
||||
#operationid_eth eth0;
|
||||
#proxy_set_header didi-header-rid $operationid;
|
||||
|
||||
log_format main '$server_addr\t$host\t'
|
||||
'$remote_addr\t$http_x_forwarded_for\t'
|
||||
'$time_local\t'
|
||||
'$scheme\t$request\t'
|
||||
'$status\t$upstream_status\t'
|
||||
'$request_time\t$upstream_addr\t$upstream_response_time\t'
|
||||
'$request_length\t$bytes_sent\t'
|
||||
'$http_referer\t$http_cookie\t$http_user_agent\t'
|
||||
'$limit_rate\t$http_didi_header_omgid\t$remote_port';
|
||||
|
||||
|
||||
set_real_ip_from 10.0.0.0/8;
|
||||
set_real_ip_from 100.64.0.0/10;
|
||||
real_ip_header X-Real-IP;
|
||||
|
||||
server {
|
||||
listen 8080 backlog=4096;
|
||||
server_name localhost;
|
||||
access_log logs/access.log main;
|
||||
|
||||
location = /status.do {
|
||||
access_log off;
|
||||
root /home/xiaoju/nginx/html;
|
||||
}
|
||||
|
||||
location / {
|
||||
root html;
|
||||
index index.html index.htm;
|
||||
if ( $args !~ '^\?' ){
|
||||
proxy_pass http://127.0.0.1:8888;
|
||||
}
|
||||
}
|
||||
|
||||
error_page 500 502 503 504 /50x.html;
|
||||
location = /50x.html {
|
||||
root html;
|
||||
}
|
||||
}
|
||||
|
||||
include conf.d/*.conf;
|
||||
# include servers/*.conf;
|
||||
# include server_conf/*.conf;
|
||||
# include upstream_conf/*.conf;
|
||||
}
|
||||
74
CODE_OF_CONDUCT.md
Normal file
74
CODE_OF_CONDUCT.md
Normal file
@@ -0,0 +1,74 @@
|
||||
|
||||
# Contributor Covenant Code of Conduct
|
||||
|
||||
## Our Pledge
|
||||
|
||||
In the interest of fostering an open and welcoming environment, we as
|
||||
contributors and maintainers pledge to making participation in our project, and
|
||||
our community a harassment-free experience for everyone, regardless of age, body
|
||||
size, disability, ethnicity, gender identity and expression, level of experience,
|
||||
education, socio-economic status, nationality, personal appearance, race,
|
||||
religion, or sexual identity and orientation.
|
||||
|
||||
## Our Standards
|
||||
|
||||
Examples of behavior that contributes to creating a positive environment
|
||||
include:
|
||||
|
||||
* Using welcoming and inclusive language
|
||||
* Being respectful of differing viewpoints and experiences
|
||||
* Gracefully accepting constructive criticism
|
||||
* Focusing on what is best for the community
|
||||
* Showing empathy towards other community members
|
||||
|
||||
Examples of unacceptable behavior by participants include:
|
||||
|
||||
* The use of sexualized language or imagery and unwelcome sexual attention or
|
||||
advances
|
||||
* Trolling, insulting/derogatory comments, and personal or political attacks
|
||||
* Public or private harassment
|
||||
* Publishing others' private information, such as a physical or electronic
|
||||
address, without explicit permission
|
||||
* Other conduct which could reasonably be considered inappropriate in a
|
||||
professional setting
|
||||
|
||||
## Our Responsibilities
|
||||
|
||||
Project maintainers are responsible for clarifying the standards of acceptable
|
||||
behavior and are expected to take appropriate and fair corrective action in
|
||||
response to any instances of unacceptable behavior.
|
||||
|
||||
Project maintainers have the right and responsibility to remove, edit, or
|
||||
reject comments, commits, code, wiki edits, issues, and other contributions
|
||||
that are not aligned to this Code of Conduct, or to ban temporarily or
|
||||
permanently any contributor for other behaviors that they deem inappropriate,
|
||||
threatening, offensive, or harmful.
|
||||
|
||||
## Scope
|
||||
|
||||
This Code of Conduct applies both within project spaces and in public spaces
|
||||
when an individual is representing the project or its community. Examples of
|
||||
representing a project or community include using an official project e-mail
|
||||
address, posting via an official social media account, or acting as an appointed
|
||||
representative at an online or offline event. Representation of a project may be
|
||||
further defined and clarified by project maintainers.
|
||||
|
||||
## Enforcement
|
||||
|
||||
Instances of abusive, harassing, or otherwise unacceptable behavior may be
|
||||
reported by contacting the project team at https://knowstreaming.com/support-center . All
|
||||
complaints will be reviewed and investigated and will result in a response that
|
||||
is deemed necessary and appropriate to the circumstances. The project team is
|
||||
obligated to maintain confidentiality with regard to the reporter of an incident.
|
||||
Further details of specific enforcement policies may be posted separately.
|
||||
|
||||
Project maintainers who do not follow or enforce the Code of Conduct in good
|
||||
faith may face temporary or permanent repercussions as determined by other
|
||||
members of the project's leadership.
|
||||
|
||||
## Attribution
|
||||
|
||||
This Code of Conduct is adapted from the [Contributor Covenant][homepage], version 1.4,
|
||||
available at https://www.contributor-covenant.org/version/1/4/code-of-conduct.html
|
||||
|
||||
[homepage]: https://www.contributor-covenant.org
|
||||
155
CONTRIBUTING.md
155
CONTRIBUTING.md
@@ -1,11 +1,150 @@
|
||||
## Contributing to Kafka
|
||||
|
||||
*Before opening a pull request*, review the [Contributing](https://kafka.apache.org/contributing.html) and [Contributing Code Changes](https://cwiki.apache.org/confluence/display/KAFKA/Contributing+Code+Changes) pages.
|
||||
|
||||
It lists steps that are required before creating a PR.
|
||||
|
||||
When you contribute code, you affirm that the contribution is your original work and that you
|
||||
license the work to the project under the project's open source license. Whether or not you
|
||||
state this explicitly, by submitting any copyrighted material via pull request, email, or
|
||||
other means you agree to license the material under the project's open source license and
|
||||
warrant that you have the legal authority to do so.
|
||||
# 为KnowStreaming做贡献
|
||||
|
||||
|
||||
欢迎👏🏻来到KnowStreaming!本文档是关于如何为KnowStreaming做出贡献的指南。
|
||||
|
||||
如果您发现不正确或遗漏的内容, 请留下意见/建议。
|
||||
|
||||
## 行为守则
|
||||
请务必阅读并遵守我们的 [行为准则](./CODE_OF_CONDUCT.md).
|
||||
|
||||
|
||||
|
||||
## 贡献
|
||||
|
||||
**KnowStreaming** 欢迎任何角色的新参与者,包括 **User** 、**Contributor**、**Committer**、**PMC** 。
|
||||
|
||||
我们鼓励新人积极加入 **KnowStreaming** 项目,从User到Contributor、Committer ,甚至是 PMC 角色。
|
||||
|
||||
为了做到这一点,新人需要积极地为 **KnowStreaming** 项目做出贡献。以下介绍如何对 **KnowStreaming** 进行贡献。
|
||||
|
||||
|
||||
### 创建/打开 Issue
|
||||
|
||||
如果您在文档中发现拼写错误、在代码中**发现错误**或想要**新功能**或想要**提供建议**,您可以在 GitHub 上[创建一个Issue](https://github.com/didi/KnowStreaming/issues/new/choose) 进行报告。
|
||||
|
||||
|
||||
如果您想直接贡献, 您可以选择下面标签的问题。
|
||||
|
||||
- [contribution welcome](https://github.com/didi/KnowStreaming/labels/contribution%20welcome) : 非常需要解决/新增 的Issues
|
||||
- [good first issue](https://github.com/didi/KnowStreaming/labels/good%20first%20issue): 对新人比较友好, 新人可以拿这个Issue来练练手热热身。
|
||||
|
||||
<font color=red ><b> 请注意,任何 PR 都必须与有效issue相关联。否则,PR 将被拒绝。</b></font>
|
||||
|
||||
|
||||
|
||||
### 开始你的贡献
|
||||
|
||||
**分支介绍**
|
||||
|
||||
我们将 `dev`分支作为开发分支, 说明这是一个不稳定的分支。
|
||||
|
||||
此外,我们的分支模型符合 [https://nvie.com/posts/a-successful-git-branching-model/](https://nvie.com/posts/a-successful-git-branching-model/). 我们强烈建议新人在创建PR之前先阅读上述文章。
|
||||
|
||||
|
||||
|
||||
**贡献流程**
|
||||
|
||||
为方便描述,我们这里定义一下2个名词:
|
||||
|
||||
自己Fork出来的仓库是私人仓库, 我们这里称之为 :**分叉仓库**
|
||||
Fork的源项目,我们称之为:**源仓库**
|
||||
|
||||
|
||||
现在,如果您准备好创建PR, 以下是贡献者的工作流程:
|
||||
|
||||
1. Fork [KnowStreaming](https://github.com/didi/KnowStreaming) 项目到自己的仓库
|
||||
|
||||
2. 从源仓库的`dev`拉取并创建自己的本地分支,例如: `dev`
|
||||
3. 在本地分支上对代码进行修改
|
||||
4. Rebase 开发分支, 并解决冲突
|
||||
5. commit 并 push 您的更改到您自己的**分叉仓库**
|
||||
6. 创建一个 Pull Request 到**源仓库**的`dev`分支中。
|
||||
7. 等待回复。如果回复的慢,请无情的催促。
|
||||
|
||||
|
||||
更为详细的贡献流程请看:[贡献流程](./docs/contributer_guide/贡献流程.md)
|
||||
|
||||
创建Pull Request时:
|
||||
|
||||
1. 请遵循 PR的 [模板](./.github/PULL_REQUEST_TEMPLATE.md)
|
||||
2. 请确保 PR 有相应的issue。
|
||||
3. 如果您的 PR 包含较大的更改,例如组件重构或新组件,请编写有关其设计和使用的详细文档(在对应的issue中)。
|
||||
4. 注意单个 PR 不能太大。如果需要进行大量更改,最好将更改分成几个单独的 PR。
|
||||
5. 在合并PR之前,尽量的将最终的提交信息清晰简洁, 将多次修改的提交尽可能的合并为一次提交。
|
||||
6. 创建 PR 后,将为PR分配一个或多个reviewers。
|
||||
|
||||
|
||||
<font color=red><b>如果您的 PR 包含较大的更改,例如组件重构或新组件,请编写有关其设计和使用的详细文档。</b></font>
|
||||
|
||||
|
||||
# 代码审查指南
|
||||
|
||||
Commiter将轮流review代码,以确保在合并前至少有一名Commiter
|
||||
|
||||
一些原则:
|
||||
|
||||
- 可读性——重要的代码应该有详细的文档。API 应该有 Javadoc。代码风格应与现有风格保持一致。
|
||||
- 优雅:新的函数、类或组件应该设计得很好。
|
||||
- 可测试性——单元测试用例应该覆盖 80% 的新代码。
|
||||
- 可维护性 - 遵守我们的编码规范。
|
||||
|
||||
|
||||
# 开发者
|
||||
|
||||
## 成为Contributor
|
||||
|
||||
只要成功提交并合并PR , 则为Contributor
|
||||
|
||||
贡献者名单请看:[贡献者名单](./docs/contributer_guide/开发者名单.md)
|
||||
|
||||
## 尝试成为Commiter
|
||||
|
||||
一般来说, 贡献8个重要的补丁并至少让三个不同的人来Review他们(您需要3个Commiter的支持)。
|
||||
然后请人给你提名, 您需要展示您的
|
||||
|
||||
1. 至少8个重要的PR和项目的相关问题
|
||||
2. 与团队合作的能力
|
||||
3. 了解项目的代码库和编码风格
|
||||
4. 编写好代码的能力
|
||||
|
||||
当前的Commiter可以通过在KnowStreaming中的Issue标签 `nomination`(提名)来提名您
|
||||
|
||||
1. 你的名字和姓氏
|
||||
2. 指向您的Git个人资料的链接
|
||||
3. 解释为什么你应该成为Commiter
|
||||
4. 详细说明提名人与您合作的3个PR以及相关问题,这些问题可以证明您的能力。
|
||||
|
||||
另外2个Commiter需要支持您的**提名**,如果5个工作日内没有人反对,您就是提交者,如果有人反对或者想要更多的信息,Commiter会讨论并通常达成共识(5个工作日内) 。
|
||||
|
||||
|
||||
# 开源奖励计划
|
||||
|
||||
|
||||
我们非常欢迎开发者们为KnowStreaming开源项目贡献一份力量,相应也将给予贡献者激励以表认可与感谢。
|
||||
|
||||
|
||||
## 参与贡献
|
||||
|
||||
1. 积极参与 Issue 的讨论,如答疑解惑、提供想法或报告无法解决的错误(Issue)
|
||||
2. 撰写和改进项目的文档(Wiki)
|
||||
3. 提交补丁优化代码(Coding)
|
||||
|
||||
|
||||
## 你将获得
|
||||
|
||||
1. 加入KnowStreaming开源项目贡献者名单并展示
|
||||
2. KnowStreaming开源贡献者证书(纸质&电子版)
|
||||
3. KnowStreaming贡献者精美大礼包(KnowStreamin/滴滴 周边)
|
||||
|
||||
|
||||
## 相关规则
|
||||
|
||||
- Contributer和Commiter都会有对应的证书和对应的礼包
|
||||
- 每季度有KnowStreaming项目团队评选出杰出贡献者,颁发相应证书。
|
||||
- 年末进行年度评选
|
||||
|
||||
贡献者名单请看:[贡献者名单](./docs/contributer_guide/开发者名单.md)
|
||||
14
HEADER
14
HEADER
@@ -1,14 +0,0 @@
|
||||
Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
contributor license agreements. See the NOTICE file distributed with
|
||||
this work for additional information regarding copyright ownership.
|
||||
The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
(the "License"); you may not use this file except in compliance with
|
||||
the License. You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
8
NOTICE
8
NOTICE
@@ -1,8 +0,0 @@
|
||||
Apache Kafka
|
||||
Copyright 2020 The Apache Software Foundation.
|
||||
|
||||
This product includes software developed at
|
||||
The Apache Software Foundation (https://www.apache.org/).
|
||||
|
||||
This distribution has a binary dependency on jersey, which is available under the CDDL
|
||||
License. The source code of jersey can be found at https://github.com/jersey/jersey/.
|
||||
@@ -1,14 +0,0 @@
|
||||
*More detailed description of your change,
|
||||
if necessary. The PR title and PR message become
|
||||
the squashed commit message, so use a separate
|
||||
comment to ping reviewers.*
|
||||
|
||||
*Summary of testing strategy (including rationale)
|
||||
for the feature or bug fix. Unit and/or integration
|
||||
tests are expected for any behaviour change and
|
||||
system tests should be considered for larger changes.*
|
||||
|
||||
### Committer Checklist (excluded from commit message)
|
||||
- [ ] Verify design and implementation
|
||||
- [ ] Verify test coverage and CI build status
|
||||
- [ ] Verify documentation (including upgrade notes)
|
||||
259
README.md
259
README.md
@@ -1,220 +1,161 @@
|
||||
Apache Kafka
|
||||
=================
|
||||
See our [web site](https://kafka.apache.org) for details on the project.
|
||||
|
||||
You need to have [Java](http://www.oracle.com/technetwork/java/javase/downloads/index.html) installed.
|
||||
<p align="center">
|
||||
<img src="https://user-images.githubusercontent.com/71620349/185368586-aed82d30-1534-453d-86ff-ecfa9d0f35bd.png" width = "256" div align=center />
|
||||
|
||||
Java 8 should be used for building in order to support both Java 8 and Java 11 at runtime.
|
||||
</p>
|
||||
|
||||
Scala 2.12 is used by default, see below for how to use a different Scala version or all of the supported Scala versions.
|
||||
<p align="center">
|
||||
<a href="https://knowstreaming.com">产品官网</a> |
|
||||
<a href="https://github.com/didi/KnowStreaming/releases">下载地址</a> |
|
||||
<a href="https://doc.knowstreaming.com/product">文档资源</a> |
|
||||
<a href="https://demo.knowstreaming.com">体验环境</a>
|
||||
</p>
|
||||
|
||||
### Build a jar and run it ###
|
||||
./gradlew jar
|
||||
<p align="center">
|
||||
<!--最近一次提交时间-->
|
||||
<a href="https://img.shields.io/github/last-commit/didi/KnowStreaming">
|
||||
<img src="https://img.shields.io/github/last-commit/didi/KnowStreaming" alt="LastCommit">
|
||||
</a>
|
||||
|
||||
Follow instructions in https://kafka.apache.org/documentation.html#quickstart
|
||||
<!--最新版本-->
|
||||
<a href="https://github.com/didi/KnowStreaming/blob/master/LICENSE">
|
||||
<img src="https://img.shields.io/github/v/release/didi/KnowStreaming" alt="License">
|
||||
</a>
|
||||
|
||||
### Build source jar ###
|
||||
./gradlew srcJar
|
||||
<!--License信息-->
|
||||
<a href="https://github.com/didi/KnowStreaming/blob/master/LICENSE">
|
||||
<img src="https://img.shields.io/github/license/didi/KnowStreaming" alt="License">
|
||||
</a>
|
||||
|
||||
### Build aggregated javadoc ###
|
||||
./gradlew aggregatedJavadoc
|
||||
<!--Open-Issue-->
|
||||
<a href="https://github.com/didi/KnowStreaming/issues">
|
||||
<img src="https://img.shields.io/github/issues-raw/didi/KnowStreaming" alt="Issues">
|
||||
</a>
|
||||
|
||||
### Build javadoc and scaladoc ###
|
||||
./gradlew javadoc
|
||||
./gradlew javadocJar # builds a javadoc jar for each module
|
||||
./gradlew scaladoc
|
||||
./gradlew scaladocJar # builds a scaladoc jar for each module
|
||||
./gradlew docsJar # builds both (if applicable) javadoc and scaladoc jars for each module
|
||||
<!--知识星球-->
|
||||
<a href="https://z.didi.cn/5gSF9">
|
||||
<img src="https://img.shields.io/badge/join-%E7%9F%A5%E8%AF%86%E6%98%9F%E7%90%83-red" alt="Slack">
|
||||
</a>
|
||||
|
||||
### Run unit/integration tests ###
|
||||
./gradlew test # runs both unit and integration tests
|
||||
./gradlew unitTest
|
||||
./gradlew integrationTest
|
||||
|
||||
### Force re-running tests without code change ###
|
||||
./gradlew cleanTest test
|
||||
./gradlew cleanTest unitTest
|
||||
./gradlew cleanTest integrationTest
|
||||
</p>
|
||||
|
||||
### Running a particular unit/integration test ###
|
||||
./gradlew clients:test --tests RequestResponseTest
|
||||
|
||||
### Running a particular test method within a unit/integration test ###
|
||||
./gradlew core:test --tests kafka.api.ProducerFailureHandlingTest.testCannotSendToInternalTopic
|
||||
./gradlew clients:test --tests org.apache.kafka.clients.MetadataTest.testMetadataUpdateWaitTime
|
||||
---
|
||||
|
||||
### Running a particular unit/integration test with log4j output ###
|
||||
Change the log4j setting in either `clients/src/test/resources/log4j.properties` or `core/src/test/resources/log4j.properties`
|
||||
|
||||
./gradlew clients:test --tests RequestResponseTest
|
||||
## `Know Streaming` 简介
|
||||
|
||||
### Generating test coverage reports ###
|
||||
Generate coverage reports for the whole project:
|
||||
`Know Streaming`是一套云原生的Kafka管控平台,脱胎于众多互联网内部多年的Kafka运营实践经验,专注于Kafka运维管控、监控告警、资源治理、多活容灾等核心场景。在用户体验、监控、运维管控上进行了平台化、可视化、智能化的建设,提供一系列特色的功能,极大地方便了用户和运维人员的日常使用,让普通运维人员都能成为Kafka专家。
|
||||
|
||||
./gradlew reportCoverage
|
||||
我们现在正在收集 Know Streaming 用户信息,以帮助我们进一步改进 Know Streaming。
|
||||
请在 [issue#663](https://github.com/didi/KnowStreaming/issues/663) 上提供您的使用信息来支持我们:[谁在使用 Know Streaming](https://github.com/didi/KnowStreaming/issues/663)
|
||||
|
||||
Generate coverage for a single module, i.e.:
|
||||
|
||||
./gradlew clients:reportCoverage
|
||||
|
||||
### Building a binary release gzipped tar ball ###
|
||||
./gradlew clean releaseTarGz
|
||||
|
||||
The above command will fail if you haven't set up the signing key. To bypass signing the artifact, you can run:
|
||||
整体具有以下特点:
|
||||
|
||||
./gradlew clean releaseTarGz -x signArchives
|
||||
- 👀 **零侵入、全覆盖**
|
||||
- 无需侵入改造 `Apache Kafka` ,一键便能纳管 `0.10.x` ~ `3.x.x` 众多版本的Kafka,包括 `ZK` 或 `Raft` 运行模式的版本,同时在兼容架构上具备良好的扩展性,帮助您提升集群管理水平;
|
||||
|
||||
The release file can be found inside `./core/build/distributions/`.
|
||||
- 🌪️ **零成本、界面化**
|
||||
- 提炼高频 CLI 能力,设计合理的产品路径,提供清新美观的 GUI 界面,支持 Cluster、Broker、Zookeeper、Topic、ConsumerGroup、Message、ACL、Connect 等组件 GUI 管理,普通用户5分钟即可上手;
|
||||
|
||||
### Cleaning the build ###
|
||||
./gradlew clean
|
||||
- 👏 **云原生、插件化**
|
||||
- 基于云原生构建,具备水平扩展能力,只需要增加节点即可获取更强的采集及对外服务能力,提供众多可热插拔的企业级特性,覆盖可观测性生态整合、资源治理、多活容灾等核心场景;
|
||||
|
||||
### Running a task with one of the Scala versions available (2.12.x or 2.13.x) ###
|
||||
*Note that if building the jars with a version other than 2.12.x, you need to set the `SCALA_VERSION` variable or change it in `bin/kafka-run-class.sh` to run the quick start.*
|
||||
- 🚀 **专业能力**
|
||||
- 集群管理:支持一键纳管,健康分析、核心组件观测 等功能;
|
||||
- 观测提升:多维度指标观测大盘、观测指标最佳实践 等功能;
|
||||
- 异常巡检:集群多维度健康巡检、集群多维度健康分 等功能;
|
||||
- 能力增强:集群负载均衡、Topic扩缩副本、Topic副本迁移 等功能;
|
||||
|
||||
You can pass either the major version (eg 2.12) or the full version (eg 2.12.7):
|
||||
|
||||
./gradlew -PscalaVersion=2.12 jar
|
||||
./gradlew -PscalaVersion=2.12 test
|
||||
./gradlew -PscalaVersion=2.12 releaseTarGz
|
||||
|
||||
**产品图**
|
||||
|
||||
### Running a task with all the scala versions enabled by default ###
|
||||
<p align="center">
|
||||
|
||||
Append `All` to the task name:
|
||||
<img src="http://img-ys011.didistatic.com/static/dc2img/do1_sPmS4SNLX9m1zlpmHaLJ" width = "768" height = "473" div align=center />
|
||||
|
||||
./gradlew testAll
|
||||
./gradlew jarAll
|
||||
./gradlew releaseTarGzAll
|
||||
</p>
|
||||
|
||||
### Running a task for a specific project ###
|
||||
This is for `core`, `examples` and `clients`
|
||||
|
||||
./gradlew core:jar
|
||||
./gradlew core:test
|
||||
|
||||
### Listing all gradle tasks ###
|
||||
./gradlew tasks
|
||||
|
||||
### Building IDE project ####
|
||||
*Note that this is not strictly necessary (IntelliJ IDEA has good built-in support for Gradle projects, for example).*
|
||||
## 文档资源
|
||||
|
||||
./gradlew eclipse
|
||||
./gradlew idea
|
||||
**`开发相关手册`**
|
||||
|
||||
The `eclipse` task has been configured to use `${project_dir}/build_eclipse` as Eclipse's build directory. Eclipse's default
|
||||
build directory (`${project_dir}/bin`) clashes with Kafka's scripts directory and we don't use Gradle's build directory
|
||||
to avoid known issues with this configuration.
|
||||
- [打包编译手册](docs/install_guide/源码编译打包手册.md)
|
||||
- [单机部署手册](docs/install_guide/单机部署手册.md)
|
||||
- [版本升级手册](docs/install_guide/版本升级手册.md)
|
||||
- [本地源码启动手册](docs/dev_guide/本地源码启动手册.md)
|
||||
- [页面无数据排查手册](docs/dev_guide/页面无数据排查手册.md)
|
||||
|
||||
### Publishing the jar for all version of Scala and for all projects to maven ###
|
||||
./gradlew uploadArchivesAll
|
||||
**`产品相关手册`**
|
||||
|
||||
Please note for this to work you should create/update `${GRADLE_USER_HOME}/gradle.properties` (typically, `~/.gradle/gradle.properties`) and assign the following variables
|
||||
- [产品使用指南](docs/user_guide/用户使用手册.md)
|
||||
- [2.x与3.x新旧对比手册](docs/user_guide/新旧对比手册.md)
|
||||
- [FAQ](docs/user_guide/faq.md)
|
||||
|
||||
mavenUrl=
|
||||
mavenUsername=
|
||||
mavenPassword=
|
||||
signing.keyId=
|
||||
signing.password=
|
||||
signing.secretKeyRingFile=
|
||||
|
||||
### Publishing the streams quickstart archetype artifact to maven ###
|
||||
For the Streams archetype project, one cannot use gradle to upload to maven; instead the `mvn deploy` command needs to be called at the quickstart folder:
|
||||
**点击 [这里](https://doc.knowstreaming.com/product),也可以从官网获取到更多文档**
|
||||
|
||||
cd streams/quickstart
|
||||
mvn deploy
|
||||
**`产品网址`**
|
||||
- [产品官网:https://knowstreaming.com](https://knowstreaming.com)
|
||||
- [体验环境:https://demo.knowstreaming.com](https://demo.knowstreaming.com),登陆账号:admin/admin
|
||||
|
||||
Please note for this to work you should create/update user maven settings (typically, `${USER_HOME}/.m2/settings.xml`) to assign the following variables
|
||||
|
||||
<settings xmlns="http://maven.apache.org/SETTINGS/1.0.0"
|
||||
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
|
||||
xsi:schemaLocation="http://maven.apache.org/SETTINGS/1.0.0
|
||||
https://maven.apache.org/xsd/settings-1.0.0.xsd">
|
||||
...
|
||||
<servers>
|
||||
...
|
||||
<server>
|
||||
<id>apache.snapshots.https</id>
|
||||
<username>${maven_username}</username>
|
||||
<password>${maven_password}</password>
|
||||
</server>
|
||||
<server>
|
||||
<id>apache.releases.https</id>
|
||||
<username>${maven_username}</username>
|
||||
<password>${maven_password}</password>
|
||||
</server>
|
||||
...
|
||||
</servers>
|
||||
...
|
||||
|
||||
## 成为社区贡献者
|
||||
|
||||
### Installing the jars to the local Maven repository ###
|
||||
./gradlew installAll
|
||||
1. [贡献源码](https://doc.knowstreaming.com/product/10-contribution) 了解如何成为 Know Streaming 的贡献者
|
||||
2. [具体贡献流程](https://doc.knowstreaming.com/product/10-contribution#102-贡献流程)
|
||||
3. [开源激励计划](https://doc.knowstreaming.com/product/10-contribution#105-开源激励计划)
|
||||
4. [贡献者名单](https://doc.knowstreaming.com/product/10-contribution#106-贡献者名单)
|
||||
|
||||
### Building the test jar ###
|
||||
./gradlew testJar
|
||||
|
||||
### Determining how transitive dependencies are added ###
|
||||
./gradlew core:dependencies --configuration runtime
|
||||
获取KnowStreaming开源社区证书。
|
||||
|
||||
### Determining if any dependencies could be updated ###
|
||||
./gradlew dependencyUpdates
|
||||
## 加入技术交流群
|
||||
|
||||
### Running code quality checks ###
|
||||
There are two code quality analysis tools that we regularly run, spotbugs and checkstyle.
|
||||
**`1、知识星球`**
|
||||
|
||||
#### Checkstyle ####
|
||||
Checkstyle enforces a consistent coding style in Kafka.
|
||||
You can run checkstyle using:
|
||||
<p align="left">
|
||||
<img src="https://user-images.githubusercontent.com/71620349/185357284-fdff1dad-c5e9-4ddf-9a82-0be1c970980d.JPG" height = "180" div align=left />
|
||||
</p>
|
||||
|
||||
./gradlew checkstyleMain checkstyleTest
|
||||
<br/>
|
||||
<br/>
|
||||
<br/>
|
||||
<br/>
|
||||
<br/>
|
||||
<br/>
|
||||
<br/>
|
||||
<br/>
|
||||
|
||||
The checkstyle warnings will be found in `reports/checkstyle/reports/main.html` and `reports/checkstyle/reports/test.html` files in the
|
||||
subproject build directories. They are also printed to the console. The build will fail if Checkstyle fails.
|
||||
👍 我们正在组建国内最大,最权威的 **[Kafka中文社区](https://z.didi.cn/5gSF9)**
|
||||
|
||||
#### Spotbugs ####
|
||||
Spotbugs uses static analysis to look for bugs in the code.
|
||||
You can run spotbugs using:
|
||||
在这里你可以结交各大互联网的 Kafka大佬 以及 4000+ Kafka爱好者,一起实现知识共享,实时掌控最新行业资讯,期待 👏 您的加入中~ https://z.didi.cn/5gSF9
|
||||
|
||||
./gradlew spotbugsMain spotbugsTest -x test
|
||||
有问必答~! 互动有礼~!
|
||||
|
||||
The spotbugs warnings will be found in `reports/spotbugs/main.html` and `reports/spotbugs/test.html` files in the subproject build
|
||||
directories. Use -PxmlSpotBugsReport=true to generate an XML report instead of an HTML one.
|
||||
PS: 提问请尽量把问题一次性描述清楚,并告知环境信息情况~!如使用版本、操作步骤、报错/警告信息等,方便大V们快速解答~
|
||||
|
||||
### Common build options ###
|
||||
|
||||
|
||||
The following options should be set with a `-P` switch, for example `./gradlew -PmaxParallelForks=1 test`.
|
||||
**`2、微信群`**
|
||||
|
||||
* `commitId`: sets the build commit ID as .git/HEAD might not be correct if there are local commits added for build purposes.
|
||||
* `mavenUrl`: sets the URL of the maven deployment repository (`file://path/to/repo` can be used to point to a local repository).
|
||||
* `maxParallelForks`: limits the maximum number of processes for each task.
|
||||
* `showStandardStreams`: shows standard out and standard error of the test JVM(s) on the console.
|
||||
* `skipSigning`: skips signing of artifacts.
|
||||
* `testLoggingEvents`: unit test events to be logged, separated by comma. For example `./gradlew -PtestLoggingEvents=started,passed,skipped,failed test`.
|
||||
* `xmlSpotBugsReport`: enable XML reports for spotBugs. This also disables HTML reports as only one can be enabled at a time.
|
||||
微信加群:添加`PenceXie` 的微信号备注KnowStreaming加群。
|
||||
<br/>
|
||||
|
||||
### Dependency Analysis ###
|
||||
加群之前有劳点一下 star,一个小小的 star 是对KnowStreaming作者们努力建设社区的动力。
|
||||
|
||||
The gradle [dependency debugging documentation](https://docs.gradle.org/current/userguide/viewing_debugging_dependencies.html) mentions using the `dependencies` or `dependencyInsight` tasks to debug dependencies for the root project or individual subprojects.
|
||||
感谢感谢!!!
|
||||
|
||||
Alternatively, use the `allDeps` or `allDepInsight` tasks for recursively iterating through all subprojects:
|
||||
<img width="116" alt="wx" src="https://user-images.githubusercontent.com/71620349/192257217-c4ebc16c-3ad9-485d-a914-5911d3a4f46b.png">
|
||||
|
||||
./gradlew allDeps
|
||||
## Star History
|
||||
|
||||
./gradlew allDepInsight --configuration runtime --dependency com.fasterxml.jackson.core:jackson-databind
|
||||
[](https://star-history.com/#didi/KnowStreaming&Date)
|
||||
|
||||
These take the same arguments as the builtin variants.
|
||||
|
||||
### Running system tests ###
|
||||
|
||||
See [tests/README.md](tests/README.md).
|
||||
|
||||
### Running in Vagrant ###
|
||||
|
||||
See [vagrant/README.md](vagrant/README.md).
|
||||
|
||||
### Contribution ###
|
||||
|
||||
Apache Kafka is interested in building the community; we would welcome any thoughts or [patches](https://issues.apache.org/jira/browse/KAFKA). You can reach us [on the Apache mailing lists](http://kafka.apache.org/contact.html).
|
||||
|
||||
To contribute follow the instructions here:
|
||||
* https://kafka.apache.org/contributing.html
|
||||
|
||||
646
Releases_Notes.md
Normal file
646
Releases_Notes.md
Normal file
@@ -0,0 +1,646 @@
|
||||
|
||||
## v3.4.0
|
||||
|
||||
|
||||
|
||||
**问题修复**
|
||||
- [Bugfix]修复 Overview 指标文案错误的错误 ([#1190](https://github.com/didi/KnowStreaming/issues/1190))
|
||||
- [Bugfix]修复删除 Kafka 集群后,Connect 集群任务出现 NPE 问题 ([#1129](https://github.com/didi/KnowStreaming/issues/1129))
|
||||
- [Bugfix]修复在 Ldap 登录时,设置 auth-user-registration: false 会导致空指针的问题 ([#1117](https://github.com/didi/KnowStreaming/issues/1117))
|
||||
- [Bugfix]修复 Ldap 登录,调用 user.getId() 出现 NPE 的问题 ([#1108](https://github.com/didi/KnowStreaming/issues/1108))
|
||||
- [Bugfix]修复前端新增角色失败等问题 ([#1107](https://github.com/didi/KnowStreaming/issues/1107))
|
||||
- [Bugfix]修复 ZK 四字命令解析错误的问题
|
||||
- [Bugfix]修复 zk standalone 模式下,状态获取错误的问题
|
||||
- [Bugfix]修复 Broker 元信息解析方法未调用导致接入集群失败的问题 ([#993](https://github.com/didi/KnowStreaming/issues/993))
|
||||
- [Bugfix]修复 ConsumerAssignment 类型转换错误的问题
|
||||
- [Bugfix]修复对 Connect 集群的 clusterUrl 的动态更新导致配置不生效的问题 ([#1079](https://github.com/didi/KnowStreaming/issues/1079))
|
||||
- [Bugfix]修复消费组不支持重置到最旧 Offset 的问题 ([#1059](https://github.com/didi/KnowStreaming/issues/1059))
|
||||
- [Bugfix]后端增加查看 User 密码的权限点 ([#1095](https://github.com/didi/KnowStreaming/issues/1095))
|
||||
- [Bugfix]修复 Connect-JMX 端口维护信息错误的问题 ([#1146](https://github.com/didi/KnowStreaming/issues/1146))
|
||||
- [Bugfix]修复系统管理子应用无法正常启动的问题 ([#1167](https://github.com/didi/KnowStreaming/issues/1167))
|
||||
- [Bugfix]修复 Security 模块,权限点缺失问题 ([#1069](https://github.com/didi/KnowStreaming/issues/1069)), ([#1154](https://github.com/didi/KnowStreaming/issues/1154))
|
||||
- [Bugfix]修复 Connect-Worker Jmx 不生效的问题 ([#1067](https://github.com/didi/KnowStreaming/issues/1067))
|
||||
- [Bugfix]修复权限 ACL 管理中,消费组列表展示错误的问题 ([#1037](https://github.com/didi/KnowStreaming/issues/1037))
|
||||
- [Bugfix]修复 Connect 模块没有默认勾选指标的问题([#1022](https://github.com/didi/KnowStreaming/issues/1022))
|
||||
- [Bugfix]修复 es 索引 create/delete 死循环的问题 ([#1021](https://github.com/didi/KnowStreaming/issues/1021))
|
||||
- [Bugfix]修复 Connect-GroupDescription 解析失败的问题 ([#1015](https://github.com/didi/KnowStreaming/issues/1015))
|
||||
- [Bugfix]修复 Prometheus 开放接口中,Partition 指标 tag 缺失的问题 ([#1014](https://github.com/didi/KnowStreaming/issues/1014))
|
||||
- [Bugfix]修复 Topic 消息展示,offset 为 0 不显示的问题 ([#1192](https://github.com/didi/KnowStreaming/issues/1192))
|
||||
- [Bugfix]修复重置offset接口调用过多问题
|
||||
- [Bugfix]Connect 提交任务变更为只保存用户修改的配置,并修复 JSON 模式下配置展示不全的问题 ([#1158](https://github.com/didi/KnowStreaming/issues/1158))
|
||||
- [Bugfix]修复消费组 Offset 重置后,提示重置成功,但是前端不刷新数据,Offset 无变化的问题 ([#1090](https://github.com/didi/KnowStreaming/issues/1090))
|
||||
- [Bugfix]修复未勾选系统管理查看权限,但是依然可以查看系统管理的问题 ([#1105](https://github.com/didi/KnowStreaming/issues/1105))
|
||||
|
||||
|
||||
**产品优化**
|
||||
- [Optimize]补充接入集群时,可选的 Kafka 版本列表 ([#1204](https://github.com/didi/KnowStreaming/issues/1204))
|
||||
- [Optimize]GroupTopic 信息修改为实时获取 ([#1196](https://github.com/didi/KnowStreaming/issues/1196))
|
||||
- [Optimize]增加 AdminClient 观测信息 ([#1111](https://github.com/didi/KnowStreaming/issues/1111))
|
||||
- [Optimize]增加 Connector 运行状态指标 ([#1110](https://github.com/didi/KnowStreaming/issues/1110))
|
||||
- [Optimize]统一 DB 元信息更新格式 ([#1127](https://github.com/didi/KnowStreaming/issues/1127)), ([#1125](https://github.com/didi/KnowStreaming/issues/1125)), ([#1006](https://github.com/didi/KnowStreaming/issues/1006))
|
||||
- [Optimize]日志输出增加支持 MDC,方便用户在 logback.xml 中 json 格式化日志 ([#1032](https://github.com/didi/KnowStreaming/issues/1032))
|
||||
- [Optimize]Jmx 相关日志优化 ([#1082](https://github.com/didi/KnowStreaming/issues/1082))
|
||||
- [Optimize]Topic-Partitions增加主动超时功能 ([#1076](https://github.com/didi/KnowStreaming/issues/1076))
|
||||
- [Optimize]Topic-Messages页面后端增加按照Partition和Offset纬度的排序 ([#1075](https://github.com/didi/KnowStreaming/issues/1075))
|
||||
- [Optimize]Connect-JSON模式下的JSON格式和官方API的格式不一致 ([#1080](https://github.com/didi/KnowStreaming/issues/1080)), ([#1153](https://github.com/didi/KnowStreaming/issues/1153)), ([#1192](https://github.com/didi/KnowStreaming/issues/1192))
|
||||
- [Optimize]登录页面展示的 star 数量修改为最新的数量
|
||||
- [Optimize]Group 列表的 maxLag 指标调整为实时获取 ([#1074](https://github.com/didi/KnowStreaming/issues/1074))
|
||||
- [Optimize]Connector增加重启、编辑、删除等权限点 ([#1066](https://github.com/didi/KnowStreaming/issues/1066)), ([#1147](https://github.com/didi/KnowStreaming/issues/1147))
|
||||
- [Optimize]优化 pom.xml 中,KS版本的标签名
|
||||
- [Optimize]优化集群Brokers中, Controller显示存在延迟的问题 ([#1162](https://github.com/didi/KnowStreaming/issues/1162))
|
||||
- [Optimize]bump jackson version to 2.13.5
|
||||
- [Optimize]权限新增 ACL,自定义权限配置,资源 TransactionalId 优化 ([#1192](https://github.com/didi/KnowStreaming/issues/1192))
|
||||
- [Optimize]Connect 样式优化
|
||||
- [Optimize]消费组详情控制数据实时刷新
|
||||
|
||||
|
||||
**功能新增**
|
||||
- [Feature]新增删除 Group 或 GroupOffset 功能 ([#1064](https://github.com/didi/KnowStreaming/issues/1064)), ([#1084](https://github.com/didi/KnowStreaming/issues/1084)), ([#1040](https://github.com/didi/KnowStreaming/issues/1040)), ([#1144](https://github.com/didi/KnowStreaming/issues/1144))
|
||||
- [Feature]增加 Truncate 数据功能 ([#1062](https://github.com/didi/KnowStreaming/issues/1062)), ([#1043](https://github.com/didi/KnowStreaming/issues/1043)), ([#1145](https://github.com/didi/KnowStreaming/issues/1145))
|
||||
- [Feature]支持指定 Server 的具体 Jmx 端口 ([#965](https://github.com/didi/KnowStreaming/issues/965))
|
||||
|
||||
|
||||
**文档更新**
|
||||
- [Doc]FAQ 补充 ES 8.x 版本使用说明 ([#1189](https://github.com/didi/KnowStreaming/issues/1189))
|
||||
- [Doc]补充启动失败的说明 ([#1126](https://github.com/didi/KnowStreaming/issues/1126))
|
||||
- [Doc]补充 ZK 无数据排查说明 ([#1004](https://github.com/didi/KnowStreaming/issues/1004))
|
||||
- [Doc]无数据排查文档,补充 ES 集群 Shard 满的异常日志
|
||||
- [Doc]README 补充页面无数据排查手册链接
|
||||
- [Doc]补充连接特定 Jmx 端口的说明 ([#965](https://github.com/didi/KnowStreaming/issues/965))
|
||||
- [Doc]补充 zk_properties 字段的使用说明 ([#1003](https://github.com/didi/KnowStreaming/issues/1003))
|
||||
|
||||
|
||||
---
|
||||
|
||||
|
||||
## v3.3.0
|
||||
|
||||
**问题修复**
|
||||
- 修复 Connect 的 JMX-Port 配置未生效问题;
|
||||
- 修复 不存在 Connector 时,OverView 页面的数据一直处于加载中的问题;
|
||||
- 修复 Group 分区信息,分页时展示不全的问题;
|
||||
- 修复采集副本指标时,参数传递错误的问题;
|
||||
- 修复用户信息修改后,用户列表会抛出空指针异常的问题;
|
||||
- 修复 Topic 详情页面,查看消息时,选择分区不生效问题;
|
||||
- 修复对 ZK 客户端进行配置后不生效的问题;
|
||||
- 修复 connect 模块,指标中缺少健康巡检项通过数的问题;
|
||||
- 修复 connect 模块,指标获取方法存在映射错误的问题;
|
||||
- 修复 connect 模块,max 纬度指标获取错误的问题;
|
||||
- 修复 Topic 指标大盘 TopN 指标显示信息错误的问题;
|
||||
- 修复 Broker Similar Config 显示错误的问题;
|
||||
- 修复解析 ZK 四字命令时,数据类型设置错误导致空指针的问题;
|
||||
- 修复新增 Topic 时,清理策略选项版本控制错误的问题;
|
||||
- 修复新接入集群时 Controller-Host 信息不显示的问题;
|
||||
- 修复 Connector 和 MM2 列表搜索不生效的问题;
|
||||
- 修复 Zookeeper 页面,Leader 显示存在异常的问题;
|
||||
- 修复前端打包失败的问题;
|
||||
|
||||
|
||||
**产品优化**
|
||||
- ZK Overview 页面补充默认展示的指标;
|
||||
- 统一初始化 ES 索引模版的脚本为 init_es_template.sh,同时新增缺失的 connect 索引模版初始化脚本,去除多余的 replica 和 zookeper 索引模版初始化脚本;
|
||||
- 指标大盘页面,优化指标筛选操作后,无指标数据的指标卡片由不显示改为显示,并增加无数据的兜底;
|
||||
- 删除从 ES 读写 replica 指标的相关代码;
|
||||
- 优化 Topic 健康巡检的日志,明确错误的原因;
|
||||
- 优化无 ZK 模块时,巡检详情忽略对 ZK 的展示;
|
||||
- 优化本地缓存大小为可配置;
|
||||
- Task 模块中的返回中,补充任务的分组信息;
|
||||
- FAQ 补充 Ldap 的配置说明;
|
||||
- FAQ 补充接入 Kerberos 认证的 Kafka 集群的配置说明;
|
||||
- ks_km_kafka_change_record 表增加时间纬度的索引,优化查询性能;
|
||||
- 优化 ZK 健康巡检的日志,便于问题的排查;
|
||||
|
||||
**功能新增**
|
||||
- 新增基于滴滴 Kafka 的 Topic 复制功能(需使用滴滴 Kafka 才可具备该能力);
|
||||
- Topic 指标大盘,新增 Topic 复制相关的指标;
|
||||
- 新增基于 TestContainers 的单测;
|
||||
|
||||
|
||||
**Kafka MM2 Beta版 (v3.3.0版本新增发布)**
|
||||
- MM2 任务的增删改查;
|
||||
- MM2 任务的指标大盘;
|
||||
- MM2 任务的健康状态;
|
||||
|
||||
---
|
||||
|
||||
|
||||
## v3.2.0
|
||||
|
||||
**问题修复**
|
||||
- 修复健康巡检结果更新至 DB 时,出现死锁问题;
|
||||
- 修复 KafkaJMXClient 类中,logger错误的问题;
|
||||
- 后端修复 Topic 过期策略在 0.10.1.0 版本能多选的问题,实际应该只能二选一;
|
||||
- 修复接入集群时,不填写集群配置会报错的问题;
|
||||
- 升级 spring-context 至 5.3.19 版本,修复安全漏洞;
|
||||
- 修复 Broker & Topic 修改配置时,多版本兼容配置的版本信息错误的问题;
|
||||
- 修复 Topic 列表的健康分为健康状态;
|
||||
- 修复 Broker LogSize 指标存储名称错误导致查询不到的问题;
|
||||
- 修复 Prometheus 中,缺少 Group 部分指标的问题;
|
||||
- 修复因缺少健康状态指标导致集群数错误的问题;
|
||||
- 修复后台任务记录操作日志时,因缺少操作用户信息导致出现异常的问题;
|
||||
- 修复 Replica 指标查询时,DSL 错误的问题;
|
||||
- 关闭 errorLogger,修复错误日志重复输出的问题;
|
||||
- 修复系统管理更新用户信息失败的问题;
|
||||
- 修复因原AR信息丢失,导致迁移任务一直处于执行中的错误;
|
||||
- 修复集群 Topic 列表实时数据查询时,出现失败的问题;
|
||||
- 修复集群 Topic 列表,页面白屏问题;
|
||||
- 修复副本变更时,因AR数据异常,导致数组访问越界的问题;
|
||||
|
||||
|
||||
**产品优化**
|
||||
- 优化健康巡检为按照资源维度多线程并发处理;
|
||||
- 统一日志输出格式,并优化部分输出的日志;
|
||||
- 优化 ZK 四字命令结果解析过程中,容易引起误解的 WARN 日志;
|
||||
- 优化 Zookeeper 详情中,目录结构的搜索文案;
|
||||
- 优化线程池的名称,方便第三方系统进行相关问题的分析;
|
||||
- 去除 ESClient 的并发访问控制,降低 ESClient 创建数及提升利用率;
|
||||
- 优化 Topic Messages 抽屉文案;
|
||||
- 优化 ZK 健康巡检失败时的错误日志信息;
|
||||
- 提高 Offset 信息获取的超时时间,降低并发过高时出现请求超时的概率;
|
||||
- 优化 Topic & Partition 元信息的更新策略,降低对 DB 连接的占用;
|
||||
- 优化 Sonar 代码扫码问题;
|
||||
- 优化分区 Offset 指标的采集;
|
||||
- 优化前端图表相关组件逻辑;
|
||||
- 优化产品主题色;
|
||||
- Consumer 列表刷新按钮新增 hover 提示;
|
||||
- 优化配置 Topic 的消息大小时的测试弹框体验;
|
||||
- 优化 Overview 页面 TopN 查询的流程;
|
||||
|
||||
|
||||
**功能新增**
|
||||
- 新增页面无数据排查文档;
|
||||
- 增加 ES 索引删除的功能;
|
||||
- 支持拆分API服务和Job服务部署;
|
||||
|
||||
|
||||
**Kafka Connect Beta版 (v3.2.0版本新增发布)**
|
||||
- Connect 集群的纳管;
|
||||
- Connector 的增删改查;
|
||||
- Connect 集群 & Connector 的指标大盘;
|
||||
|
||||
|
||||
---
|
||||
|
||||
|
||||
## v3.1.0
|
||||
|
||||
**Bug修复**
|
||||
- 修复重置 Group Offset 的提示信息中,缺少Dead状态也可进行重置的描述;
|
||||
- 修复新建 Topic 后,立即查看 Topic Messages 信息时,会提示 Topic 不存在的问题;
|
||||
- 修复副本变更时,优先副本选举未被正常处罚执行的问题;
|
||||
- 修复 git 目录不存在时,打包不能正常进行的问题;
|
||||
- 修复 KRaft 模式的 Kafka 集群,JMX PORT 显示 -1 的问题;
|
||||
|
||||
|
||||
**体验优化**
|
||||
- 优化Cluster、Broker、Topic、Group的健康分为健康状态;
|
||||
- 去除健康巡检配置中的权重信息;
|
||||
- 错误提示页面展示优化;
|
||||
- 前端打包编译依赖默认使用 taobao 镜像;
|
||||
- 重新设计优化导航栏的 icon ;
|
||||
|
||||
|
||||
**新增**
|
||||
- 个人头像下拉信息中,新增产品版本信息;
|
||||
- 多集群列表页面,新增集群健康状态分布信息;
|
||||
|
||||
|
||||
**Kafka ZK 部分 (v3.1.0版本正式发布)**
|
||||
- 新增 ZK 集群的指标大盘信息;
|
||||
- 新增 ZK 集群的服务状态概览信息;
|
||||
- 新增 ZK 集群的服务节点列表信息;
|
||||
- 新增 Kafka 在 ZK 的存储数据查看功能;
|
||||
- 新增 ZK 的健康巡检及健康状态计算;
|
||||
|
||||
|
||||
|
||||
---
|
||||
|
||||
|
||||
## v3.0.1
|
||||
|
||||
**Bug修复**
|
||||
- 修复重置 Group Offset 时,提示信息中缺少 Dead 状态也可进行重置的信息;
|
||||
- 修复 Ldap 某个属性不存在时,会直接抛出空指针导致登陆失败的问题;
|
||||
- 修复集群 Topic 列表页,健康分详情信息中,检查时间展示错误的问题;
|
||||
- 修复更新健康检查结果时,出现死锁的问题;
|
||||
- 修复 Replica 索引模版错误的问题;
|
||||
- 修复 FAQ 文档中的错误链接;
|
||||
- 修复 Broker 的 TopN 指标不存在时,页面数据不展示的问题;
|
||||
- 修复 Group 详情页,图表时间范围选择不生效的问题;
|
||||
|
||||
|
||||
**体验优化**
|
||||
- 集群 Group 列表按照 Group 维度进行展示;
|
||||
- 优化避免因 ES 中该指标不存在,导致日志中出现大量空指针的问题;
|
||||
- 优化全局 Message & Notification 展示效果;
|
||||
- 优化 Topic 扩分区名称 & 描述展示;
|
||||
|
||||
|
||||
**新增**
|
||||
- Broker 列表页面,新增 JMX 是否成功连接的信息;
|
||||
|
||||
|
||||
**ZK 部分(未完全发布)**
|
||||
- 后端补充 Kafka ZK 指标采集,Kafka ZK 信息获取相关功能;
|
||||
- 增加本地缓存,避免同一采集周期内 ZK 指标重复采集;
|
||||
- 增加 ZK 节点采集失败跳过策略,避免不断对存在问题的节点不断尝试;
|
||||
- 修复 zkAvgLatency 指标转 Long 时抛出异常问题;
|
||||
- 修复 ks_km_zookeeper 表中,role 字段类型错误问题;
|
||||
|
||||
---
|
||||
|
||||
## v3.0.0
|
||||
|
||||
**Bug修复**
|
||||
- 修复 Group 指标防重复采集不生效问题
|
||||
- 修复自动创建 ES 索引模版失败问题
|
||||
- 修复 Group+Topic 列表中存在已删除Topic的问题
|
||||
- 修复使用 MySQL-8 ,因兼容问题, start_time 信息为 NULL 时,会导致创建任务失败的问题
|
||||
- 修复 Group 信息表更新时,出现死锁的问题
|
||||
- 修复图表补点逻辑与图表时间范围不适配的问题
|
||||
|
||||
|
||||
**体验优化**
|
||||
- 按照资源类别,拆分健康巡检任务
|
||||
- 优化 Group 详情页的指标为实时获取
|
||||
- 图表拖拽排序支持用户级存储
|
||||
- 多集群列表 ZK 信息展示兼容无 ZK 情况
|
||||
- Topic 详情消息预览支持复制功能
|
||||
- 部分内容大数字支持千位分割符展示
|
||||
|
||||
|
||||
**新增**
|
||||
- 集群信息中,新增 Zookeeper 客户端配置字段
|
||||
- 集群信息中,新增 Kafka 集群运行模式字段
|
||||
- 新增 docker-compose 的部署方式
|
||||
|
||||
---
|
||||
|
||||
## v3.0.0-beta.3
|
||||
|
||||
**文档**
|
||||
- FAQ 补充权限识别失败问题的说明
|
||||
- 同步更新文档,保持与官网一致
|
||||
|
||||
|
||||
**Bug修复**
|
||||
- Offset 信息获取时,过滤掉无 Leader 的分区
|
||||
- 升级 oshi-core 版本至 5.6.1 版本,修复 Windows 系统获取系统指标失败问题
|
||||
- 修复 JMX 连接被关闭后,未进行重建的问题
|
||||
- 修复因 DB 中 Broker 信息不存在导致 TotalLogSize 指标获取时抛空指针问题
|
||||
- 修复 dml-logi.sql 中,SQL 注释错误的问题
|
||||
- 修复 startup.sh 中,识别操作系统类型错误的问题
|
||||
- 修复配置管理页面删除配置失败的问题
|
||||
- 修复系统管理应用文件引用路径
|
||||
- 修复 Topic Messages 详情提示信息点击跳转 404 的问题
|
||||
- 修复扩副本时,当前副本数不显示问题
|
||||
|
||||
|
||||
**体验优化**
|
||||
- Topic-Messages 页面,增加返回数据的排序以及按照Earliest/Latest的获取方式
|
||||
- 优化 GroupOffsetResetEnum 类名为 OffsetTypeEnum,使得类名含义更准确
|
||||
- 移动 KafkaZKDAO 类,及 Kafka Znode 实体类的位置,使得 Kafka Zookeeper DAO 更加内聚及便于识别
|
||||
- 后端补充 Overview 页面指标排序的功能
|
||||
- 前端 Webpack 配置优化
|
||||
- Cluster Overview 图表取消放大展示功能
|
||||
- 列表页增加手动刷新功能
|
||||
- 接入/编辑集群,优化 JMX-PORT,Version 信息的回显,优化JMX信息的展示
|
||||
- 提高登录页面图片展示清晰度
|
||||
- 部分样式和文案优化
|
||||
|
||||
---
|
||||
|
||||
## v3.0.0-beta.2
|
||||
|
||||
**文档**
|
||||
- 新增登录系统对接文档
|
||||
- 优化前端工程打包构建部分文档说明
|
||||
- FAQ补充KnowStreaming连接特定JMX IP的说明
|
||||
|
||||
|
||||
**Bug修复**
|
||||
- 修复logi_security_oplog表字段过短,导致删除Topic等操作无法记录的问题
|
||||
- 修复ES查询时,抛java.lang.NumberFormatException: For input string: "{"value":0,"relation":"eq"}" 问题
|
||||
- 修复LogStartOffset和LogEndOffset指标单位错误问题
|
||||
- 修复进行副本变更时,旧副本数为NULL的问题
|
||||
- 修复集群Group列表,在第二页搜索时,搜索时返回的分页信息错误问题
|
||||
- 修复重置Offset时,返回的错误信息提示不一致的问题
|
||||
- 修复集群查看,系统查看,LoadRebalance等页面权限点缺失问题
|
||||
- 修复查询不存在的Topic时,错误信息提示不明显的问题
|
||||
- 修复Windows用户打包前端工程报错的问题
|
||||
- package-lock.json锁定前端依赖版本号,修复因依赖自动升级导致打包失败等问题
|
||||
- 系统管理子应用,补充后端返回的Code码拦截,解决后端接口返回报错不展示的问题
|
||||
- 修复用户登出后,依旧可以访问系统的问题
|
||||
- 修复巡检任务配置时,数值显示错误的问题
|
||||
- 修复Broker/Topic Overview 图表和图表详情问题
|
||||
- 修复Job扩缩副本任务明细数据错误的问题
|
||||
- 修复重置Offset时,分区ID,Offset数值无限制问题
|
||||
- 修复扩缩/迁移副本时,无法选中Kafka系统Topic的问题
|
||||
- 修复Topic的Config页面,编辑表单时不能正确回显当前值的问题
|
||||
- 修复Broker Card返回数据后依旧展示加载态的问题
|
||||
|
||||
|
||||
|
||||
**体验优化**
|
||||
- 优化默认用户密码为 admin/admin
|
||||
- 缩短新增集群后,集群信息加载的耗时
|
||||
- 集群Broker列表,增加Controller角色信息
|
||||
- 副本变更任务结束后,增加进行优先副本选举的操作
|
||||
- Task模块任务分为Metrics、Common、Metadata三类任务,每类任务配备独立线程池,减少对Job模块的线程池,以及不同类任务之间的相互影响
|
||||
- 删除代码中存在的多余无用文件
|
||||
- 自动新增ES索引模版及近7天索引,减少用户搭建时需要做的事项
|
||||
- 优化前端工程打包流程
|
||||
- 优化登录页文案,页面左侧栏内容,单集群详情样式,Topic列表趋势图等
|
||||
- 首次进入Broker/Topic图表详情时,进行预缓存数据从而优化体验
|
||||
- 优化Topic详情Partition Tab的展示
|
||||
- 多集群列表页增加编辑功能
|
||||
- 优化副本变更时,迁移时间支持分钟级别粒度
|
||||
- logi-security版本升级至2.10.13
|
||||
- logi-elasticsearch-client版本升级至1.0.24
|
||||
|
||||
|
||||
**能力提升**
|
||||
- 支持Ldap登录认证
|
||||
|
||||
---
|
||||
|
||||
## v3.0.0-beta.1
|
||||
|
||||
**文档**
|
||||
- 新增Task模块说明文档
|
||||
- FAQ补充 `Specified key was too long; max key length is 767 bytes ` 错误说明
|
||||
- FAQ补充 `出现ESIndexNotFoundException报错` 错误说明
|
||||
|
||||
|
||||
**Bug修复**
|
||||
- 修复 Consumer 点击 Stop 未停止检索的问题
|
||||
- 修复创建/编辑角色权限报错问题
|
||||
- 修复多集群管理/单集群详情均衡卡片状态错误问题
|
||||
- 修复版本列表未排序问题
|
||||
- 修复Raft集群Controller信息不断记录问题
|
||||
- 修复部分版本消费组描述信息获取失败问题
|
||||
- 修复分区Offset获取失败的日志中,缺少Topic名称信息问题
|
||||
- 修复GitHub图地址错误,及图裂问题
|
||||
- 修复Broker默认使用的地址和注释不一致问题
|
||||
- 修复 Consumer 列表分页不生效问题
|
||||
- 修复操作记录表operation_methods字段缺少默认值问题
|
||||
- 修复集群均衡表中move_broker_list字段无效的问题
|
||||
- 修复KafkaUser、KafkaACL信息获取时,日志一直重复提示不支持问题
|
||||
- 修复指标缺失时,曲线出现掉底的问题
|
||||
|
||||
|
||||
**体验优化**
|
||||
- 优化前端构建时间和打包体积,增加依赖打包的分包策略
|
||||
- 优化产品样式和文案展示
|
||||
- 优化ES客户端数为可配置
|
||||
- 优化日志中大量出现的MySQL Key冲突日志
|
||||
|
||||
|
||||
**能力提升**
|
||||
- 增加周期任务,用于主动创建缺少的ES模版及索引的能力,减少额外的脚本操作
|
||||
- 增加JMX连接的Broker地址可选择的能力
|
||||
|
||||
---
|
||||
|
||||
## v3.0.0-beta.0
|
||||
|
||||
**1、多集群管理**
|
||||
|
||||
- 增加健康监测体系、关键组件&指标 GUI 展示
|
||||
- 增加 2.8.x 以上 Kafka 集群接入,覆盖 0.10.x-3.x
|
||||
- 删除逻辑集群、共享集群、Region 概念
|
||||
|
||||
**2、Cluster 管理**
|
||||
|
||||
- 增加集群概览信息、集群配置变更记录
|
||||
- 增加 Cluster 健康分,健康检查规则支持自定义配置
|
||||
- 增加 Cluster 关键指标统计和 GUI 展示,支持自定义配置
|
||||
- 增加 Cluster 层 I/O、Disk 的 Load Reblance 功能,支持定时均衡任务(企业版)
|
||||
- 删除限流、鉴权功能
|
||||
- 删除 APPID 概念
|
||||
|
||||
**3、Broker 管理**
|
||||
|
||||
- 增加 Broker 健康分
|
||||
- 增加 Broker 关键指标统计和 GUI 展示,支持自定义配置
|
||||
- 增加 Broker 参数配置功能,需重启生效
|
||||
- 增加 Controller 变更记录
|
||||
- 增加 Broker Datalogs 记录
|
||||
- 删除 Leader Rebalance 功能
|
||||
- 删除 Broker 优先副本选举
|
||||
|
||||
**4、Topic 管理**
|
||||
|
||||
- 增加 Topic 健康分
|
||||
- 增加 Topic 关键指标统计和 GUI 展示,支持自定义配置
|
||||
- 增加 Topic 参数配置功能,可实时生效
|
||||
- 增加 Topic 批量迁移、Topic 批量扩缩副本功能
|
||||
- 增加查看系统 Topic 功能
|
||||
- 优化 Partition 分布的 GUI 展示
|
||||
- 优化 Topic Message 数据采样
|
||||
- 删除 Topic 过期概念
|
||||
- 删除 Topic 申请配额功能
|
||||
|
||||
**5、Consumer 管理**
|
||||
|
||||
- 优化了 ConsumerGroup 展示形式,增加 Consumer Lag 的 GUI 展示
|
||||
|
||||
**6、ACL 管理**
|
||||
|
||||
- 增加原生 ACL GUI 配置功能,可配置生产、消费、自定义多种组合权限
|
||||
- 增加 KafkaUser 功能,可自定义新增 KafkaUser
|
||||
|
||||
**7、消息测试(企业版)**
|
||||
|
||||
- 增加生产者消息模拟器,支持 Data、Flow、Header、Options 自定义配置(企业版)
|
||||
- 增加消费者消息模拟器,支持 Data、Flow、Header、Options 自定义配置(企业版)
|
||||
|
||||
**8、Job**
|
||||
|
||||
- 优化 Job 模块,支持任务进度管理
|
||||
|
||||
**9、系统管理**
|
||||
|
||||
- 优化用户、角色管理体系,支持自定义角色配置页面及操作权限
|
||||
- 优化审计日志信息
|
||||
- 删除多租户体系
|
||||
- 删除工单流程
|
||||
|
||||
---
|
||||
|
||||
## v2.6.0
|
||||
|
||||
版本上线时间:2022-01-24
|
||||
|
||||
### 能力提升
|
||||
- 增加简单回退工具类
|
||||
|
||||
### 体验优化
|
||||
- 补充周期任务说明文档
|
||||
- 补充集群安装部署使用说明文档
|
||||
- 升级Swagger、SpringFramework、SpringBoot、EChats版本
|
||||
- 优化Task模块的日志输出
|
||||
- 优化corn表达式解析失败后退出无任何日志提示问题
|
||||
- Ldap用户接入时,增加部门及邮箱信息等
|
||||
- 对Jmx模块,增加连接失败后的回退机制及错误日志优化
|
||||
- 增加线程池、客户端池可配置
|
||||
- 删除无用的jmx_prometheus_javaagent-0.14.0.jar
|
||||
- 优化迁移任务名称
|
||||
- 优化创建Region时,Region容量信息不能立即被更新问题
|
||||
- 引入lombok
|
||||
- 更新视频教程
|
||||
- 优化kcm_script.sh脚本中的LogiKM地址为可通过程序传入
|
||||
- 第三方接口及网关接口,增加是否跳过登录的开关
|
||||
- extends模块相关配置调整为非必须在application.yml中配置
|
||||
|
||||
### bug修复
|
||||
- 修复批量往DB写入空指标数组时报SQL语法异常的问题
|
||||
- 修复网关增加配置及修改配置时,version不变化问题
|
||||
- 修复集群列表页,提示框遮挡问题
|
||||
- 修复对高版本Broker元信息协议解析失败的问题
|
||||
- 修复Dockerfile执行时提示缺少application.yml文件的问题
|
||||
- 修复逻辑集群更新时,会报空指针的问题
|
||||
|
||||
|
||||
## v2.5.0
|
||||
|
||||
版本上线时间:2021-07-10
|
||||
|
||||
### 体验优化
|
||||
- 更改产品名为LogiKM
|
||||
- 更新产品图标
|
||||
|
||||
|
||||
## v2.4.1+
|
||||
|
||||
版本上线时间:2021-05-21
|
||||
|
||||
### 能力提升
|
||||
- 增加直接增加权限和配额的接口(v2.4.1)
|
||||
- 增加接口调用可绕过登录的功能(v2.4.1)
|
||||
|
||||
### 体验优化
|
||||
- Tomcat 版本提升至8.5.66(v2.4.2)
|
||||
- op接口优化,拆分util接口为topic、leader两类接口(v2.4.1)
|
||||
- 简化Gateway配置的Key长度(v2.4.1)
|
||||
|
||||
### bug修复
|
||||
- 修复页面展示版本错误问题(v2.4.2)
|
||||
|
||||
|
||||
## v2.4.0
|
||||
|
||||
版本上线时间:2021-05-18
|
||||
|
||||
|
||||
### 能力提升
|
||||
|
||||
- 增加App与Topic自动化审批开关
|
||||
- Broker元信息中增加Rack信息
|
||||
- 升级MySQL 驱动,支持MySQL 8+
|
||||
- 增加操作记录查询界面
|
||||
|
||||
### 体验优化
|
||||
|
||||
- FAQ告警组说明优化
|
||||
- 用户手册共享及 独享集群概念优化
|
||||
- 用户管理界面,前端限制用户删除自己
|
||||
|
||||
### bug修复
|
||||
|
||||
- 修复op-util类中创建Topic失败的接口
|
||||
- 周期同步Topic到DB的任务修复,将Topic列表查询从缓存调整为直接查DB
|
||||
- 应用下线审批失败的功能修复,将权限为0(无权限)的数据进行过滤
|
||||
- 修复登录及权限绕过的漏洞
|
||||
- 修复研发角色展示接入集群、暂停监控等按钮的问题
|
||||
|
||||
|
||||
## v2.3.0
|
||||
|
||||
版本上线时间:2021-02-08
|
||||
|
||||
|
||||
### 能力提升
|
||||
|
||||
- 新增支持docker化部署
|
||||
- 可指定Broker作为候选controller
|
||||
- 可新增并管理网关配置
|
||||
- 可获取消费组状态
|
||||
- 增加集群的JMX认证
|
||||
|
||||
### 体验优化
|
||||
|
||||
- 优化编辑用户角色、修改密码的流程
|
||||
- 新增consumerID的搜索功能
|
||||
- 优化“Topic连接信息”、“消费组重置消费偏移”、“修改Topic保存时间”的文案提示
|
||||
- 在相应位置增加《资源申请文档》链接
|
||||
|
||||
### bug修复
|
||||
|
||||
- 修复Broker监控图表时间轴展示错误的问题
|
||||
- 修复创建夜莺监控告警规则时,使用的告警周期的单位不正确的问题
|
||||
|
||||
|
||||
|
||||
## v2.2.0
|
||||
|
||||
版本上线时间:2021-01-25
|
||||
|
||||
|
||||
|
||||
### 能力提升
|
||||
|
||||
- 优化工单批量操作流程
|
||||
- 增加获取Topic75分位/99分位的实时耗时数据
|
||||
- 增加定时任务,可将无主未落DB的Topic定期写入DB
|
||||
|
||||
### 体验优化
|
||||
|
||||
- 在相应位置增加《集群接入文档》链接
|
||||
- 优化物理集群、逻辑集群含义
|
||||
- 在Topic详情页、Topic扩分区操作弹窗增加展示Topic所属Region的信息
|
||||
- 优化Topic审批时,Topic数据保存时间的配置流程
|
||||
- 优化Topic/应用申请、审批时的错误提示文案
|
||||
- 优化Topic数据采样的操作项文案
|
||||
- 优化运维人员删除Topic时的提示文案
|
||||
- 优化运维人员删除Region的删除逻辑与提示文案
|
||||
- 优化运维人员删除逻辑集群的提示文案
|
||||
- 优化上传集群配置文件时的文件类型限制条件
|
||||
|
||||
### bug修复
|
||||
|
||||
- 修复填写应用名称时校验特殊字符出错的问题
|
||||
- 修复普通用户越权访问应用详情的问题
|
||||
- 修复由于Kafka版本升级,导致的数据压缩格式无法获取的问题
|
||||
- 修复删除逻辑集群或Topic之后,界面依旧展示的问题
|
||||
- 修复进行Leader rebalance操作时执行结果重复提示的问题
|
||||
|
||||
|
||||
## v2.1.0
|
||||
|
||||
版本上线时间:2020-12-19
|
||||
|
||||
|
||||
|
||||
### 体验优化
|
||||
|
||||
- 优化页面加载时的背景样式
|
||||
- 优化普通用户申请Topic权限的流程
|
||||
- 优化Topic申请配额、申请分区的权限限制
|
||||
- 优化取消Topic权限的文案提示
|
||||
- 优化申请配额表单的表单项名称
|
||||
- 优化重置消费偏移的操作流程
|
||||
- 优化创建Topic迁移任务的表单内容
|
||||
- 优化Topic扩分区操作的弹窗界面样式
|
||||
- 优化集群Broker监控可视化图表样式
|
||||
- 优化创建逻辑集群的表单内容
|
||||
- 优化集群安全协议的提示文案
|
||||
|
||||
### bug修复
|
||||
|
||||
- 修复偶发性重置消费偏移失败的问题
|
||||
|
||||
|
||||
|
||||
|
||||
189
TROGDOR.md
189
TROGDOR.md
@@ -1,189 +0,0 @@
|
||||
Trogdor
|
||||
========================================
|
||||
Trogdor is a test framework for Apache Kafka.
|
||||
|
||||
Trogdor can run benchmarks and other workloads. Trogdor can also inject faults in order to stress test the system.
|
||||
|
||||
Quickstart
|
||||
=========================================================
|
||||
First, we want to start a single-node Kafka cluster with a ZooKeeper and a broker.
|
||||
|
||||
Running ZooKeeper:
|
||||
|
||||
> ./bin/zookeeper-server-start.sh ./config/zookeeper.properties &> /tmp/zookeeper.log &
|
||||
|
||||
Running Kafka:
|
||||
|
||||
> ./bin/kafka-server-start.sh ./config/server.properties &> /tmp/kafka.log &
|
||||
|
||||
Then, we want to run a Trogdor Agent, plus a Trogdor Coordinator.
|
||||
|
||||
To run the Trogdor Agent:
|
||||
|
||||
> ./bin/trogdor.sh agent -c ./config/trogdor.conf -n node0 &> /tmp/trogdor-agent.log &
|
||||
|
||||
To run the Trogdor Coordinator:
|
||||
|
||||
> ./bin/trogdor.sh coordinator -c ./config/trogdor.conf -n node0 &> /tmp/trogdor-coordinator.log &
|
||||
|
||||
Let's confirm that all of the daemons are running:
|
||||
|
||||
> jps
|
||||
116212 Coordinator
|
||||
115188 QuorumPeerMain
|
||||
116571 Jps
|
||||
115420 Kafka
|
||||
115694 Agent
|
||||
|
||||
Now, we can submit a test job to Trogdor.
|
||||
|
||||
> ./bin/trogdor.sh client createTask -t localhost:8889 -i produce0 --spec ./tests/spec/simple_produce_bench.json
|
||||
Sent CreateTaskRequest for task produce0.
|
||||
|
||||
We can run showTask to see what the task's status is:
|
||||
|
||||
> ./bin/trogdor.sh client showTask -t localhost:8889 -i produce0
|
||||
Task bar of type org.apache.kafka.trogdor.workload.ProduceBenchSpec is DONE. FINISHED at 2019-01-09T20:38:22.039-08:00 after 6s
|
||||
|
||||
To see the results, we use showTask with --show-status:
|
||||
|
||||
> ./bin/trogdor.sh client showTask -t localhost:8889 -i produce0 --show-status
|
||||
Task bar of type org.apache.kafka.trogdor.workload.ProduceBenchSpec is DONE. FINISHED at 2019-01-09T20:38:22.039-08:00 after 6s
|
||||
Status: {
|
||||
"totalSent" : 50000,
|
||||
"averageLatencyMs" : 17.83388,
|
||||
"p50LatencyMs" : 12,
|
||||
"p95LatencyMs" : 75,
|
||||
"p99LatencyMs" : 96,
|
||||
"transactionsCommitted" : 0
|
||||
}
|
||||
|
||||
Trogdor Architecture
|
||||
========================================
|
||||
Trogdor has a single coordinator process which manages multiple agent processes. Each agent process is responsible for a single cluster node.
|
||||
|
||||
The Trogdor coordinator manages tasks. A task is anything we might want to do on a cluster, such as running a benchmark, injecting a fault, or running a workload. In order to implement each task, the coordinator creates workers on one or more agent nodes.
|
||||
|
||||
The Trogdor agent process implements the tasks. For example, when running a workload, the agent process is the process which produces and consumes messages.
|
||||
|
||||
Both the coordinator and the agent expose a REST interface that accepts objects serialized via JSON. There is also a command-line program which makes it easy to send messages to either one without manually crafting the JSON message body.
|
||||
|
||||
All Trogdor RPCs are idempotent except the shutdown requests. Sending an idempotent RPC twice in a row has the same effect as sending the RPC once.
|
||||
|
||||
Tasks
|
||||
========================================
|
||||
Tasks are described by specifications containing:
|
||||
|
||||
* A "class" field describing the task type. This contains a full Java class name.
|
||||
* A "startMs" field describing when the task should start. This is given in terms of milliseconds since the UNIX epoch.
|
||||
* A "durationMs" field describing how long the task should last. This is given in terms of milliseconds.
|
||||
* Other fields which are task-specific.
|
||||
|
||||
The task specification is usually written as JSON. For example, this task specification describes a network partition between nodes 1 and 2, and 3:
|
||||
|
||||
{
|
||||
"class": "org.apache.kafka.trogdor.fault.NetworkPartitionFaultSpec",
|
||||
"startMs": 1000,
|
||||
"durationMs": 30000,
|
||||
"partitions": [["node1", "node2"], ["node3"]]
|
||||
}
|
||||
|
||||
This task runs a simple ProduceBench test on a cluster with one producer node, 5 topics, and 10,000 messages per second.
|
||||
The keys are generated sequentially and the configured partitioner (DefaultPartitioner) is used.
|
||||
|
||||
{
|
||||
"class": "org.apache.kafka.trogdor.workload.ProduceBenchSpec",
|
||||
"durationMs": 10000000,
|
||||
"producerNode": "node0",
|
||||
"bootstrapServers": "localhost:9092",
|
||||
"targetMessagesPerSec": 10000,
|
||||
"maxMessages": 50000,
|
||||
"activeTopics": {
|
||||
"foo[1-3]": {
|
||||
"numPartitions": 10,
|
||||
"replicationFactor": 1
|
||||
}
|
||||
},
|
||||
"inactiveTopics": {
|
||||
"foo[4-5]": {
|
||||
"numPartitions": 10,
|
||||
"replicationFactor": 1
|
||||
}
|
||||
},
|
||||
"keyGenerator": {
|
||||
"type": "sequential",
|
||||
"size": 8,
|
||||
"offset": 1
|
||||
},
|
||||
"useConfiguredPartitioner": true
|
||||
}
|
||||
|
||||
Tasks are submitted to the coordinator. Once the coordinator determines that it is time for the task to start, it creates workers on agent processes. The workers run until the task is done.
|
||||
|
||||
Task specifications are immutable; they do not change after the task has been created.
|
||||
|
||||
Tasks can be in several states:
|
||||
* PENDING, when task is waiting to execute,
|
||||
* RUNNING, when the task is running,
|
||||
* STOPPING, when the task is in the process of stopping,
|
||||
* DONE, when the task is done.
|
||||
|
||||
Tasks that are DONE also have an error field which will be set if the task failed.
|
||||
|
||||
Workloads
|
||||
========================================
|
||||
Trogdor can run several workloads. Workloads perform operations on the cluster and measure their performance. Workloads fail when the operations cannot be performed.
|
||||
|
||||
### ProduceBench
|
||||
ProduceBench starts a Kafka producer on a single agent node, producing to several partitions. The workload measures the average produce latency, as well as the median, 95th percentile, and 99th percentile latency.
|
||||
It can be configured to use a transactional producer which can commit transactions based on a set time interval or number of messages.
|
||||
|
||||
### RoundTripWorkload
|
||||
RoundTripWorkload tests both production and consumption. The workload starts a Kafka producer and consumer on a single node. The consumer will read back the messages that were produced by the producer.
|
||||
|
||||
### ConsumeBench
|
||||
ConsumeBench starts one or more Kafka consumers on a single agent node. Depending on the passed in configuration (see ConsumeBenchSpec), the consumers either subscribe to a set of topics (leveraging consumer group functionality and dynamic partition assignment) or manually assign partitions to themselves.
|
||||
The workload measures the average produce latency, as well as the median, 95th percentile, and 99th percentile latency.
|
||||
|
||||
Faults
|
||||
========================================
|
||||
Trogdor can run several faults which deliberately break something in the cluster.
|
||||
|
||||
### ProcessStopFault
|
||||
ProcessStopFault stops a process by sending it a SIGSTOP signal. When the fault ends, the process is resumed with SIGCONT.
|
||||
|
||||
### NetworkPartitionFault
|
||||
NetworkPartitionFault sets up an artificial network partition between one or more sets of nodes. Currently, this is implemented using iptables. The iptables rules are set up on the outbound traffic from the affected nodes. Therefore, the affected nodes should still be reachable from outside the cluster.
|
||||
|
||||
External Processes
|
||||
========================================
|
||||
Trogdor supports running arbitrary commands in external processes. This is a generic way to run any configurable command in the Trogdor framework - be it a Python program, bash script, docker image, etc.
|
||||
|
||||
### ExternalCommandWorker
|
||||
ExternalCommandWorker starts an external command defined by the ExternalCommandSpec. It essentially allows you to run any command on any Trogdor agent node.
|
||||
The worker communicates with the external process via its stdin, stdout and stderr in a JSON protocol. It uses stdout for any actionable communication and only logs what it sees in stderr.
|
||||
On startup the worker will first send a message describing the workload to the external process in this format:
|
||||
```
|
||||
{"id":<task ID string>, "workload":<configured workload JSON object>}
|
||||
```
|
||||
and will then listen for messages from the external process, again in a JSON format.
|
||||
Said JSON can contain the following fields:
|
||||
- status: If the object contains this field, the status of the worker will be set to the given value.
|
||||
- error: If the object contains this field, the error of the worker will be set to the given value. Once an error occurs, the external process will be terminated.
|
||||
- log: If the object contains this field, a log message will be issued with this text.
|
||||
An example:
|
||||
```json
|
||||
{"log": "Finished successfully.", "status": {"p99ProduceLatency": "100ms", "messagesSent": 10000}}
|
||||
```
|
||||
|
||||
Exec Mode
|
||||
========================================
|
||||
Sometimes, you just want to run a test quickly on a single node. In this case, you can use "exec mode." This mode allows you to run a single Trogdor Agent without a Coordinator.
|
||||
|
||||
When using exec mode, you must pass in a Task specification to use. The Agent will try to start this task.
|
||||
|
||||
For example:
|
||||
|
||||
> ./bin/trogdor.sh agent -n node0 -c ./config/trogdor.conf --exec ./tests/spec/simple_produce_bench.json
|
||||
|
||||
When using exec mode, the Agent will exit once the task is complete.
|
||||
199
Vagrantfile
vendored
199
Vagrantfile
vendored
@@ -1,199 +0,0 @@
|
||||
# Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
# contributor license agreements. See the NOTICE file distributed with
|
||||
# this work for additional information regarding copyright ownership.
|
||||
# The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
# (the "License"); you may not use this file except in compliance with
|
||||
# the License. You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
# -*- mode: ruby -*-
|
||||
# vi: set ft=ruby :
|
||||
|
||||
require 'socket'
|
||||
|
||||
# Vagrantfile API/syntax version. Don't touch unless you know what you're doing!
|
||||
VAGRANTFILE_API_VERSION = "2"
|
||||
|
||||
# General config
|
||||
enable_dns = false
|
||||
# Override to false when bringing up a cluster on AWS
|
||||
enable_hostmanager = true
|
||||
enable_jmx = false
|
||||
num_zookeepers = 1
|
||||
num_brokers = 3
|
||||
num_workers = 0 # Generic workers that get the code, but don't start any services
|
||||
ram_megabytes = 1280
|
||||
base_box = "ubuntu/trusty64"
|
||||
|
||||
# EC2
|
||||
ec2_access_key = ENV['AWS_ACCESS_KEY']
|
||||
ec2_secret_key = ENV['AWS_SECRET_KEY']
|
||||
ec2_session_token = ENV['AWS_SESSION_TOKEN']
|
||||
ec2_keypair_name = nil
|
||||
ec2_keypair_file = nil
|
||||
|
||||
ec2_region = "us-east-1"
|
||||
ec2_az = nil # Uses set by AWS
|
||||
ec2_ami = "ami-29ebb519"
|
||||
ec2_instance_type = "m3.medium"
|
||||
ec2_spot_instance = ENV['SPOT_INSTANCE'] ? ENV['SPOT_INSTANCE'] == 'true' : true
|
||||
ec2_spot_max_price = "0.113" # On-demand price for instance type
|
||||
ec2_user = "ubuntu"
|
||||
ec2_instance_name_prefix = "kafka-vagrant"
|
||||
ec2_security_groups = nil
|
||||
ec2_subnet_id = nil
|
||||
# Only override this by setting it to false if you're running in a VPC and you
|
||||
# are running Vagrant from within that VPC as well.
|
||||
ec2_associate_public_ip = nil
|
||||
|
||||
jdk_major = '8'
|
||||
jdk_full = '8u202-linux-x64'
|
||||
|
||||
local_config_file = File.join(File.dirname(__FILE__), "Vagrantfile.local")
|
||||
if File.exists?(local_config_file) then
|
||||
eval(File.read(local_config_file), binding, "Vagrantfile.local")
|
||||
end
|
||||
|
||||
# TODO(ksweeney): RAM requirements are not empirical and can probably be significantly lowered.
|
||||
Vagrant.configure(VAGRANTFILE_API_VERSION) do |config|
|
||||
config.hostmanager.enabled = enable_hostmanager
|
||||
config.hostmanager.manage_host = enable_dns
|
||||
config.hostmanager.include_offline = false
|
||||
|
||||
## Provider-specific global configs
|
||||
config.vm.provider :virtualbox do |vb,override|
|
||||
override.vm.box = base_box
|
||||
|
||||
override.hostmanager.ignore_private_ip = false
|
||||
|
||||
# Brokers started with the standard script currently set Xms and Xmx to 1G,
|
||||
# plus we need some extra head room.
|
||||
vb.customize ["modifyvm", :id, "--memory", ram_megabytes.to_s]
|
||||
|
||||
if Vagrant.has_plugin?("vagrant-cachier")
|
||||
override.cache.scope = :box
|
||||
end
|
||||
end
|
||||
|
||||
config.vm.provider :aws do |aws,override|
|
||||
# The "box" is specified as an AMI
|
||||
override.vm.box = "dummy"
|
||||
override.vm.box_url = "https://github.com/mitchellh/vagrant-aws/raw/master/dummy.box"
|
||||
|
||||
cached_addresses = {}
|
||||
# Use a custom resolver that SSH's into the machine and finds the IP address
|
||||
# directly. This lets us get at the private IP address directly, avoiding
|
||||
# some issues with using the default IP resolver, which uses the public IP
|
||||
# address.
|
||||
override.hostmanager.ip_resolver = proc do |vm, resolving_vm|
|
||||
if !cached_addresses.has_key?(vm.name)
|
||||
state_id = vm.state.id
|
||||
if state_id != :not_created && state_id != :stopped && vm.communicate.ready?
|
||||
contents = ''
|
||||
vm.communicate.execute("/sbin/ifconfig eth0 | grep 'inet addr' | tail -n 1 | egrep -o '[0-9\.]+' | head -n 1 2>&1") do |type, data|
|
||||
contents << data
|
||||
end
|
||||
cached_addresses[vm.name] = contents.split("\n").first[/(\d+\.\d+\.\d+\.\d+)/, 1]
|
||||
else
|
||||
cached_addresses[vm.name] = nil
|
||||
end
|
||||
end
|
||||
cached_addresses[vm.name]
|
||||
end
|
||||
|
||||
override.ssh.username = ec2_user
|
||||
override.ssh.private_key_path = ec2_keypair_file
|
||||
|
||||
aws.access_key_id = ec2_access_key
|
||||
aws.secret_access_key = ec2_secret_key
|
||||
aws.session_token = ec2_session_token
|
||||
aws.keypair_name = ec2_keypair_name
|
||||
|
||||
aws.region = ec2_region
|
||||
aws.availability_zone = ec2_az
|
||||
aws.instance_type = ec2_instance_type
|
||||
aws.ami = ec2_ami
|
||||
aws.security_groups = ec2_security_groups
|
||||
aws.subnet_id = ec2_subnet_id
|
||||
# If a subnet is specified, default to turning on a public IP unless the
|
||||
# user explicitly specifies the option. Without a public IP, Vagrant won't
|
||||
# be able to SSH into the hosts unless Vagrant is also running in the VPC.
|
||||
if ec2_associate_public_ip.nil?
|
||||
aws.associate_public_ip = true unless ec2_subnet_id.nil?
|
||||
else
|
||||
aws.associate_public_ip = ec2_associate_public_ip
|
||||
end
|
||||
aws.region_config ec2_region do |region|
|
||||
region.spot_instance = ec2_spot_instance
|
||||
region.spot_max_price = ec2_spot_max_price
|
||||
end
|
||||
|
||||
# Exclude some directories that can grow very large from syncing
|
||||
override.vm.synced_folder ".", "/vagrant", type: "rsync", rsync__exclude: ['.git', 'core/data/', 'logs/', 'tests/results/', 'results/']
|
||||
end
|
||||
|
||||
def name_node(node, name, ec2_instance_name_prefix)
|
||||
node.vm.hostname = name
|
||||
node.vm.provider :aws do |aws|
|
||||
aws.tags = {
|
||||
'Name' => ec2_instance_name_prefix + "-" + Socket.gethostname + "-" + name,
|
||||
'JenkinsBuildUrl' => ENV['BUILD_URL']
|
||||
}
|
||||
end
|
||||
end
|
||||
|
||||
def assign_local_ip(node, ip_address)
|
||||
node.vm.provider :virtualbox do |vb,override|
|
||||
override.vm.network :private_network, ip: ip_address
|
||||
end
|
||||
end
|
||||
|
||||
## Cluster definition
|
||||
zookeepers = []
|
||||
(1..num_zookeepers).each { |i|
|
||||
name = "zk" + i.to_s
|
||||
zookeepers.push(name)
|
||||
config.vm.define name do |zookeeper|
|
||||
name_node(zookeeper, name, ec2_instance_name_prefix)
|
||||
ip_address = "192.168.50." + (10 + i).to_s
|
||||
assign_local_ip(zookeeper, ip_address)
|
||||
zookeeper.vm.provision "shell", path: "vagrant/base.sh", env: {"JDK_MAJOR" => jdk_major, "JDK_FULL" => jdk_full}
|
||||
zk_jmx_port = enable_jmx ? (8000 + i).to_s : ""
|
||||
zookeeper.vm.provision "shell", path: "vagrant/zk.sh", :args => [i.to_s, num_zookeepers, zk_jmx_port]
|
||||
end
|
||||
}
|
||||
|
||||
(1..num_brokers).each { |i|
|
||||
name = "broker" + i.to_s
|
||||
config.vm.define name do |broker|
|
||||
name_node(broker, name, ec2_instance_name_prefix)
|
||||
ip_address = "192.168.50." + (50 + i).to_s
|
||||
assign_local_ip(broker, ip_address)
|
||||
# We need to be careful about what we list as the publicly routable
|
||||
# address since this is registered in ZK and handed out to clients. If
|
||||
# host DNS isn't setup, we shouldn't use hostnames -- IP addresses must be
|
||||
# used to support clients running on the host.
|
||||
zookeeper_connect = zookeepers.map{ |zk_addr| zk_addr + ":2181"}.join(",")
|
||||
broker.vm.provision "shell", path: "vagrant/base.sh", env: {"JDK_MAJOR" => jdk_major, "JDK_FULL" => jdk_full}
|
||||
kafka_jmx_port = enable_jmx ? (9000 + i).to_s : ""
|
||||
broker.vm.provision "shell", path: "vagrant/broker.sh", :args => [i.to_s, enable_dns ? name : ip_address, zookeeper_connect, kafka_jmx_port]
|
||||
end
|
||||
}
|
||||
|
||||
(1..num_workers).each { |i|
|
||||
name = "worker" + i.to_s
|
||||
config.vm.define name do |worker|
|
||||
name_node(worker, name, ec2_instance_name_prefix)
|
||||
ip_address = "192.168.50." + (100 + i).to_s
|
||||
assign_local_ip(worker, ip_address)
|
||||
worker.vm.provision "shell", path: "vagrant/base.sh", env: {"JDK_MAJOR" => jdk_major, "JDK_FULL" => jdk_full}
|
||||
end
|
||||
}
|
||||
|
||||
end
|
||||
@@ -1,45 +0,0 @@
|
||||
#!/bin/sh
|
||||
# Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
# contributor license agreements. See the NOTICE file distributed with
|
||||
# this work for additional information regarding copyright ownership.
|
||||
# The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
# (the "License"); you may not use this file except in compliance with
|
||||
# the License. You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
if [ $# -lt 1 ];
|
||||
then
|
||||
echo "USAGE: $0 [-daemon] connect-distributed.properties"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
base_dir=$(dirname $0)
|
||||
|
||||
if [ "x$KAFKA_LOG4J_OPTS" = "x" ]; then
|
||||
export KAFKA_LOG4J_OPTS="-Dlog4j.configuration=file:$base_dir/../config/connect-log4j.properties"
|
||||
fi
|
||||
|
||||
if [ "x$KAFKA_HEAP_OPTS" = "x" ]; then
|
||||
export KAFKA_HEAP_OPTS="-Xms256M -Xmx2G"
|
||||
fi
|
||||
|
||||
EXTRA_ARGS=${EXTRA_ARGS-'-name connectDistributed'}
|
||||
|
||||
COMMAND=$1
|
||||
case $COMMAND in
|
||||
-daemon)
|
||||
EXTRA_ARGS="-daemon "$EXTRA_ARGS
|
||||
shift
|
||||
;;
|
||||
*)
|
||||
;;
|
||||
esac
|
||||
|
||||
exec $(dirname $0)/kafka-run-class.sh $EXTRA_ARGS org.apache.kafka.connect.cli.ConnectDistributed "$@"
|
||||
@@ -1,45 +0,0 @@
|
||||
#!/bin/sh
|
||||
# Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
# contributor license agreements. See the NOTICE file distributed with
|
||||
# this work for additional information regarding copyright ownership.
|
||||
# The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
# (the "License"); you may not use this file except in compliance with
|
||||
# the License. You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
if [ $# -lt 1 ];
|
||||
then
|
||||
echo "USAGE: $0 [-daemon] mm2.properties"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
base_dir=$(dirname $0)
|
||||
|
||||
if [ "x$KAFKA_LOG4J_OPTS" = "x" ]; then
|
||||
export KAFKA_LOG4J_OPTS="-Dlog4j.configuration=file:$base_dir/../config/connect-log4j.properties"
|
||||
fi
|
||||
|
||||
if [ "x$KAFKA_HEAP_OPTS" = "x" ]; then
|
||||
export KAFKA_HEAP_OPTS="-Xms256M -Xmx2G"
|
||||
fi
|
||||
|
||||
EXTRA_ARGS=${EXTRA_ARGS-'-name mirrorMaker'}
|
||||
|
||||
COMMAND=$1
|
||||
case $COMMAND in
|
||||
-daemon)
|
||||
EXTRA_ARGS="-daemon "$EXTRA_ARGS
|
||||
shift
|
||||
;;
|
||||
*)
|
||||
;;
|
||||
esac
|
||||
|
||||
exec $(dirname $0)/kafka-run-class.sh $EXTRA_ARGS org.apache.kafka.connect.mirror.MirrorMaker "$@"
|
||||
@@ -1,45 +0,0 @@
|
||||
#!/bin/sh
|
||||
# Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
# contributor license agreements. See the NOTICE file distributed with
|
||||
# this work for additional information regarding copyright ownership.
|
||||
# The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
# (the "License"); you may not use this file except in compliance with
|
||||
# the License. You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
if [ $# -lt 1 ];
|
||||
then
|
||||
echo "USAGE: $0 [-daemon] connect-standalone.properties"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
base_dir=$(dirname $0)
|
||||
|
||||
if [ "x$KAFKA_LOG4J_OPTS" = "x" ]; then
|
||||
export KAFKA_LOG4J_OPTS="-Dlog4j.configuration=file:$base_dir/../config/connect-log4j.properties"
|
||||
fi
|
||||
|
||||
if [ "x$KAFKA_HEAP_OPTS" = "x" ]; then
|
||||
export KAFKA_HEAP_OPTS="-Xms256M -Xmx2G"
|
||||
fi
|
||||
|
||||
EXTRA_ARGS=${EXTRA_ARGS-'-name connectStandalone'}
|
||||
|
||||
COMMAND=$1
|
||||
case $COMMAND in
|
||||
-daemon)
|
||||
EXTRA_ARGS="-daemon "$EXTRA_ARGS
|
||||
shift
|
||||
;;
|
||||
*)
|
||||
;;
|
||||
esac
|
||||
|
||||
exec $(dirname $0)/kafka-run-class.sh $EXTRA_ARGS org.apache.kafka.connect.cli.ConnectStandalone "$@"
|
||||
1036
bin/init_es_template.sh
Normal file
1036
bin/init_es_template.sh
Normal file
File diff suppressed because it is too large
Load Diff
@@ -1,17 +0,0 @@
|
||||
#!/bin/bash
|
||||
# Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
# contributor license agreements. See the NOTICE file distributed with
|
||||
# this work for additional information regarding copyright ownership.
|
||||
# The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
# (the "License"); you may not use this file except in compliance with
|
||||
# the License. You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
exec $(dirname $0)/kafka-run-class.sh kafka.admin.AclCommand "$@"
|
||||
@@ -1,17 +0,0 @@
|
||||
#!/bin/bash
|
||||
# Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
# contributor license agreements. See the NOTICE file distributed with
|
||||
# this work for additional information regarding copyright ownership.
|
||||
# The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
# (the "License"); you may not use this file except in compliance with
|
||||
# the License. You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
exec $(dirname $0)/kafka-run-class.sh kafka.admin.BrokerApiVersionsCommand "$@"
|
||||
@@ -1,17 +0,0 @@
|
||||
#!/bin/bash
|
||||
# Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
# contributor license agreements. See the NOTICE file distributed with
|
||||
# this work for additional information regarding copyright ownership.
|
||||
# The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
# (the "License"); you may not use this file except in compliance with
|
||||
# the License. You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
exec $(dirname $0)/kafka-run-class.sh kafka.admin.ConfigCommand "$@"
|
||||
@@ -1,21 +0,0 @@
|
||||
#!/bin/bash
|
||||
# Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
# contributor license agreements. See the NOTICE file distributed with
|
||||
# this work for additional information regarding copyright ownership.
|
||||
# The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
# (the "License"); you may not use this file except in compliance with
|
||||
# the License. You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
if [ "x$KAFKA_HEAP_OPTS" = "x" ]; then
|
||||
export KAFKA_HEAP_OPTS="-Xmx512M"
|
||||
fi
|
||||
|
||||
exec $(dirname $0)/kafka-run-class.sh kafka.tools.ConsoleConsumer "$@"
|
||||
@@ -1,20 +0,0 @@
|
||||
#!/bin/bash
|
||||
# Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
# contributor license agreements. See the NOTICE file distributed with
|
||||
# this work for additional information regarding copyright ownership.
|
||||
# The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
# (the "License"); you may not use this file except in compliance with
|
||||
# the License. You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
if [ "x$KAFKA_HEAP_OPTS" = "x" ]; then
|
||||
export KAFKA_HEAP_OPTS="-Xmx512M"
|
||||
fi
|
||||
exec $(dirname $0)/kafka-run-class.sh kafka.tools.ConsoleProducer "$@"
|
||||
@@ -1,17 +0,0 @@
|
||||
#!/bin/bash
|
||||
# Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
# contributor license agreements. See the NOTICE file distributed with
|
||||
# this work for additional information regarding copyright ownership.
|
||||
# The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
# (the "License"); you may not use this file except in compliance with
|
||||
# the License. You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
exec $(dirname $0)/kafka-run-class.sh kafka.admin.ConsumerGroupCommand "$@"
|
||||
@@ -1,20 +0,0 @@
|
||||
#!/bin/bash
|
||||
# Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
# contributor license agreements. See the NOTICE file distributed with
|
||||
# this work for additional information regarding copyright ownership.
|
||||
# The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
# (the "License"); you may not use this file except in compliance with
|
||||
# the License. You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
if [ "x$KAFKA_HEAP_OPTS" = "x" ]; then
|
||||
export KAFKA_HEAP_OPTS="-Xmx512M"
|
||||
fi
|
||||
exec $(dirname $0)/kafka-run-class.sh kafka.tools.ConsumerPerformance "$@"
|
||||
@@ -1,17 +0,0 @@
|
||||
#!/bin/bash
|
||||
# Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
# contributor license agreements. See the NOTICE file distributed with
|
||||
# this work for additional information regarding copyright ownership.
|
||||
# The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
# (the "License"); you may not use this file except in compliance with
|
||||
# the License. You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
exec $(dirname $0)/kafka-run-class.sh kafka.admin.DelegationTokenCommand "$@"
|
||||
@@ -1,17 +0,0 @@
|
||||
#!/bin/bash
|
||||
# Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
# contributor license agreements. See the NOTICE file distributed with
|
||||
# this work for additional information regarding copyright ownership.
|
||||
# The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
# (the "License"); you may not use this file except in compliance with
|
||||
# the License. You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
exec $(dirname $0)/kafka-run-class.sh kafka.admin.DeleteRecordsCommand "$@"
|
||||
@@ -1,18 +0,0 @@
|
||||
#!/bin/bash
|
||||
# Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
# contributor license agreements. See the NOTICE file distributed with
|
||||
# this work for additional information regarding copyright ownership.
|
||||
# The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
# (the "License"); you may not use this file except in compliance with
|
||||
# the License. You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
exec $(dirname $0)/kafka-run-class.sh kafka.admin.DiskLoadProtectorCommand "$@"
|
||||
|
||||
@@ -1,17 +0,0 @@
|
||||
#!/bin/bash
|
||||
# Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
# contributor license agreements. See the NOTICE file distributed with
|
||||
# this work for additional information regarding copyright ownership.
|
||||
# The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
# (the "License"); you may not use this file except in compliance with
|
||||
# the License. You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
exec $(dirname $0)/kafka-run-class.sh kafka.tools.DumpLogSegments "$@"
|
||||
@@ -1,18 +0,0 @@
|
||||
#!/bin/bash
|
||||
# Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
# contributor license agreements. See the NOTICE file distributed with
|
||||
# this work for additional information regarding copyright ownership.
|
||||
# The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
# (the "License"); you may not use this file except in compliance with
|
||||
# the License. You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
exec $(dirname $0)/kafka-run-class.sh kafka.admin.KafkaExMetricsCommand "$@"
|
||||
|
||||
@@ -1,17 +0,0 @@
|
||||
#!/bin/bash
|
||||
# Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
# contributor license agreements. See the NOTICE file distributed with
|
||||
# this work for additional information regarding copyright ownership.
|
||||
# The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
# (the "License"); you may not use this file except in compliance with
|
||||
# the License. You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
exec $(dirname $0)/kafka-run-class.sh kafka.admin.LeaderElectionCommand "$@"
|
||||
@@ -1,17 +0,0 @@
|
||||
#!/bin/bash
|
||||
# Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
# contributor license agreements. See the NOTICE file distributed with
|
||||
# this work for additional information regarding copyright ownership.
|
||||
# The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
# (the "License"); you may not use this file except in compliance with
|
||||
# the License. You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
exec $(dirname $0)/kafka-run-class.sh kafka.admin.LogDirsCommand "$@"
|
||||
@@ -1,17 +0,0 @@
|
||||
#!/bin/bash
|
||||
# Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
# contributor license agreements. See the NOTICE file distributed with
|
||||
# this work for additional information regarding copyright ownership.
|
||||
# The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
# (the "License"); you may not use this file except in compliance with
|
||||
# the License. You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
exec $(dirname $0)/kafka-run-class.sh kafka.tools.MirrorMaker "$@"
|
||||
@@ -1,17 +0,0 @@
|
||||
#!/bin/bash
|
||||
# Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
# contributor license agreements. See the NOTICE file distributed with
|
||||
# this work for additional information regarding copyright ownership.
|
||||
# The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
# (the "License"); you may not use this file except in compliance with
|
||||
# the License. You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
exec $(dirname $0)/kafka-run-class.sh kafka.admin.PreferredReplicaLeaderElectionCommand "$@"
|
||||
@@ -1,20 +0,0 @@
|
||||
#!/bin/bash
|
||||
# Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
# contributor license agreements. See the NOTICE file distributed with
|
||||
# this work for additional information regarding copyright ownership.
|
||||
# The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
# (the "License"); you may not use this file except in compliance with
|
||||
# the License. You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
if [ "x$KAFKA_HEAP_OPTS" = "x" ]; then
|
||||
export KAFKA_HEAP_OPTS="-Xmx512M"
|
||||
fi
|
||||
exec $(dirname $0)/kafka-run-class.sh org.apache.kafka.tools.ProducerPerformance "$@"
|
||||
@@ -1,17 +0,0 @@
|
||||
#!/bin/bash
|
||||
# Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
# contributor license agreements. See the NOTICE file distributed with
|
||||
# this work for additional information regarding copyright ownership.
|
||||
# The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
# (the "License"); you may not use this file except in compliance with
|
||||
# the License. You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
exec $(dirname $0)/kafka-run-class.sh kafka.admin.ReassignPartitionsCommand "$@"
|
||||
@@ -1,17 +0,0 @@
|
||||
#!/bin/bash
|
||||
# Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
# contributor license agreements. See the NOTICE file distributed with
|
||||
# this work for additional information regarding copyright ownership.
|
||||
# The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
# (the "License"); you may not use this file except in compliance with
|
||||
# the License. You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
exec $(dirname $0)/kafka-run-class.sh kafka.tools.ReplicaVerificationTool "$@"
|
||||
@@ -1,316 +0,0 @@
|
||||
#!/bin/bash
|
||||
# Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
# contributor license agreements. See the NOTICE file distributed with
|
||||
# this work for additional information regarding copyright ownership.
|
||||
# The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
# (the "License"); you may not use this file except in compliance with
|
||||
# the License. You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
if [ $# -lt 1 ];
|
||||
then
|
||||
echo "USAGE: $0 [-daemon] [-name servicename] [-loggc] classname [opts]"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# CYGWIN == 1 if Cygwin is detected, else 0.
|
||||
if [[ $(uname -a) =~ "CYGWIN" ]]; then
|
||||
CYGWIN=1
|
||||
else
|
||||
CYGWIN=0
|
||||
fi
|
||||
|
||||
if [ -z "$INCLUDE_TEST_JARS" ]; then
|
||||
INCLUDE_TEST_JARS=false
|
||||
fi
|
||||
|
||||
# Exclude jars not necessary for running commands.
|
||||
regex="(-(test|test-sources|src|scaladoc|javadoc)\.jar|jar.asc)$"
|
||||
should_include_file() {
|
||||
if [ "$INCLUDE_TEST_JARS" = true ]; then
|
||||
return 0
|
||||
fi
|
||||
file=$1
|
||||
if [ -z "$(echo "$file" | egrep "$regex")" ] ; then
|
||||
return 0
|
||||
else
|
||||
return 1
|
||||
fi
|
||||
}
|
||||
|
||||
base_dir=$(dirname $0)/..
|
||||
|
||||
if [ -z "$SCALA_VERSION" ]; then
|
||||
SCALA_VERSION=2.12.10
|
||||
fi
|
||||
|
||||
if [ -z "$SCALA_BINARY_VERSION" ]; then
|
||||
SCALA_BINARY_VERSION=$(echo $SCALA_VERSION | cut -f 1-2 -d '.')
|
||||
fi
|
||||
|
||||
# run ./gradlew copyDependantLibs to get all dependant jars in a local dir
|
||||
shopt -s nullglob
|
||||
if [ -z "$UPGRADE_KAFKA_STREAMS_TEST_VERSION" ]; then
|
||||
for dir in "$base_dir"/core/build/dependant-libs-${SCALA_VERSION}*;
|
||||
do
|
||||
CLASSPATH="$CLASSPATH:$dir/*"
|
||||
done
|
||||
fi
|
||||
|
||||
for file in "$base_dir"/examples/build/libs/kafka-examples*.jar;
|
||||
do
|
||||
if should_include_file "$file"; then
|
||||
CLASSPATH="$CLASSPATH":"$file"
|
||||
fi
|
||||
done
|
||||
|
||||
if [ -z "$UPGRADE_KAFKA_STREAMS_TEST_VERSION" ]; then
|
||||
clients_lib_dir=$(dirname $0)/../clients/build/libs
|
||||
streams_lib_dir=$(dirname $0)/../streams/build/libs
|
||||
streams_dependant_clients_lib_dir=$(dirname $0)/../streams/build/dependant-libs-${SCALA_VERSION}
|
||||
else
|
||||
clients_lib_dir=/opt/kafka-$UPGRADE_KAFKA_STREAMS_TEST_VERSION/libs
|
||||
streams_lib_dir=$clients_lib_dir
|
||||
streams_dependant_clients_lib_dir=$streams_lib_dir
|
||||
fi
|
||||
|
||||
|
||||
for file in "$clients_lib_dir"/kafka-clients*.jar;
|
||||
do
|
||||
if should_include_file "$file"; then
|
||||
CLASSPATH="$CLASSPATH":"$file"
|
||||
fi
|
||||
done
|
||||
|
||||
for file in "$streams_lib_dir"/kafka-streams*.jar;
|
||||
do
|
||||
if should_include_file "$file"; then
|
||||
CLASSPATH="$CLASSPATH":"$file"
|
||||
fi
|
||||
done
|
||||
|
||||
if [ -z "$UPGRADE_KAFKA_STREAMS_TEST_VERSION" ]; then
|
||||
for file in "$base_dir"/streams/examples/build/libs/kafka-streams-examples*.jar;
|
||||
do
|
||||
if should_include_file "$file"; then
|
||||
CLASSPATH="$CLASSPATH":"$file"
|
||||
fi
|
||||
done
|
||||
else
|
||||
VERSION_NO_DOTS=`echo $UPGRADE_KAFKA_STREAMS_TEST_VERSION | sed 's/\.//g'`
|
||||
SHORT_VERSION_NO_DOTS=${VERSION_NO_DOTS:0:((${#VERSION_NO_DOTS} - 1))} # remove last char, ie, bug-fix number
|
||||
for file in "$base_dir"/streams/upgrade-system-tests-$SHORT_VERSION_NO_DOTS/build/libs/kafka-streams-upgrade-system-tests*.jar;
|
||||
do
|
||||
if should_include_file "$file"; then
|
||||
CLASSPATH="$file":"$CLASSPATH"
|
||||
fi
|
||||
done
|
||||
if [ "$SHORT_VERSION_NO_DOTS" = "0100" ]; then
|
||||
CLASSPATH="/opt/kafka-$UPGRADE_KAFKA_STREAMS_TEST_VERSION/libs/zkclient-0.8.jar":"$CLASSPATH"
|
||||
CLASSPATH="/opt/kafka-$UPGRADE_KAFKA_STREAMS_TEST_VERSION/libs/zookeeper-3.4.6.jar":"$CLASSPATH"
|
||||
fi
|
||||
if [ "$SHORT_VERSION_NO_DOTS" = "0101" ]; then
|
||||
CLASSPATH="/opt/kafka-$UPGRADE_KAFKA_STREAMS_TEST_VERSION/libs/zkclient-0.9.jar":"$CLASSPATH"
|
||||
CLASSPATH="/opt/kafka-$UPGRADE_KAFKA_STREAMS_TEST_VERSION/libs/zookeeper-3.4.8.jar":"$CLASSPATH"
|
||||
fi
|
||||
fi
|
||||
|
||||
for file in "$streams_dependant_clients_lib_dir"/rocksdb*.jar;
|
||||
do
|
||||
CLASSPATH="$CLASSPATH":"$file"
|
||||
done
|
||||
|
||||
for file in "$streams_dependant_clients_lib_dir"/*hamcrest*.jar;
|
||||
do
|
||||
CLASSPATH="$CLASSPATH":"$file"
|
||||
done
|
||||
|
||||
for file in "$base_dir"/tools/build/libs/kafka-tools*.jar;
|
||||
do
|
||||
if should_include_file "$file"; then
|
||||
CLASSPATH="$CLASSPATH":"$file"
|
||||
fi
|
||||
done
|
||||
|
||||
for dir in "$base_dir"/tools/build/dependant-libs-${SCALA_VERSION}*;
|
||||
do
|
||||
CLASSPATH="$CLASSPATH:$dir/*"
|
||||
done
|
||||
|
||||
for cc_pkg in "api" "transforms" "runtime" "file" "mirror" "mirror-client" "json" "tools" "basic-auth-extension"
|
||||
do
|
||||
for file in "$base_dir"/connect/${cc_pkg}/build/libs/connect-${cc_pkg}*.jar;
|
||||
do
|
||||
if should_include_file "$file"; then
|
||||
CLASSPATH="$CLASSPATH":"$file"
|
||||
fi
|
||||
done
|
||||
if [ -d "$base_dir/connect/${cc_pkg}/build/dependant-libs" ] ; then
|
||||
CLASSPATH="$CLASSPATH:$base_dir/connect/${cc_pkg}/build/dependant-libs/*"
|
||||
fi
|
||||
done
|
||||
|
||||
# classpath addition for release
|
||||
for file in "$base_dir"/libs/*;
|
||||
do
|
||||
if should_include_file "$file"; then
|
||||
CLASSPATH="$CLASSPATH":"$file"
|
||||
fi
|
||||
done
|
||||
|
||||
for file in "$base_dir"/core/build/libs/kafka_${SCALA_BINARY_VERSION}*.jar;
|
||||
do
|
||||
if should_include_file "$file"; then
|
||||
CLASSPATH="$CLASSPATH":"$file"
|
||||
fi
|
||||
done
|
||||
shopt -u nullglob
|
||||
|
||||
if [ -z "$CLASSPATH" ] ; then
|
||||
echo "Classpath is empty. Please build the project first e.g. by running './gradlew jar -PscalaVersion=$SCALA_VERSION'"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# JMX settings
|
||||
if [ -z "$KAFKA_JMX_OPTS" ]; then
|
||||
KAFKA_JMX_OPTS="-Dcom.sun.management.jmxremote -Dcom.sun.management.jmxremote.authenticate=false -Dcom.sun.management.jmxremote.ssl=false "
|
||||
fi
|
||||
|
||||
# JMX port to use
|
||||
if [ $JMX_PORT ]; then
|
||||
KAFKA_JMX_OPTS="$KAFKA_JMX_OPTS -Dcom.sun.management.jmxremote.port=$JMX_PORT -Dcom.sun.management.jmxremote.rmi.port=$JMX_PORT"
|
||||
fi
|
||||
|
||||
# Log directory to use
|
||||
if [ "x$LOG_DIR" = "x" ]; then
|
||||
LOG_DIR="$base_dir/logs"
|
||||
fi
|
||||
|
||||
# Log4j settings
|
||||
if [ -z "$KAFKA_LOG4J_OPTS" ]; then
|
||||
# Log to console. This is a tool.
|
||||
LOG4J_DIR="$base_dir/config/tools-log4j.properties"
|
||||
# If Cygwin is detected, LOG4J_DIR is converted to Windows format.
|
||||
(( CYGWIN )) && LOG4J_DIR=$(cygpath --path --mixed "${LOG4J_DIR}")
|
||||
KAFKA_LOG4J_OPTS="-Dlog4j.configuration=file:${LOG4J_DIR}"
|
||||
else
|
||||
# create logs directory
|
||||
if [ ! -d "$LOG_DIR" ]; then
|
||||
mkdir -p "$LOG_DIR"
|
||||
fi
|
||||
fi
|
||||
|
||||
# If Cygwin is detected, LOG_DIR is converted to Windows format.
|
||||
(( CYGWIN )) && LOG_DIR=$(cygpath --path --mixed "${LOG_DIR}")
|
||||
KAFKA_LOG4J_OPTS="-Dkafka.logs.dir=$LOG_DIR $KAFKA_LOG4J_OPTS"
|
||||
|
||||
# Generic jvm settings you want to add
|
||||
if [ -z "$KAFKA_OPTS" ]; then
|
||||
KAFKA_OPTS=""
|
||||
fi
|
||||
|
||||
# Set Debug options if enabled
|
||||
if [ "x$KAFKA_DEBUG" != "x" ]; then
|
||||
|
||||
# Use default ports
|
||||
DEFAULT_JAVA_DEBUG_PORT="5005"
|
||||
|
||||
if [ -z "$JAVA_DEBUG_PORT" ]; then
|
||||
JAVA_DEBUG_PORT="$DEFAULT_JAVA_DEBUG_PORT"
|
||||
fi
|
||||
|
||||
# Use the defaults if JAVA_DEBUG_OPTS was not set
|
||||
DEFAULT_JAVA_DEBUG_OPTS="-agentlib:jdwp=transport=dt_socket,server=y,suspend=${DEBUG_SUSPEND_FLAG:-n},address=0.0.0.0:$JAVA_DEBUG_PORT"
|
||||
if [ -z "$JAVA_DEBUG_OPTS" ]; then
|
||||
JAVA_DEBUG_OPTS="$DEFAULT_JAVA_DEBUG_OPTS"
|
||||
fi
|
||||
|
||||
echo "Enabling Java debug options: $JAVA_DEBUG_OPTS"
|
||||
KAFKA_OPTS="$JAVA_DEBUG_OPTS $KAFKA_OPTS"
|
||||
fi
|
||||
|
||||
# Which java to use
|
||||
if [ -z "$JAVA_HOME" ]; then
|
||||
JAVA="java"
|
||||
else
|
||||
JAVA="$JAVA_HOME/bin/java"
|
||||
fi
|
||||
|
||||
# Memory options
|
||||
if [ -z "$KAFKA_HEAP_OPTS" ]; then
|
||||
KAFKA_HEAP_OPTS="-Xmx256M"
|
||||
fi
|
||||
|
||||
# JVM performance options
|
||||
# MaxInlineLevel=15 is the default since JDK 14 and can be removed once older JDKs are no longer supported
|
||||
if [ -z "$KAFKA_JVM_PERFORMANCE_OPTS" ]; then
|
||||
KAFKA_JVM_PERFORMANCE_OPTS="-server -XX:+UseG1GC -XX:MaxGCPauseMillis=200 -XX:InitiatingHeapOccupancyPercent=35 -XX:G1HeapRegionSize=16m -XX:+ExplicitGCInvokesConcurrent -XX:MaxInlineLevel=15 -Djava.awt.headless=true"
|
||||
fi
|
||||
|
||||
while [ $# -gt 0 ]; do
|
||||
COMMAND=$1
|
||||
case $COMMAND in
|
||||
-name)
|
||||
DAEMON_NAME=$2
|
||||
CONSOLE_OUTPUT_FILE=$LOG_DIR/$DAEMON_NAME.out
|
||||
shift 2
|
||||
;;
|
||||
-loggc)
|
||||
if [ -z "$KAFKA_GC_LOG_OPTS" ]; then
|
||||
GC_LOG_ENABLED="true"
|
||||
fi
|
||||
shift
|
||||
;;
|
||||
-daemon)
|
||||
DAEMON_MODE="true"
|
||||
shift
|
||||
;;
|
||||
*)
|
||||
break
|
||||
;;
|
||||
esac
|
||||
done
|
||||
|
||||
# GC options
|
||||
GC_FILE_SUFFIX='-gc.log'
|
||||
GC_LOG_FILE_NAME=''
|
||||
if [ "x$GC_LOG_ENABLED" = "xtrue" ]; then
|
||||
GC_LOG_FILE_NAME=$DAEMON_NAME$GC_FILE_SUFFIX
|
||||
|
||||
# The first segment of the version number, which is '1' for releases before Java 9
|
||||
# it then becomes '9', '10', ...
|
||||
# Some examples of the first line of `java --version`:
|
||||
# 8 -> java version "1.8.0_152"
|
||||
# 9.0.4 -> java version "9.0.4"
|
||||
# 10 -> java version "10" 2018-03-20
|
||||
# 10.0.1 -> java version "10.0.1" 2018-04-17
|
||||
# We need to match to the end of the line to prevent sed from printing the characters that do not match
|
||||
JAVA_MAJOR_VERSION=$($JAVA -version 2>&1 | sed -E -n 's/.* version "([0-9]*).*$/\1/p')
|
||||
if [[ "$JAVA_MAJOR_VERSION" -ge "9" ]] ; then
|
||||
KAFKA_GC_LOG_OPTS="-Xlog:gc*:file=$LOG_DIR/$GC_LOG_FILE_NAME:time"
|
||||
else
|
||||
KAFKA_GC_LOG_OPTS="-Xloggc:$LOG_DIR/$GC_LOG_FILE_NAME -verbose:gc -XX:+PrintGCDetails -XX:+PrintGCDateStamps -XX:+PrintGCTimeStamps -XX:+UseGCLogFileRotation -XX:NumberOfGCLogFiles=10 -XX:GCLogFileSize=100M"
|
||||
fi
|
||||
fi
|
||||
|
||||
# Remove a possible colon prefix from the classpath (happens at lines like `CLASSPATH="$CLASSPATH:$file"` when CLASSPATH is blank)
|
||||
# Syntax used on the right side is native Bash string manipulation; for more details see
|
||||
# http://tldp.org/LDP/abs/html/string-manipulation.html, specifically the section titled "Substring Removal"
|
||||
CLASSPATH=${CLASSPATH#:}
|
||||
|
||||
# If Cygwin is detected, classpath is converted to Windows format.
|
||||
(( CYGWIN )) && CLASSPATH=$(cygpath --path --mixed "${CLASSPATH}")
|
||||
|
||||
# Launch mode
|
||||
if [ "x$DAEMON_MODE" = "xtrue" ]; then
|
||||
nohup $JAVA $KAFKA_HEAP_OPTS $KAFKA_JVM_PERFORMANCE_OPTS $KAFKA_GC_LOG_OPTS $KAFKA_JMX_OPTS $KAFKA_LOG4J_OPTS -cp $CLASSPATH $KAFKA_OPTS "$@" > "$CONSOLE_OUTPUT_FILE" 2>&1 < /dev/null &
|
||||
else
|
||||
exec $JAVA $KAFKA_HEAP_OPTS $KAFKA_JVM_PERFORMANCE_OPTS $KAFKA_GC_LOG_OPTS $KAFKA_JMX_OPTS $KAFKA_LOG4J_OPTS -cp $CLASSPATH $KAFKA_OPTS "$@"
|
||||
fi
|
||||
@@ -1,51 +0,0 @@
|
||||
#!/bin/bash
|
||||
# Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
# contributor license agreements. See the NOTICE file distributed with
|
||||
# this work for additional information regarding copyright ownership.
|
||||
# The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
# (the "License"); you may not use this file except in compliance with
|
||||
# the License. You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
if [ $# -lt 1 ];
|
||||
then
|
||||
echo "USAGE: $0 [-daemon] server.properties [--override property=value]*"
|
||||
exit 1
|
||||
fi
|
||||
base_dir=$(dirname $0)
|
||||
|
||||
if [ "x$KAFKA_LOG4J_OPTS" = "x" ]; then
|
||||
export KAFKA_LOG4J_OPTS="-Dlog4j.configuration=file:$base_dir/../config/log4j.properties"
|
||||
fi
|
||||
|
||||
if [ "x$KAFKA_HEAP_OPTS" = "x" ]; then
|
||||
export KAFKA_HEAP_OPTS="-Xmx8G -Xms8G"
|
||||
export JMX_PORT=8099
|
||||
#export KAFKA_DEBUG=debug
|
||||
#export DAEMON_MODE=true
|
||||
export KAFKA_OPTS="-Djava.security.auth.login.config=$base_dir/../config/kafka_server_jaas.conf"
|
||||
export DEBUG_SUSPEND_FLAG="n"
|
||||
export JAVA_DEBUG_PORT="8096"
|
||||
export GC_LOG_ENABLED=true
|
||||
fi
|
||||
|
||||
EXTRA_ARGS=${EXTRA_ARGS-'-name kafkaServer -loggc'}
|
||||
|
||||
COMMAND=$1
|
||||
case $COMMAND in
|
||||
-daemon)
|
||||
EXTRA_ARGS="-daemon "$EXTRA_ARGS
|
||||
shift
|
||||
;;
|
||||
*)
|
||||
;;
|
||||
esac
|
||||
|
||||
exec $base_dir/kafka-run-class.sh $EXTRA_ARGS kafka.ServiceDiscovery "$@"
|
||||
@@ -1,24 +0,0 @@
|
||||
#!/bin/sh
|
||||
# Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
# contributor license agreements. See the NOTICE file distributed with
|
||||
# this work for additional information regarding copyright ownership.
|
||||
# The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
# (the "License"); you may not use this file except in compliance with
|
||||
# the License. You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
SIGNAL=${SIGNAL:-TERM}
|
||||
PIDS=$(ps ax | grep -i 'kafka\.ServiceDiscovery' | grep java | grep -v grep | awk '{print $1}')
|
||||
|
||||
if [ -z "$PIDS" ]; then
|
||||
echo "No kafka server to stop"
|
||||
exit 1
|
||||
else
|
||||
kill -s $SIGNAL $PIDS
|
||||
fi
|
||||
@@ -1,21 +0,0 @@
|
||||
#!/bin/bash
|
||||
# Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
# contributor license agreements. See the NOTICE file distributed with
|
||||
# this work for additional information regarding copyright ownership.
|
||||
# The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
# (the "License"); you may not use this file except in compliance with
|
||||
# the License. You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
if [ "x$KAFKA_HEAP_OPTS" = "x" ]; then
|
||||
export KAFKA_HEAP_OPTS="-Xmx512M"
|
||||
fi
|
||||
|
||||
exec $(dirname $0)/kafka-run-class.sh kafka.tools.StreamsResetter "$@"
|
||||
@@ -1,17 +0,0 @@
|
||||
#!/bin/bash
|
||||
# Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
# contributor license agreements. See the NOTICE file distributed with
|
||||
# this work for additional information regarding copyright ownership.
|
||||
# The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
# (the "License"); you may not use this file except in compliance with
|
||||
# the License. You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
exec $(dirname $0)/kafka-run-class.sh kafka.admin.TopicCommand "$@"
|
||||
@@ -1,20 +0,0 @@
|
||||
#!/bin/bash
|
||||
# Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
# contributor license agreements. See the NOTICE file distributed with
|
||||
# this work for additional information regarding copyright ownership.
|
||||
# The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
# (the "License"); you may not use this file except in compliance with
|
||||
# the License. You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
if [ "x$KAFKA_HEAP_OPTS" = "x" ]; then
|
||||
export KAFKA_HEAP_OPTS="-Xmx512M"
|
||||
fi
|
||||
exec $(dirname $0)/kafka-run-class.sh org.apache.kafka.tools.VerifiableConsumer "$@"
|
||||
@@ -1,20 +0,0 @@
|
||||
#!/bin/bash
|
||||
# Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
# contributor license agreements. See the NOTICE file distributed with
|
||||
# this work for additional information regarding copyright ownership.
|
||||
# The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
# (the "License"); you may not use this file except in compliance with
|
||||
# the License. You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
if [ "x$KAFKA_HEAP_OPTS" = "x" ]; then
|
||||
export KAFKA_HEAP_OPTS="-Xmx512M"
|
||||
fi
|
||||
exec $(dirname $0)/kafka-run-class.sh org.apache.kafka.tools.VerifiableProducer "$@"
|
||||
16
bin/shutdown.sh
Normal file
16
bin/shutdown.sh
Normal file
@@ -0,0 +1,16 @@
|
||||
#!/bin/bash
|
||||
|
||||
cd `dirname $0`/../libs
|
||||
target_dir=`pwd`
|
||||
|
||||
pid=`ps ax | grep -i 'ks-km' | grep ${target_dir} | grep java | grep -v grep | awk '{print $1}'`
|
||||
if [ -z "$pid" ] ; then
|
||||
echo "No ks-km running."
|
||||
exit -1;
|
||||
fi
|
||||
|
||||
echo "The ks-km (${pid}) is running..."
|
||||
|
||||
kill ${pid}
|
||||
|
||||
echo "Send shutdown request to ks-km (${pid}) OK"
|
||||
82
bin/startup.sh
Normal file
82
bin/startup.sh
Normal file
@@ -0,0 +1,82 @@
|
||||
error_exit ()
|
||||
{
|
||||
echo "ERROR: $1 !!"
|
||||
exit 1
|
||||
}
|
||||
|
||||
[ ! -e "$JAVA_HOME/bin/java" ] && JAVA_HOME=$HOME/jdk/java
|
||||
[ ! -e "$JAVA_HOME/bin/java" ] && JAVA_HOME=/usr/java
|
||||
[ ! -e "$JAVA_HOME/bin/java" ] && unset JAVA_HOME
|
||||
|
||||
if [ -z "$JAVA_HOME" ]; then
|
||||
if [ "Darwin" = "$(uname -s)" ]; then
|
||||
|
||||
if [ -x '/usr/libexec/java_home' ] ; then
|
||||
export JAVA_HOME=`/usr/libexec/java_home`
|
||||
|
||||
elif [ -d "/System/Library/Frameworks/JavaVM.framework/Versions/CurrentJDK/Home" ]; then
|
||||
export JAVA_HOME="/System/Library/Frameworks/JavaVM.framework/Versions/CurrentJDK/Home"
|
||||
fi
|
||||
else
|
||||
JAVA_PATH=`dirname $(readlink -f $(which javac))`
|
||||
if [ "x$JAVA_PATH" != "x" ]; then
|
||||
export JAVA_HOME=`dirname $JAVA_PATH 2>/dev/null`
|
||||
fi
|
||||
fi
|
||||
if [ -z "$JAVA_HOME" ]; then
|
||||
error_exit "Please set the JAVA_HOME variable in your environment, We need java(x64)! jdk8 or later is better!"
|
||||
fi
|
||||
fi
|
||||
|
||||
|
||||
|
||||
|
||||
export WEB_SERVER="ks-km"
|
||||
export JAVA_HOME
|
||||
export JAVA="$JAVA_HOME/bin/java"
|
||||
export BASE_DIR=`cd $(dirname $0)/..; pwd`
|
||||
export CUSTOM_SEARCH_LOCATIONS=file:${BASE_DIR}/conf/
|
||||
|
||||
|
||||
#===========================================================================================
|
||||
# JVM Configuration
|
||||
#===========================================================================================
|
||||
|
||||
JAVA_OPT="${JAVA_OPT} -server -Xms2g -Xmx2g -Xmn1g -XX:MetaspaceSize=128m -XX:MaxMetaspaceSize=320m"
|
||||
JAVA_OPT="${JAVA_OPT} -XX:-OmitStackTraceInFastThrow -XX:+HeapDumpOnOutOfMemoryError -XX:HeapDumpPath=${BASE_DIR}/logs/java_heapdump.hprof"
|
||||
|
||||
## jdk版本高的情况 有些 参数废弃了
|
||||
JAVA_MAJOR_VERSION=$($JAVA -version 2>&1 | sed -E -n 's/.* version "([0-9]*).*$/\1/p')
|
||||
if [[ "$JAVA_MAJOR_VERSION" -ge "9" ]] ; then
|
||||
JAVA_OPT="${JAVA_OPT} -Xlog:gc*:file=${BASE_DIR}/logs/km_gc.log:time,tags:filecount=10,filesize=102400"
|
||||
else
|
||||
JAVA_OPT="${JAVA_OPT} -Djava.ext.dirs=${JAVA_HOME}/jre/lib/ext:${JAVA_HOME}/lib/ext"
|
||||
JAVA_OPT="${JAVA_OPT} -Xloggc:${BASE_DIR}/logs/km_gc.log -verbose:gc -XX:+PrintGCDetails -XX:+PrintGCDateStamps -XX:+PrintGCTimeStamps -XX:+UseGCLogFileRotation -XX:NumberOfGCLogFiles=10 -XX:GCLogFileSize=100M"
|
||||
|
||||
fi
|
||||
|
||||
JAVA_OPT="${JAVA_OPT} -jar ${BASE_DIR}/libs/${WEB_SERVER}.jar"
|
||||
JAVA_OPT="${JAVA_OPT} --spring.config.additional-location=${CUSTOM_SEARCH_LOCATIONS}"
|
||||
JAVA_OPT="${JAVA_OPT} --logging.config=${BASE_DIR}/conf/logback-spring.xml"
|
||||
JAVA_OPT="${JAVA_OPT} --server.max-http-header-size=524288"
|
||||
|
||||
|
||||
|
||||
if [ ! -d "${BASE_DIR}/logs" ]; then
|
||||
mkdir ${BASE_DIR}/logs
|
||||
fi
|
||||
|
||||
echo "$JAVA ${JAVA_OPT}"
|
||||
|
||||
# check the start.out log output file
|
||||
if [ ! -f "${BASE_DIR}/logs/start.out" ]; then
|
||||
touch "${BASE_DIR}/logs/start.out"
|
||||
fi
|
||||
|
||||
# start
|
||||
echo -e "---- 启动脚本 ------\n $JAVA ${JAVA_OPT}" > ${BASE_DIR}/logs/start.out 2>&1 &
|
||||
|
||||
|
||||
nohup $JAVA ${JAVA_OPT} >> ${BASE_DIR}/logs/start.out 2>&1 &
|
||||
|
||||
echo "${WEB_SERVER} is starting,you can check the ${BASE_DIR}/logs/start.out"
|
||||
@@ -1,50 +0,0 @@
|
||||
#!/usr/bin/env bash
|
||||
# Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
# contributor license agreements. See the NOTICE file distributed with
|
||||
# this work for additional information regarding copyright ownership.
|
||||
# The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
# (the "License"); you may not use this file except in compliance with
|
||||
# the License. You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
usage() {
|
||||
cat <<EOF
|
||||
The Trogdor fault injector.
|
||||
|
||||
Usage:
|
||||
$0 [action] [options]
|
||||
|
||||
Actions:
|
||||
agent: Run the trogdor agent.
|
||||
coordinator: Run the trogdor coordinator.
|
||||
client: Run the client which communicates with the trogdor coordinator.
|
||||
agent-client: Run the client which communicates with the trogdor agent.
|
||||
help: This help message.
|
||||
EOF
|
||||
}
|
||||
|
||||
if [[ $# -lt 1 ]]; then
|
||||
usage
|
||||
exit 0
|
||||
fi
|
||||
action="${1}"
|
||||
shift
|
||||
CLASS=""
|
||||
case ${action} in
|
||||
agent) CLASS="org.apache.kafka.trogdor.agent.Agent";;
|
||||
coordinator) CLASS="org.apache.kafka.trogdor.coordinator.Coordinator";;
|
||||
client) CLASS="org.apache.kafka.trogdor.coordinator.CoordinatorClient";;
|
||||
agent-client) CLASS="org.apache.kafka.trogdor.agent.AgentClient";;
|
||||
help) usage; exit 0;;
|
||||
*) echo "Unknown action '${action}'. Type '$0 help' for help."; exit 1;;
|
||||
esac
|
||||
|
||||
export INCLUDE_TEST_JARS=1
|
||||
exec $(dirname $0)/kafka-run-class.sh "${CLASS}" "$@"
|
||||
@@ -1,34 +0,0 @@
|
||||
@echo off
|
||||
rem Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
rem contributor license agreements. See the NOTICE file distributed with
|
||||
rem this work for additional information regarding copyright ownership.
|
||||
rem The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
rem (the "License"); you may not use this file except in compliance with
|
||||
rem the License. You may obtain a copy of the License at
|
||||
rem
|
||||
rem http://www.apache.org/licenses/LICENSE-2.0
|
||||
rem
|
||||
rem Unless required by applicable law or agreed to in writing, software
|
||||
rem distributed under the License is distributed on an "AS IS" BASIS,
|
||||
rem WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
rem See the License for the specific language governing permissions and
|
||||
rem limitations under the License.
|
||||
|
||||
IF [%1] EQU [] (
|
||||
echo USAGE: %0 connect-distributed.properties
|
||||
EXIT /B 1
|
||||
)
|
||||
|
||||
SetLocal
|
||||
rem Using pushd popd to set BASE_DIR to the absolute path
|
||||
pushd %~dp0..\..
|
||||
set BASE_DIR=%CD%
|
||||
popd
|
||||
|
||||
rem Log4j settings
|
||||
IF ["%KAFKA_LOG4J_OPTS%"] EQU [""] (
|
||||
set KAFKA_LOG4J_OPTS=-Dlog4j.configuration=file:%BASE_DIR%/config/tools-log4j.properties
|
||||
)
|
||||
|
||||
"%~dp0kafka-run-class.bat" org.apache.kafka.connect.cli.ConnectDistributed %*
|
||||
EndLocal
|
||||
@@ -1,34 +0,0 @@
|
||||
@echo off
|
||||
rem Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
rem contributor license agreements. See the NOTICE file distributed with
|
||||
rem this work for additional information regarding copyright ownership.
|
||||
rem The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
rem (the "License"); you may not use this file except in compliance with
|
||||
rem the License. You may obtain a copy of the License at
|
||||
rem
|
||||
rem http://www.apache.org/licenses/LICENSE-2.0
|
||||
rem
|
||||
rem Unless required by applicable law or agreed to in writing, software
|
||||
rem distributed under the License is distributed on an "AS IS" BASIS,
|
||||
rem WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
rem See the License for the specific language governing permissions and
|
||||
rem limitations under the License.
|
||||
|
||||
IF [%1] EQU [] (
|
||||
echo USAGE: %0 connect-standalone.properties
|
||||
EXIT /B 1
|
||||
)
|
||||
|
||||
SetLocal
|
||||
rem Using pushd popd to set BASE_DIR to the absolute path
|
||||
pushd %~dp0..\..
|
||||
set BASE_DIR=%CD%
|
||||
popd
|
||||
|
||||
rem Log4j settings
|
||||
IF ["%KAFKA_LOG4J_OPTS%"] EQU [""] (
|
||||
set KAFKA_LOG4J_OPTS=-Dlog4j.configuration=file:%BASE_DIR%/config/tools-log4j.properties
|
||||
)
|
||||
|
||||
"%~dp0kafka-run-class.bat" org.apache.kafka.connect.cli.ConnectStandalone %*
|
||||
EndLocal
|
||||
@@ -1,17 +0,0 @@
|
||||
@echo off
|
||||
rem Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
rem contributor license agreements. See the NOTICE file distributed with
|
||||
rem this work for additional information regarding copyright ownership.
|
||||
rem The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
rem (the "License"); you may not use this file except in compliance with
|
||||
rem the License. You may obtain a copy of the License at
|
||||
rem
|
||||
rem http://www.apache.org/licenses/LICENSE-2.0
|
||||
rem
|
||||
rem Unless required by applicable law or agreed to in writing, software
|
||||
rem distributed under the License is distributed on an "AS IS" BASIS,
|
||||
rem WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
rem See the License for the specific language governing permissions and
|
||||
rem limitations under the License.
|
||||
|
||||
"%~dp0kafka-run-class.bat" kafka.admin.AclCommand %*
|
||||
@@ -1,17 +0,0 @@
|
||||
@echo off
|
||||
rem Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
rem contributor license agreements. See the NOTICE file distributed with
|
||||
rem this work for additional information regarding copyright ownership.
|
||||
rem The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
rem (the "License"); you may not use this file except in compliance with
|
||||
rem the License. You may obtain a copy of the License at
|
||||
rem
|
||||
rem http://www.apache.org/licenses/LICENSE-2.0
|
||||
rem
|
||||
rem Unless required by applicable law or agreed to in writing, software
|
||||
rem distributed under the License is distributed on an "AS IS" BASIS,
|
||||
rem WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
rem See the License for the specific language governing permissions and
|
||||
rem limitations under the License.
|
||||
|
||||
%~dp0kafka-run-class.bat kafka.admin.BrokerApiVersionsCommand %*
|
||||
@@ -1,17 +0,0 @@
|
||||
@echo off
|
||||
rem Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
rem contributor license agreements. See the NOTICE file distributed with
|
||||
rem this work for additional information regarding copyright ownership.
|
||||
rem The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
rem (the "License"); you may not use this file except in compliance with
|
||||
rem the License. You may obtain a copy of the License at
|
||||
rem
|
||||
rem http://www.apache.org/licenses/LICENSE-2.0
|
||||
rem
|
||||
rem Unless required by applicable law or agreed to in writing, software
|
||||
rem distributed under the License is distributed on an "AS IS" BASIS,
|
||||
rem WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
rem See the License for the specific language governing permissions and
|
||||
rem limitations under the License.
|
||||
|
||||
"%~dp0kafka-run-class.bat" kafka.admin.ConfigCommand %*
|
||||
@@ -1,20 +0,0 @@
|
||||
@echo off
|
||||
rem Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
rem contributor license agreements. See the NOTICE file distributed with
|
||||
rem this work for additional information regarding copyright ownership.
|
||||
rem The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
rem (the "License"); you may not use this file except in compliance with
|
||||
rem the License. You may obtain a copy of the License at
|
||||
rem
|
||||
rem http://www.apache.org/licenses/LICENSE-2.0
|
||||
rem
|
||||
rem Unless required by applicable law or agreed to in writing, software
|
||||
rem distributed under the License is distributed on an "AS IS" BASIS,
|
||||
rem WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
rem See the License for the specific language governing permissions and
|
||||
rem limitations under the License.
|
||||
|
||||
SetLocal
|
||||
set KAFKA_HEAP_OPTS=-Xmx512M
|
||||
"%~dp0kafka-run-class.bat" kafka.tools.ConsoleConsumer %*
|
||||
EndLocal
|
||||
@@ -1,20 +0,0 @@
|
||||
@echo off
|
||||
rem Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
rem contributor license agreements. See the NOTICE file distributed with
|
||||
rem this work for additional information regarding copyright ownership.
|
||||
rem The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
rem (the "License"); you may not use this file except in compliance with
|
||||
rem the License. You may obtain a copy of the License at
|
||||
rem
|
||||
rem http://www.apache.org/licenses/LICENSE-2.0
|
||||
rem
|
||||
rem Unless required by applicable law or agreed to in writing, software
|
||||
rem distributed under the License is distributed on an "AS IS" BASIS,
|
||||
rem WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
rem See the License for the specific language governing permissions and
|
||||
rem limitations under the License.
|
||||
|
||||
SetLocal
|
||||
set KAFKA_HEAP_OPTS=-Xmx512M
|
||||
"%~dp0kafka-run-class.bat" kafka.tools.ConsoleProducer %*
|
||||
EndLocal
|
||||
@@ -1,17 +0,0 @@
|
||||
@echo off
|
||||
rem Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
rem contributor license agreements. See the NOTICE file distributed with
|
||||
rem this work for additional information regarding copyright ownership.
|
||||
rem The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
rem (the "License"); you may not use this file except in compliance with
|
||||
rem the License. You may obtain a copy of the License at
|
||||
rem
|
||||
rem http://www.apache.org/licenses/LICENSE-2.0
|
||||
rem
|
||||
rem Unless required by applicable law or agreed to in writing, software
|
||||
rem distributed under the License is distributed on an "AS IS" BASIS,
|
||||
rem WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
rem See the License for the specific language governing permissions and
|
||||
rem limitations under the License.
|
||||
|
||||
"%~dp0kafka-run-class.bat" kafka.admin.ConsumerGroupCommand %*
|
||||
@@ -1,20 +0,0 @@
|
||||
@echo off
|
||||
rem Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
rem contributor license agreements. See the NOTICE file distributed with
|
||||
rem this work for additional information regarding copyright ownership.
|
||||
rem The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
rem (the "License"); you may not use this file except in compliance with
|
||||
rem the License. You may obtain a copy of the License at
|
||||
rem
|
||||
rem http://www.apache.org/licenses/LICENSE-2.0
|
||||
rem
|
||||
rem Unless required by applicable law or agreed to in writing, software
|
||||
rem distributed under the License is distributed on an "AS IS" BASIS,
|
||||
rem WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
rem See the License for the specific language governing permissions and
|
||||
rem limitations under the License.
|
||||
|
||||
SetLocal
|
||||
set KAFKA_HEAP_OPTS=-Xmx512M -Xms512M
|
||||
"%~dp0kafka-run-class.bat" kafka.tools.ConsumerPerformance %*
|
||||
EndLocal
|
||||
@@ -1,17 +0,0 @@
|
||||
@echo off
|
||||
rem Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
rem contributor license agreements. See the NOTICE file distributed with
|
||||
rem this work for additional information regarding copyright ownership.
|
||||
rem The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
rem (the "License"); you may not use this file except in compliance with
|
||||
rem the License. You may obtain a copy of the License at
|
||||
rem
|
||||
rem http://www.apache.org/licenses/LICENSE-2.0
|
||||
rem
|
||||
rem Unless required by applicable law or agreed to in writing, software
|
||||
rem distributed under the License is distributed on an "AS IS" BASIS,
|
||||
rem WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
rem See the License for the specific language governing permissions and
|
||||
rem limitations under the License.
|
||||
|
||||
"%~dp0kafka-run-class.bat" kafka.admin.DelegationTokenCommand %*
|
||||
@@ -1,17 +0,0 @@
|
||||
@echo off
|
||||
rem Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
rem contributor license agreements. See the NOTICE file distributed with
|
||||
rem this work for additional information regarding copyright ownership.
|
||||
rem The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
rem (the "License"); you may not use this file except in compliance with
|
||||
rem the License. You may obtain a copy of the License at
|
||||
rem
|
||||
rem http://www.apache.org/licenses/LICENSE-2.0
|
||||
rem
|
||||
rem Unless required by applicable law or agreed to in writing, software
|
||||
rem distributed under the License is distributed on an "AS IS" BASIS,
|
||||
rem WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
rem See the License for the specific language governing permissions and
|
||||
rem limitations under the License.
|
||||
|
||||
"%~dp0kafka-run-class.bat" kafka.admin.DeleteRecordsCommand %*
|
||||
@@ -1,17 +0,0 @@
|
||||
@echo off
|
||||
rem Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
rem contributor license agreements. See the NOTICE file distributed with
|
||||
rem this work for additional information regarding copyright ownership.
|
||||
rem The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
rem (the "License"); you may not use this file except in compliance with
|
||||
rem the License. You may obtain a copy of the License at
|
||||
rem
|
||||
rem http://www.apache.org/licenses/LICENSE-2.0
|
||||
rem
|
||||
rem Unless required by applicable law or agreed to in writing, software
|
||||
rem distributed under the License is distributed on an "AS IS" BASIS,
|
||||
rem WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
rem See the License for the specific language governing permissions and
|
||||
rem limitations under the License.
|
||||
|
||||
"%~dp0kafka-run-class.bat" kafka.tools.DumpLogSegments %*
|
||||
@@ -1,17 +0,0 @@
|
||||
@echo off
|
||||
rem Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
rem contributor license agreements. See the NOTICE file distributed with
|
||||
rem this work for additional information regarding copyright ownership.
|
||||
rem The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
rem (the "License"); you may not use this file except in compliance with
|
||||
rem the License. You may obtain a copy of the License at
|
||||
rem
|
||||
rem http://www.apache.org/licenses/LICENSE-2.0
|
||||
rem
|
||||
rem Unless required by applicable law or agreed to in writing, software
|
||||
rem distributed under the License is distributed on an "AS IS" BASIS,
|
||||
rem WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
rem See the License for the specific language governing permissions and
|
||||
rem limitations under the License.
|
||||
|
||||
"%~dp0kafka-run-class.bat" kafka.admin.LeaderElectionCommand %*
|
||||
@@ -1,17 +0,0 @@
|
||||
@echo off
|
||||
rem Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
rem contributor license agreements. See the NOTICE file distributed with
|
||||
rem this work for additional information regarding copyright ownership.
|
||||
rem The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
rem (the "License"); you may not use this file except in compliance with
|
||||
rem the License. You may obtain a copy of the License at
|
||||
rem
|
||||
rem http://www.apache.org/licenses/LICENSE-2.0
|
||||
rem
|
||||
rem Unless required by applicable law or agreed to in writing, software
|
||||
rem distributed under the License is distributed on an "AS IS" BASIS,
|
||||
rem WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
rem See the License for the specific language governing permissions and
|
||||
rem limitations under the License.
|
||||
|
||||
"%~dp0kafka-run-class.bat" kafka.admin.LogDirsCommand %*
|
||||
@@ -1,17 +0,0 @@
|
||||
@echo off
|
||||
rem Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
rem contributor license agreements. See the NOTICE file distributed with
|
||||
rem this work for additional information regarding copyright ownership.
|
||||
rem The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
rem (the "License"); you may not use this file except in compliance with
|
||||
rem the License. You may obtain a copy of the License at
|
||||
rem
|
||||
rem http://www.apache.org/licenses/LICENSE-2.0
|
||||
rem
|
||||
rem Unless required by applicable law or agreed to in writing, software
|
||||
rem distributed under the License is distributed on an "AS IS" BASIS,
|
||||
rem WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
rem See the License for the specific language governing permissions and
|
||||
rem limitations under the License.
|
||||
|
||||
"%~dp0kafka-run-class.bat" kafka.tools.MirrorMaker %*
|
||||
@@ -1,17 +0,0 @@
|
||||
@echo off
|
||||
rem Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
rem contributor license agreements. See the NOTICE file distributed with
|
||||
rem this work for additional information regarding copyright ownership.
|
||||
rem The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
rem (the "License"); you may not use this file except in compliance with
|
||||
rem the License. You may obtain a copy of the License at
|
||||
rem
|
||||
rem http://www.apache.org/licenses/LICENSE-2.0
|
||||
rem
|
||||
rem Unless required by applicable law or agreed to in writing, software
|
||||
rem distributed under the License is distributed on an "AS IS" BASIS,
|
||||
rem WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
rem See the License for the specific language governing permissions and
|
||||
rem limitations under the License.
|
||||
|
||||
"%~dp0kafka-run-class.bat" kafka.admin.PreferredReplicaLeaderElectionCommand %*
|
||||
@@ -1,20 +0,0 @@
|
||||
@echo off
|
||||
rem Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
rem contributor license agreements. See the NOTICE file distributed with
|
||||
rem this work for additional information regarding copyright ownership.
|
||||
rem The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
rem (the "License"); you may not use this file except in compliance with
|
||||
rem the License. You may obtain a copy of the License at
|
||||
rem
|
||||
rem http://www.apache.org/licenses/LICENSE-2.0
|
||||
rem
|
||||
rem Unless required by applicable law or agreed to in writing, software
|
||||
rem distributed under the License is distributed on an "AS IS" BASIS,
|
||||
rem WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
rem See the License for the specific language governing permissions and
|
||||
rem limitations under the License.
|
||||
|
||||
SetLocal
|
||||
set KAFKA_HEAP_OPTS=-Xmx512M
|
||||
"%~dp0kafka-run-class.bat" org.apache.kafka.tools.ProducerPerformance %*
|
||||
EndLocal
|
||||
@@ -1,17 +0,0 @@
|
||||
@echo off
|
||||
rem Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
rem contributor license agreements. See the NOTICE file distributed with
|
||||
rem this work for additional information regarding copyright ownership.
|
||||
rem The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
rem (the "License"); you may not use this file except in compliance with
|
||||
rem the License. You may obtain a copy of the License at
|
||||
rem
|
||||
rem http://www.apache.org/licenses/LICENSE-2.0
|
||||
rem
|
||||
rem Unless required by applicable law or agreed to in writing, software
|
||||
rem distributed under the License is distributed on an "AS IS" BASIS,
|
||||
rem WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
rem See the License for the specific language governing permissions and
|
||||
rem limitations under the License.
|
||||
|
||||
"%~dp0kafka-run-class.bat" kafka.admin.ReassignPartitionsCommand %*
|
||||
@@ -1,17 +0,0 @@
|
||||
@echo off
|
||||
rem Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
rem contributor license agreements. See the NOTICE file distributed with
|
||||
rem this work for additional information regarding copyright ownership.
|
||||
rem The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
rem (the "License"); you may not use this file except in compliance with
|
||||
rem the License. You may obtain a copy of the License at
|
||||
rem
|
||||
rem http://www.apache.org/licenses/LICENSE-2.0
|
||||
rem
|
||||
rem Unless required by applicable law or agreed to in writing, software
|
||||
rem distributed under the License is distributed on an "AS IS" BASIS,
|
||||
rem WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
rem See the License for the specific language governing permissions and
|
||||
rem limitations under the License.
|
||||
|
||||
"%~dp0kafka-run-class.bat" kafka.tools.ReplicaVerificationTool %*
|
||||
@@ -1,191 +0,0 @@
|
||||
@echo off
|
||||
rem Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
rem contributor license agreements. See the NOTICE file distributed with
|
||||
rem this work for additional information regarding copyright ownership.
|
||||
rem The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
rem (the "License"); you may not use this file except in compliance with
|
||||
rem the License. You may obtain a copy of the License at
|
||||
rem
|
||||
rem http://www.apache.org/licenses/LICENSE-2.0
|
||||
rem
|
||||
rem Unless required by applicable law or agreed to in writing, software
|
||||
rem distributed under the License is distributed on an "AS IS" BASIS,
|
||||
rem WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
rem See the License for the specific language governing permissions and
|
||||
rem limitations under the License.
|
||||
|
||||
setlocal enabledelayedexpansion
|
||||
|
||||
IF [%1] EQU [] (
|
||||
echo USAGE: %0 classname [opts]
|
||||
EXIT /B 1
|
||||
)
|
||||
|
||||
rem Using pushd popd to set BASE_DIR to the absolute path
|
||||
pushd %~dp0..\..
|
||||
set BASE_DIR=%CD%
|
||||
popd
|
||||
|
||||
IF ["%SCALA_VERSION%"] EQU [""] (
|
||||
set SCALA_VERSION=2.12.10
|
||||
)
|
||||
|
||||
IF ["%SCALA_BINARY_VERSION%"] EQU [""] (
|
||||
for /f "tokens=1,2 delims=." %%a in ("%SCALA_VERSION%") do (
|
||||
set FIRST=%%a
|
||||
set SECOND=%%b
|
||||
if ["!SECOND!"] EQU [""] (
|
||||
set SCALA_BINARY_VERSION=!FIRST!
|
||||
) else (
|
||||
set SCALA_BINARY_VERSION=!FIRST!.!SECOND!
|
||||
)
|
||||
)
|
||||
)
|
||||
|
||||
rem Classpath addition for kafka-core dependencies
|
||||
for %%i in ("%BASE_DIR%\core\build\dependant-libs-%SCALA_VERSION%\*.jar") do (
|
||||
call :concat "%%i"
|
||||
)
|
||||
|
||||
rem Classpath addition for kafka-examples
|
||||
for %%i in ("%BASE_DIR%\examples\build\libs\kafka-examples*.jar") do (
|
||||
call :concat "%%i"
|
||||
)
|
||||
|
||||
rem Classpath addition for kafka-clients
|
||||
for %%i in ("%BASE_DIR%\clients\build\libs\kafka-clients*.jar") do (
|
||||
call :concat "%%i"
|
||||
)
|
||||
|
||||
rem Classpath addition for kafka-streams
|
||||
for %%i in ("%BASE_DIR%\streams\build\libs\kafka-streams*.jar") do (
|
||||
call :concat "%%i"
|
||||
)
|
||||
|
||||
rem Classpath addition for kafka-streams-examples
|
||||
for %%i in ("%BASE_DIR%\streams\examples\build\libs\kafka-streams-examples*.jar") do (
|
||||
call :concat "%%i"
|
||||
)
|
||||
|
||||
for %%i in ("%BASE_DIR%\streams\build\dependant-libs-%SCALA_VERSION%\rocksdb*.jar") do (
|
||||
call :concat "%%i"
|
||||
)
|
||||
|
||||
rem Classpath addition for kafka tools
|
||||
for %%i in ("%BASE_DIR%\tools\build\libs\kafka-tools*.jar") do (
|
||||
call :concat "%%i"
|
||||
)
|
||||
|
||||
for %%i in ("%BASE_DIR%\tools\build\dependant-libs-%SCALA_VERSION%\*.jar") do (
|
||||
call :concat "%%i"
|
||||
)
|
||||
|
||||
for %%p in (api runtime file json tools) do (
|
||||
for %%i in ("%BASE_DIR%\connect\%%p\build\libs\connect-%%p*.jar") do (
|
||||
call :concat "%%i"
|
||||
)
|
||||
if exist "%BASE_DIR%\connect\%%p\build\dependant-libs\*" (
|
||||
call :concat "%BASE_DIR%\connect\%%p\build\dependant-libs\*"
|
||||
)
|
||||
)
|
||||
|
||||
rem Classpath addition for release
|
||||
for %%i in ("%BASE_DIR%\libs\*") do (
|
||||
call :concat "%%i"
|
||||
)
|
||||
|
||||
rem Classpath addition for core
|
||||
for %%i in ("%BASE_DIR%\core\build\libs\kafka_%SCALA_BINARY_VERSION%*.jar") do (
|
||||
call :concat "%%i"
|
||||
)
|
||||
|
||||
rem JMX settings
|
||||
IF ["%KAFKA_JMX_OPTS%"] EQU [""] (
|
||||
set KAFKA_JMX_OPTS=-Dcom.sun.management.jmxremote -Dcom.sun.management.jmxremote.authenticate=false -Dcom.sun.management.jmxremote.ssl=false
|
||||
)
|
||||
|
||||
rem JMX port to use
|
||||
IF ["%JMX_PORT%"] NEQ [""] (
|
||||
set KAFKA_JMX_OPTS=%KAFKA_JMX_OPTS% -Dcom.sun.management.jmxremote.port=%JMX_PORT%
|
||||
)
|
||||
|
||||
rem Log directory to use
|
||||
IF ["%LOG_DIR%"] EQU [""] (
|
||||
set LOG_DIR=%BASE_DIR%/logs
|
||||
)
|
||||
|
||||
rem Log4j settings
|
||||
IF ["%KAFKA_LOG4J_OPTS%"] EQU [""] (
|
||||
set KAFKA_LOG4J_OPTS=-Dlog4j.configuration=file:%BASE_DIR%/config/tools-log4j.properties
|
||||
) ELSE (
|
||||
rem create logs directory
|
||||
IF not exist "%LOG_DIR%" (
|
||||
mkdir "%LOG_DIR%"
|
||||
)
|
||||
)
|
||||
|
||||
set KAFKA_LOG4J_OPTS=-Dkafka.logs.dir="%LOG_DIR%" "%KAFKA_LOG4J_OPTS%"
|
||||
|
||||
rem Generic jvm settings you want to add
|
||||
IF ["%KAFKA_OPTS%"] EQU [""] (
|
||||
set KAFKA_OPTS=
|
||||
)
|
||||
|
||||
set DEFAULT_JAVA_DEBUG_PORT=5005
|
||||
set DEFAULT_DEBUG_SUSPEND_FLAG=n
|
||||
rem Set Debug options if enabled
|
||||
IF ["%KAFKA_DEBUG%"] NEQ [""] (
|
||||
|
||||
|
||||
IF ["%JAVA_DEBUG_PORT%"] EQU [""] (
|
||||
set JAVA_DEBUG_PORT=%DEFAULT_JAVA_DEBUG_PORT%
|
||||
)
|
||||
|
||||
IF ["%DEBUG_SUSPEND_FLAG%"] EQU [""] (
|
||||
set DEBUG_SUSPEND_FLAG=%DEFAULT_DEBUG_SUSPEND_FLAG%
|
||||
)
|
||||
set DEFAULT_JAVA_DEBUG_OPTS=-agentlib:jdwp=transport=dt_socket,server=y,suspend=!DEBUG_SUSPEND_FLAG!,address=!JAVA_DEBUG_PORT!
|
||||
|
||||
IF ["%JAVA_DEBUG_OPTS%"] EQU [""] (
|
||||
set JAVA_DEBUG_OPTS=!DEFAULT_JAVA_DEBUG_OPTS!
|
||||
)
|
||||
|
||||
echo Enabling Java debug options: !JAVA_DEBUG_OPTS!
|
||||
set KAFKA_OPTS=!JAVA_DEBUG_OPTS! !KAFKA_OPTS!
|
||||
)
|
||||
|
||||
rem Which java to use
|
||||
IF ["%JAVA_HOME%"] EQU [""] (
|
||||
set JAVA=java
|
||||
) ELSE (
|
||||
set JAVA="%JAVA_HOME%/bin/java"
|
||||
)
|
||||
|
||||
rem Memory options
|
||||
IF ["%KAFKA_HEAP_OPTS%"] EQU [""] (
|
||||
set KAFKA_HEAP_OPTS=-Xmx256M
|
||||
)
|
||||
|
||||
rem JVM performance options
|
||||
IF ["%KAFKA_JVM_PERFORMANCE_OPTS%"] EQU [""] (
|
||||
set KAFKA_JVM_PERFORMANCE_OPTS=-server -XX:+UseG1GC -XX:MaxGCPauseMillis=20 -XX:InitiatingHeapOccupancyPercent=35 -XX:+ExplicitGCInvokesConcurrent -Djava.awt.headless=true
|
||||
)
|
||||
|
||||
IF not defined CLASSPATH (
|
||||
echo Classpath is empty. Please build the project first e.g. by running 'gradlew jarAll'
|
||||
EXIT /B 2
|
||||
)
|
||||
|
||||
set COMMAND=%JAVA% %KAFKA_HEAP_OPTS% %KAFKA_JVM_PERFORMANCE_OPTS% %KAFKA_JMX_OPTS% %KAFKA_LOG4J_OPTS% -cp "%CLASSPATH%" %KAFKA_OPTS% %*
|
||||
rem echo.
|
||||
rem echo %COMMAND%
|
||||
rem echo.
|
||||
%COMMAND%
|
||||
|
||||
goto :eof
|
||||
:concat
|
||||
IF not defined CLASSPATH (
|
||||
set CLASSPATH="%~1"
|
||||
) ELSE (
|
||||
set CLASSPATH=%CLASSPATH%;"%~1"
|
||||
)
|
||||
@@ -1,38 +0,0 @@
|
||||
@echo off
|
||||
rem Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
rem contributor license agreements. See the NOTICE file distributed with
|
||||
rem this work for additional information regarding copyright ownership.
|
||||
rem The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
rem (the "License"); you may not use this file except in compliance with
|
||||
rem the License. You may obtain a copy of the License at
|
||||
rem
|
||||
rem http://www.apache.org/licenses/LICENSE-2.0
|
||||
rem
|
||||
rem Unless required by applicable law or agreed to in writing, software
|
||||
rem distributed under the License is distributed on an "AS IS" BASIS,
|
||||
rem WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
rem See the License for the specific language governing permissions and
|
||||
rem limitations under the License.
|
||||
|
||||
IF [%1] EQU [] (
|
||||
echo USAGE: %0 server.properties
|
||||
EXIT /B 1
|
||||
)
|
||||
|
||||
SetLocal
|
||||
IF ["%KAFKA_LOG4J_OPTS%"] EQU [""] (
|
||||
set KAFKA_LOG4J_OPTS=-Dlog4j.configuration=file:%~dp0../../config/log4j.properties
|
||||
)
|
||||
IF ["%KAFKA_HEAP_OPTS%"] EQU [""] (
|
||||
rem detect OS architecture
|
||||
wmic os get osarchitecture | find /i "32-bit" >nul 2>&1
|
||||
IF NOT ERRORLEVEL 1 (
|
||||
rem 32-bit OS
|
||||
set KAFKA_HEAP_OPTS=-Xmx512M -Xms512M
|
||||
) ELSE (
|
||||
rem 64-bit OS
|
||||
set KAFKA_HEAP_OPTS=-Xmx1G -Xms1G
|
||||
)
|
||||
)
|
||||
"%~dp0kafka-run-class.bat" kafka.Kafka %*
|
||||
EndLocal
|
||||
@@ -1,18 +0,0 @@
|
||||
@echo off
|
||||
rem Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
rem contributor license agreements. See the NOTICE file distributed with
|
||||
rem this work for additional information regarding copyright ownership.
|
||||
rem The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
rem (the "License"); you may not use this file except in compliance with
|
||||
rem the License. You may obtain a copy of the License at
|
||||
rem
|
||||
rem http://www.apache.org/licenses/LICENSE-2.0
|
||||
rem
|
||||
rem Unless required by applicable law or agreed to in writing, software
|
||||
rem distributed under the License is distributed on an "AS IS" BASIS,
|
||||
rem WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
rem See the License for the specific language governing permissions and
|
||||
rem limitations under the License.
|
||||
|
||||
wmic process where (commandline like "%%kafka.Kafka%%" and not name="wmic.exe") delete
|
||||
rem ps ax | grep -i 'kafka.Kafka' | grep -v grep | awk '{print $1}' | xargs kill -SIGTERM
|
||||
@@ -1,23 +0,0 @@
|
||||
@echo off
|
||||
rem Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
rem contributor license agreements. See the NOTICE file distributed with
|
||||
rem this work for additional information regarding copyright ownership.
|
||||
rem The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
rem (the "License"); you may not use this file except in compliance with
|
||||
rem the License. You may obtain a copy of the License at
|
||||
rem
|
||||
rem http://www.apache.org/licenses/LICENSE-2.0
|
||||
rem
|
||||
rem Unless required by applicable law or agreed to in writing, software
|
||||
rem distributed under the License is distributed on an "AS IS" BASIS,
|
||||
rem WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
rem See the License for the specific language governing permissions and
|
||||
rem limitations under the License.
|
||||
|
||||
SetLocal
|
||||
IF ["%KAFKA_HEAP_OPTS%"] EQU [""] (
|
||||
set KAFKA_HEAP_OPTS=-Xmx512M
|
||||
)
|
||||
|
||||
"%~dp0kafka-run-class.bat" kafka.tools.StreamsResetter %*
|
||||
EndLocal
|
||||
@@ -1,17 +0,0 @@
|
||||
@echo off
|
||||
rem Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
rem contributor license agreements. See the NOTICE file distributed with
|
||||
rem this work for additional information regarding copyright ownership.
|
||||
rem The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
rem (the "License"); you may not use this file except in compliance with
|
||||
rem the License. You may obtain a copy of the License at
|
||||
rem
|
||||
rem http://www.apache.org/licenses/LICENSE-2.0
|
||||
rem
|
||||
rem Unless required by applicable law or agreed to in writing, software
|
||||
rem distributed under the License is distributed on an "AS IS" BASIS,
|
||||
rem WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
rem See the License for the specific language governing permissions and
|
||||
rem limitations under the License.
|
||||
|
||||
"%~dp0kafka-run-class.bat" kafka.admin.TopicCommand %*
|
||||
@@ -1,30 +0,0 @@
|
||||
@echo off
|
||||
rem Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
rem contributor license agreements. See the NOTICE file distributed with
|
||||
rem this work for additional information regarding copyright ownership.
|
||||
rem The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
rem (the "License"); you may not use this file except in compliance with
|
||||
rem the License. You may obtain a copy of the License at
|
||||
rem
|
||||
rem http://www.apache.org/licenses/LICENSE-2.0
|
||||
rem
|
||||
rem Unless required by applicable law or agreed to in writing, software
|
||||
rem distributed under the License is distributed on an "AS IS" BASIS,
|
||||
rem WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
rem See the License for the specific language governing permissions and
|
||||
rem limitations under the License.
|
||||
|
||||
IF [%1] EQU [] (
|
||||
echo USAGE: %0 zookeeper.properties
|
||||
EXIT /B 1
|
||||
)
|
||||
|
||||
SetLocal
|
||||
IF ["%KAFKA_LOG4J_OPTS%"] EQU [""] (
|
||||
set KAFKA_LOG4J_OPTS=-Dlog4j.configuration=file:%~dp0../../config/log4j.properties
|
||||
)
|
||||
IF ["%KAFKA_HEAP_OPTS%"] EQU [""] (
|
||||
set KAFKA_HEAP_OPTS=-Xmx512M -Xms512M
|
||||
)
|
||||
"%~dp0kafka-run-class.bat" org.apache.zookeeper.server.quorum.QuorumPeerMain %*
|
||||
EndLocal
|
||||
@@ -1,17 +0,0 @@
|
||||
@echo off
|
||||
rem Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
rem contributor license agreements. See the NOTICE file distributed with
|
||||
rem this work for additional information regarding copyright ownership.
|
||||
rem The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
rem (the "License"); you may not use this file except in compliance with
|
||||
rem the License. You may obtain a copy of the License at
|
||||
rem
|
||||
rem http://www.apache.org/licenses/LICENSE-2.0
|
||||
rem
|
||||
rem Unless required by applicable law or agreed to in writing, software
|
||||
rem distributed under the License is distributed on an "AS IS" BASIS,
|
||||
rem WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
rem See the License for the specific language governing permissions and
|
||||
rem limitations under the License.
|
||||
|
||||
wmic process where (commandline like "%%zookeeper%%" and not name="wmic.exe") delete
|
||||
@@ -1,22 +0,0 @@
|
||||
@echo off
|
||||
rem Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
rem contributor license agreements. See the NOTICE file distributed with
|
||||
rem this work for additional information regarding copyright ownership.
|
||||
rem The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
rem (the "License"); you may not use this file except in compliance with
|
||||
rem the License. You may obtain a copy of the License at
|
||||
rem
|
||||
rem http://www.apache.org/licenses/LICENSE-2.0
|
||||
rem
|
||||
rem Unless required by applicable law or agreed to in writing, software
|
||||
rem distributed under the License is distributed on an "AS IS" BASIS,
|
||||
rem WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
rem See the License for the specific language governing permissions and
|
||||
rem limitations under the License.
|
||||
|
||||
IF [%1] EQU [] (
|
||||
echo USAGE: %0 zookeeper_host:port[/path] [-zk-tls-config-file file] [args...]
|
||||
EXIT /B 1
|
||||
)
|
||||
|
||||
"%~dp0kafka-run-class.bat" org.apache.zookeeper.ZooKeeperMainWithTlsSupportForKafka -server %*
|
||||
@@ -1,17 +0,0 @@
|
||||
#!/bin/bash
|
||||
# Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
# contributor license agreements. See the NOTICE file distributed with
|
||||
# this work for additional information regarding copyright ownership.
|
||||
# The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
# (the "License"); you may not use this file except in compliance with
|
||||
# the License. You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
exec $(dirname $0)/kafka-run-class.sh kafka.admin.ZkSecurityMigrator "$@"
|
||||
@@ -1,44 +0,0 @@
|
||||
#!/bin/bash
|
||||
# Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
# contributor license agreements. See the NOTICE file distributed with
|
||||
# this work for additional information regarding copyright ownership.
|
||||
# The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
# (the "License"); you may not use this file except in compliance with
|
||||
# the License. You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
if [ $# -lt 1 ];
|
||||
then
|
||||
echo "USAGE: $0 [-daemon] zookeeper.properties"
|
||||
exit 1
|
||||
fi
|
||||
base_dir=$(dirname $0)
|
||||
|
||||
if [ "x$KAFKA_LOG4J_OPTS" = "x" ]; then
|
||||
export KAFKA_LOG4J_OPTS="-Dlog4j.configuration=file:$base_dir/../config/log4j.properties"
|
||||
fi
|
||||
|
||||
if [ "x$KAFKA_HEAP_OPTS" = "x" ]; then
|
||||
export KAFKA_HEAP_OPTS="-Xmx512M -Xms512M"
|
||||
fi
|
||||
|
||||
EXTRA_ARGS=${EXTRA_ARGS-'-name zookeeper -loggc'}
|
||||
|
||||
COMMAND=$1
|
||||
case $COMMAND in
|
||||
-daemon)
|
||||
EXTRA_ARGS="-daemon "$EXTRA_ARGS
|
||||
shift
|
||||
;;
|
||||
*)
|
||||
;;
|
||||
esac
|
||||
|
||||
exec $base_dir/kafka-run-class.sh $EXTRA_ARGS org.apache.zookeeper.server.quorum.QuorumPeerMain "$@"
|
||||
@@ -1,24 +0,0 @@
|
||||
#!/bin/sh
|
||||
# Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
# contributor license agreements. See the NOTICE file distributed with
|
||||
# this work for additional information regarding copyright ownership.
|
||||
# The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
# (the "License"); you may not use this file except in compliance with
|
||||
# the License. You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
SIGNAL=${SIGNAL:-TERM}
|
||||
PIDS=$(ps ax | grep java | grep -i QuorumPeerMain | grep -v grep | awk '{print $1}')
|
||||
|
||||
if [ -z "$PIDS" ]; then
|
||||
echo "No zookeeper server to stop"
|
||||
exit 1
|
||||
else
|
||||
kill -s $SIGNAL $PIDS
|
||||
fi
|
||||
@@ -1,23 +0,0 @@
|
||||
#!/bin/sh
|
||||
# Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
# contributor license agreements. See the NOTICE file distributed with
|
||||
# this work for additional information regarding copyright ownership.
|
||||
# The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
# (the "License"); you may not use this file except in compliance with
|
||||
# the License. You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
if [ $# -lt 1 ];
|
||||
then
|
||||
echo "USAGE: $0 zookeeper_host:port[/path] [-zk-tls-config-file file] [args...]"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
exec $(dirname $0)/kafka-run-class.sh org.apache.zookeeper.ZooKeeperMainWithTlsSupportForKafka -server "$@"
|
||||
1202
build.gradle
1202
build.gradle
File diff suppressed because it is too large
Load Diff
83
build.sh
83
build.sh
@@ -1,83 +0,0 @@
|
||||
#!/bin/bash
|
||||
workspace=$(cd $(dirname $0) && pwd -P)
|
||||
cd $workspace
|
||||
|
||||
## TODO const
|
||||
APPNAME=service-discovery
|
||||
module=$APPNAME
|
||||
app=$module
|
||||
|
||||
gitversion=.gitversion
|
||||
control=./control.sh
|
||||
ngxfunc=./nginxfunc.sh
|
||||
|
||||
## function
|
||||
function build() {
|
||||
# 进行编译
|
||||
# cmd
|
||||
JVERSION=`java -version 2>&1 | awk 'NR==1{gsub(/"/,"");print $3}'`
|
||||
major=`echo $JVERSION | awk -F. '{print $1}'`
|
||||
mijor=`echo $JVERSION | awk -F. '{print $2}'`
|
||||
if [ $major -le 1 ] && [ $mijor -lt 11 ]; then
|
||||
export JAVA_HOME=/usr/local/jdk-11.0.2 #(使用jdk11请设置)
|
||||
export PATH=$JAVA_HOME/bin:$PATH
|
||||
fi
|
||||
# XXX 编译命令
|
||||
# mvn clean install -Ponline -Dmaven.test.skip=true -f ../pom.xml
|
||||
./gradlew -PscalaVersion=2.12 releaseTarGz
|
||||
|
||||
local sc=$?
|
||||
if [ $sc -ne 0 ];then
|
||||
## 编译失败, 退出码为 非0
|
||||
echo "$app build error"
|
||||
exit $sc
|
||||
else
|
||||
echo -n "$app build ok, vsn="
|
||||
gitversion
|
||||
fi
|
||||
}
|
||||
|
||||
function make_output() {
|
||||
# 新建output目录
|
||||
local output="./output"
|
||||
rm -rf $output &>/dev/null
|
||||
mkdir -p $output &>/dev/null
|
||||
|
||||
# 填充output目录, output内的内容 即为 线上部署内容
|
||||
(
|
||||
cp -rf $control $output && # 拷贝 control.sh脚本 至output目录
|
||||
cp -rf $ngxfunc $output &&
|
||||
cp -rf ./APP_META $output &&
|
||||
cp -rf ./APP_META/Dockerfile $output &&
|
||||
# XXX 解压程序包到output路径
|
||||
tar -xzvf core/build/distributions/kafka_2.12-sd-2.5.0-d-100.tgz
|
||||
mv kafka_2.12-sd-2.5.0-d-100 ${output}/service-discovery
|
||||
# unzip target/${module}.war -d ${output} && # 解压war包到output目录
|
||||
echo -e "make output ok."
|
||||
) || { echo -e "make output error"; exit 2; } # 填充output目录失败后, 退出码为 非0
|
||||
}
|
||||
|
||||
## internals
|
||||
function gitversion() {
|
||||
git log -1 --pretty=%h > $gitversion
|
||||
local gv=`cat $gitversion`
|
||||
echo "$gv"
|
||||
}
|
||||
|
||||
|
||||
##########################################
|
||||
## main
|
||||
## 其中,
|
||||
## 1.进行编译
|
||||
## 2.生成部署包output
|
||||
##########################################
|
||||
|
||||
# 1.进行编译
|
||||
build
|
||||
|
||||
# 2.生成部署包output
|
||||
make_output
|
||||
|
||||
# 编译成功
|
||||
echo -e "build done"
|
||||
exit 0
|
||||
@@ -1,20 +0,0 @@
|
||||
// Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
// contributor license agreements. See the NOTICE file distributed with
|
||||
// this work for additional information regarding copyright ownership.
|
||||
// The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
// (the "License"); you may not use this file except in compliance with
|
||||
// the License. You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
docstrings = JavaDoc
|
||||
maxColumn = 120
|
||||
continuationIndent.defnSite = 2
|
||||
assumeStandardLibraryStripMargin = true
|
||||
danglingParentheses = true
|
||||
rewrite.rules = [SortImports, RedundantBraces, RedundantParens, SortModifiers]
|
||||
@@ -1,142 +0,0 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<!DOCTYPE module PUBLIC
|
||||
"-//Puppy Crawl//DTD Check Configuration 1.3//EN"
|
||||
"http://www.puppycrawl.com/dtds/configuration_1_3.dtd">
|
||||
<!--
|
||||
// Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
// contributor license agreements. See the NOTICE file distributed with
|
||||
// this work for additional information regarding copyright ownership.
|
||||
// The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
// (the "License"); you may not use this file except in compliance with
|
||||
// the License. You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
-->
|
||||
<module name="Checker">
|
||||
<property name="localeLanguage" value="en"/>
|
||||
|
||||
<module name="FileTabCharacter"/>
|
||||
|
||||
<!-- header -->
|
||||
<module name="Header">
|
||||
<property name="headerFile" value="${headerFile}" />
|
||||
</module>
|
||||
|
||||
<module name="TreeWalker">
|
||||
|
||||
<!-- code cleanup -->
|
||||
<module name="UnusedImports">
|
||||
<property name="processJavadoc" value="true" />
|
||||
</module>
|
||||
<module name="RedundantImport"/>
|
||||
<module name="IllegalImport" />
|
||||
<module name="EqualsHashCode"/>
|
||||
<module name="SimplifyBooleanExpression"/>
|
||||
<module name="OneStatementPerLine"/>
|
||||
<module name="UnnecessaryParentheses" />
|
||||
<module name="SimplifyBooleanReturn"/>
|
||||
|
||||
<!-- style -->
|
||||
<module name="DefaultComesLast"/>
|
||||
<module name="EmptyStatement"/>
|
||||
<module name="ArrayTypeStyle"/>
|
||||
<module name="UpperEll"/>
|
||||
<module name="LeftCurly"/>
|
||||
<module name="RightCurly"/>
|
||||
<module name="EmptyStatement"/>
|
||||
<module name="ConstantName">
|
||||
<property name="format" value="(^[A-Z][A-Z0-9]*(_[A-Z0-9]+)*$)|(^log$)"/>
|
||||
</module>
|
||||
<module name="LocalVariableName"/>
|
||||
<module name="LocalFinalVariableName"/>
|
||||
<module name="MemberName"/>
|
||||
<module name="ClassTypeParameterName">
|
||||
<property name="format" value="^[A-Z][a-zA-Z0-9]*$$"/>
|
||||
</module>
|
||||
<module name="MethodTypeParameterName">
|
||||
<property name="format" value="^[A-Z][a-zA-Z0-9]*$$"/>
|
||||
</module>
|
||||
<module name="InterfaceTypeParameterName">
|
||||
<property name="format" value="^[A-Z][a-zA-Z0-9]*$$"/>
|
||||
</module>
|
||||
<module name="PackageName"/>
|
||||
<module name="ParameterName"/>
|
||||
<module name="StaticVariableName"/>
|
||||
<module name="TypeName"/>
|
||||
<module name="AvoidStarImport"/>
|
||||
|
||||
<!-- variables that can be final should be final (suppressed except for Streams) -->
|
||||
<module name="FinalLocalVariable">
|
||||
<property name="tokens" value="VARIABLE_DEF,PARAMETER_DEF"/>
|
||||
<property name="validateEnhancedForLoopVariable" value="true"/>
|
||||
</module>
|
||||
|
||||
<!-- dependencies -->
|
||||
<module name="ImportControl">
|
||||
<property name="file" value="${importControlFile}"/>
|
||||
</module>
|
||||
|
||||
<!-- whitespace -->
|
||||
<module name="GenericWhitespace"/>
|
||||
<module name="NoWhitespaceBefore"/>
|
||||
<module name="WhitespaceAfter" />
|
||||
<module name="NoWhitespaceAfter"/>
|
||||
<module name="WhitespaceAround">
|
||||
<property name="allowEmptyConstructors" value="true"/>
|
||||
<property name="allowEmptyMethods" value="true"/>
|
||||
</module>
|
||||
<module name="Indentation"/>
|
||||
<module name="MethodParamPad"/>
|
||||
<module name="ParenPad"/>
|
||||
<module name="TypecastParenPad"/>
|
||||
|
||||
<!-- locale-sensitive methods should specify locale -->
|
||||
<module name="Regexp">
|
||||
<property name="format" value="\.to(Lower|Upper)Case\(\)"/>
|
||||
<property name="illegalPattern" value="true"/>
|
||||
<property name="ignoreComments" value="true"/>
|
||||
</module>
|
||||
|
||||
<!-- code quality -->
|
||||
<module name="MethodLength"/>
|
||||
<module name="ParameterNumber">
|
||||
<!-- default is 8 -->
|
||||
<property name="max" value="13"/>
|
||||
</module>
|
||||
<module name="ClassDataAbstractionCoupling">
|
||||
<!-- default is 7 -->
|
||||
<property name="max" value="25"/>
|
||||
</module>
|
||||
<module name="BooleanExpressionComplexity">
|
||||
<!-- default is 3 -->
|
||||
<property name="max" value="5"/>
|
||||
</module>
|
||||
|
||||
<module name="ClassFanOutComplexity">
|
||||
<!-- default is 20 -->
|
||||
<property name="max" value="50"/>
|
||||
</module>
|
||||
<module name="CyclomaticComplexity">
|
||||
<!-- default is 10-->
|
||||
<property name="max" value="16"/>
|
||||
</module>
|
||||
<module name="JavaNCSS">
|
||||
<!-- default is 50 -->
|
||||
<property name="methodMaximum" value="100"/>
|
||||
</module>
|
||||
<module name="NPathComplexity">
|
||||
<!-- default is 200 -->
|
||||
<property name="max" value="500"/>
|
||||
</module>
|
||||
</module>
|
||||
|
||||
<module name="SuppressionFilter">
|
||||
<property name="file" value="${suppressionsFile}"/>
|
||||
</module>
|
||||
</module>
|
||||
@@ -1,56 +0,0 @@
|
||||
<!DOCTYPE import-control PUBLIC
|
||||
"-//Puppy Crawl//DTD Import Control 1.1//EN"
|
||||
"http://www.puppycrawl.com/dtds/import_control_1_1.dtd">
|
||||
<!--
|
||||
// Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
// contributor license agreements. See the NOTICE file distributed with
|
||||
// this work for additional information regarding copyright ownership.
|
||||
// The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
// (the "License"); you may not use this file except in compliance with
|
||||
// the License. You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
-->
|
||||
|
||||
<import-control pkg="kafka">
|
||||
|
||||
<!-- THINK HARD ABOUT THE LAYERING OF THE PROJECT BEFORE CHANGING THIS FILE -->
|
||||
|
||||
<!-- common library dependencies -->
|
||||
<allow pkg="java" />
|
||||
<allow pkg="scala" />
|
||||
<allow pkg="javax.management" />
|
||||
<allow pkg="org.slf4j" />
|
||||
<allow pkg="org.junit" />
|
||||
<allow pkg="org.easymock" />
|
||||
<allow pkg="java.security" />
|
||||
<allow pkg="javax.net.ssl" />
|
||||
<allow pkg="javax.security" />
|
||||
|
||||
<allow pkg="kafka.common" />
|
||||
<allow pkg="kafka.utils" />
|
||||
<allow pkg="kafka.serializer" />
|
||||
<allow pkg="org.apache.kafka.common" />
|
||||
|
||||
<subpackage name="tools">
|
||||
<allow pkg="org.apache.kafka.clients.admin" />
|
||||
<allow pkg="kafka.admin" />
|
||||
<allow pkg="joptsimple" />
|
||||
<allow pkg="org.apache.kafka.clients.consumer" />
|
||||
</subpackage>
|
||||
|
||||
<subpackage name="coordinator">
|
||||
<allow class="kafka.server.MetadataCache" />
|
||||
</subpackage>
|
||||
|
||||
<subpackage name="examples">
|
||||
<allow pkg="org.apache.kafka.clients" />
|
||||
</subpackage>
|
||||
|
||||
</import-control>
|
||||
@@ -1,47 +0,0 @@
|
||||
<!DOCTYPE import-control PUBLIC
|
||||
"-//Puppy Crawl//DTD Import Control 1.1//EN"
|
||||
"http://www.puppycrawl.com/dtds/import_control_1_1.dtd">
|
||||
<!--
|
||||
// Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
// contributor license agreements. See the NOTICE file distributed with
|
||||
// this work for additional information regarding copyright ownership.
|
||||
// The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
// (the "License"); you may not use this file except in compliance with
|
||||
// the License. You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
-->
|
||||
|
||||
<import-control pkg="org.apache.kafka.jmh">
|
||||
|
||||
<allow pkg="java"/>
|
||||
<allow pkg="scala"/>
|
||||
<allow pkg="javax.management"/>
|
||||
<allow pkg="org.slf4j"/>
|
||||
<allow pkg="org.openjdk.jmh.annotations"/>
|
||||
<allow pkg="org.openjdk.jmh.runner"/>
|
||||
<allow pkg="org.openjdk.jmh.infra"/>
|
||||
<allow pkg="java.security"/>
|
||||
<allow pkg="javax.net.ssl"/>
|
||||
<allow pkg="javax.security"/>
|
||||
<allow pkg="org.apache.kafka.common"/>
|
||||
<allow pkg="org.apache.kafka.clients.producer"/>
|
||||
<allow pkg="kafka.cluster"/>
|
||||
<allow pkg="kafka.log"/>
|
||||
<allow pkg="kafka.server"/>
|
||||
<allow pkg="kafka.api"/>
|
||||
<allow class="kafka.utils.Pool"/>
|
||||
<allow class="kafka.utils.KafkaScheduler"/>
|
||||
<allow class="org.apache.kafka.clients.FetchSessionHandler"/>
|
||||
<allow pkg="org.mockito"/>
|
||||
|
||||
|
||||
<subpackage name="cache">
|
||||
</subpackage>
|
||||
</import-control>
|
||||
@@ -1,456 +0,0 @@
|
||||
<!DOCTYPE import-control PUBLIC
|
||||
"-//Puppy Crawl//DTD Import Control 1.1//EN"
|
||||
"http://www.puppycrawl.com/dtds/import_control_1_1.dtd">
|
||||
<!--
|
||||
// Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
// contributor license agreements. See the NOTICE file distributed with
|
||||
// this work for additional information regarding copyright ownership.
|
||||
// The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
// (the "License"); you may not use this file except in compliance with
|
||||
// the License. You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
-->
|
||||
|
||||
<import-control pkg="org.apache.kafka">
|
||||
|
||||
<!-- THINK HARD ABOUT THE LAYERING OF THE PROJECT BEFORE CHANGING THIS FILE -->
|
||||
|
||||
<!-- common library dependencies -->
|
||||
<allow pkg="java" />
|
||||
<allow pkg="javax.management" />
|
||||
<allow pkg="org.slf4j" />
|
||||
<allow pkg="org.junit" />
|
||||
<allow pkg="org.hamcrest" />
|
||||
<allow pkg="org.mockito" />
|
||||
<allow pkg="org.easymock" />
|
||||
<allow pkg="org.powermock" />
|
||||
<allow pkg="java.security" />
|
||||
<allow pkg="javax.net.ssl" />
|
||||
<allow pkg="javax.security" />
|
||||
<allow pkg="org.ietf.jgss" />
|
||||
|
||||
<!-- no one depends on the server -->
|
||||
<disallow pkg="kafka" />
|
||||
|
||||
<!-- anyone can use public classes -->
|
||||
<allow pkg="org.apache.kafka.common" exact-match="true" />
|
||||
<allow pkg="org.apache.kafka.common.security" />
|
||||
<allow pkg="org.apache.kafka.common.serialization" />
|
||||
<allow pkg="org.apache.kafka.common.utils" />
|
||||
<allow pkg="org.apache.kafka.common.errors" exact-match="true" />
|
||||
<allow pkg="org.apache.kafka.common.memory" />
|
||||
|
||||
<subpackage name="common">
|
||||
<disallow pkg="org.apache.kafka.clients" />
|
||||
<allow pkg="org.apache.kafka.common" exact-match="true" />
|
||||
<allow pkg="org.apache.kafka.common.annotation" />
|
||||
<allow pkg="org.apache.kafka.common.config" exact-match="true" />
|
||||
<allow pkg="org.apache.kafka.common.internals" exact-match="true" />
|
||||
<allow pkg="org.apache.kafka.test" />
|
||||
|
||||
<subpackage name="acl">
|
||||
<allow pkg="org.apache.kafka.common.annotation" />
|
||||
<allow pkg="org.apache.kafka.common.acl" />
|
||||
<allow pkg="org.apache.kafka.common.resource" />
|
||||
</subpackage>
|
||||
|
||||
<subpackage name="config">
|
||||
<allow pkg="org.apache.kafka.common.config" />
|
||||
<!-- for testing -->
|
||||
<allow pkg="org.apache.kafka.common.metrics" />
|
||||
</subpackage>
|
||||
|
||||
<subpackage name="message">
|
||||
<allow pkg="com.fasterxml.jackson" />
|
||||
<allow pkg="org.apache.kafka.common.protocol" />
|
||||
<allow pkg="org.apache.kafka.common.protocol.types" />
|
||||
<allow pkg="org.apache.kafka.common.message" />
|
||||
</subpackage>
|
||||
|
||||
<subpackage name="metrics">
|
||||
<allow pkg="org.apache.kafka.common.metrics" />
|
||||
</subpackage>
|
||||
|
||||
<subpackage name="memory">
|
||||
<allow pkg="org.apache.kafka.common.metrics" />
|
||||
</subpackage>
|
||||
|
||||
<subpackage name="network">
|
||||
<allow pkg="org.apache.kafka.common.security.auth" />
|
||||
<allow pkg="org.apache.kafka.common.protocol" />
|
||||
<allow pkg="org.apache.kafka.common.config" />
|
||||
<allow pkg="org.apache.kafka.common.metrics" />
|
||||
<allow pkg="org.apache.kafka.common.security" />
|
||||
</subpackage>
|
||||
|
||||
<subpackage name="resource">
|
||||
<allow pkg="org.apache.kafka.common.annotation" />
|
||||
<allow pkg="org.apache.kafka.common.resource" />
|
||||
</subpackage>
|
||||
|
||||
<subpackage name="security">
|
||||
<allow pkg="org.apache.kafka.common.annotation" />
|
||||
<allow pkg="org.apache.kafka.common.network" />
|
||||
<allow pkg="org.apache.kafka.common.config" />
|
||||
<allow pkg="org.apache.kafka.common.protocol" />
|
||||
<allow pkg="org.apache.kafka.common.errors" />
|
||||
<subpackage name="authenticator">
|
||||
<allow pkg="org.apache.kafka.common.message" />
|
||||
<allow pkg="org.apache.kafka.common.protocol.types" />
|
||||
<allow pkg="org.apache.kafka.common.requests" />
|
||||
<allow pkg="org.apache.kafka.clients" />
|
||||
</subpackage>
|
||||
<subpackage name="scram">
|
||||
<allow pkg="javax.crypto" />
|
||||
</subpackage>
|
||||
<subpackage name="oauthbearer">
|
||||
<allow pkg="com.fasterxml.jackson.databind" />
|
||||
</subpackage>
|
||||
</subpackage>
|
||||
|
||||
<subpackage name="protocol">
|
||||
<allow pkg="org.apache.kafka.common.errors" />
|
||||
<allow pkg="org.apache.kafka.common.message" />
|
||||
<allow pkg="org.apache.kafka.common.protocol" />
|
||||
<allow pkg="org.apache.kafka.common.protocol.types" />
|
||||
<allow pkg="org.apache.kafka.common.record" />
|
||||
<allow pkg="org.apache.kafka.common.requests" />
|
||||
<allow pkg="org.apache.kafka.common.resource" />
|
||||
</subpackage>
|
||||
|
||||
<subpackage name="record">
|
||||
<allow pkg="net.jpountz" />
|
||||
<allow pkg="org.apache.kafka.common.header" />
|
||||
<allow pkg="org.apache.kafka.common.record" />
|
||||
<allow pkg="org.apache.kafka.common.network" />
|
||||
<allow pkg="org.apache.kafka.common.protocol" />
|
||||
<allow pkg="org.apache.kafka.common.protocol.types" />
|
||||
<allow pkg="org.apache.kafka.common.errors" />
|
||||
</subpackage>
|
||||
|
||||
<subpackage name="header">
|
||||
<allow pkg="org.apache.kafka.common.header" />
|
||||
<allow pkg="org.apache.kafka.common.record" />
|
||||
</subpackage>
|
||||
|
||||
<subpackage name="requests">
|
||||
<allow pkg="org.apache.kafka.common.acl" />
|
||||
<allow pkg="org.apache.kafka.common.protocol" />
|
||||
<allow pkg="org.apache.kafka.common.message" />
|
||||
<allow pkg="org.apache.kafka.common.network" />
|
||||
<allow pkg="org.apache.kafka.common.requests" />
|
||||
<allow pkg="org.apache.kafka.common.resource" />
|
||||
<allow pkg="org.apache.kafka.common.record" />
|
||||
<!-- for AuthorizableRequestContext interface -->
|
||||
<allow pkg="org.apache.kafka.server.authorizer" />
|
||||
<!-- for testing -->
|
||||
<allow pkg="org.apache.kafka.common.errors" />
|
||||
</subpackage>
|
||||
|
||||
<subpackage name="serialization">
|
||||
<allow class="org.apache.kafka.common.errors.SerializationException" />
|
||||
<allow class="org.apache.kafka.common.header.Headers" />
|
||||
</subpackage>
|
||||
|
||||
<subpackage name="utils">
|
||||
<allow pkg="org.apache.kafka.common" />
|
||||
</subpackage>
|
||||
</subpackage>
|
||||
|
||||
<subpackage name="clients">
|
||||
<allow pkg="org.slf4j" />
|
||||
<allow pkg="org.apache.kafka.common" />
|
||||
<allow pkg="org.apache.kafka.clients" exact-match="true"/>
|
||||
<allow pkg="org.apache.kafka.test" />
|
||||
|
||||
<subpackage name="consumer">
|
||||
<allow pkg="org.apache.kafka.clients.consumer" />
|
||||
</subpackage>
|
||||
|
||||
<subpackage name="producer">
|
||||
<allow pkg="org.apache.kafka.clients.consumer" />
|
||||
<allow pkg="org.apache.kafka.clients.producer" />
|
||||
</subpackage>
|
||||
|
||||
<subpackage name="admin">
|
||||
<allow pkg="org.apache.kafka.clients.admin" />
|
||||
<allow pkg="org.apache.kafka.clients.consumer.internals" />
|
||||
<allow pkg="org.apache.kafka.clients.consumer" />
|
||||
</subpackage>
|
||||
</subpackage>
|
||||
|
||||
<subpackage name="server">
|
||||
<allow pkg="org.slf4j" />
|
||||
<allow pkg="org.apache.kafka.common" />
|
||||
<allow pkg="org.apache.kafka.test" />
|
||||
</subpackage>
|
||||
|
||||
<subpackage name="tools">
|
||||
<allow pkg="org.apache.kafka.common"/>
|
||||
<allow pkg="org.apache.kafka.clients.admin" />
|
||||
<allow pkg="org.apache.kafka.clients.producer" />
|
||||
<allow pkg="org.apache.kafka.clients.consumer" />
|
||||
<allow pkg="com.fasterxml.jackson" />
|
||||
<allow pkg="net.sourceforge.argparse4j" />
|
||||
<allow pkg="org.apache.log4j" />
|
||||
</subpackage>
|
||||
|
||||
<subpackage name="trogdor">
|
||||
<allow pkg="com.fasterxml.jackson" />
|
||||
<allow pkg="javax.servlet" />
|
||||
<allow pkg="javax.ws.rs" />
|
||||
<allow pkg="net.sourceforge.argparse4j" />
|
||||
<allow pkg="org.apache.kafka.clients" />
|
||||
<allow pkg="org.apache.kafka.clients.admin" />
|
||||
<allow pkg="org.apache.kafka.clients.consumer" exact-match="true"/>
|
||||
<allow pkg="org.apache.kafka.clients.producer" exact-match="true"/>
|
||||
<allow pkg="org.apache.kafka.common" />
|
||||
<allow pkg="org.apache.kafka.test"/>
|
||||
<allow pkg="org.apache.kafka.trogdor" />
|
||||
<allow pkg="org.apache.log4j" />
|
||||
<allow pkg="org.eclipse.jetty" />
|
||||
<allow pkg="org.glassfish.jersey" />
|
||||
</subpackage>
|
||||
|
||||
<subpackage name="message">
|
||||
<allow pkg="com.fasterxml.jackson" />
|
||||
<allow pkg="com.fasterxml.jackson.annotation" />
|
||||
</subpackage>
|
||||
|
||||
<subpackage name="streams">
|
||||
<allow pkg="org.apache.kafka.common"/>
|
||||
<allow pkg="org.apache.kafka.test"/>
|
||||
<allow pkg="org.apache.kafka.clients"/>
|
||||
<allow pkg="org.apache.kafka.clients.producer" exact-match="true"/>
|
||||
<allow pkg="org.apache.kafka.clients.consumer" exact-match="true"/>
|
||||
|
||||
<allow pkg="org.apache.kafka.streams"/>
|
||||
|
||||
<subpackage name="examples">
|
||||
<allow pkg="com.fasterxml.jackson" />
|
||||
<allow pkg="org.apache.kafka.connect.json" />
|
||||
</subpackage>
|
||||
|
||||
<subpackage name="perf">
|
||||
<allow pkg="com.fasterxml.jackson.databind" />
|
||||
</subpackage>
|
||||
|
||||
<subpackage name="integration">
|
||||
<allow pkg="kafka.admin" />
|
||||
<allow pkg="kafka.api" />
|
||||
<allow pkg="kafka.server" />
|
||||
<allow pkg="kafka.tools" />
|
||||
<allow pkg="kafka.utils" />
|
||||
<allow pkg="kafka.log" />
|
||||
<allow pkg="scala" />
|
||||
<allow class="kafka.zk.EmbeddedZookeeper"/>
|
||||
</subpackage>
|
||||
|
||||
<subpackage name="test">
|
||||
<allow pkg="kafka.admin" />
|
||||
</subpackage>
|
||||
|
||||
<subpackage name="tools">
|
||||
<allow pkg="kafka.tools" />
|
||||
</subpackage>
|
||||
|
||||
<subpackage name="state">
|
||||
<allow pkg="org.rocksdb" />
|
||||
</subpackage>
|
||||
|
||||
<subpackage name="processor">
|
||||
<subpackage name="internals">
|
||||
<allow pkg="com.fasterxml.jackson" />
|
||||
<allow pkg="kafka.utils" />
|
||||
<allow pkg="org.apache.zookeeper" />
|
||||
<allow pkg="org.apache.zookeeper" />
|
||||
<allow pkg="org.apache.log4j" />
|
||||
<subpackage name="testutil">
|
||||
<allow pkg="org.apache.log4j" />
|
||||
</subpackage>
|
||||
</subpackage>
|
||||
</subpackage>
|
||||
</subpackage>
|
||||
|
||||
<subpackage name="jmh">
|
||||
<allow pkg="org.openjdk.jmh.annotations" />
|
||||
<allow pkg="org.openjdk.jmh.runner" />
|
||||
<allow pkg="org.openjdk.jmh.runner.options" />
|
||||
<allow pkg="org.openjdk.jmh.infra" />
|
||||
<allow pkg="org.apache.kafka.common" />
|
||||
<allow pkg="org.apache.kafka.clients" />
|
||||
<allow pkg="org.apache.kafka.streams" />
|
||||
<allow pkg="org.github.jamm" />
|
||||
</subpackage>
|
||||
|
||||
<subpackage name="log4jappender">
|
||||
<allow pkg="org.apache.log4j" />
|
||||
<allow pkg="org.apache.kafka.clients" />
|
||||
<allow pkg="org.apache.kafka.common" />
|
||||
<allow pkg="org.apache.kafka.test" />
|
||||
</subpackage>
|
||||
|
||||
<subpackage name="test">
|
||||
<allow pkg="org.apache.kafka" />
|
||||
<allow pkg="org.bouncycastle" />
|
||||
</subpackage>
|
||||
|
||||
<subpackage name="connect">
|
||||
<allow pkg="org.apache.kafka.common" />
|
||||
<allow pkg="org.apache.kafka.connect.data" />
|
||||
<allow pkg="org.apache.kafka.connect.errors" />
|
||||
<allow pkg="org.apache.kafka.connect.header" />
|
||||
<allow pkg="org.apache.kafka.connect.components"/>
|
||||
<allow pkg="org.apache.kafka.clients" />
|
||||
<allow pkg="org.apache.kafka.test"/>
|
||||
|
||||
<subpackage name="source">
|
||||
<allow pkg="org.apache.kafka.connect.connector" />
|
||||
<allow pkg="org.apache.kafka.connect.storage" />
|
||||
</subpackage>
|
||||
|
||||
<subpackage name="sink">
|
||||
<allow pkg="org.apache.kafka.clients.consumer" />
|
||||
<allow pkg="org.apache.kafka.connect.connector" />
|
||||
<allow pkg="org.apache.kafka.connect.storage" />
|
||||
</subpackage>
|
||||
|
||||
<subpackage name="converters">
|
||||
<allow pkg="org.apache.kafka.connect.storage" />
|
||||
</subpackage>
|
||||
|
||||
<subpackage name="connector.policy">
|
||||
<allow pkg="org.apache.kafka.connect.health" />
|
||||
<allow pkg="org.apache.kafka.connect.connector" />
|
||||
<!-- for testing -->
|
||||
<allow pkg="org.apache.kafka.connect.runtime" />
|
||||
</subpackage>
|
||||
|
||||
<subpackage name="rest">
|
||||
<allow pkg="org.apache.kafka.connect.health" />
|
||||
<allow pkg="javax.ws.rs" />
|
||||
<allow pkg= "javax.security.auth"/>
|
||||
<subpackage name="basic">
|
||||
<allow pkg="org.apache.kafka.connect.rest"/>
|
||||
</subpackage>
|
||||
</subpackage>
|
||||
|
||||
<subpackage name="mirror">
|
||||
<allow pkg="org.apache.kafka.clients.consumer" />
|
||||
<allow pkg="org.apache.kafka.connect.source" />
|
||||
<allow pkg="org.apache.kafka.connect.sink" />
|
||||
<allow pkg="org.apache.kafka.connect.storage" />
|
||||
<allow pkg="org.apache.kafka.connect.connector" />
|
||||
<allow pkg="org.apache.kafka.connect.runtime" />
|
||||
<allow pkg="org.apache.kafka.connect.runtime.distributed" />
|
||||
<allow pkg="org.apache.kafka.connect.util" />
|
||||
<allow pkg="org.apache.kafka.connect.converters" />
|
||||
<allow pkg="net.sourceforge.argparse4j" />
|
||||
<!-- for tests -->
|
||||
<allow pkg="org.apache.kafka.connect.integration" />
|
||||
<allow pkg="org.apache.kafka.connect.mirror" />
|
||||
</subpackage>
|
||||
|
||||
<subpackage name="runtime">
|
||||
<allow pkg="org.apache.kafka.connect" />
|
||||
<allow pkg="org.reflections"/>
|
||||
<allow pkg="org.reflections.util"/>
|
||||
<allow pkg="javax.crypto"/>
|
||||
|
||||
<subpackage name="rest">
|
||||
<allow pkg="org.eclipse.jetty" />
|
||||
<allow pkg="javax.ws.rs" />
|
||||
<allow pkg="javax.servlet" />
|
||||
<allow pkg="org.glassfish.jersey" />
|
||||
<allow pkg="com.fasterxml.jackson" />
|
||||
<allow pkg="org.apache.http"/>
|
||||
<subpackage name="resources">
|
||||
<allow pkg="org.apache.log4j" />
|
||||
</subpackage>
|
||||
</subpackage>
|
||||
|
||||
<subpackage name="isolation">
|
||||
<allow pkg="com.fasterxml.jackson" />
|
||||
<allow pkg="org.apache.maven.artifact.versioning" />
|
||||
<allow pkg="javax.tools" />
|
||||
</subpackage>
|
||||
|
||||
<subpackage name="distributed">
|
||||
<allow pkg="javax.ws.rs.core" />
|
||||
</subpackage>
|
||||
</subpackage>
|
||||
|
||||
<subpackage name="cli">
|
||||
<allow pkg="org.apache.kafka.connect.runtime" />
|
||||
<allow pkg="org.apache.kafka.connect.storage" />
|
||||
<allow pkg="org.apache.kafka.connect.util" />
|
||||
<allow pkg="org.apache.kafka.common" />
|
||||
<allow pkg="org.apache.kafka.connect.connector.policy" />
|
||||
</subpackage>
|
||||
|
||||
<subpackage name="storage">
|
||||
<allow pkg="org.apache.kafka.connect" />
|
||||
<allow pkg="org.apache.kafka.common.serialization" />
|
||||
<allow pkg="javax.crypto.spec"/>
|
||||
</subpackage>
|
||||
|
||||
<subpackage name="util">
|
||||
<allow pkg="org.apache.kafka.connect" />
|
||||
<allow pkg="org.reflections.vfs" />
|
||||
<!-- for annotations to avoid code duplication -->
|
||||
<allow pkg="com.fasterxml.jackson.annotation" />
|
||||
<allow pkg="com.fasterxml.jackson.databind" />
|
||||
<subpackage name="clusters">
|
||||
<allow pkg="kafka.server" />
|
||||
<allow pkg="kafka.zk" />
|
||||
<allow pkg="kafka.utils" />
|
||||
<allow class="javax.servlet.http.HttpServletResponse" />
|
||||
<allow class="javax.ws.rs.core.Response" />
|
||||
<allow pkg="com.fasterxml.jackson.core.type" />
|
||||
</subpackage>
|
||||
</subpackage>
|
||||
|
||||
<subpackage name="integration">
|
||||
<allow pkg="org.apache.kafka.connect.util.clusters" />
|
||||
<allow pkg="org.apache.kafka.connect" />
|
||||
<allow pkg="org.apache.kafka.tools" />
|
||||
<allow pkg="javax.ws.rs" />
|
||||
</subpackage>
|
||||
|
||||
<subpackage name="json">
|
||||
<allow pkg="com.fasterxml.jackson" />
|
||||
<allow pkg="org.apache.kafka.common.serialization" />
|
||||
<allow pkg="org.apache.kafka.common.errors" />
|
||||
<allow pkg="org.apache.kafka.connect.storage" />
|
||||
</subpackage>
|
||||
|
||||
<subpackage name="file">
|
||||
<allow pkg="org.apache.kafka.connect" />
|
||||
<allow pkg="org.apache.kafka.clients.consumer" />
|
||||
<!-- for tests -->
|
||||
<allow pkg="org.easymock" />
|
||||
<allow pkg="org.powermock" />
|
||||
</subpackage>
|
||||
|
||||
<subpackage name="tools">
|
||||
<allow pkg="org.apache.kafka.connect" />
|
||||
<allow pkg="org.apache.kafka.tools" />
|
||||
<allow pkg="com.fasterxml.jackson" />
|
||||
</subpackage>
|
||||
|
||||
<subpackage name="transforms">
|
||||
<allow class="org.apache.kafka.connect.connector.ConnectRecord" />
|
||||
<allow class="org.apache.kafka.connect.source.SourceRecord" />
|
||||
<allow class="org.apache.kafka.connect.sink.SinkRecord" />
|
||||
<allow pkg="org.apache.kafka.connect.transforms.util" />
|
||||
</subpackage>
|
||||
</subpackage>
|
||||
|
||||
</import-control>
|
||||
@@ -1,16 +0,0 @@
|
||||
/*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
* contributor license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright ownership.
|
||||
* The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
* (the "License"); you may not use this file except in compliance with
|
||||
* the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
@@ -1,267 +0,0 @@
|
||||
|
||||
|
||||
<!DOCTYPE suppressions PUBLIC
|
||||
"-//Puppy Crawl//DTD Suppressions 1.1//EN"
|
||||
"http://www.puppycrawl.com/dtds/suppressions_1_1.dtd">
|
||||
|
||||
<suppressions>
|
||||
|
||||
<!-- Note that [/\\] must be used as the path separator for cross-platform support -->
|
||||
|
||||
<!-- Generator -->
|
||||
<suppress checks="CyclomaticComplexity|BooleanExpressionComplexity"
|
||||
files="(SchemaGenerator|MessageDataGenerator|FieldSpec).java"/>
|
||||
<suppress checks="NPathComplexity"
|
||||
files="(MessageDataGenerator|FieldSpec).java"/>
|
||||
<suppress checks="JavaNCSS"
|
||||
files="(ApiMessageType).java|MessageDataGenerator.java"/>
|
||||
<suppress checks="MethodLength"
|
||||
files="MessageDataGenerator.java"/>
|
||||
|
||||
<!-- Clients -->
|
||||
<suppress checks="ClassFanOutComplexity"
|
||||
files="(Fetcher|Sender|SenderTest|ConsumerCoordinator|KafkaConsumer|KafkaProducer|Utils|TransactionManager|TransactionManagerTest|KafkaAdminClient|NetworkClient|Admin).java"/>
|
||||
<suppress checks="ClassFanOutComplexity"
|
||||
files="(SaslServerAuthenticator|SaslAuthenticatorTest).java"/>
|
||||
<suppress checks="ClassFanOutComplexity"
|
||||
files="Errors.java"/>
|
||||
<suppress checks="ClassFanOutComplexity"
|
||||
files="Utils.java"/>
|
||||
<suppress checks="ClassFanOutComplexity"
|
||||
files="AbstractRequest.java"/>
|
||||
<suppress checks="ClassFanOutComplexity"
|
||||
files="AbstractResponse.java"/>
|
||||
|
||||
<suppress checks="MethodLength"
|
||||
files="KerberosLogin.java|RequestResponseTest.java|ConnectMetricsRegistry.java|KafkaConsumer.java"/>
|
||||
|
||||
<suppress checks="ParameterNumber"
|
||||
files="NetworkClient.java|FieldSpec.java"/>
|
||||
<suppress checks="ParameterNumber"
|
||||
files="KafkaConsumer.java"/>
|
||||
<suppress checks="ParameterNumber"
|
||||
files="Fetcher.java"/>
|
||||
<suppress checks="ParameterNumber"
|
||||
files="Sender.java"/>
|
||||
<suppress checks="ParameterNumber"
|
||||
files="ConfigDef.java"/>
|
||||
<suppress checks="ParameterNumber"
|
||||
files="DefaultRecordBatch.java"/>
|
||||
<suppress checks="ParameterNumber"
|
||||
files="Sender.java"/>
|
||||
|
||||
<suppress checks="ClassDataAbstractionCoupling"
|
||||
files="(KafkaConsumer|ConsumerCoordinator|Fetcher|KafkaProducer|AbstractRequest|AbstractResponse|TransactionManager|Admin|KafkaAdminClient).java"/>
|
||||
<suppress checks="ClassDataAbstractionCoupling"
|
||||
files="(Errors|SaslAuthenticatorTest|AgentTest|CoordinatorTest).java"/>
|
||||
|
||||
<suppress checks="BooleanExpressionComplexity"
|
||||
files="(Utils|Topic|KafkaLZ4BlockOutputStream|AclData|JoinGroupRequest).java"/>
|
||||
|
||||
<suppress checks="CyclomaticComplexity"
|
||||
files="(ConsumerCoordinator|Fetcher|Sender|KafkaProducer|BufferPool|ConfigDef|RecordAccumulator|KerberosLogin|AbstractRequest|AbstractResponse|Selector|SslFactory|SslTransportLayer|SaslClientAuthenticator|SaslClientCallbackHandler|SaslServerAuthenticator|AbstractCoordinator|TransactionManager).java"/>
|
||||
|
||||
<suppress checks="JavaNCSS"
|
||||
files="(AbstractRequest|KerberosLogin|WorkerSinkTaskTest|TransactionManagerTest|SenderTest|KafkaAdminClient|ConsumerCoordinatorTest).java"/>
|
||||
|
||||
<suppress checks="NPathComplexity"
|
||||
files="(BufferPool|Fetcher|MetricName|Node|ConfigDef|RecordBatch|SslFactory|SslTransportLayer|MetadataResponse|KerberosLogin|Selector|Sender|Serdes|TokenInformation|Agent|Values|PluginUtils|MiniTrogdorCluster|TasksRequest|KafkaProducer).java"/>
|
||||
|
||||
<suppress checks="(JavaNCSS|CyclomaticComplexity|MethodLength)"
|
||||
files="CoordinatorClient.java"/>
|
||||
<suppress checks="(UnnecessaryParentheses|BooleanExpressionComplexity|CyclomaticComplexity|WhitespaceAfter|LocalVariableName)"
|
||||
files="Murmur3.java"/>
|
||||
|
||||
<suppress checks="(NPathComplexity|ClassFanOutComplexity|CyclomaticComplexity|ClassDataAbstractionCoupling|LocalVariableName|MemberName|ParameterName|MethodLength|JavaNCSS)"
|
||||
files="clients[\\/]src[\\/](generated|generated-test)[\\/].+.java$"/>
|
||||
|
||||
<suppress checks="NPathComplexity"
|
||||
files="MessageTest.java"/>
|
||||
|
||||
<!-- clients tests -->
|
||||
<suppress checks="ClassDataAbstractionCoupling"
|
||||
files="(Sender|Fetcher|KafkaConsumer|Metrics|RequestResponse|TransactionManager|KafkaAdminClient|Message|KafkaProducer)Test.java"/>
|
||||
|
||||
<suppress checks="ClassFanOutComplexity"
|
||||
files="(ConsumerCoordinator|KafkaConsumer|RequestResponse|Fetcher|KafkaAdminClient|Message|KafkaProducer)Test.java"/>
|
||||
|
||||
<suppress checks="ClassFanOutComplexity"
|
||||
files="MockAdminClient.java"/>
|
||||
|
||||
<suppress checks="JavaNCSS"
|
||||
files="RequestResponseTest.java|FetcherTest.java"/>
|
||||
|
||||
<suppress checks="NPathComplexity"
|
||||
files="MemoryRecordsTest|MetricsTest"/>
|
||||
|
||||
<suppress checks="(WhitespaceAround|LocalVariableName|ImportControl|AvoidStarImport)"
|
||||
files="Murmur3Test.java"/>
|
||||
|
||||
<!-- Connect -->
|
||||
<suppress checks="ClassFanOutComplexity"
|
||||
files="DistributedHerder(|Test).java"/>
|
||||
<suppress checks="ClassFanOutComplexity"
|
||||
files="Worker.java"/>
|
||||
<suppress checks="MethodLength"
|
||||
files="(KafkaConfigBackingStore|RequestResponseTest|WorkerSinkTaskTest).java"/>
|
||||
|
||||
<suppress checks="ParameterNumber"
|
||||
files="(WorkerSinkTask|WorkerSourceTask).java"/>
|
||||
<suppress checks="ParameterNumber"
|
||||
files="WorkerCoordinator.java"/>
|
||||
<suppress checks="ParameterNumber"
|
||||
files="ConfigKeyInfo.java"/>
|
||||
|
||||
<suppress checks="ClassDataAbstractionCoupling"
|
||||
files="(RestServer|AbstractHerder|DistributedHerder).java"/>
|
||||
|
||||
<suppress checks="BooleanExpressionComplexity"
|
||||
files="JsonConverter.java"/>
|
||||
|
||||
<suppress checks="CyclomaticComplexity"
|
||||
files="ConnectRecord.java"/>
|
||||
<suppress checks="CyclomaticComplexity"
|
||||
files="JsonConverter.java"/>
|
||||
<suppress checks="CyclomaticComplexity"
|
||||
files="FileStreamSourceTask.java"/>
|
||||
<suppress checks="CyclomaticComplexity"
|
||||
files="DistributedHerder.java"/>
|
||||
<suppress checks="CyclomaticComplexity"
|
||||
files="KafkaConfigBackingStore.java"/>
|
||||
<suppress checks="CyclomaticComplexity"
|
||||
files="(Values|ConnectHeader|ConnectHeaders).java"/>
|
||||
<suppress checks="CyclomaticComplexity"
|
||||
files="RocksDBGenericOptionsToDbOptionsColumnFamilyOptionsAdapterTest.java"/>
|
||||
|
||||
<suppress checks="JavaNCSS"
|
||||
files="KafkaConfigBackingStore.java"/>
|
||||
<suppress checks="JavaNCSS"
|
||||
files="Values.java"/>
|
||||
|
||||
<suppress checks="NPathComplexity"
|
||||
files="(DistributedHerder|RestClient|JsonConverter|KafkaConfigBackingStore|FileStreamSourceTask).java"/>
|
||||
|
||||
<suppress checks="MethodLength"
|
||||
files="Values.java"/>
|
||||
|
||||
<!-- connect tests-->
|
||||
<suppress checks="ClassDataAbstractionCoupling"
|
||||
files="(DistributedHerder|KafkaBasedLog)Test.java"/>
|
||||
|
||||
<suppress checks="ClassFanOutComplexity"
|
||||
files="(WorkerSinkTask|WorkerSourceTask)Test.java"/>
|
||||
|
||||
<!-- Streams -->
|
||||
<suppress checks="ClassFanOutComplexity"
|
||||
files="(TopologyBuilder|KafkaStreams|KStreamImpl|KTableImpl|StreamThread|StreamTask).java"/>
|
||||
|
||||
<suppress checks="MethodLength"
|
||||
files="(KTableImpl|StreamsPartitionAssignor.java)"/>
|
||||
|
||||
<suppress checks="ParameterNumber"
|
||||
files="StreamTask.java"/>
|
||||
<suppress checks="ParameterNumber"
|
||||
files="RocksDBWindowStoreSupplier.java"/>
|
||||
|
||||
<suppress checks="ClassDataAbstractionCoupling"
|
||||
files="(TopologyBuilder|KStreamImpl|StreamsPartitionAssignor|KafkaStreams|KTableImpl).java"/>
|
||||
|
||||
<suppress checks="CyclomaticComplexity"
|
||||
files="TopologyBuilder.java"/>
|
||||
<suppress checks="CyclomaticComplexity"
|
||||
files="StreamsPartitionAssignor.java"/>
|
||||
<suppress checks="CyclomaticComplexity"
|
||||
files="StreamThread.java"/>
|
||||
|
||||
<suppress checks="JavaNCSS"
|
||||
files="StreamsPartitionAssignor.java"/>
|
||||
|
||||
<suppress checks="NPathComplexity"
|
||||
files="(ProcessorStateManager|InternalTopologyBuilder|StreamsPartitionAssignor|StreamThread).java"/>
|
||||
|
||||
<suppress checks="(FinalLocalVariable|UnnecessaryParentheses|BooleanExpressionComplexity|CyclomaticComplexity|WhitespaceAfter|LocalVariableName)"
|
||||
files="Murmur3.java"/>
|
||||
|
||||
<!-- suppress FinalLocalVariable outside of the streams package. -->
|
||||
<suppress checks="FinalLocalVariable"
|
||||
files="^(?!.*[\\/]org[\\/]apache[\\/]kafka[\\/]streams[\\/].*$)"/>
|
||||
|
||||
<!-- generated code -->
|
||||
<suppress checks="(NPathComplexity|ClassFanOutComplexity|CyclomaticComplexity|ClassDataAbstractionCoupling|FinalLocalVariable|LocalVariableName|MemberName|ParameterName|MethodLength|JavaNCSS)"
|
||||
files="streams[\\/]src[\\/](generated|generated-test)[\\/].+.java$"/>
|
||||
|
||||
|
||||
<!-- Streams tests -->
|
||||
<suppress checks="ClassFanOutComplexity"
|
||||
files="(StreamThreadTest|StreamTaskTest|ProcessorTopologyTestDriver).java"/>
|
||||
|
||||
<suppress checks="MethodLength"
|
||||
files="KStreamKTableJoinIntegrationTest.java"/>
|
||||
<suppress checks="MethodLength"
|
||||
files="KStreamKStreamJoinTest.java"/>
|
||||
<suppress checks="MethodLength"
|
||||
files="KStreamWindowAggregateTest.java"/>
|
||||
<suppress checks="MethodLength"
|
||||
files="RocksDBWindowStoreTest.java"/>
|
||||
|
||||
<suppress checks="MemberName"
|
||||
files="StreamsPartitionAssignorTest.java"/>
|
||||
|
||||
<suppress checks="ClassDataAbstractionCoupling"
|
||||
files=".*[/\\]streams[/\\].*test[/\\].*.java"/>
|
||||
|
||||
<suppress checks="BooleanExpressionComplexity"
|
||||
files="SmokeTestDriver.java"/>
|
||||
|
||||
<suppress checks="CyclomaticComplexity"
|
||||
files="KStreamKStreamJoinTest.java|KTableKTableForeignKeyJoinIntegrationTest.java"/>
|
||||
<suppress checks="CyclomaticComplexity"
|
||||
files="RelationalSmokeTest.java|SmokeTestDriver.java"/>
|
||||
|
||||
<suppress checks="JavaNCSS"
|
||||
files="KStreamKStreamJoinTest.java"/>
|
||||
<suppress checks="JavaNCSS"
|
||||
files="SmokeTestDriver.java"/>
|
||||
|
||||
<suppress checks="NPathComplexity"
|
||||
files="EosTestDriver|KStreamKStreamJoinTest.java|RelationalSmokeTest.java|SmokeTestDriver.java|KStreamKStreamLeftJoinTest.java|KTableKTableForeignKeyJoinIntegrationTest.java"/>
|
||||
|
||||
<suppress checks="(FinalLocalVariable|WhitespaceAround|LocalVariableName|ImportControl|AvoidStarImport)"
|
||||
files="Murmur3Test.java"/>
|
||||
|
||||
|
||||
<!-- Streams Test-Utils -->
|
||||
<suppress checks="ClassFanOutComplexity"
|
||||
files="TopologyTestDriver.java"/>
|
||||
<suppress checks="ClassDataAbstractionCoupling"
|
||||
files="TopologyTestDriver.java"/>
|
||||
|
||||
<!-- Tools -->
|
||||
<suppress checks="ClassDataAbstractionCoupling"
|
||||
files="VerifiableConsumer.java"/>
|
||||
|
||||
<suppress checks="CyclomaticComplexity"
|
||||
files="(StreamsResetter|ProducerPerformance|Agent).java"/>
|
||||
<suppress checks="BooleanExpressionComplexity"
|
||||
files="StreamsResetter.java"/>
|
||||
<suppress checks="NPathComplexity"
|
||||
files="(ProducerPerformance|StreamsResetter|Agent|TransactionalMessageCopier).java"/>
|
||||
<suppress checks="ImportControl"
|
||||
files="SignalLogger.java"/>
|
||||
<suppress checks="IllegalImport"
|
||||
files="SignalLogger.java"/>
|
||||
<suppress checks="ParameterNumber"
|
||||
files="ProduceBenchSpec.java"/>
|
||||
<suppress checks="ParameterNumber"
|
||||
files="SustainedConnectionSpec.java"/>
|
||||
|
||||
<!-- Log4J-Appender -->
|
||||
<suppress checks="CyclomaticComplexity"
|
||||
files="KafkaLog4jAppender.java"/>
|
||||
|
||||
<suppress checks="NPathComplexity"
|
||||
files="KafkaLog4jAppender.java"/>
|
||||
<suppress checks="JavaNCSS"
|
||||
files="RequestResponseTest.java"/>
|
||||
|
||||
</suppressions>
|
||||
1
clients/.gitignore
vendored
1
clients/.gitignore
vendored
@@ -1 +0,0 @@
|
||||
/bin/
|
||||
@@ -1,952 +0,0 @@
|
||||
/*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
* contributor license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright ownership.
|
||||
* The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
* (the "License"); you may not use this file except in compliance with
|
||||
* the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
// THIS CODE IS AUTOMATICALLY GENERATED. DO NOT EDIT.
|
||||
|
||||
package org.apache.kafka.common.message;
|
||||
|
||||
import java.nio.ByteBuffer;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.List;
|
||||
import java.util.NavigableMap;
|
||||
import java.util.Objects;
|
||||
import java.util.TreeMap;
|
||||
import java.util.UUID;
|
||||
import org.apache.kafka.common.errors.UnsupportedVersionException;
|
||||
import org.apache.kafka.common.protocol.ApiMessage;
|
||||
import org.apache.kafka.common.protocol.MessageUtil;
|
||||
import org.apache.kafka.common.protocol.ObjectSerializationCache;
|
||||
import org.apache.kafka.common.protocol.Readable;
|
||||
import org.apache.kafka.common.protocol.Writable;
|
||||
import org.apache.kafka.common.protocol.types.CompactArrayOf;
|
||||
import org.apache.kafka.common.protocol.types.Field;
|
||||
import org.apache.kafka.common.protocol.types.RawTaggedField;
|
||||
import org.apache.kafka.common.protocol.types.RawTaggedFieldWriter;
|
||||
import org.apache.kafka.common.protocol.types.Schema;
|
||||
import org.apache.kafka.common.protocol.types.Struct;
|
||||
import org.apache.kafka.common.protocol.types.Type;
|
||||
import org.apache.kafka.common.utils.ByteUtils;
|
||||
import org.apache.kafka.common.utils.Bytes;
|
||||
|
||||
import static java.util.Map.Entry;
|
||||
import static org.apache.kafka.common.protocol.types.Field.TaggedFieldsSection;
|
||||
|
||||
|
||||
public class SimpleExampleMessageData implements ApiMessage {
|
||||
private UUID processId;
|
||||
private List<Integer> myTaggedIntArray;
|
||||
private String myNullableString;
|
||||
private short myInt16;
|
||||
private double myFloat64;
|
||||
private String myString;
|
||||
private byte[] myBytes;
|
||||
private UUID taggedUuid;
|
||||
private long taggedLong;
|
||||
private ByteBuffer zeroCopyByteBuffer;
|
||||
private ByteBuffer nullableZeroCopyByteBuffer;
|
||||
private List<RawTaggedField> _unknownTaggedFields;
|
||||
|
||||
public static final Schema SCHEMA_0 =
|
||||
new Schema(
|
||||
);
|
||||
|
||||
public static final Schema SCHEMA_1 =
|
||||
new Schema(
|
||||
new Field("process_id", Type.UUID, ""),
|
||||
new Field("zero_copy_byte_buffer", Type.COMPACT_BYTES, ""),
|
||||
new Field("nullable_zero_copy_byte_buffer", Type.COMPACT_NULLABLE_BYTES, ""),
|
||||
TaggedFieldsSection.of(
|
||||
0, new Field("my_tagged_int_array", new CompactArrayOf(Type.INT32), ""),
|
||||
1, new Field("my_nullable_string", Type.COMPACT_NULLABLE_STRING, ""),
|
||||
2, new Field("my_int16", Type.INT16, ""),
|
||||
3, new Field("my_float64", Type.FLOAT64, ""),
|
||||
4, new Field("my_string", Type.COMPACT_STRING, ""),
|
||||
5, new Field("my_bytes", Type.COMPACT_NULLABLE_BYTES, ""),
|
||||
6, new Field("tagged_uuid", Type.UUID, ""),
|
||||
7, new Field("tagged_long", Type.INT64, "")
|
||||
)
|
||||
);
|
||||
|
||||
public static final Schema[] SCHEMAS = new Schema[] {
|
||||
SCHEMA_0,
|
||||
SCHEMA_1
|
||||
};
|
||||
|
||||
public SimpleExampleMessageData(Readable _readable, short _version) {
|
||||
read(_readable, _version);
|
||||
}
|
||||
|
||||
public SimpleExampleMessageData(Struct struct, short _version) {
|
||||
fromStruct(struct, _version);
|
||||
}
|
||||
|
||||
public SimpleExampleMessageData() {
|
||||
this.processId = MessageUtil.ZERO_UUID;
|
||||
this.myTaggedIntArray = new ArrayList<Integer>();
|
||||
this.myNullableString = null;
|
||||
this.myInt16 = (short) 123;
|
||||
this.myFloat64 = Double.parseDouble("12.34");
|
||||
this.myString = "";
|
||||
this.myBytes = Bytes.EMPTY;
|
||||
this.taggedUuid = UUID.fromString("212d5494-4a8b-4fdf-94b3-88b470beb367");
|
||||
this.taggedLong = 0xcafcacafcacafcaL;
|
||||
this.zeroCopyByteBuffer = ByteUtils.EMPTY_BUF;
|
||||
this.nullableZeroCopyByteBuffer = ByteUtils.EMPTY_BUF;
|
||||
}
|
||||
|
||||
@Override
|
||||
public short apiKey() {
|
||||
return -1;
|
||||
}
|
||||
|
||||
@Override
|
||||
public short lowestSupportedVersion() {
|
||||
return 0;
|
||||
}
|
||||
|
||||
@Override
|
||||
public short highestSupportedVersion() {
|
||||
return 1;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void read(Readable _readable, short _version) {
|
||||
if (_version >= 1) {
|
||||
this.processId = _readable.readUUID();
|
||||
} else {
|
||||
this.processId = MessageUtil.ZERO_UUID;
|
||||
}
|
||||
{
|
||||
this.myTaggedIntArray = new ArrayList<Integer>();
|
||||
}
|
||||
{
|
||||
this.myNullableString = null;
|
||||
}
|
||||
this.myInt16 = (short) 123;
|
||||
this.myFloat64 = Double.parseDouble("12.34");
|
||||
{
|
||||
this.myString = "";
|
||||
}
|
||||
{
|
||||
this.myBytes = Bytes.EMPTY;
|
||||
}
|
||||
this.taggedUuid = UUID.fromString("212d5494-4a8b-4fdf-94b3-88b470beb367");
|
||||
this.taggedLong = 0xcafcacafcacafcaL;
|
||||
if (_version >= 1) {
|
||||
int length;
|
||||
length = _readable.readUnsignedVarint() - 1;
|
||||
if (length < 0) {
|
||||
throw new RuntimeException("non-nullable field zeroCopyByteBuffer was serialized as null");
|
||||
} else {
|
||||
this.zeroCopyByteBuffer = _readable.readByteBuffer(length);
|
||||
}
|
||||
} else {
|
||||
this.zeroCopyByteBuffer = ByteUtils.EMPTY_BUF;
|
||||
}
|
||||
if (_version >= 1) {
|
||||
int length;
|
||||
length = _readable.readUnsignedVarint() - 1;
|
||||
if (length < 0) {
|
||||
this.nullableZeroCopyByteBuffer = null;
|
||||
} else {
|
||||
this.nullableZeroCopyByteBuffer = _readable.readByteBuffer(length);
|
||||
}
|
||||
} else {
|
||||
this.nullableZeroCopyByteBuffer = ByteUtils.EMPTY_BUF;
|
||||
}
|
||||
this._unknownTaggedFields = null;
|
||||
if (_version >= 1) {
|
||||
int _numTaggedFields = _readable.readUnsignedVarint();
|
||||
for (int _i = 0; _i < _numTaggedFields; _i++) {
|
||||
int _tag = _readable.readUnsignedVarint();
|
||||
int _size = _readable.readUnsignedVarint();
|
||||
switch (_tag) {
|
||||
case 0: {
|
||||
int arrayLength;
|
||||
arrayLength = _readable.readUnsignedVarint() - 1;
|
||||
if (arrayLength < 0) {
|
||||
throw new RuntimeException("non-nullable field myTaggedIntArray was serialized as null");
|
||||
} else {
|
||||
ArrayList<Integer> newCollection = new ArrayList<Integer>(arrayLength);
|
||||
for (int i = 0; i < arrayLength; i++) {
|
||||
newCollection.add(_readable.readInt());
|
||||
}
|
||||
this.myTaggedIntArray = newCollection;
|
||||
}
|
||||
break;
|
||||
}
|
||||
case 1: {
|
||||
int length;
|
||||
length = _readable.readUnsignedVarint() - 1;
|
||||
if (length < 0) {
|
||||
this.myNullableString = null;
|
||||
} else if (length > 0x7fff) {
|
||||
throw new RuntimeException("string field myNullableString had invalid length " + length);
|
||||
} else {
|
||||
this.myNullableString = _readable.readString(length);
|
||||
}
|
||||
break;
|
||||
}
|
||||
case 2: {
|
||||
this.myInt16 = _readable.readShort();
|
||||
break;
|
||||
}
|
||||
case 3: {
|
||||
this.myFloat64 = _readable.readDouble();
|
||||
break;
|
||||
}
|
||||
case 4: {
|
||||
int length;
|
||||
length = _readable.readUnsignedVarint() - 1;
|
||||
if (length < 0) {
|
||||
throw new RuntimeException("non-nullable field myString was serialized as null");
|
||||
} else if (length > 0x7fff) {
|
||||
throw new RuntimeException("string field myString had invalid length " + length);
|
||||
} else {
|
||||
this.myString = _readable.readString(length);
|
||||
}
|
||||
break;
|
||||
}
|
||||
case 5: {
|
||||
int length;
|
||||
length = _readable.readUnsignedVarint() - 1;
|
||||
if (length < 0) {
|
||||
this.myBytes = null;
|
||||
} else {
|
||||
byte[] newBytes = new byte[length];
|
||||
_readable.readArray(newBytes);
|
||||
this.myBytes = newBytes;
|
||||
}
|
||||
break;
|
||||
}
|
||||
case 6: {
|
||||
this.taggedUuid = _readable.readUUID();
|
||||
break;
|
||||
}
|
||||
case 7: {
|
||||
this.taggedLong = _readable.readLong();
|
||||
break;
|
||||
}
|
||||
default:
|
||||
this._unknownTaggedFields = _readable.readUnknownTaggedField(this._unknownTaggedFields, _tag, _size);
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void write(Writable _writable, ObjectSerializationCache _cache, short _version) {
|
||||
int _numTaggedFields = 0;
|
||||
if (_version >= 1) {
|
||||
_writable.writeUUID(processId);
|
||||
} else {
|
||||
if (processId != MessageUtil.ZERO_UUID) {
|
||||
throw new UnsupportedVersionException("Attempted to write a non-default processId at version " + _version);
|
||||
}
|
||||
}
|
||||
if (_version >= 1) {
|
||||
if (!myTaggedIntArray.isEmpty()) {
|
||||
_numTaggedFields++;
|
||||
}
|
||||
} else {
|
||||
if (!myTaggedIntArray.isEmpty()) {
|
||||
throw new UnsupportedVersionException("Attempted to write a non-default myTaggedIntArray at version " + _version);
|
||||
}
|
||||
}
|
||||
if (_version >= 1) {
|
||||
if (myNullableString != null) {
|
||||
_numTaggedFields++;
|
||||
}
|
||||
} else {
|
||||
if (myNullableString != null) {
|
||||
throw new UnsupportedVersionException("Attempted to write a non-default myNullableString at version " + _version);
|
||||
}
|
||||
}
|
||||
if (_version >= 1) {
|
||||
if (myInt16 != (short) 123) {
|
||||
_numTaggedFields++;
|
||||
}
|
||||
} else {
|
||||
if (myInt16 != (short) 123) {
|
||||
throw new UnsupportedVersionException("Attempted to write a non-default myInt16 at version " + _version);
|
||||
}
|
||||
}
|
||||
if (_version >= 1) {
|
||||
if (myFloat64 != Double.parseDouble("12.34")) {
|
||||
_numTaggedFields++;
|
||||
}
|
||||
} else {
|
||||
if (myFloat64 != Double.parseDouble("12.34")) {
|
||||
throw new UnsupportedVersionException("Attempted to write a non-default myFloat64 at version " + _version);
|
||||
}
|
||||
}
|
||||
if (_version >= 1) {
|
||||
if (!myString.equals("")) {
|
||||
_numTaggedFields++;
|
||||
}
|
||||
} else {
|
||||
if (!myString.equals("")) {
|
||||
throw new UnsupportedVersionException("Attempted to write a non-default myString at version " + _version);
|
||||
}
|
||||
}
|
||||
if (_version >= 1) {
|
||||
if (myBytes == null || myBytes.length != 0) {
|
||||
_numTaggedFields++;
|
||||
}
|
||||
} else {
|
||||
if (myBytes == null || myBytes.length != 0) {
|
||||
throw new UnsupportedVersionException("Attempted to write a non-default myBytes at version " + _version);
|
||||
}
|
||||
}
|
||||
if (_version >= 1) {
|
||||
if (taggedUuid != UUID.fromString("212d5494-4a8b-4fdf-94b3-88b470beb367")) {
|
||||
_numTaggedFields++;
|
||||
}
|
||||
} else {
|
||||
if (taggedUuid != UUID.fromString("212d5494-4a8b-4fdf-94b3-88b470beb367")) {
|
||||
throw new UnsupportedVersionException("Attempted to write a non-default taggedUuid at version " + _version);
|
||||
}
|
||||
}
|
||||
if (_version >= 1) {
|
||||
if (taggedLong != 0xcafcacafcacafcaL) {
|
||||
_numTaggedFields++;
|
||||
}
|
||||
} else {
|
||||
if (taggedLong != 0xcafcacafcacafcaL) {
|
||||
throw new UnsupportedVersionException("Attempted to write a non-default taggedLong at version " + _version);
|
||||
}
|
||||
}
|
||||
if (_version >= 1) {
|
||||
_writable.writeUnsignedVarint(zeroCopyByteBuffer.remaining() + 1);
|
||||
_writable.writeByteBuffer(zeroCopyByteBuffer);
|
||||
} else {
|
||||
if (zeroCopyByteBuffer.hasRemaining()) {
|
||||
throw new UnsupportedVersionException("Attempted to write a non-default zeroCopyByteBuffer at version " + _version);
|
||||
}
|
||||
}
|
||||
if (_version >= 1) {
|
||||
if (nullableZeroCopyByteBuffer == null) {
|
||||
_writable.writeUnsignedVarint(0);
|
||||
} else {
|
||||
_writable.writeUnsignedVarint(nullableZeroCopyByteBuffer.remaining() + 1);
|
||||
_writable.writeByteBuffer(nullableZeroCopyByteBuffer);
|
||||
}
|
||||
} else {
|
||||
if (nullableZeroCopyByteBuffer == null || nullableZeroCopyByteBuffer.remaining() > 0) {
|
||||
throw new UnsupportedVersionException("Attempted to write a non-default nullableZeroCopyByteBuffer at version " + _version);
|
||||
}
|
||||
}
|
||||
RawTaggedFieldWriter _rawWriter = RawTaggedFieldWriter.forFields(_unknownTaggedFields);
|
||||
_numTaggedFields += _rawWriter.numFields();
|
||||
if (_version >= 1) {
|
||||
_writable.writeUnsignedVarint(_numTaggedFields);
|
||||
{
|
||||
if (!myTaggedIntArray.isEmpty()) {
|
||||
_writable.writeUnsignedVarint(0);
|
||||
_writable.writeUnsignedVarint(_cache.getArraySizeInBytes(this.myTaggedIntArray));
|
||||
_writable.writeUnsignedVarint(myTaggedIntArray.size() + 1);
|
||||
for (Integer myTaggedIntArrayElement : myTaggedIntArray) {
|
||||
_writable.writeInt(myTaggedIntArrayElement);
|
||||
}
|
||||
}
|
||||
}
|
||||
if (myNullableString != null) {
|
||||
_writable.writeUnsignedVarint(1);
|
||||
byte[] _stringBytes = _cache.getSerializedValue(this.myNullableString);
|
||||
_writable.writeUnsignedVarint(_stringBytes.length + ByteUtils.sizeOfUnsignedVarint(_stringBytes.length + 1));
|
||||
_writable.writeUnsignedVarint(_stringBytes.length + 1);
|
||||
_writable.writeByteArray(_stringBytes);
|
||||
}
|
||||
{
|
||||
if (myInt16 != (short) 123) {
|
||||
_writable.writeUnsignedVarint(2);
|
||||
_writable.writeUnsignedVarint(2);
|
||||
_writable.writeShort(myInt16);
|
||||
}
|
||||
}
|
||||
{
|
||||
if (myFloat64 != Double.parseDouble("12.34")) {
|
||||
_writable.writeUnsignedVarint(3);
|
||||
_writable.writeUnsignedVarint(8);
|
||||
_writable.writeDouble(myFloat64);
|
||||
}
|
||||
}
|
||||
{
|
||||
if (!myString.equals("")) {
|
||||
_writable.writeUnsignedVarint(4);
|
||||
byte[] _stringBytes = _cache.getSerializedValue(this.myString);
|
||||
_writable.writeUnsignedVarint(_stringBytes.length + ByteUtils.sizeOfUnsignedVarint(_stringBytes.length + 1));
|
||||
_writable.writeUnsignedVarint(_stringBytes.length + 1);
|
||||
_writable.writeByteArray(_stringBytes);
|
||||
}
|
||||
}
|
||||
if (myBytes == null) {
|
||||
_writable.writeUnsignedVarint(5);
|
||||
_writable.writeUnsignedVarint(1);
|
||||
_writable.writeUnsignedVarint(0);
|
||||
} else {
|
||||
if (myBytes.length != 0) {
|
||||
_writable.writeUnsignedVarint(5);
|
||||
_writable.writeUnsignedVarint(this.myBytes.length + ByteUtils.sizeOfUnsignedVarint(this.myBytes.length + 1));
|
||||
_writable.writeUnsignedVarint(this.myBytes.length + 1);
|
||||
_writable.writeByteArray(this.myBytes);
|
||||
}
|
||||
}
|
||||
{
|
||||
if (taggedUuid != UUID.fromString("212d5494-4a8b-4fdf-94b3-88b470beb367")) {
|
||||
_writable.writeUnsignedVarint(6);
|
||||
_writable.writeUnsignedVarint(16);
|
||||
_writable.writeUUID(taggedUuid);
|
||||
}
|
||||
}
|
||||
{
|
||||
if (taggedLong != 0xcafcacafcacafcaL) {
|
||||
_writable.writeUnsignedVarint(7);
|
||||
_writable.writeUnsignedVarint(8);
|
||||
_writable.writeLong(taggedLong);
|
||||
}
|
||||
}
|
||||
_rawWriter.writeRawTags(_writable, Integer.MAX_VALUE);
|
||||
} else {
|
||||
if (_numTaggedFields > 0) {
|
||||
throw new UnsupportedVersionException("Tagged fields were set, but version " + _version + " of this message does not support them.");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
@Override
|
||||
public void fromStruct(Struct struct, short _version) {
|
||||
NavigableMap<Integer, Object> _taggedFields = null;
|
||||
this._unknownTaggedFields = null;
|
||||
if (_version >= 1) {
|
||||
_taggedFields = (NavigableMap<Integer, Object>) struct.get("_tagged_fields");
|
||||
}
|
||||
if (_version >= 1) {
|
||||
this.processId = struct.getUUID("process_id");
|
||||
} else {
|
||||
this.processId = MessageUtil.ZERO_UUID;
|
||||
}
|
||||
if (_version >= 1) {
|
||||
if (_taggedFields.containsKey(0)) {
|
||||
Object[] _nestedObjects = (Object[]) _taggedFields.remove(0);
|
||||
this.myTaggedIntArray = new ArrayList<Integer>(_nestedObjects.length);
|
||||
for (Object nestedObject : _nestedObjects) {
|
||||
this.myTaggedIntArray.add((Integer) nestedObject);
|
||||
}
|
||||
} else {
|
||||
this.myTaggedIntArray = new ArrayList<Integer>();
|
||||
}
|
||||
} else {
|
||||
this.myTaggedIntArray = new ArrayList<Integer>();
|
||||
}
|
||||
if (_version >= 1) {
|
||||
if (_taggedFields.containsKey(1)) {
|
||||
this.myNullableString = (String) _taggedFields.remove(1);
|
||||
} else {
|
||||
this.myNullableString = null;
|
||||
}
|
||||
} else {
|
||||
this.myNullableString = null;
|
||||
}
|
||||
if (_version >= 1) {
|
||||
if (_taggedFields.containsKey(2)) {
|
||||
this.myInt16 = (Short) _taggedFields.remove(2);
|
||||
} else {
|
||||
this.myInt16 = (short) 123;
|
||||
}
|
||||
} else {
|
||||
this.myInt16 = (short) 123;
|
||||
}
|
||||
if (_version >= 1) {
|
||||
if (_taggedFields.containsKey(3)) {
|
||||
this.myFloat64 = (Double) _taggedFields.remove(3);
|
||||
} else {
|
||||
this.myFloat64 = Double.parseDouble("12.34");
|
||||
}
|
||||
} else {
|
||||
this.myFloat64 = Double.parseDouble("12.34");
|
||||
}
|
||||
if (_version >= 1) {
|
||||
if (_taggedFields.containsKey(4)) {
|
||||
this.myString = (String) _taggedFields.remove(4);
|
||||
} else {
|
||||
this.myString = "";
|
||||
}
|
||||
} else {
|
||||
this.myString = "";
|
||||
}
|
||||
if (_version >= 1) {
|
||||
if (_taggedFields.containsKey(5)) {
|
||||
this.myBytes = MessageUtil.byteBufferToArray((ByteBuffer) _taggedFields.remove(5));
|
||||
} else {
|
||||
this.myBytes = Bytes.EMPTY;
|
||||
}
|
||||
} else {
|
||||
this.myBytes = Bytes.EMPTY;
|
||||
}
|
||||
if (_version >= 1) {
|
||||
if (_taggedFields.containsKey(6)) {
|
||||
this.taggedUuid = (UUID) _taggedFields.remove(6);
|
||||
} else {
|
||||
this.taggedUuid = UUID.fromString("212d5494-4a8b-4fdf-94b3-88b470beb367");
|
||||
}
|
||||
} else {
|
||||
this.taggedUuid = UUID.fromString("212d5494-4a8b-4fdf-94b3-88b470beb367");
|
||||
}
|
||||
if (_version >= 1) {
|
||||
if (_taggedFields.containsKey(7)) {
|
||||
this.taggedLong = (Long) _taggedFields.remove(7);
|
||||
} else {
|
||||
this.taggedLong = 0xcafcacafcacafcaL;
|
||||
}
|
||||
} else {
|
||||
this.taggedLong = 0xcafcacafcacafcaL;
|
||||
}
|
||||
if (_version >= 1) {
|
||||
this.zeroCopyByteBuffer = struct.getBytes("zero_copy_byte_buffer");
|
||||
} else {
|
||||
this.zeroCopyByteBuffer = ByteUtils.EMPTY_BUF;
|
||||
}
|
||||
if (_version >= 1) {
|
||||
this.nullableZeroCopyByteBuffer = struct.getBytes("nullable_zero_copy_byte_buffer");
|
||||
} else {
|
||||
this.nullableZeroCopyByteBuffer = ByteUtils.EMPTY_BUF;
|
||||
}
|
||||
if (_version >= 1) {
|
||||
if (!_taggedFields.isEmpty()) {
|
||||
this._unknownTaggedFields = new ArrayList<>(_taggedFields.size());
|
||||
for (Entry<Integer, Object> entry : _taggedFields.entrySet()) {
|
||||
this._unknownTaggedFields.add((RawTaggedField) entry.getValue());
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public Struct toStruct(short _version) {
|
||||
TreeMap<Integer, Object> _taggedFields = null;
|
||||
if (_version >= 1) {
|
||||
_taggedFields = new TreeMap<>();
|
||||
}
|
||||
Struct struct = new Struct(SCHEMAS[_version]);
|
||||
if (_version >= 1) {
|
||||
struct.set("process_id", this.processId);
|
||||
} else {
|
||||
if (processId != MessageUtil.ZERO_UUID) {
|
||||
throw new UnsupportedVersionException("Attempted to write a non-default processId at version " + _version);
|
||||
}
|
||||
}
|
||||
if (_version >= 1) {
|
||||
if (!myTaggedIntArray.isEmpty()) {
|
||||
Integer[] _nestedObjects = new Integer[myTaggedIntArray.size()];
|
||||
int i = 0;
|
||||
for (Integer element : this.myTaggedIntArray) {
|
||||
_nestedObjects[i++] = element;
|
||||
}
|
||||
_taggedFields.put(0, _nestedObjects);
|
||||
}
|
||||
} else {
|
||||
if (!myTaggedIntArray.isEmpty()) {
|
||||
throw new UnsupportedVersionException("Attempted to write a non-default myTaggedIntArray at version " + _version);
|
||||
}
|
||||
}
|
||||
if (_version >= 1) {
|
||||
if (myNullableString != null) {
|
||||
_taggedFields.put(1, myNullableString);
|
||||
}
|
||||
} else {
|
||||
if (myNullableString != null) {
|
||||
throw new UnsupportedVersionException("Attempted to write a non-default myNullableString at version " + _version);
|
||||
}
|
||||
}
|
||||
if (_version >= 1) {
|
||||
if (myInt16 != (short) 123) {
|
||||
_taggedFields.put(2, myInt16);
|
||||
}
|
||||
} else {
|
||||
if (myInt16 != (short) 123) {
|
||||
throw new UnsupportedVersionException("Attempted to write a non-default myInt16 at version " + _version);
|
||||
}
|
||||
}
|
||||
if (_version >= 1) {
|
||||
if (myFloat64 != Double.parseDouble("12.34")) {
|
||||
_taggedFields.put(3, myFloat64);
|
||||
}
|
||||
} else {
|
||||
if (myFloat64 != Double.parseDouble("12.34")) {
|
||||
throw new UnsupportedVersionException("Attempted to write a non-default myFloat64 at version " + _version);
|
||||
}
|
||||
}
|
||||
if (_version >= 1) {
|
||||
if (!myString.equals("")) {
|
||||
_taggedFields.put(4, myString);
|
||||
}
|
||||
} else {
|
||||
if (!myString.equals("")) {
|
||||
throw new UnsupportedVersionException("Attempted to write a non-default myString at version " + _version);
|
||||
}
|
||||
}
|
||||
if (_version >= 1) {
|
||||
if (myBytes == null || myBytes.length != 0) {
|
||||
_taggedFields.put(5, (myBytes == null) ? null : ByteBuffer.wrap(myBytes));
|
||||
}
|
||||
} else {
|
||||
if (myBytes == null || myBytes.length != 0) {
|
||||
throw new UnsupportedVersionException("Attempted to write a non-default myBytes at version " + _version);
|
||||
}
|
||||
}
|
||||
if (_version >= 1) {
|
||||
if (taggedUuid != UUID.fromString("212d5494-4a8b-4fdf-94b3-88b470beb367")) {
|
||||
_taggedFields.put(6, taggedUuid);
|
||||
}
|
||||
} else {
|
||||
if (taggedUuid != UUID.fromString("212d5494-4a8b-4fdf-94b3-88b470beb367")) {
|
||||
throw new UnsupportedVersionException("Attempted to write a non-default taggedUuid at version " + _version);
|
||||
}
|
||||
}
|
||||
if (_version >= 1) {
|
||||
if (taggedLong != 0xcafcacafcacafcaL) {
|
||||
_taggedFields.put(7, taggedLong);
|
||||
}
|
||||
} else {
|
||||
if (taggedLong != 0xcafcacafcacafcaL) {
|
||||
throw new UnsupportedVersionException("Attempted to write a non-default taggedLong at version " + _version);
|
||||
}
|
||||
}
|
||||
if (_version >= 1) {
|
||||
struct.set("zero_copy_byte_buffer", this.zeroCopyByteBuffer);
|
||||
} else {
|
||||
if (zeroCopyByteBuffer.hasRemaining()) {
|
||||
throw new UnsupportedVersionException("Attempted to write a non-default zeroCopyByteBuffer at version " + _version);
|
||||
}
|
||||
}
|
||||
if (_version >= 1) {
|
||||
struct.set("nullable_zero_copy_byte_buffer", this.nullableZeroCopyByteBuffer);
|
||||
} else {
|
||||
if (nullableZeroCopyByteBuffer == null || nullableZeroCopyByteBuffer.remaining() > 0) {
|
||||
throw new UnsupportedVersionException("Attempted to write a non-default nullableZeroCopyByteBuffer at version " + _version);
|
||||
}
|
||||
}
|
||||
if (_version >= 1) {
|
||||
struct.set("_tagged_fields", _taggedFields);
|
||||
}
|
||||
return struct;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int size(ObjectSerializationCache _cache, short _version) {
|
||||
int _size = 0, _numTaggedFields = 0;
|
||||
if (_version >= 1) {
|
||||
_size += 16;
|
||||
}
|
||||
if (_version >= 1) {
|
||||
{
|
||||
if (!myTaggedIntArray.isEmpty()) {
|
||||
_numTaggedFields++;
|
||||
_size += 1;
|
||||
int _arraySize = 0;
|
||||
_arraySize += ByteUtils.sizeOfUnsignedVarint(myTaggedIntArray.size() + 1);
|
||||
_arraySize += myTaggedIntArray.size() * 4;
|
||||
_cache.setArraySizeInBytes(myTaggedIntArray, _arraySize);
|
||||
_size += _arraySize + ByteUtils.sizeOfUnsignedVarint(_arraySize);
|
||||
}
|
||||
}
|
||||
}
|
||||
if (_version >= 1) {
|
||||
if (myNullableString == null) {
|
||||
} else {
|
||||
_numTaggedFields++;
|
||||
_size += 1;
|
||||
byte[] _stringBytes = myNullableString.getBytes(StandardCharsets.UTF_8);
|
||||
if (_stringBytes.length > 0x7fff) {
|
||||
throw new RuntimeException("'myNullableString' field is too long to be serialized");
|
||||
}
|
||||
_cache.cacheSerializedValue(myNullableString, _stringBytes);
|
||||
int _stringPrefixSize = ByteUtils.sizeOfUnsignedVarint(_stringBytes.length + 1);
|
||||
_size += _stringBytes.length + _stringPrefixSize + ByteUtils.sizeOfUnsignedVarint(_stringPrefixSize);
|
||||
}
|
||||
}
|
||||
if (_version >= 1) {
|
||||
if (myInt16 != (short) 123) {
|
||||
_numTaggedFields++;
|
||||
_size += 1;
|
||||
_size += 1;
|
||||
_size += 2;
|
||||
}
|
||||
}
|
||||
if (_version >= 1) {
|
||||
if (myFloat64 != Double.parseDouble("12.34")) {
|
||||
_numTaggedFields++;
|
||||
_size += 1;
|
||||
_size += 1;
|
||||
_size += 8;
|
||||
}
|
||||
}
|
||||
if (_version >= 1) {
|
||||
{
|
||||
if (!myString.equals("")) {
|
||||
_numTaggedFields++;
|
||||
_size += 1;
|
||||
byte[] _stringBytes = myString.getBytes(StandardCharsets.UTF_8);
|
||||
if (_stringBytes.length > 0x7fff) {
|
||||
throw new RuntimeException("'myString' field is too long to be serialized");
|
||||
}
|
||||
_cache.cacheSerializedValue(myString, _stringBytes);
|
||||
int _stringPrefixSize = ByteUtils.sizeOfUnsignedVarint(_stringBytes.length + 1);
|
||||
_size += _stringBytes.length + _stringPrefixSize + ByteUtils.sizeOfUnsignedVarint(_stringPrefixSize);
|
||||
}
|
||||
}
|
||||
}
|
||||
if (_version >= 1) {
|
||||
if (myBytes == null) {
|
||||
_numTaggedFields++;
|
||||
_size += 1;
|
||||
_size += 1;
|
||||
_size += 1;
|
||||
} else {
|
||||
if (myBytes.length != 0) {
|
||||
_numTaggedFields++;
|
||||
_size += 1;
|
||||
int _bytesSize = myBytes.length;
|
||||
_bytesSize += ByteUtils.sizeOfUnsignedVarint(myBytes.length + 1);
|
||||
_size += _bytesSize + ByteUtils.sizeOfUnsignedVarint(_bytesSize);
|
||||
}
|
||||
}
|
||||
}
|
||||
if (_version >= 1) {
|
||||
if (taggedUuid != UUID.fromString("212d5494-4a8b-4fdf-94b3-88b470beb367")) {
|
||||
_numTaggedFields++;
|
||||
_size += 1;
|
||||
_size += 1;
|
||||
_size += 16;
|
||||
}
|
||||
}
|
||||
if (_version >= 1) {
|
||||
if (taggedLong != 0xcafcacafcacafcaL) {
|
||||
_numTaggedFields++;
|
||||
_size += 1;
|
||||
_size += 1;
|
||||
_size += 8;
|
||||
}
|
||||
}
|
||||
if (_version >= 1) {
|
||||
{
|
||||
int _bytesSize = zeroCopyByteBuffer.remaining();
|
||||
_bytesSize += ByteUtils.sizeOfUnsignedVarint(zeroCopyByteBuffer.remaining() + 1);
|
||||
_size += _bytesSize;
|
||||
}
|
||||
}
|
||||
if (_version >= 1) {
|
||||
if (nullableZeroCopyByteBuffer == null) {
|
||||
_size += 1;
|
||||
} else {
|
||||
int _bytesSize = nullableZeroCopyByteBuffer.remaining();
|
||||
_bytesSize += ByteUtils.sizeOfUnsignedVarint(nullableZeroCopyByteBuffer.remaining() + 1);
|
||||
_size += _bytesSize;
|
||||
}
|
||||
}
|
||||
if (_unknownTaggedFields != null) {
|
||||
_numTaggedFields += _unknownTaggedFields.size();
|
||||
for (RawTaggedField _field : _unknownTaggedFields) {
|
||||
_size += ByteUtils.sizeOfUnsignedVarint(_field.tag());
|
||||
_size += ByteUtils.sizeOfUnsignedVarint(_field.size());
|
||||
_size += _field.size();
|
||||
}
|
||||
}
|
||||
if (_version >= 1) {
|
||||
_size += ByteUtils.sizeOfUnsignedVarint(_numTaggedFields);
|
||||
} else {
|
||||
if (_numTaggedFields > 0) {
|
||||
throw new UnsupportedVersionException("Tagged fields were set, but version " + _version + " of this message does not support them.");
|
||||
}
|
||||
}
|
||||
return _size;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object obj) {
|
||||
if (!(obj instanceof SimpleExampleMessageData)) return false;
|
||||
SimpleExampleMessageData other = (SimpleExampleMessageData) obj;
|
||||
if (!this.processId.equals(other.processId)) return false;
|
||||
if (this.myTaggedIntArray == null) {
|
||||
if (other.myTaggedIntArray != null) return false;
|
||||
} else {
|
||||
if (!this.myTaggedIntArray.equals(other.myTaggedIntArray)) return false;
|
||||
}
|
||||
if (this.myNullableString == null) {
|
||||
if (other.myNullableString != null) return false;
|
||||
} else {
|
||||
if (!this.myNullableString.equals(other.myNullableString)) return false;
|
||||
}
|
||||
if (myInt16 != other.myInt16) return false;
|
||||
if (myFloat64 != other.myFloat64) return false;
|
||||
if (this.myString == null) {
|
||||
if (other.myString != null) return false;
|
||||
} else {
|
||||
if (!this.myString.equals(other.myString)) return false;
|
||||
}
|
||||
if (!Arrays.equals(this.myBytes, other.myBytes)) return false;
|
||||
if (!this.taggedUuid.equals(other.taggedUuid)) return false;
|
||||
if (taggedLong != other.taggedLong) return false;
|
||||
if (!Objects.equals(this.zeroCopyByteBuffer, other.zeroCopyByteBuffer)) return false;
|
||||
if (!Objects.equals(this.nullableZeroCopyByteBuffer, other.nullableZeroCopyByteBuffer)) return false;
|
||||
return true;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
int hashCode = 0;
|
||||
hashCode = 31 * hashCode + processId.hashCode();
|
||||
hashCode = 31 * hashCode + (myTaggedIntArray == null ? 0 : myTaggedIntArray.hashCode());
|
||||
hashCode = 31 * hashCode + (myNullableString == null ? 0 : myNullableString.hashCode());
|
||||
hashCode = 31 * hashCode + myInt16;
|
||||
hashCode = 31 * hashCode + Double.hashCode(myFloat64);
|
||||
hashCode = 31 * hashCode + (myString == null ? 0 : myString.hashCode());
|
||||
hashCode = 31 * hashCode + Arrays.hashCode(myBytes);
|
||||
hashCode = 31 * hashCode + taggedUuid.hashCode();
|
||||
hashCode = 31 * hashCode + ((int) (taggedLong >> 32) ^ (int) taggedLong);
|
||||
hashCode = 31 * hashCode + Objects.hashCode(zeroCopyByteBuffer);
|
||||
hashCode = 31 * hashCode + Objects.hashCode(nullableZeroCopyByteBuffer);
|
||||
return hashCode;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return "SimpleExampleMessageData("
|
||||
+ ", myTaggedIntArray=" + MessageUtil.deepToString(myTaggedIntArray.iterator())
|
||||
+ ", myNullableString=" + ((myNullableString == null) ? "null" : "'" + myNullableString.toString() + "'")
|
||||
+ ", myInt16=" + myInt16
|
||||
+ ", myFloat64=" + myFloat64
|
||||
+ ", myString=" + ((myString == null) ? "null" : "'" + myString.toString() + "'")
|
||||
+ ", myBytes=" + Arrays.toString(myBytes)
|
||||
+ ", taggedLong=" + taggedLong
|
||||
+ ", zeroCopyByteBuffer=" + zeroCopyByteBuffer
|
||||
+ ", nullableZeroCopyByteBuffer=" + nullableZeroCopyByteBuffer
|
||||
+ ")";
|
||||
}
|
||||
|
||||
public UUID processId() {
|
||||
return this.processId;
|
||||
}
|
||||
|
||||
public List<Integer> myTaggedIntArray() {
|
||||
return this.myTaggedIntArray;
|
||||
}
|
||||
|
||||
public String myNullableString() {
|
||||
return this.myNullableString;
|
||||
}
|
||||
|
||||
public short myInt16() {
|
||||
return this.myInt16;
|
||||
}
|
||||
|
||||
public double myFloat64() {
|
||||
return this.myFloat64;
|
||||
}
|
||||
|
||||
public String myString() {
|
||||
return this.myString;
|
||||
}
|
||||
|
||||
public byte[] myBytes() {
|
||||
return this.myBytes;
|
||||
}
|
||||
|
||||
public UUID taggedUuid() {
|
||||
return this.taggedUuid;
|
||||
}
|
||||
|
||||
public long taggedLong() {
|
||||
return this.taggedLong;
|
||||
}
|
||||
|
||||
public ByteBuffer zeroCopyByteBuffer() {
|
||||
return this.zeroCopyByteBuffer;
|
||||
}
|
||||
|
||||
public ByteBuffer nullableZeroCopyByteBuffer() {
|
||||
return this.nullableZeroCopyByteBuffer;
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<RawTaggedField> unknownTaggedFields() {
|
||||
if (_unknownTaggedFields == null) {
|
||||
_unknownTaggedFields = new ArrayList<>(0);
|
||||
}
|
||||
return _unknownTaggedFields;
|
||||
}
|
||||
|
||||
public SimpleExampleMessageData setProcessId(UUID v) {
|
||||
this.processId = v;
|
||||
return this;
|
||||
}
|
||||
|
||||
public SimpleExampleMessageData setMyTaggedIntArray(List<Integer> v) {
|
||||
this.myTaggedIntArray = v;
|
||||
return this;
|
||||
}
|
||||
|
||||
public SimpleExampleMessageData setMyNullableString(String v) {
|
||||
this.myNullableString = v;
|
||||
return this;
|
||||
}
|
||||
|
||||
public SimpleExampleMessageData setMyInt16(short v) {
|
||||
this.myInt16 = v;
|
||||
return this;
|
||||
}
|
||||
|
||||
public SimpleExampleMessageData setMyFloat64(double v) {
|
||||
this.myFloat64 = v;
|
||||
return this;
|
||||
}
|
||||
|
||||
public SimpleExampleMessageData setMyString(String v) {
|
||||
this.myString = v;
|
||||
return this;
|
||||
}
|
||||
|
||||
public SimpleExampleMessageData setMyBytes(byte[] v) {
|
||||
this.myBytes = v;
|
||||
return this;
|
||||
}
|
||||
|
||||
public SimpleExampleMessageData setTaggedUuid(UUID v) {
|
||||
this.taggedUuid = v;
|
||||
return this;
|
||||
}
|
||||
|
||||
public SimpleExampleMessageData setTaggedLong(long v) {
|
||||
this.taggedLong = v;
|
||||
return this;
|
||||
}
|
||||
|
||||
public SimpleExampleMessageData setZeroCopyByteBuffer(ByteBuffer v) {
|
||||
this.zeroCopyByteBuffer = v;
|
||||
return this;
|
||||
}
|
||||
|
||||
public SimpleExampleMessageData setNullableZeroCopyByteBuffer(ByteBuffer v) {
|
||||
this.nullableZeroCopyByteBuffer = v;
|
||||
return this;
|
||||
}
|
||||
}
|
||||
@@ -1,281 +0,0 @@
|
||||
/*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
* contributor license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright ownership.
|
||||
* The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
* (the "License"); you may not use this file except in compliance with
|
||||
* the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
// THIS CODE IS AUTOMATICALLY GENERATED. DO NOT EDIT.
|
||||
|
||||
package org.apache.kafka.common.message;
|
||||
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import java.util.TreeMap;
|
||||
import org.apache.kafka.common.errors.UnsupportedVersionException;
|
||||
import org.apache.kafka.common.protocol.ApiMessage;
|
||||
import org.apache.kafka.common.protocol.ObjectSerializationCache;
|
||||
import org.apache.kafka.common.protocol.Readable;
|
||||
import org.apache.kafka.common.protocol.Writable;
|
||||
import org.apache.kafka.common.protocol.types.Field;
|
||||
import org.apache.kafka.common.protocol.types.RawTaggedField;
|
||||
import org.apache.kafka.common.protocol.types.RawTaggedFieldWriter;
|
||||
import org.apache.kafka.common.protocol.types.Schema;
|
||||
import org.apache.kafka.common.protocol.types.Struct;
|
||||
import org.apache.kafka.common.protocol.types.Type;
|
||||
import org.apache.kafka.common.utils.ByteUtils;
|
||||
|
||||
|
||||
public class AddOffsetsToTxnRequestData implements ApiMessage {
|
||||
private String transactionalId;
|
||||
private long producerId;
|
||||
private short producerEpoch;
|
||||
private String groupId;
|
||||
private List<RawTaggedField> _unknownTaggedFields;
|
||||
|
||||
public static final Schema SCHEMA_0 =
|
||||
new Schema(
|
||||
new Field("transactional_id", Type.STRING, "The transactional id corresponding to the transaction."),
|
||||
new Field("producer_id", Type.INT64, "Current producer id in use by the transactional id."),
|
||||
new Field("producer_epoch", Type.INT16, "Current epoch associated with the producer id."),
|
||||
new Field("group_id", Type.STRING, "The unique group identifier.")
|
||||
);
|
||||
|
||||
public static final Schema SCHEMA_1 = SCHEMA_0;
|
||||
|
||||
public static final Schema[] SCHEMAS = new Schema[] {
|
||||
SCHEMA_0,
|
||||
SCHEMA_1
|
||||
};
|
||||
|
||||
public AddOffsetsToTxnRequestData(Readable _readable, short _version) {
|
||||
read(_readable, _version);
|
||||
}
|
||||
|
||||
public AddOffsetsToTxnRequestData(Struct struct, short _version) {
|
||||
fromStruct(struct, _version);
|
||||
}
|
||||
|
||||
public AddOffsetsToTxnRequestData() {
|
||||
this.transactionalId = "";
|
||||
this.producerId = 0L;
|
||||
this.producerEpoch = (short) 0;
|
||||
this.groupId = "";
|
||||
}
|
||||
|
||||
@Override
|
||||
public short apiKey() {
|
||||
return 25;
|
||||
}
|
||||
|
||||
@Override
|
||||
public short lowestSupportedVersion() {
|
||||
return 0;
|
||||
}
|
||||
|
||||
@Override
|
||||
public short highestSupportedVersion() {
|
||||
return 1;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void read(Readable _readable, short _version) {
|
||||
{
|
||||
int length;
|
||||
length = _readable.readShort();
|
||||
if (length < 0) {
|
||||
throw new RuntimeException("non-nullable field transactionalId was serialized as null");
|
||||
} else if (length > 0x7fff) {
|
||||
throw new RuntimeException("string field transactionalId had invalid length " + length);
|
||||
} else {
|
||||
this.transactionalId = _readable.readString(length);
|
||||
}
|
||||
}
|
||||
this.producerId = _readable.readLong();
|
||||
this.producerEpoch = _readable.readShort();
|
||||
{
|
||||
int length;
|
||||
length = _readable.readShort();
|
||||
if (length < 0) {
|
||||
throw new RuntimeException("non-nullable field groupId was serialized as null");
|
||||
} else if (length > 0x7fff) {
|
||||
throw new RuntimeException("string field groupId had invalid length " + length);
|
||||
} else {
|
||||
this.groupId = _readable.readString(length);
|
||||
}
|
||||
}
|
||||
this._unknownTaggedFields = null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void write(Writable _writable, ObjectSerializationCache _cache, short _version) {
|
||||
int _numTaggedFields = 0;
|
||||
{
|
||||
byte[] _stringBytes = _cache.getSerializedValue(transactionalId);
|
||||
_writable.writeShort((short) _stringBytes.length);
|
||||
_writable.writeByteArray(_stringBytes);
|
||||
}
|
||||
_writable.writeLong(producerId);
|
||||
_writable.writeShort(producerEpoch);
|
||||
{
|
||||
byte[] _stringBytes = _cache.getSerializedValue(groupId);
|
||||
_writable.writeShort((short) _stringBytes.length);
|
||||
_writable.writeByteArray(_stringBytes);
|
||||
}
|
||||
RawTaggedFieldWriter _rawWriter = RawTaggedFieldWriter.forFields(_unknownTaggedFields);
|
||||
_numTaggedFields += _rawWriter.numFields();
|
||||
if (_numTaggedFields > 0) {
|
||||
throw new UnsupportedVersionException("Tagged fields were set, but version " + _version + " of this message does not support them.");
|
||||
}
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
@Override
|
||||
public void fromStruct(Struct struct, short _version) {
|
||||
this._unknownTaggedFields = null;
|
||||
this.transactionalId = struct.getString("transactional_id");
|
||||
this.producerId = struct.getLong("producer_id");
|
||||
this.producerEpoch = struct.getShort("producer_epoch");
|
||||
this.groupId = struct.getString("group_id");
|
||||
}
|
||||
|
||||
@Override
|
||||
public Struct toStruct(short _version) {
|
||||
TreeMap<Integer, Object> _taggedFields = null;
|
||||
Struct struct = new Struct(SCHEMAS[_version]);
|
||||
struct.set("transactional_id", this.transactionalId);
|
||||
struct.set("producer_id", this.producerId);
|
||||
struct.set("producer_epoch", this.producerEpoch);
|
||||
struct.set("group_id", this.groupId);
|
||||
return struct;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int size(ObjectSerializationCache _cache, short _version) {
|
||||
int _size = 0, _numTaggedFields = 0;
|
||||
{
|
||||
byte[] _stringBytes = transactionalId.getBytes(StandardCharsets.UTF_8);
|
||||
if (_stringBytes.length > 0x7fff) {
|
||||
throw new RuntimeException("'transactionalId' field is too long to be serialized");
|
||||
}
|
||||
_cache.cacheSerializedValue(transactionalId, _stringBytes);
|
||||
_size += _stringBytes.length + 2;
|
||||
}
|
||||
_size += 8;
|
||||
_size += 2;
|
||||
{
|
||||
byte[] _stringBytes = groupId.getBytes(StandardCharsets.UTF_8);
|
||||
if (_stringBytes.length > 0x7fff) {
|
||||
throw new RuntimeException("'groupId' field is too long to be serialized");
|
||||
}
|
||||
_cache.cacheSerializedValue(groupId, _stringBytes);
|
||||
_size += _stringBytes.length + 2;
|
||||
}
|
||||
if (_unknownTaggedFields != null) {
|
||||
_numTaggedFields += _unknownTaggedFields.size();
|
||||
for (RawTaggedField _field : _unknownTaggedFields) {
|
||||
_size += ByteUtils.sizeOfUnsignedVarint(_field.tag());
|
||||
_size += ByteUtils.sizeOfUnsignedVarint(_field.size());
|
||||
_size += _field.size();
|
||||
}
|
||||
}
|
||||
if (_numTaggedFields > 0) {
|
||||
throw new UnsupportedVersionException("Tagged fields were set, but version " + _version + " of this message does not support them.");
|
||||
}
|
||||
return _size;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object obj) {
|
||||
if (!(obj instanceof AddOffsetsToTxnRequestData)) return false;
|
||||
AddOffsetsToTxnRequestData other = (AddOffsetsToTxnRequestData) obj;
|
||||
if (this.transactionalId == null) {
|
||||
if (other.transactionalId != null) return false;
|
||||
} else {
|
||||
if (!this.transactionalId.equals(other.transactionalId)) return false;
|
||||
}
|
||||
if (producerId != other.producerId) return false;
|
||||
if (producerEpoch != other.producerEpoch) return false;
|
||||
if (this.groupId == null) {
|
||||
if (other.groupId != null) return false;
|
||||
} else {
|
||||
if (!this.groupId.equals(other.groupId)) return false;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
int hashCode = 0;
|
||||
hashCode = 31 * hashCode + (transactionalId == null ? 0 : transactionalId.hashCode());
|
||||
hashCode = 31 * hashCode + ((int) (producerId >> 32) ^ (int) producerId);
|
||||
hashCode = 31 * hashCode + producerEpoch;
|
||||
hashCode = 31 * hashCode + (groupId == null ? 0 : groupId.hashCode());
|
||||
return hashCode;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return "AddOffsetsToTxnRequestData("
|
||||
+ "transactionalId=" + ((transactionalId == null) ? "null" : "'" + transactionalId.toString() + "'")
|
||||
+ ", producerId=" + producerId
|
||||
+ ", producerEpoch=" + producerEpoch
|
||||
+ ", groupId=" + ((groupId == null) ? "null" : "'" + groupId.toString() + "'")
|
||||
+ ")";
|
||||
}
|
||||
|
||||
public String transactionalId() {
|
||||
return this.transactionalId;
|
||||
}
|
||||
|
||||
public long producerId() {
|
||||
return this.producerId;
|
||||
}
|
||||
|
||||
public short producerEpoch() {
|
||||
return this.producerEpoch;
|
||||
}
|
||||
|
||||
public String groupId() {
|
||||
return this.groupId;
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<RawTaggedField> unknownTaggedFields() {
|
||||
if (_unknownTaggedFields == null) {
|
||||
_unknownTaggedFields = new ArrayList<>(0);
|
||||
}
|
||||
return _unknownTaggedFields;
|
||||
}
|
||||
|
||||
public AddOffsetsToTxnRequestData setTransactionalId(String v) {
|
||||
this.transactionalId = v;
|
||||
return this;
|
||||
}
|
||||
|
||||
public AddOffsetsToTxnRequestData setProducerId(long v) {
|
||||
this.producerId = v;
|
||||
return this;
|
||||
}
|
||||
|
||||
public AddOffsetsToTxnRequestData setProducerEpoch(short v) {
|
||||
this.producerEpoch = v;
|
||||
return this;
|
||||
}
|
||||
|
||||
public AddOffsetsToTxnRequestData setGroupId(String v) {
|
||||
this.groupId = v;
|
||||
return this;
|
||||
}
|
||||
}
|
||||
@@ -1,190 +0,0 @@
|
||||
/*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
* contributor license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright ownership.
|
||||
* The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
* (the "License"); you may not use this file except in compliance with
|
||||
* the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
// THIS CODE IS AUTOMATICALLY GENERATED. DO NOT EDIT.
|
||||
|
||||
package org.apache.kafka.common.message;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import java.util.TreeMap;
|
||||
import org.apache.kafka.common.errors.UnsupportedVersionException;
|
||||
import org.apache.kafka.common.protocol.ApiMessage;
|
||||
import org.apache.kafka.common.protocol.ObjectSerializationCache;
|
||||
import org.apache.kafka.common.protocol.Readable;
|
||||
import org.apache.kafka.common.protocol.Writable;
|
||||
import org.apache.kafka.common.protocol.types.Field;
|
||||
import org.apache.kafka.common.protocol.types.RawTaggedField;
|
||||
import org.apache.kafka.common.protocol.types.RawTaggedFieldWriter;
|
||||
import org.apache.kafka.common.protocol.types.Schema;
|
||||
import org.apache.kafka.common.protocol.types.Struct;
|
||||
import org.apache.kafka.common.protocol.types.Type;
|
||||
import org.apache.kafka.common.utils.ByteUtils;
|
||||
|
||||
|
||||
public class AddOffsetsToTxnResponseData implements ApiMessage {
|
||||
private int throttleTimeMs;
|
||||
private short errorCode;
|
||||
private List<RawTaggedField> _unknownTaggedFields;
|
||||
|
||||
public static final Schema SCHEMA_0 =
|
||||
new Schema(
|
||||
new Field("throttle_time_ms", Type.INT32, "Duration in milliseconds for which the request was throttled due to a quota violation, or zero if the request did not violate any quota."),
|
||||
new Field("error_code", Type.INT16, "The response error code, or 0 if there was no error.")
|
||||
);
|
||||
|
||||
public static final Schema SCHEMA_1 = SCHEMA_0;
|
||||
|
||||
public static final Schema[] SCHEMAS = new Schema[] {
|
||||
SCHEMA_0,
|
||||
SCHEMA_1
|
||||
};
|
||||
|
||||
public AddOffsetsToTxnResponseData(Readable _readable, short _version) {
|
||||
read(_readable, _version);
|
||||
}
|
||||
|
||||
public AddOffsetsToTxnResponseData(Struct struct, short _version) {
|
||||
fromStruct(struct, _version);
|
||||
}
|
||||
|
||||
public AddOffsetsToTxnResponseData() {
|
||||
this.throttleTimeMs = 0;
|
||||
this.errorCode = (short) 0;
|
||||
}
|
||||
|
||||
@Override
|
||||
public short apiKey() {
|
||||
return 25;
|
||||
}
|
||||
|
||||
@Override
|
||||
public short lowestSupportedVersion() {
|
||||
return 0;
|
||||
}
|
||||
|
||||
@Override
|
||||
public short highestSupportedVersion() {
|
||||
return 1;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void read(Readable _readable, short _version) {
|
||||
this.throttleTimeMs = _readable.readInt();
|
||||
this.errorCode = _readable.readShort();
|
||||
this._unknownTaggedFields = null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void write(Writable _writable, ObjectSerializationCache _cache, short _version) {
|
||||
int _numTaggedFields = 0;
|
||||
_writable.writeInt(throttleTimeMs);
|
||||
_writable.writeShort(errorCode);
|
||||
RawTaggedFieldWriter _rawWriter = RawTaggedFieldWriter.forFields(_unknownTaggedFields);
|
||||
_numTaggedFields += _rawWriter.numFields();
|
||||
if (_numTaggedFields > 0) {
|
||||
throw new UnsupportedVersionException("Tagged fields were set, but version " + _version + " of this message does not support them.");
|
||||
}
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
@Override
|
||||
public void fromStruct(Struct struct, short _version) {
|
||||
this._unknownTaggedFields = null;
|
||||
this.throttleTimeMs = struct.getInt("throttle_time_ms");
|
||||
this.errorCode = struct.getShort("error_code");
|
||||
}
|
||||
|
||||
@Override
|
||||
public Struct toStruct(short _version) {
|
||||
TreeMap<Integer, Object> _taggedFields = null;
|
||||
Struct struct = new Struct(SCHEMAS[_version]);
|
||||
struct.set("throttle_time_ms", this.throttleTimeMs);
|
||||
struct.set("error_code", this.errorCode);
|
||||
return struct;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int size(ObjectSerializationCache _cache, short _version) {
|
||||
int _size = 0, _numTaggedFields = 0;
|
||||
_size += 4;
|
||||
_size += 2;
|
||||
if (_unknownTaggedFields != null) {
|
||||
_numTaggedFields += _unknownTaggedFields.size();
|
||||
for (RawTaggedField _field : _unknownTaggedFields) {
|
||||
_size += ByteUtils.sizeOfUnsignedVarint(_field.tag());
|
||||
_size += ByteUtils.sizeOfUnsignedVarint(_field.size());
|
||||
_size += _field.size();
|
||||
}
|
||||
}
|
||||
if (_numTaggedFields > 0) {
|
||||
throw new UnsupportedVersionException("Tagged fields were set, but version " + _version + " of this message does not support them.");
|
||||
}
|
||||
return _size;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object obj) {
|
||||
if (!(obj instanceof AddOffsetsToTxnResponseData)) return false;
|
||||
AddOffsetsToTxnResponseData other = (AddOffsetsToTxnResponseData) obj;
|
||||
if (throttleTimeMs != other.throttleTimeMs) return false;
|
||||
if (errorCode != other.errorCode) return false;
|
||||
return true;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
int hashCode = 0;
|
||||
hashCode = 31 * hashCode + throttleTimeMs;
|
||||
hashCode = 31 * hashCode + errorCode;
|
||||
return hashCode;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return "AddOffsetsToTxnResponseData("
|
||||
+ "throttleTimeMs=" + throttleTimeMs
|
||||
+ ", errorCode=" + errorCode
|
||||
+ ")";
|
||||
}
|
||||
|
||||
public int throttleTimeMs() {
|
||||
return this.throttleTimeMs;
|
||||
}
|
||||
|
||||
public short errorCode() {
|
||||
return this.errorCode;
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<RawTaggedField> unknownTaggedFields() {
|
||||
if (_unknownTaggedFields == null) {
|
||||
_unknownTaggedFields = new ArrayList<>(0);
|
||||
}
|
||||
return _unknownTaggedFields;
|
||||
}
|
||||
|
||||
public AddOffsetsToTxnResponseData setThrottleTimeMs(int v) {
|
||||
this.throttleTimeMs = v;
|
||||
return this;
|
||||
}
|
||||
|
||||
public AddOffsetsToTxnResponseData setErrorCode(short v) {
|
||||
this.errorCode = v;
|
||||
return this;
|
||||
}
|
||||
}
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user