diff --git a/docs/ops.html b/docs/ops.html index 2d050ec76da..fcef480f2c9 100644 --- a/docs/ops.html +++ b/docs/ops.html @@ -4315,7 +4315,7 @@ $ bin/kafka-topics.sh --create --topic tieredTopic --bootstrap-server localhost:
Lastly, we can try to consume some data from the beginning and print offset number, to make sure it will successfully fetch offset 0 from the remote storage.
-$ bin/kafka-console-consumer.sh --topic tieredTopic --from-beginning --max-messages 1 --bootstrap-server localhost:9092 --property print.offset=true
+$ bin/kafka-console-consumer.sh --topic tieredTopic --from-beginning --max-messages 1 --bootstrap-server localhost:9092 --formatter-property print.offset=true
In KRaft mode, you can disable tiered storage at the topic level, to make the remote logs as read-only logs, or completely delete all remote logs.
diff --git a/docs/streams/developer-guide/datatypes.html b/docs/streams/developer-guide/datatypes.html index 2bc2d7d5d0e..6e93de6e1e1 100644 --- a/docs/streams/developer-guide/datatypes.html +++ b/docs/streams/developer-guide/datatypes.html @@ -217,15 +217,15 @@ TimeWindowedDeserializer<String> deserializer = new TimeWindowedDeserializWhen using command-line tools (like bin/kafka-console-consumer.sh
), you can configure windowed deserializers by passing the inner class and window size via configuration properties. The property names use a prefix pattern:
# Time windowed deserializer configuration
---property print.key=true \
---property key.deserializer=org.apache.kafka.streams.kstream.TimeWindowedDeserializer \
---property key.deserializer.windowed.inner.deserializer.class=org.apache.kafka.common.serialization.StringDeserializer \
---property key.deserializer.window.size.ms=500
+--formatter-property print.key=true \
+--formatter-property key.deserializer=org.apache.kafka.streams.kstream.TimeWindowedDeserializer \
+--formatter-property key.deserializer.windowed.inner.deserializer.class=org.apache.kafka.common.serialization.StringDeserializer \
+--formatter-property key.deserializer.window.size.ms=500
# Session windowed deserializer configuration
---property print.key=true \
---property key.deserializer=org.apache.kafka.streams.kstream.SessionWindowedDeserializer \
---property key.deserializer.windowed.inner.deserializer.class=org.apache.kafka.common.serialization.StringDeserializer
+--formatter-property print.key=true \
+--formatter-property key.deserializer=org.apache.kafka.streams.kstream.SessionWindowedDeserializer \
+--formatter-property key.deserializer.windowed.inner.deserializer.class=org.apache.kafka.common.serialization.StringDeserializer
The following StreamsConfig
parameters are deprecated in favor of passing parameters directly to serializer/deserializer constructors:
$ bin/kafka-console-consumer.sh --bootstrap-server localhost:9092 \
--topic streams-wordcount-output \
--from-beginning \
- --property print.key=true \
- --property print.value=true \
- --property key.deserializer=org.apache.kafka.common.serialization.StringDeserializer \
- --property value.deserializer=org.apache.kafka.common.serialization.LongDeserializer
+ --formatter-property print.key=true \
+ --formatter-property print.value=true \
+ --formatter-property key.deserializer=org.apache.kafka.common.serialization.StringDeserializer \
+ --formatter-property value.deserializer=org.apache.kafka.common.serialization.LongDeserializer
$ bin/kafka-console-consumer.sh --bootstrap-server localhost:9092 \
--topic streams-wordcount-output \
--from-beginning \
- --property print.key=true \
- --property print.value=true \
- --property key.deserializer=org.apache.kafka.common.serialization.StringDeserializer \
- --property value.deserializer=org.apache.kafka.common.serialization.LongDeserializer
+ --formatter-property print.key=true \
+ --formatter-property print.value=true \
+ --formatter-property key.deserializer=org.apache.kafka.common.serialization.StringDeserializer \
+ --formatter-property value.deserializer=org.apache.kafka.common.serialization.LongDeserializer
all 1
streams 1
@@ -225,10 +225,10 @@ In your other terminal in which the console consumer is running, you will observ
$ bin/kafka-console-consumer.sh --bootstrap-server localhost:9092 \
--topic streams-wordcount-output \
--from-beginning \
- --property print.key=true \
- --property print.value=true \
- --property key.deserializer=org.apache.kafka.common.serialization.StringDeserializer \
- --property value.deserializer=org.apache.kafka.common.serialization.LongDeserializer
+ --formatter-property print.key=true \
+ --formatter-property print.value=true \
+ --formatter-property key.deserializer=org.apache.kafka.common.serialization.StringDeserializer \
+ --formatter-property value.deserializer=org.apache.kafka.common.serialization.LongDeserializer
all 1
streams 1
@@ -255,10 +255,10 @@ The streams-wordcount-output topic will subsequently show the correspondi
$ bin/kafka-console-consumer.sh --bootstrap-server localhost:9092 \
--topic streams-wordcount-output \
--from-beginning \
- --property print.key=true \
- --property print.value=true \
- --property key.deserializer=org.apache.kafka.common.serialization.StringDeserializer \
- --property value.deserializer=org.apache.kafka.common.serialization.LongDeserializer
+ --formatter-property print.key=true \
+ --formatter-property print.value=true \
+ --formatter-property key.deserializer=org.apache.kafka.common.serialization.StringDeserializer \
+ --formatter-property value.deserializer=org.apache.kafka.common.serialization.LongDeserializer
all 1
streams 1