-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathaggregator.log
167 lines (163 loc) · 8.78 KB
/
aggregator.log
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
. ____ _ __ _ _
/\\ / ___'_ __ _ _(_)_ __ __ _ \ \ \ \
( ( )\___ | '_ | '_| | '_ \/ _` | \ \ \ \
\\/ ___)| |_)| | | | | || (_| | ) ) ) )
' |____| .__|_| |_|_| |_\__, | / / / /
=========|_|==============|___/=/_/_/_/
:: Spring Boot :: (v2.2.4.RELEASE)
2020-01-28 22:24:23.612 INFO 61679 --- [ main] c.h.y.aggregator.AggregatorApplication : Starting AggregatorApplication v0.0.1-SNAPSHOT on MacBook-Pro.local with PID 61679 (/Users/matthewhersee/Documents/Development/OmniiLogic/YellowDog/aggregator/target/aggregator-0.0.1-SNAPSHOT.jar started by matthewhersee in /Users/matthewhersee/Documents/Development/OmniiLogic/YellowDog/aggregator)
2020-01-28 22:24:23.615 INFO 61679 --- [ main] c.h.y.aggregator.AggregatorApplication : No active profile set, falling back to default profiles: default
2020-01-28 22:24:24.767 INFO 61679 --- [ main] o.a.k.clients.consumer.ConsumerConfig : ConsumerConfig values:
allow.auto.create.topics = true
auto.commit.interval.ms = 5000
auto.offset.reset = latest
bootstrap.servers = [localhost:9092]
check.crcs = true
client.dns.lookup = default
client.id =
client.rack =
connections.max.idle.ms = 540000
default.api.timeout.ms = 60000
enable.auto.commit = false
exclude.internal.topics = true
fetch.max.bytes = 52428800
fetch.max.wait.ms = 500
fetch.min.bytes = 1
group.id = consumer
group.instance.id = null
heartbeat.interval.ms = 3000
interceptor.classes = []
internal.leave.group.on.close = true
isolation.level = read_uncommitted
key.deserializer = class org.apache.kafka.common.serialization.StringDeserializer
max.partition.fetch.bytes = 1048576
max.poll.interval.ms = 300000
max.poll.records = 500
metadata.max.age.ms = 300000
metric.reporters = []
metrics.num.samples = 2
metrics.recording.level = INFO
metrics.sample.window.ms = 30000
partition.assignment.strategy = [class org.apache.kafka.clients.consumer.RangeAssignor]
receive.buffer.bytes = 65536
reconnect.backoff.max.ms = 1000
reconnect.backoff.ms = 50
request.timeout.ms = 30000
retry.backoff.ms = 100
sasl.client.callback.handler.class = null
sasl.jaas.config = null
sasl.kerberos.kinit.cmd = /usr/bin/kinit
sasl.kerberos.min.time.before.relogin = 60000
sasl.kerberos.service.name = null
sasl.kerberos.ticket.renew.jitter = 0.05
sasl.kerberos.ticket.renew.window.factor = 0.8
sasl.login.callback.handler.class = null
sasl.login.class = null
sasl.login.refresh.buffer.seconds = 300
sasl.login.refresh.min.period.seconds = 60
sasl.login.refresh.window.factor = 0.8
sasl.login.refresh.window.jitter = 0.05
sasl.mechanism = GSSAPI
security.protocol = PLAINTEXT
send.buffer.bytes = 131072
session.timeout.ms = 10000
ssl.cipher.suites = null
ssl.enabled.protocols = [TLSv1.2, TLSv1.1, TLSv1]
ssl.endpoint.identification.algorithm = https
ssl.key.password = null
ssl.keymanager.algorithm = SunX509
ssl.keystore.location = null
ssl.keystore.password = null
ssl.keystore.type = JKS
ssl.protocol = TLS
ssl.provider = null
ssl.secure.random.implementation = null
ssl.trustmanager.algorithm = PKIX
ssl.truststore.location = null
ssl.truststore.password = null
ssl.truststore.type = JKS
value.deserializer = class org.springframework.kafka.support.serializer.JsonDeserializer
2020-01-28 22:24:24.920 INFO 61679 --- [ main] o.a.kafka.common.utils.AppInfoParser : Kafka version: 2.3.1
2020-01-28 22:24:24.921 INFO 61679 --- [ main] o.a.kafka.common.utils.AppInfoParser : Kafka commitId: 18a913733fb71c01
2020-01-28 22:24:24.922 INFO 61679 --- [ main] o.a.kafka.common.utils.AppInfoParser : Kafka startTimeMs: 1580250264918
2020-01-28 22:24:24.925 INFO 61679 --- [ main] o.a.k.clients.consumer.KafkaConsumer : [Consumer clientId=consumer-1, groupId=consumer] Subscribed to topic(s): trade.record
2020-01-28 22:24:24.926 INFO 61679 --- [ main] o.s.s.c.ThreadPoolTaskScheduler : Initializing ExecutorService
2020-01-28 22:24:24.946 INFO 61679 --- [ main] c.h.y.aggregator.AggregatorApplication : Started AggregatorApplication in 1.986 seconds (JVM running for 2.477)
2020-01-28 22:24:25.193 INFO 61679 --- [ntainer#0-0-C-1] org.apache.kafka.clients.Metadata : [Consumer clientId=consumer-1, groupId=consumer] Cluster ID: gxcsCfLNTA2aqXc0HQ-l5w
2020-01-28 22:24:26.279 INFO 61679 --- [ntainer#0-0-C-1] o.a.k.c.c.internals.AbstractCoordinator : [Consumer clientId=consumer-1, groupId=consumer] Discovered group coordinator localhost:9092 (id: 2147483646 rack: null)
2020-01-28 22:24:26.283 INFO 61679 --- [ntainer#0-0-C-1] o.a.k.c.c.internals.ConsumerCoordinator : [Consumer clientId=consumer-1, groupId=consumer] Revoking previously assigned partitions []
2020-01-28 22:24:26.284 INFO 61679 --- [ntainer#0-0-C-1] o.s.k.l.KafkaMessageListenerContainer : consumer: partitions revoked: []
2020-01-28 22:24:26.284 INFO 61679 --- [ntainer#0-0-C-1] o.a.k.c.c.internals.AbstractCoordinator : [Consumer clientId=consumer-1, groupId=consumer] (Re-)joining group
2020-01-28 22:24:26.350 INFO 61679 --- [ntainer#0-0-C-1] o.a.k.c.c.internals.AbstractCoordinator : [Consumer clientId=consumer-1, groupId=consumer] (Re-)joining group
2020-01-28 22:24:26.616 INFO 61679 --- [ntainer#0-0-C-1] o.a.k.c.c.internals.AbstractCoordinator : [Consumer clientId=consumer-1, groupId=consumer] Successfully joined group with generation 1
2020-01-28 22:24:26.621 INFO 61679 --- [ntainer#0-0-C-1] o.a.k.c.c.internals.ConsumerCoordinator : [Consumer clientId=consumer-1, groupId=consumer] Setting newly assigned partitions: trade.record-0
2020-01-28 22:24:26.658 INFO 61679 --- [ntainer#0-0-C-1] o.a.k.c.c.internals.ConsumerCoordinator : [Consumer clientId=consumer-1, groupId=consumer] Found no committed offset for partition trade.record-0
2020-01-28 22:24:26.693 INFO 61679 --- [ntainer#0-0-C-1] o.a.k.c.c.internals.SubscriptionState : [Consumer clientId=consumer-1, groupId=consumer] Resetting offset for partition trade.record-0 to offset 0.
2020-01-28 22:24:26.742 INFO 61679 --- [ntainer#0-0-C-1] o.s.k.l.KafkaMessageListenerContainer : consumer: partitions assigned: [trade.record-0]
2020-01-28 22:27:37.589 INFO 61679 --- [ntainer#0-0-C-1] o.a.k.clients.producer.ProducerConfig : ProducerConfig values:
acks = 1
batch.size = 16384
bootstrap.servers = [localhost:9092]
buffer.memory = 33554432
client.dns.lookup = default
client.id =
compression.type = none
connections.max.idle.ms = 540000
delivery.timeout.ms = 120000
enable.idempotence = false
interceptor.classes = []
key.serializer = class org.apache.kafka.common.serialization.StringSerializer
linger.ms = 0
max.block.ms = 60000
max.in.flight.requests.per.connection = 5
max.request.size = 1048576
metadata.max.age.ms = 300000
metric.reporters = []
metrics.num.samples = 2
metrics.recording.level = INFO
metrics.sample.window.ms = 30000
partitioner.class = class org.apache.kafka.clients.producer.internals.DefaultPartitioner
receive.buffer.bytes = 32768
reconnect.backoff.max.ms = 1000
reconnect.backoff.ms = 50
request.timeout.ms = 30000
retries = 2147483647
retry.backoff.ms = 100
sasl.client.callback.handler.class = null
sasl.jaas.config = null
sasl.kerberos.kinit.cmd = /usr/bin/kinit
sasl.kerberos.min.time.before.relogin = 60000
sasl.kerberos.service.name = null
sasl.kerberos.ticket.renew.jitter = 0.05
sasl.kerberos.ticket.renew.window.factor = 0.8
sasl.login.callback.handler.class = null
sasl.login.class = null
sasl.login.refresh.buffer.seconds = 300
sasl.login.refresh.min.period.seconds = 60
sasl.login.refresh.window.factor = 0.8
sasl.login.refresh.window.jitter = 0.05
sasl.mechanism = GSSAPI
security.protocol = PLAINTEXT
send.buffer.bytes = 131072
ssl.cipher.suites = null
ssl.enabled.protocols = [TLSv1.2, TLSv1.1, TLSv1]
ssl.endpoint.identification.algorithm = https
ssl.key.password = null
ssl.keymanager.algorithm = SunX509
ssl.keystore.location = null
ssl.keystore.password = null
ssl.keystore.type = JKS
ssl.protocol = TLS
ssl.provider = null
ssl.secure.random.implementation = null
ssl.trustmanager.algorithm = PKIX
ssl.truststore.location = null
ssl.truststore.password = null
ssl.truststore.type = JKS
transaction.timeout.ms = 60000
transactional.id = null
value.serializer = class org.springframework.kafka.support.serializer.JsonSerializer
2020-01-28 22:27:37.619 INFO 61679 --- [ntainer#0-0-C-1] o.a.kafka.common.utils.AppInfoParser : Kafka version: 2.3.1
2020-01-28 22:27:37.620 INFO 61679 --- [ntainer#0-0-C-1] o.a.kafka.common.utils.AppInfoParser : Kafka commitId: 18a913733fb71c01
2020-01-28 22:27:37.620 INFO 61679 --- [ntainer#0-0-C-1] o.a.kafka.common.utils.AppInfoParser : Kafka startTimeMs: 1580250457619
2020-01-28 22:27:37.635 INFO 61679 --- [ad | producer-1] org.apache.kafka.clients.Metadata : [Producer clientId=producer-1] Cluster ID: gxcsCfLNTA2aqXc0HQ-l5w