security.protocol = PLAINTEXT security.providers = null send.buffer.bytes = 131072 ssl.cipher.suites = null ssl.enabled.protocols = [TLSv1.2] ssl.endpoint.identification.algorithm = null ssl.key.password = null ssl.keymanager.algorithm = SunX509 ssl.keystore.location = null ssl.keystore.password = null ssl.keystore.type = JKS ssl.protocol = TLSv1.2 ssl.provider = null ssl.secure.random.implementation = null ssl.trustmanager.algorithm = PKIX ssl.truststore.location = null ssl.truststore.password = null ssl.truststore.type = JKS transaction.timeout.ms = 60000 transactional.id = null value.serializer = class org.apache.kafka.common.serialization.StringSerializer 14:17:10.701 [vert.x-worker-thread-6] INFO KafkaProducer [25288736eqId] [Producer clientId=producer-30] Instantiated an idempotent producer. 14:17:10.703 [vert.x-worker-thread-6] INFO KafkaProducer [25288738eqId] [Producer clientId=producer-30] Overriding the default retries config to the recommended value of 2147483647 since the idempotent producer is enabled. 14:17:10.703 [vert.x-worker-thread-6] INFO KafkaProducer [25288738eqId] [Producer clientId=producer-30] Overriding the default acks to all since idempotence is enabled. 14:17:10.756 [vert.x-worker-thread-6] INFO ppInfoParser$AppInfo [25288791eqId] Kafka version: 2.5.0 14:17:10.756 [vert.x-worker-thread-6] INFO ppInfoParser$AppInfo [25288791eqId] Kafka commitId: 66563e712b0b9f84 14:17:10.756 [vert.x-worker-thread-6] INFO ppInfoParser$AppInfo [25288791eqId] Kafka startTimeMs: 1634307430754 14:17:10.757 [vert.x-worker-thread-6] DEBUG KafkaConsumerWrapper [25288792eqId] Starting business completion handler, globalLoadSensor: org.folio.kafka.GlobalLoadSensor@2f7988d8 14:17:10.758 [kafka-producer-network-thread | producer-30] INFO Metadata [25288793eqId] [Producer clientId=producer-30] Cluster ID: ndvmVewOTKSTf8qW5if9eg 14:17:10.759 [kafka-producer-network-thread | producer-30] INFO TransactionManager [25288794eqId] [Producer clientId=producer-30] ProducerId set to 4069 with epoch 0 14:17:10.764 [vert.x-worker-thread-8] DEBUG rdChunksKafkaHandler [25288799eqId] RecordCollection processing has been completed with response sent... correlationId dc2d17f9-2d5b-4a7b-989e-48d3fd34433f, chunkNumber 10-10 14:17:10.765 [vert.x-worker-thread-8] DEBUG KafkaConsumerWrapper [25288800eqId] Consumer - id: 0 subscriptionPattern: SubscriptionDefinition(eventType=DI_RAW_RECORDS_CHUNK_PARSED, subscriptionPattern=metadata-spitfire\.Default\.\w{1,}\.DI_RAW_RECORDS_CHUNK_PARSED) Committing offset: 10 14:17:10.765 [vert.x-worker-thread-8] DEBUG KafkaConsumerWrapper [25288800eqId] Threshold is exceeded, preparing to resume, globalLoad: 0, currentLoad: 0, requestNo: -10 14:17:10.765 [vert.x-worker-thread-8] INFO KafkaProducer [25288800eqId] [Producer clientId=producer-30] Closing the Kafka producer with timeoutMillis = 9223372036854775807 ms. 14:17:11.428 [vert.x-worker-thread-17] DEBUG KafkaConsumerWrapper [25289463eqId] Consumer - id: 14 subscriptionPattern: SubscriptionDefinition(eventType=DI_INVENTORY_HOLDINGS_CREATED_READY_FOR_POST_PROCESSING, subscriptionPattern=metadata-spitfire\.Default\.\w{1,}\.DI_INVENTORY_HOLDINGS_CREATED_READY_FOR_POST_PROCESSING) a Record has been received. key: 12 currentLoad: 1 globalLoad: 1 14:17:11.428 [vert.x-worker-thread-17] DEBUG taImportKafkaHandler [25289463eqId] Data import event payload has been received with event type: DI_INVENTORY_HOLDINGS_CREATED_READY_FOR_POST_PROCESSING and correlationId: a08d8df5-df7d-4ffc-994f-48f9365a6814 14:17:11.429 [vert.x-worker-thread-17] DEBUG ProfileSnapshotCache [25289464eqId] Trying to load jobProfileSnapshot by id '80787eb7-807b-4062-8f7a-5f9de915bb2a' for cache, okapi url: http://okapi:9130, tenantId: diku 14:17:11.430 [vert.x-worker-thread-17] DEBUG KafkaConsumerWrapper [25289465eqId] Starting business completion handler, globalLoadSensor: org.folio.kafka.GlobalLoadSensor@5d830be2 14:17:11.540 [vert.x-worker-thread-19] INFO ProfileSnapshotCache [25289575eqId] JobProfileSnapshot was loaded by id '80787eb7-807b-4062-8f7a-5f9de915bb2a' 14:17:11.542 [vert.x-worker-thread-19] DEBUG ametersSnapshotCache [25289577eqId] Trying to load MappingParametersSnapshot by jobExecutionId '0b1ecf51-420e-4e75-af41-f4003b115f6a' for cache, okapi url: http://okapi:9130, tenantId: diku 14:17:11.563 [vert.x-worker-thread-9] INFO ametersSnapshotCache [25289598eqId] MappingParametersSnapshot was loaded by jobExecutionId '0b1ecf51-420e-4e75-af41-f4003b115f6a' 14:17:11.851 [vert.x-worker-thread-11] INFO KafkaConsumerWrapper [25289886eqId] Consumer - id: 0 subscriptionPattern: SubscriptionDefinition(eventType=DI_RAW_RECORDS_CHUNK_PARSED, subscriptionPattern=metadata-spitfire\.Default\.\w{1,}\.DI_RAW_RECORDS_CHUNK_PARSED) Committed offset: 10 14:17:12.460 [vert.x-worker-thread-7] INFO AbstractConfig [25290495eqId] ProducerConfig values: acks = -1 batch.size = 16384 bootstrap.servers = [kafka:9092] buffer.memory = 33554432 client.dns.lookup = default client.id = producer-31 compression.type = gzip connections.max.idle.ms = 540000 delivery.timeout.ms = 120000 enable.idempotence = true interceptor.classes = [] key.serializer = class org.apache.kafka.common.serialization.StringSerializer linger.ms = 0 max.block.ms = 60000 max.in.flight.requests.per.connection = 5 max.request.size = 1048576 metadata.max.age.ms = 300000 metadata.max.idle.ms = 300000 metric.reporters = [] metrics.num.samples = 2 metrics.recording.level = INFO metrics.sample.window.ms = 30000 partitioner.class = class org.apache.kafka.clients.producer.internals.DefaultPartitioner receive.buffer.bytes = 32768 reconnect.backoff.max.ms = 1000 reconnect.backoff.ms = 50 request.timeout.ms = 30000 retries = 2147483647 retry.backoff.ms = 100 sasl.client.callback.handler.class = null sasl.jaas.config = null sasl.kerberos.kinit.cmd = /usr/bin/kinit sasl.kerberos.min.time.before.relogin = 60000 sasl.kerberos.service.name = null sasl.kerberos.ticket.renew.jitter = 0.05 sasl.kerberos.ticket.renew.window.factor = 0.8 sasl.login.callback.handler.class = null sasl.login.class = null sasl.login.refresh.buffer.seconds = 300 sasl.login.refresh.min.period.seconds = 60 sasl.login.refresh.window.factor = 0.8 sasl.login.refresh.window.jitter = 0.05 sasl.mechanism = GSSAPI security.protocol = PLAINTEXT security.providers = null send.buffer.bytes = 131072 ssl.cipher.suites = null ssl.enabled.protocols = [TLSv1.2] ssl.endpoint.identification.algorithm = null ssl.key.password = null ssl.keymanager.algorithm = SunX509 ssl.keystore.location = null ssl.keystore.password = null ssl.keystore.type = JKS ssl.protocol = TLSv1.2 ssl.provider = null ssl.secure.random.implementation = null ssl.trustmanager.algorithm = PKIX ssl.truststore.location = null ssl.truststore.password = null ssl.truststore.type = JKS transaction.timeout.ms = 60000 transactional.id = null value.serializer = class org.apache.kafka.common.serialization.StringSerializer 14:17:12.461 [vert.x-worker-thread-7] INFO KafkaProducer [25290496eqId] [Producer clientId=producer-31] Instantiated an idempotent producer. 14:17:12.552 [vert.x-worker-thread-7] INFO KafkaProducer [25290587eqId] [Producer clientId=producer-31] Overriding the default retries config to the recommended value of 2147483647 since the idempotent producer is enabled. 14:17:12.552 [vert.x-worker-thread-7] INFO KafkaProducer [25290587eqId] [Producer clientId=producer-31] Overriding the default acks to all since idempotence is enabled. 14:17:12.553 [vert.x-worker-thread-7] INFO ppInfoParser$AppInfo [25290588eqId] Kafka version: 2.5.0 14:17:12.553 [vert.x-worker-thread-7] INFO ppInfoParser$AppInfo [25290588eqId] Kafka commitId: 66563e712b0b9f84 14:17:12.553 [vert.x-worker-thread-7] INFO ppInfoParser$AppInfo [25290588eqId] Kafka startTimeMs: 1634307432552 14:17:12.651 [vert.x-worker-thread-7] INFO AbstractConfig [25290686eqId] ProducerConfig values: acks = -1 batch.size = 16384 bootstrap.servers = [kafka:9092] buffer.memory = 33554432 client.dns.lookup = default client.id = producer-32 compression.type = gzip connections.max.idle.ms = 540000 delivery.timeout.ms = 120000 enable.idempotence = true interceptor.classes = [] key.serializer = class org.apache.kafka.common.serialization.StringSerializer linger.ms = 0 max.block.ms = 60000 max.in.flight.requests.per.connection = 5 max.request.size = 1048576 metadata.max.age.ms = 300000 metadata.max.idle.ms = 300000 metric.reporters = [] metrics.num.samples = 2 metrics.recording.level = INFO metrics.sample.window.ms = 30000 partitioner.class = class org.apache.kafka.clients.producer.internals.DefaultPartitioner receive.buffer.bytes = 32768 reconnect.backoff.max.ms = 1000 reconnect.backoff.ms = 50 request.timeout.ms = 30000 retries = 2147483647 retry.backoff.ms = 100 sasl.client.callback.handler.class = null sasl.jaas.config = null sasl.kerberos.kinit.cmd = /usr/bin/kinit sasl.kerberos.min.time.before.relogin = 60000 sasl.kerberos.service.name = null sasl.kerberos.ticket.renew.jitter = 0.05 sasl.kerberos.ticket.renew.window.factor = 0.8 sasl.login.callback.handler.class = null sasl.login.class = null sasl.login.refresh.buffer.seconds = 300 sasl.login.refresh.min.period.seconds = 60 sasl.login.refresh.window.factor = 0.8 sasl.login.refresh.window.jitter = 0.05 sasl.mechanism = GSSAPI security.protocol = PLAINTEXT security.providers = null send.buffer.bytes = 131072 ssl.cipher.suites = null ssl.enabled.protocols = [TLSv1.2] ssl.endpoint.identification.algorithm = null ssl.key.password = null ssl.keymanager.algorithm = SunX509 ssl.keystore.location = null ssl.keystore.password = null ssl.keystore.type = JKS ssl.protocol = TLSv1.2 ssl.provider = null ssl.secure.random.implementation = null ssl.trustmanager.algorithm = PKIX ssl.truststore.location = null ssl.truststore.password = null ssl.truststore.type = JKS transaction.timeout.ms = 60000 transactional.id = null value.serializer = class org.apache.kafka.common.serialization.StringSerializer 14:17:12.651 [vert.x-worker-thread-7] INFO KafkaProducer [25290686eqId] [Producer clientId=producer-32] Instantiated an idempotent producer. 14:17:12.654 [vert.x-worker-thread-7] INFO KafkaProducer [25290689eqId] [Producer clientId=producer-32] Overriding the default retries config to the recommended value of 2147483647 since the idempotent producer is enabled. 14:17:12.654 [vert.x-worker-thread-7] INFO KafkaProducer [25290689eqId] [Producer clientId=producer-32] Overriding the default acks to all since idempotence is enabled. 14:17:12.654 [kafka-producer-network-thread | producer-31] INFO Metadata [25290689eqId] [Producer clientId=producer-31] Cluster ID: ndvmVewOTKSTf8qW5if9eg 14:17:12.655 [kafka-producer-network-thread | producer-31] INFO TransactionManager [25290690eqId] [Producer clientId=producer-31] ProducerId set to 4073 with epoch 0 14:17:12.655 [vert.x-worker-thread-7] INFO ppInfoParser$AppInfo [25290690eqId] Kafka version: 2.5.0 14:17:12.656 [vert.x-worker-thread-7] INFO ppInfoParser$AppInfo [25290691eqId] Kafka commitId: 66563e712b0b9f84 14:17:12.656 [vert.x-worker-thread-7] INFO ppInfoParser$AppInfo [25290691eqId] Kafka startTimeMs: 1634307432655 14:17:12.753 [kafka-producer-network-thread | producer-32] INFO Metadata [25290788eqId] [Producer clientId=producer-32] Cluster ID: ndvmVewOTKSTf8qW5if9eg 14:17:12.753 [kafka-producer-network-thread | producer-32] INFO TransactionManager [25290788eqId] [Producer clientId=producer-32] ProducerId set to 4074 with epoch 0 14:17:12.762 [vert.x-worker-thread-7] INFO EventHandlingUtil [25290797eqId] Event with type DI_SRS_MARC_HOLDINGS_HOLDING_HRID_SET and correlationId a08d8df5-df7d-4ffc-994f-48f9365a6814 was sent to kafka 14:17:12.762 [vert.x-worker-thread-7] INFO KafkaProducer [25290797eqId] [Producer clientId=producer-31] Closing the Kafka producer with timeoutMillis = 9223372036854775807 ms. 14:17:12.852 [vert.x-worker-thread-14] INFO KafkaEventPublisher [25290887eqId] Event with type: DI_COMPLETED and correlationId: a08d8df5-df7d-4ffc-994f-48f9365a6814 was sent to the topic metadata-spitfire.Default.diku.DI_COMPLETED 14:17:12.852 [vert.x-worker-thread-14] DEBUG KafkaConsumerWrapper [25290887eqId] Consumer - id: 14 subscriptionPattern: SubscriptionDefinition(eventType=DI_INVENTORY_HOLDINGS_CREATED_READY_FOR_POST_PROCESSING, subscriptionPattern=metadata-spitfire\.Default\.\w{1,}\.DI_INVENTORY_HOLDINGS_CREATED_READY_FOR_POST_PROCESSING) Committing offset: 2 14:17:12.853 [vert.x-worker-thread-14] DEBUG KafkaConsumerWrapper [25290888eqId] Threshold is exceeded, preparing to resume, globalLoad: 0, currentLoad: 0, requestNo: -2 14:17:12.853 [vert.x-worker-thread-14] INFO KafkaProducer [25290888eqId] [Producer clientId=producer-32] Closing the Kafka producer with timeoutMillis = 9223372036854775807 ms. 14:17:12.892 [vert.x-eventloop-thread-1] INFO RestRouting [25290927eqId] invoking putSourceStorageSnapshotsByJobExecutionId 14:17:12.953 [vert.x-eventloop-thread-1] INFO LogUtil [25290988eqId] 192.168.183.117:58836 PUT /source-storage/snapshots/0b1ecf51-420e-4e75-af41-f4003b115f6a HTTP_1_1 200 342 61 tid=diku OK 14:17:13.451 [vert.x-worker-thread-9] INFO KafkaConsumerWrapper [25291486eqId] Consumer - id: 14 subscriptionPattern: SubscriptionDefinition(eventType=DI_INVENTORY_HOLDINGS_CREATED_READY_FOR_POST_PROCESSING, subscriptionPattern=metadata-spitfire\.Default\.\w{1,}\.DI_INVENTORY_HOLDINGS_CREATED_READY_FOR_POST_PROCESSING) Committed offset: 2 14:18:23.255 [vert.x-eventloop-thread-1] INFO RestRouting [25361290eqId] invoking getSourceStorageRecordsById 14:18:23.275 [vert.x-eventloop-thread-1] INFO LogUtil [25361310eqId] 192.168.183.117:59286 GET /source-storage/records/72f817ce-3ff9-47d4-86ed-3cfdf8df6f1d null HTTP_1_1 200 3127 19 tid=diku OK 15:16:33.854 [vert.x-worker-thread-10] INFO KafkaInternalCache [28851889eqId] Clearing cache from outdated events... 17:16:33.854 [vert.x-worker-thread-18] INFO KafkaInternalCache [36051889eqId] Clearing cache from outdated events... 15:48:03.526 [vert.x-eventloop-thread-1] INFO RestRouting [117141561eqId] invoking getSourceStorageRecordsById 15:48:03.544 [vert.x-eventloop-thread-1] INFO LogUtil [117141579eqId] 192.168.183.117:46686 GET /source-storage/records/72f817ce-3ff9-47d4-86ed-3cfdf8df6f1d null HTTP_1_1 200 3127 18 tid=diku OK 14:40:19.936 [vert.x-eventloop-thread-1] INFO RestRouting [199477971eqId] invoking getSourceStorageRecordsById 14:40:19.955 [vert.x-eventloop-thread-1] INFO LogUtil [199477990eqId] 192.168.183.117:55192 GET /source-storage/records/72f817ce-3ff9-47d4-86ed-3cfdf8df6f1d null HTTP_1_1 200 3127 19 tid=diku OK 11:13:05.636 [vert.x-eventloop-thread-1] INFO RestRouting [273443671eqId] invoking getSourceStorageRecordsById 11:13:05.654 [vert.x-eventloop-thread-1] INFO LogUtil [273443689eqId] 192.168.183.117:43090 GET /source-storage/records/72f817ce-3ff9-47d4-86ed-3cfdf8df6f1d null HTTP_1_1 200 3127 17 tid=diku OK 11:13:31.164 [vert.x-eventloop-thread-1] INFO RestRouting [273469199eqId] invoking getSourceStorageRecordsById 11:13:31.177 [vert.x-eventloop-thread-1] INFO LogUtil [273469212eqId] 192.168.183.117:43090 GET /source-storage/records/16441fc5-774d-42a3-ab9d-c340b2406d3a null HTTP_1_1 404 67 12 tid=diku Not Found 11:13:41.999 [vert.x-eventloop-thread-1] INFO RestRouting [273480034eqId] invoking getSourceStorageRecordsById 11:13:42.012 [vert.x-eventloop-thread-1] INFO LogUtil [273480047eqId] 192.168.183.117:43090 GET /source-storage/records/779085e9-2515-43a6-ad3e-40cf7b149f16 null HTTP_1_1 404 67 13 tid=diku Not Found 11:14:05.020 [vert.x-eventloop-thread-1] INFO RestRouting [273503055eqId] invoking getSourceStorageRecordsById 11:14:05.038 [vert.x-eventloop-thread-1] INFO LogUtil [273503073eqId] 192.168.183.117:43090 GET /source-storage/records/28f1f30f-44bf-4527-8fd5-f6190dd1aca5 null HTTP_1_1 200 3127 18 tid=diku OK 11:14:30.027 [vert.x-eventloop-thread-1] INFO RestRouting [273528062eqId] invoking postSourceStorageSnapshots 11:14:30.038 [vert.x-eventloop-thread-1] INFO LogUtil [273528073eqId] 192.168.183.117:43090 POST /source-storage/snapshots HTTP_1_1 201 336 12 tid=diku Created 11:14:31.324 [vert.x-eventloop-thread-1] INFO RestRouting [273529359eqId] invoking putSourceStorageSnapshotsByJobExecutionId 11:14:31.335 [vert.x-eventloop-thread-1] INFO LogUtil [273529370eqId] 192.168.183.117:43090 PUT /source-storage/snapshots/bc300e53-48e4-4c5c-ae20-f5dfd7e20961 HTTP_1_1 200 346 12 tid=diku OK 11:14:37.089 [vert.x-eventloop-thread-1] INFO RestRouting [273535124eqId] invoking putSourceStorageSnapshotsByJobExecutionId 11:14:37.101 [vert.x-eventloop-thread-1] INFO LogUtil [273535136eqId] 192.168.183.117:43090 PUT /source-storage/snapshots/bc300e53-48e4-4c5c-ae20-f5dfd7e20961 HTTP_1_1 200 412 12 tid=diku OK 11:14:37.223 [vert.x-eventloop-thread-1] INFO RestRouting [273535258eqId] invoking putSourceStorageSnapshotsByJobExecutionId 11:14:37.230 [vert.x-eventloop-thread-1] INFO LogUtil [273535265eqId] 192.168.183.117:43090 PUT /source-storage/snapshots/bc300e53-48e4-4c5c-ae20-f5dfd7e20961 HTTP_1_1 200 412 7 tid=diku OK 11:14:37.323 [vert.x-eventloop-thread-1] INFO RestRouting [273535358eqId] invoking postSourceStorageBatchVerifiedRecords 11:14:37.330 [vert.x-eventloop-thread-1] INFO LogUtil [273535365eqId] 192.168.183.117:43090 POST /source-storage/batch/verified-records HTTP_1_1 200 48 6 tid=diku OK 11:14:37.782 [vert.x-eventloop-thread-1] INFO RestRouting [273535817eqId] invoking putSourceStorageSnapshotsByJobExecutionId 11:14:37.786 [vert.x-eventloop-thread-1] INFO LogUtil [273535821eqId] 192.168.183.117:43090 PUT /source-storage/snapshots/bc300e53-48e4-4c5c-ae20-f5dfd7e20961 HTTP_1_1 200 338 4 tid=diku OK 11:15:02.786 [vert.x-eventloop-thread-1] INFO RestRouting [273560821eqId] invoking getSourceStorageRecordsById 11:15:02.801 [vert.x-eventloop-thread-1] INFO LogUtil [273560836eqId] 192.168.183.117:43090 GET /source-storage/records/1fb04e26-5d0f-4b10-9b33-70c336372bee null HTTP_1_1 404 67 15 tid=diku Not Found 11:54:32.235 [vert.x-eventloop-thread-1] INFO RestRouting [275930270eqId] invoking getSourceStorageRecordsById 11:54:32.252 [vert.x-eventloop-thread-1] INFO LogUtil [275930287eqId] 192.168.183.117:32836 GET /source-storage/records/72f817ce-3ff9-47d4-86ed-3cfdf8df6f1d null HTTP_1_1 200 3127 17 tid=diku OK 14:05:12.000 [vert.x-eventloop-thread-1] INFO RestRouting [283770035eqId] invoking getSourceStorageRecordsById 14:05:12.019 [vert.x-eventloop-thread-1] INFO LogUtil [283770054eqId] 192.168.183.117:60626 GET /source-storage/records/72f817ce-3ff9-47d4-86ed-3cfdf8df6f1d null HTTP_1_1 200 3127 18 tid=diku OK 14:16:33.854 [vert.x-worker-thread-10] INFO KafkaInternalCache [284451889eqId] Clearing cache from outdated events... 16:25:43.733 [vert.x-eventloop-thread-1] INFO RestRouting [292201768eqId] invoking postSourceStorageSnapshots 16:25:43.745 [vert.x-eventloop-thread-1] INFO LogUtil [292201780eqId] 192.168.183.117:36572 POST /source-storage/snapshots HTTP_1_1 201 336 12 tid=diku Created 16:25:44.854 [vert.x-eventloop-thread-1] INFO RestRouting [292202889eqId] invoking putSourceStorageSnapshotsByJobExecutionId 16:25:44.866 [vert.x-eventloop-thread-1] INFO LogUtil [292202901eqId] 192.168.183.117:36572 PUT /source-storage/snapshots/810ea89c-9177-40e9-b6b1-54701b033264 HTTP_1_1 200 346 12 tid=diku OK 16:25:51.497 [vert.x-eventloop-thread-1] INFO RestRouting [292209532eqId] invoking putSourceStorageSnapshotsByJobExecutionId 16:25:51.510 [vert.x-eventloop-thread-1] INFO LogUtil [292209545eqId] 192.168.183.117:36572 PUT /source-storage/snapshots/810ea89c-9177-40e9-b6b1-54701b033264 HTTP_1_1 200 412 13 tid=diku OK 16:25:51.534 [vert.x-eventloop-thread-1] INFO RestRouting [292209569eqId] invoking putSourceStorageSnapshotsByJobExecutionId 16:25:51.546 [vert.x-eventloop-thread-1] INFO LogUtil [292209581eqId] 192.168.183.117:36572 PUT /source-storage/snapshots/810ea89c-9177-40e9-b6b1-54701b033264 HTTP_1_1 200 412 12 tid=diku OK 16:25:54.482 [vert.x-worker-thread-2] DEBUG KafkaConsumerWrapper [292212517eqId] Consumer - id: 0 subscriptionPattern: SubscriptionDefinition(eventType=DI_RAW_RECORDS_CHUNK_PARSED, subscriptionPattern=metadata-spitfire\.Default\.\w{1,}\.DI_RAW_RECORDS_CHUNK_PARSED) a Record has been received. key: 13 currentLoad: 1 globalLoad: 1 16:25:54.482 [vert.x-worker-thread-2] DEBUG rdChunksKafkaHandler [292212517eqId] RecordCollection has been received, correlationId: 56a7903b-07a1-40c4-bf23-0c2118fdab57, starting processing... chunkNumber 11-13 16:25:54.508 [vert.x-worker-thread-2] INFO AbstractConfig [292212543eqId] ProducerConfig values: acks = -1 batch.size = 16384 bootstrap.servers = [kafka:9092] buffer.memory = 33554432 client.dns.lookup = default client.id = producer-33 compression.type = gzip connections.max.idle.ms = 540000 delivery.timeout.ms = 120000 enable.idempotence = true interceptor.classes = [] key.serializer = class org.apache.kafka.common.serialization.StringSerializer linger.ms = 0 max.block.ms = 60000 max.in.flight.requests.per.connection = 5 max.request.size = 1048576 metadata.max.age.ms = 300000 metadata.max.idle.ms = 300000 metric.reporters = [] metrics.num.samples = 2 metrics.recording.level = INFO metrics.sample.window.ms = 30000 partitioner.class = class org.apache.kafka.clients.producer.internals.DefaultPartitioner receive.buffer.bytes = 32768 reconnect.backoff.max.ms = 1000 reconnect.backoff.ms = 50 request.timeout.ms = 30000 retries = 2147483647 retry.backoff.ms = 100 sasl.client.callback.handler.class = null sasl.jaas.config = null sasl.kerberos.kinit.cmd = /usr/bin/kinit sasl.kerberos.min.time.before.relogin = 60000 sasl.kerberos.service.name = null sasl.kerberos.ticket.renew.jitter = 0.05 sasl.kerberos.ticket.renew.window.factor = 0.8 sasl.login.callback.handler.class = null sasl.login.class = null sasl.login.refresh.buffer.seconds = 300 sasl.login.refresh.min.period.seconds = 60 sasl.login.refresh.window.factor = 0.8 sasl.login.refresh.window.jitter = 0.05 sasl.mechanism = GSSAPI security.protocol = PLAINTEXT security.providers = null send.buffer.bytes = 131072 ssl.cipher.suites = null ssl.enabled.protocols = [TLSv1.2] ssl.endpoint.identification.algorithm = null ssl.key.password = null ssl.keymanager.algorithm = SunX509 ssl.keystore.location = null ssl.keystore.password = null ssl.keystore.type = JKS ssl.protocol = TLSv1.2 ssl.provider = null ssl.secure.random.implementation = null ssl.trustmanager.algorithm = PKIX ssl.truststore.location = null ssl.truststore.password = null ssl.truststore.type = JKS transaction.timeout.ms = 60000 transactional.id = null value.serializer = class org.apache.kafka.common.serialization.StringSerializer 16:25:54.509 [vert.x-worker-thread-2] INFO KafkaProducer [292212544eqId] [Producer clientId=producer-33] Instantiated an idempotent producer. 16:25:54.511 [vert.x-worker-thread-2] INFO KafkaProducer [292212546eqId] [Producer clientId=producer-33] Overriding the default retries config to the recommended value of 2147483647 since the idempotent producer is enabled. 16:25:54.512 [vert.x-worker-thread-2] INFO KafkaProducer [292212547eqId] [Producer clientId=producer-33] Overriding the default acks to all since idempotence is enabled. 16:25:54.512 [vert.x-worker-thread-2] INFO ppInfoParser$AppInfo [292212547eqId] Kafka version: 2.5.0 16:25:54.551 [vert.x-worker-thread-2] INFO ppInfoParser$AppInfo [292212586eqId] Kafka commitId: 66563e712b0b9f84 16:25:54.551 [vert.x-worker-thread-2] INFO ppInfoParser$AppInfo [292212586eqId] Kafka startTimeMs: 1634574354512 16:25:54.551 [vert.x-worker-thread-2] DEBUG KafkaConsumerWrapper [292212586eqId] Starting business completion handler, globalLoadSensor: org.folio.kafka.GlobalLoadSensor@2f7988d8 16:25:54.553 [kafka-producer-network-thread | producer-33] INFO Metadata [292212588eqId] [Producer clientId=producer-33] Cluster ID: ndvmVewOTKSTf8qW5if9eg 16:25:54.553 [kafka-producer-network-thread | producer-33] INFO TransactionManager [292212588eqId] [Producer clientId=producer-33] ProducerId set to 4080 with epoch 0 16:25:54.561 [vert.x-worker-thread-7] DEBUG rdChunksKafkaHandler [292212596eqId] RecordCollection processing has been completed with response sent... correlationId 56a7903b-07a1-40c4-bf23-0c2118fdab57, chunkNumber 11-11 16:25:54.650 [vert.x-worker-thread-7] DEBUG KafkaConsumerWrapper [292212685eqId] Consumer - id: 0 subscriptionPattern: SubscriptionDefinition(eventType=DI_RAW_RECORDS_CHUNK_PARSED, subscriptionPattern=metadata-spitfire\.Default\.\w{1,}\.DI_RAW_RECORDS_CHUNK_PARSED) Committing offset: 11 16:25:54.653 [vert.x-worker-thread-7] DEBUG KafkaConsumerWrapper [292212688eqId] Threshold is exceeded, preparing to resume, globalLoad: 0, currentLoad: 0, requestNo: -11 16:25:54.654 [vert.x-worker-thread-7] INFO KafkaProducer [292212689eqId] [Producer clientId=producer-33] Closing the Kafka producer with timeoutMillis = 9223372036854775807 ms. 16:25:54.678 [vert.x-worker-thread-8] DEBUG KafkaConsumerWrapper [292212713eqId] Consumer - id: 2 subscriptionPattern: SubscriptionDefinition(eventType=DI_SRS_MARC_BIB_RECORD_CREATED, subscriptionPattern=metadata-spitfire\.Default\.\w{1,}\.DI_SRS_MARC_BIB_RECORD_CREATED) a Record has been received. key: 11 currentLoad: 1 globalLoad: 1 16:25:54.678 [vert.x-worker-thread-8] DEBUG taImportKafkaHandler [292212713eqId] Data import event payload has been received with event type: DI_SRS_MARC_BIB_RECORD_CREATED and correlationId: d6b764a1-76c1-4011-96d9-09ff23ac571a 16:25:54.679 [vert.x-worker-thread-8] DEBUG ProfileSnapshotCache [292212714eqId] Trying to load jobProfileSnapshot by id '67118dc0-24d6-4222-b5a1-bd5617cc8d1d' for cache, okapi url: http://okapi:9130, tenantId: diku 16:25:54.680 [vert.x-worker-thread-8] DEBUG KafkaConsumerWrapper [292212715eqId] Starting business completion handler, globalLoadSensor: org.folio.kafka.GlobalLoadSensor@5d830be2 16:25:54.843 [vert.x-worker-thread-6] INFO ProfileSnapshotCache [292212878eqId] JobProfileSnapshot was loaded by id '67118dc0-24d6-4222-b5a1-bd5617cc8d1d' 16:25:54.844 [vert.x-worker-thread-6] INFO EventProcessorImpl [292212879eqId] No suitable handler found for DI_SRS_MARC_BIB_RECORD_CREATED event type and current profile ACTION_PROFILE 16:25:54.844 [vert.x-worker-thread-6] DEBUG KafkaConsumerWrapper [292212879eqId] Consumer - id: 2 subscriptionPattern: SubscriptionDefinition(eventType=DI_SRS_MARC_BIB_RECORD_CREATED, subscriptionPattern=metadata-spitfire\.Default\.\w{1,}\.DI_SRS_MARC_BIB_RECORD_CREATED) Committing offset: 11 16:25:54.845 [vert.x-worker-thread-6] DEBUG KafkaConsumerWrapper [292212880eqId] Threshold is exceeded, preparing to resume, globalLoad: 0, currentLoad: 0, requestNo: -11 16:25:55.031 [vert.x-worker-thread-13] DEBUG KafkaConsumerWrapper [292213066eqId] Consumer - id: 11 subscriptionPattern: SubscriptionDefinition(eventType=DI_INVENTORY_INSTANCE_CREATED_READY_FOR_POST_PROCESSING, subscriptionPattern=metadata-spitfire\.Default\.\w{1,}\.DI_INVENTORY_INSTANCE_CREATED_READY_FOR_POST_PROCESSING) a Record has been received. key: 13 currentLoad: 1 globalLoad: 1 16:25:55.032 [vert.x-worker-thread-13] DEBUG taImportKafkaHandler [292213067eqId] Data import event payload has been received with event type: DI_INVENTORY_INSTANCE_CREATED_READY_FOR_POST_PROCESSING and correlationId: d6b764a1-76c1-4011-96d9-09ff23ac571a 16:25:55.032 [vert.x-worker-thread-13] DEBUG ProfileSnapshotCache [292213067eqId] Trying to load jobProfileSnapshot by id '67118dc0-24d6-4222-b5a1-bd5617cc8d1d' for cache, okapi url: http://okapi:9130, tenantId: diku 16:25:55.033 [vert.x-worker-thread-13] DEBUG KafkaConsumerWrapper [292213068eqId] Starting business completion handler, globalLoadSensor: org.folio.kafka.GlobalLoadSensor@5d830be2 16:25:55.047 [vert.x-worker-thread-18] INFO ProfileSnapshotCache [292213082eqId] JobProfileSnapshot was loaded by id '67118dc0-24d6-4222-b5a1-bd5617cc8d1d' 16:25:55.048 [vert.x-worker-thread-18] DEBUG ametersSnapshotCache [292213083eqId] Trying to load MappingParametersSnapshot by jobExecutionId '810ea89c-9177-40e9-b6b1-54701b033264' for cache, okapi url: http://okapi:9130, tenantId: diku 16:25:55.070 [vert.x-worker-thread-8] INFO ametersSnapshotCache [292213105eqId] MappingParametersSnapshot was loaded by jobExecutionId '810ea89c-9177-40e9-b6b1-54701b033264' 16:25:55.556 [vert.x-worker-thread-12] INFO KafkaConsumerWrapper [292213591eqId] Consumer - id: 0 subscriptionPattern: SubscriptionDefinition(eventType=DI_RAW_RECORDS_CHUNK_PARSED, subscriptionPattern=metadata-spitfire\.Default\.\w{1,}\.DI_RAW_RECORDS_CHUNK_PARSED) Committed offset: 11 16:25:55.682 [vert.x-worker-thread-7] INFO KafkaConsumerWrapper [292213717eqId] Consumer - id: 2 subscriptionPattern: SubscriptionDefinition(eventType=DI_SRS_MARC_BIB_RECORD_CREATED, subscriptionPattern=metadata-spitfire\.Default\.\w{1,}\.DI_SRS_MARC_BIB_RECORD_CREATED) Committed offset: 11 16:25:55.931 [vert.x-worker-thread-19] INFO AbstractConfig [292213966eqId] ProducerConfig values: acks = -1 batch.size = 16384 bootstrap.servers = [kafka:9092] buffer.memory = 33554432 client.dns.lookup = default client.id = producer-34 compression.type = gzip connections.max.idle.ms = 540000 delivery.timeout.ms = 120000 enable.idempotence = true interceptor.classes = [] key.serializer = class org.apache.kafka.common.serialization.StringSerializer linger.ms = 0 max.block.ms = 60000 max.in.flight.requests.per.connection = 5 max.request.size = 1048576 metadata.max.age.ms = 300000 metadata.max.idle.ms = 300000 metric.reporters = [] metrics.num.samples = 2 metrics.recording.level = INFO metrics.sample.window.ms = 30000 partitioner.class = class org.apache.kafka.clients.producer.internals.DefaultPartitioner receive.buffer.bytes = 32768 reconnect.backoff.max.ms = 1000 reconnect.backoff.ms = 50 request.timeout.ms = 30000 retries = 2147483647 retry.backoff.ms = 100 sasl.client.callback.handler.class = null sasl.jaas.config = null sasl.kerberos.kinit.cmd = /usr/bin/kinit sasl.kerberos.min.time.before.relogin = 60000 sasl.kerberos.service.name = null sasl.kerberos.ticket.renew.jitter = 0.05 sasl.kerberos.ticket.renew.window.factor = 0.8 sasl.login.callback.handler.class = null sasl.login.class = null sasl.login.refresh.buffer.seconds = 300 sasl.login.refresh.min.period.seconds = 60 sasl.login.refresh.window.factor = 0.8 sasl.login.refresh.window.jitter = 0.05 sasl.mechanism = GSSAPI security.protocol = PLAINTEXT security.providers = null send.buffer.bytes = 131072 ssl.cipher.suites = null ssl.enabled.protocols = [TLSv1.2] ssl.endpoint.identification.algorithm = null ssl.key.password = null ssl.keymanager.algorithm = SunX509 ssl.keystore.location = null ssl.keystore.password = null ssl.keystore.type = JKS ssl.protocol = TLSv1.2 ssl.provider = null ssl.secure.random.implementation = null ssl.trustmanager.algorithm = PKIX ssl.truststore.location = null ssl.truststore.password = null ssl.truststore.type = JKS transaction.timeout.ms = 60000 transactional.id = null value.serializer = class org.apache.kafka.common.serialization.StringSerializer 16:25:55.932 [vert.x-worker-thread-19] INFO KafkaProducer [292213967eqId] [Producer clientId=producer-34] Instantiated an idempotent producer. 16:25:55.933 [vert.x-worker-thread-19] INFO KafkaProducer [292213968eqId] [Producer clientId=producer-34] Overriding the default retries config to the recommended value of 2147483647 since the idempotent producer is enabled. 16:25:55.933 [vert.x-worker-thread-19] INFO KafkaProducer [292213968eqId] [Producer clientId=producer-34] Overriding the default acks to all since idempotence is enabled. 16:25:55.950 [vert.x-worker-thread-19] INFO ppInfoParser$AppInfo [292213985eqId] Kafka version: 2.5.0 16:25:55.950 [vert.x-worker-thread-19] INFO ppInfoParser$AppInfo [292213985eqId] Kafka commitId: 66563e712b0b9f84 16:25:55.950 [vert.x-worker-thread-19] INFO ppInfoParser$AppInfo [292213985eqId] Kafka startTimeMs: 1634574355949 16:25:55.952 [vert.x-worker-thread-19] INFO AbstractConfig [292213987eqId] ProducerConfig values: acks = -1 batch.size = 16384 bootstrap.servers = [kafka:9092] buffer.memory = 33554432 client.dns.lookup = default client.id = producer-35 compression.type = gzip connections.max.idle.ms = 540000 delivery.timeout.ms = 120000 enable.idempotence = true interceptor.classes = [] key.serializer = class org.apache.kafka.common.serialization.StringSerializer linger.ms = 0 max.block.ms = 60000 max.in.flight.requests.per.connection = 5 max.request.size = 1048576 metadata.max.age.ms = 300000 metadata.max.idle.ms = 300000 metric.reporters = [] metrics.num.samples = 2 metrics.recording.level = INFO metrics.sample.window.ms = 30000 partitioner.class = class org.apache.kafka.clients.producer.internals.DefaultPartitioner receive.buffer.bytes = 32768 reconnect.backoff.max.ms = 1000 reconnect.backoff.ms = 50 request.timeout.ms = 30000 retries = 2147483647 retry.backoff.ms = 100 sasl.client.callback.handler.class = null sasl.jaas.config = null sasl.kerberos.kinit.cmd = /usr/bin/kinit sasl.kerberos.min.time.before.relogin = 60000 sasl.kerberos.service.name = null sasl.kerberos.ticket.renew.jitter = 0.05 sasl.kerberos.ticket.renew.window.factor = 0.8 sasl.login.callback.handler.class = null sasl.login.class = null sasl.login.refresh.buffer.seconds = 300 sasl.login.refresh.min.period.seconds = 60 sasl.login.refresh.window.factor = 0.8 sasl.login.refresh.window.jitter = 0.05 sasl.mechanism = GSSAPI security.protocol = PLAINTEXT security.providers = null send.buffer.bytes = 131072 ssl.cipher.suites = null ssl.enabled.protocols = [TLSv1.2] ssl.endpoint.identification.algorithm = null ssl.key.password = null ssl.keymanager.algorithm = SunX509 ssl.keystore.location = null ssl.keystore.password = null ssl.keystore.type = JKS ssl.protocol = TLSv1.2 ssl.provider = null ssl.secure.random.implementation = null ssl.trustmanager.algorithm = PKIX ssl.truststore.location = null ssl.truststore.password = null ssl.truststore.type = JKS transaction.timeout.ms = 60000 transactional.id = null value.serializer = class org.apache.kafka.common.serialization.StringSerializer 16:25:55.952 [vert.x-worker-thread-19] INFO KafkaProducer [292213987eqId] [Producer clientId=producer-35] Instantiated an idempotent producer. 16:25:55.953 [vert.x-worker-thread-19] INFO KafkaProducer [292213988eqId] [Producer clientId=producer-35] Overriding the default retries config to the recommended value of 2147483647 since the idempotent producer is enabled. 16:25:55.953 [vert.x-worker-thread-19] INFO KafkaProducer [292213988eqId] [Producer clientId=producer-35] Overriding the default acks to all since idempotence is enabled. 16:25:55.954 [vert.x-worker-thread-19] INFO ppInfoParser$AppInfo [292213989eqId] Kafka version: 2.5.0 16:25:55.955 [vert.x-worker-thread-19] INFO ppInfoParser$AppInfo [292213990eqId] Kafka commitId: 66563e712b0b9f84 16:25:55.955 [vert.x-worker-thread-19] INFO ppInfoParser$AppInfo [292213990eqId] Kafka startTimeMs: 1634574355954 16:25:55.956 [kafka-producer-network-thread | producer-34] INFO Metadata [292213991eqId] [Producer clientId=producer-34] Cluster ID: ndvmVewOTKSTf8qW5if9eg 16:25:55.956 [kafka-producer-network-thread | producer-34] INFO TransactionManager [292213991eqId] [Producer clientId=producer-34] ProducerId set to 4084 with epoch 0 16:25:55.958 [kafka-producer-network-thread | producer-35] INFO Metadata [292213993eqId] [Producer clientId=producer-35] Cluster ID: ndvmVewOTKSTf8qW5if9eg 16:25:55.958 [kafka-producer-network-thread | producer-35] INFO TransactionManager [292213993eqId] [Producer clientId=producer-35] ProducerId set to 4085 with epoch 0 16:25:56.056 [vert.x-worker-thread-19] INFO EventHandlingUtil [292214091eqId] Event with type DI_SRS_MARC_BIB_INSTANCE_HRID_SET and correlationId d6b764a1-76c1-4011-96d9-09ff23ac571a was sent to kafka 16:25:56.057 [vert.x-worker-thread-19] INFO KafkaProducer [292214092eqId] [Producer clientId=producer-34] Closing the Kafka producer with timeoutMillis = 9223372036854775807 ms. 16:25:56.151 [vert.x-worker-thread-19] INFO KafkaEventPublisher [292214186eqId] Event with type: DI_COMPLETED and correlationId: d6b764a1-76c1-4011-96d9-09ff23ac571a was sent to the topic metadata-spitfire.Default.diku.DI_COMPLETED 16:25:56.151 [vert.x-worker-thread-19] DEBUG KafkaConsumerWrapper [292214186eqId] Consumer - id: 11 subscriptionPattern: SubscriptionDefinition(eventType=DI_INVENTORY_INSTANCE_CREATED_READY_FOR_POST_PROCESSING, subscriptionPattern=metadata-spitfire\.Default\.\w{1,}\.DI_INVENTORY_INSTANCE_CREATED_READY_FOR_POST_PROCESSING) Committing offset: 9 16:25:56.151 [vert.x-worker-thread-19] DEBUG KafkaConsumerWrapper [292214186eqId] Threshold is exceeded, preparing to resume, globalLoad: 0, currentLoad: 0, requestNo: -9 16:25:56.151 [vert.x-worker-thread-19] INFO KafkaProducer [292214186eqId] [Producer clientId=producer-35] Closing the Kafka producer with timeoutMillis = 9223372036854775807 ms. 16:25:56.186 [vert.x-eventloop-thread-1] INFO RestRouting [292214221eqId] invoking putSourceStorageSnapshotsByJobExecutionId 16:25:56.191 [vert.x-eventloop-thread-1] INFO LogUtil [292214226eqId] 192.168.183.117:36572 PUT /source-storage/snapshots/810ea89c-9177-40e9-b6b1-54701b033264 HTTP_1_1 200 342 5 tid=diku OK 16:25:57.058 [vert.x-worker-thread-16] INFO KafkaConsumerWrapper [292215093eqId] Consumer - id: 11 subscriptionPattern: SubscriptionDefinition(eventType=DI_INVENTORY_INSTANCE_CREATED_READY_FOR_POST_PROCESSING, subscriptionPattern=metadata-spitfire\.Default\.\w{1,}\.DI_INVENTORY_INSTANCE_CREATED_READY_FOR_POST_PROCESSING) Committed offset: 9 16:26:10.834 [vert.x-eventloop-thread-1] INFO RestRouting [292228869eqId] invoking getSourceStorageRecordsById 16:26:10.844 [vert.x-eventloop-thread-1] INFO LogUtil [292228879eqId] 192.168.183.117:36572 GET /source-storage/records/594fa7a6-7c01-4640-9b63-a5eda2d781c5 null HTTP_1_1 200 8841 10 tid=diku OK 16:34:56.634 [vert.x-eventloop-thread-1] INFO RestRouting [292754669eqId] invoking postSourceStorageSnapshots 16:34:56.645 [vert.x-eventloop-thread-1] INFO LogUtil [292754680eqId] 192.168.183.117:40652 POST /source-storage/snapshots HTTP_1_1 201 336 11 tid=diku Created 16:34:57.929 [vert.x-eventloop-thread-1] INFO RestRouting [292755964eqId] invoking putSourceStorageSnapshotsByJobExecutionId 16:34:57.940 [vert.x-eventloop-thread-1] INFO LogUtil [292755975eqId] 192.168.183.117:40652 PUT /source-storage/snapshots/8303f7ea-c811-4247-886f-5f48188eb52d HTTP_1_1 200 346 11 tid=diku OK 16:35:02.898 [vert.x-eventloop-thread-1] INFO RestRouting [292760933eqId] invoking putSourceStorageSnapshotsByJobExecutionId 16:35:02.911 [vert.x-eventloop-thread-1] INFO LogUtil [292760946eqId] 192.168.183.117:40652 PUT /source-storage/snapshots/8303f7ea-c811-4247-886f-5f48188eb52d HTTP_1_1 200 412 13 tid=diku OK 16:35:03.024 [vert.x-eventloop-thread-1] INFO RestRouting [292761059eqId] invoking putSourceStorageSnapshotsByJobExecutionId 16:35:03.036 [vert.x-eventloop-thread-1] INFO LogUtil [292761071eqId] 192.168.183.117:40652 PUT /source-storage/snapshots/8303f7ea-c811-4247-886f-5f48188eb52d HTTP_1_1 200 412 12 tid=diku OK 16:35:03.089 [vert.x-eventloop-thread-1] INFO RestRouting [292761124eqId] invoking postSourceStorageBatchVerifiedRecords 16:35:03.102 [vert.x-eventloop-thread-1] INFO LogUtil [292761137eqId] 192.168.183.117:40652 POST /source-storage/batch/verified-records HTTP_1_1 200 31 13 tid=diku OK 16:35:05.979 [vert.x-worker-thread-8] DEBUG KafkaConsumerWrapper [292764014eqId] Consumer - id: 0 subscriptionPattern: SubscriptionDefinition(eventType=DI_RAW_RECORDS_CHUNK_PARSED, subscriptionPattern=metadata-spitfire\.Default\.\w{1,}\.DI_RAW_RECORDS_CHUNK_PARSED) a Record has been received. key: 14 currentLoad: 1 globalLoad: 1 16:35:05.979 [vert.x-worker-thread-8] DEBUG rdChunksKafkaHandler [292764014eqId] RecordCollection has been received, correlationId: a24682f7-7e99-4fbe-975a-6ba98052bcba, starting processing... chunkNumber 12-14 16:35:05.998 [vert.x-worker-thread-8] INFO AbstractConfig [292764033eqId] ProducerConfig values: acks = -1 batch.size = 16384 bootstrap.servers = [kafka:9092] buffer.memory = 33554432 client.dns.lookup = default client.id = producer-36 compression.type = gzip connections.max.idle.ms = 540000 delivery.timeout.ms = 120000 enable.idempotence = true interceptor.classes = [] key.serializer = class org.apache.kafka.common.serialization.StringSerializer linger.ms = 0 max.block.ms = 60000 max.in.flight.requests.per.connection = 5 max.request.size = 1048576 metadata.max.age.ms = 300000 metadata.max.idle.ms = 300000 metric.reporters = [] metrics.num.samples = 2 metrics.recording.level = INFO metrics.sample.window.ms = 30000 partitioner.class = class org.apache.kafka.clients.producer.internals.DefaultPartitioner receive.buffer.bytes = 32768 reconnect.backoff.max.ms = 1000 reconnect.backoff.ms = 50 request.timeout.ms = 30000 retries = 2147483647 retry.backoff.ms = 100 sasl.client.callback.handler.class = null sasl.jaas.config = null sasl.kerberos.kinit.cmd = /usr/bin/kinit sasl.kerberos.min.time.before.relogin = 60000 sasl.kerberos.service.name = null sasl.kerberos.ticket.renew.jitter = 0.05 sasl.kerberos.ticket.renew.window.factor = 0.8 sasl.login.callback.handler.class = null sasl.login.class = null sasl.login.refresh.buffer.seconds = 300 sasl.login.refresh.min.period.seconds = 60 sasl.login.refresh.window.factor = 0.8 sasl.login.refresh.window.jitter = 0.05 sasl.mechanism = GSSAPI security.protocol = PLAINTEXT security.providers = null send.buffer.bytes = 131072 ssl.cipher.suites = null ssl.enabled.protocols = [TLSv1.2] ssl.endpoint.identification.algorithm = null ssl.key.password = null ssl.keymanager.algorithm = SunX509 ssl.keystore.location = null ssl.keystore.password = null ssl.keystore.type = JKS ssl.protocol = TLSv1.2 ssl.provider = null ssl.secure.random.implementation = null ssl.trustmanager.algorithm = PKIX ssl.truststore.location = null ssl.truststore.password = null ssl.truststore.type = JKS transaction.timeout.ms = 60000 transactional.id = null value.serializer = class org.apache.kafka.common.serialization.StringSerializer 16:35:05.999 [vert.x-worker-thread-8] INFO KafkaProducer [292764034eqId] [Producer clientId=producer-36] Instantiated an idempotent producer. 16:35:06.001 [vert.x-worker-thread-8] INFO KafkaProducer [292764036eqId] [Producer clientId=producer-36] Overriding the default retries config to the recommended value of 2147483647 since the idempotent producer is enabled. 16:35:06.001 [vert.x-worker-thread-8] INFO KafkaProducer [292764036eqId] [Producer clientId=producer-36] Overriding the default acks to all since idempotence is enabled. 16:35:06.002 [vert.x-worker-thread-8] INFO ppInfoParser$AppInfo [292764037eqId] Kafka version: 2.5.0 16:35:06.002 [vert.x-worker-thread-8] INFO ppInfoParser$AppInfo [292764037eqId] Kafka commitId: 66563e712b0b9f84 16:35:06.002 [vert.x-worker-thread-8] INFO ppInfoParser$AppInfo [292764037eqId] Kafka startTimeMs: 1634574906002 16:35:06.003 [vert.x-worker-thread-8] DEBUG KafkaConsumerWrapper [292764038eqId] Starting business completion handler, globalLoadSensor: org.folio.kafka.GlobalLoadSensor@2f7988d8 16:35:06.006 [kafka-producer-network-thread | producer-36] INFO Metadata [292764041eqId] [Producer clientId=producer-36] Cluster ID: ndvmVewOTKSTf8qW5if9eg 16:35:06.007 [kafka-producer-network-thread | producer-36] INFO TransactionManager [292764042eqId] [Producer clientId=producer-36] ProducerId set to 4089 with epoch 0 16:35:06.050 [vert.x-worker-thread-6] DEBUG rdChunksKafkaHandler [292764085eqId] RecordCollection processing has been completed with response sent... correlationId a24682f7-7e99-4fbe-975a-6ba98052bcba, chunkNumber 12-12 16:35:06.050 [vert.x-worker-thread-6] DEBUG KafkaConsumerWrapper [292764085eqId] Consumer - id: 0 subscriptionPattern: SubscriptionDefinition(eventType=DI_RAW_RECORDS_CHUNK_PARSED, subscriptionPattern=metadata-spitfire\.Default\.\w{1,}\.DI_RAW_RECORDS_CHUNK_PARSED) Committing offset: 12 16:35:06.050 [vert.x-worker-thread-6] DEBUG KafkaConsumerWrapper [292764085eqId] Threshold is exceeded, preparing to resume, globalLoad: 0, currentLoad: 0, requestNo: -12 16:35:06.051 [vert.x-worker-thread-6] INFO KafkaProducer [292764086eqId] [Producer clientId=producer-36] Closing the Kafka producer with timeoutMillis = 9223372036854775807 ms. 16:35:06.616 [vert.x-worker-thread-14] DEBUG KafkaConsumerWrapper [292764651eqId] Consumer - id: 14 subscriptionPattern: SubscriptionDefinition(eventType=DI_INVENTORY_HOLDINGS_CREATED_READY_FOR_POST_PROCESSING, subscriptionPattern=metadata-spitfire\.Default\.\w{1,}\.DI_INVENTORY_HOLDINGS_CREATED_READY_FOR_POST_PROCESSING) a Record has been received. key: 14 currentLoad: 1 globalLoad: 1 16:35:06.616 [vert.x-worker-thread-14] DEBUG taImportKafkaHandler [292764651eqId] Data import event payload has been received with event type: DI_INVENTORY_HOLDINGS_CREATED_READY_FOR_POST_PROCESSING and correlationId: 092189d6-856c-453a-a7e8-6573f12a0fc4 16:35:06.617 [vert.x-worker-thread-14] DEBUG ProfileSnapshotCache [292764652eqId] Trying to load jobProfileSnapshot by id 'bfafdedb-7438-45f6-82bc-1558adb676dd' for cache, okapi url: http://okapi:9130, tenantId: diku 16:35:06.618 [vert.x-worker-thread-14] DEBUG KafkaConsumerWrapper [292764653eqId] Starting business completion handler, globalLoadSensor: org.folio.kafka.GlobalLoadSensor@5d830be2 16:35:06.638 [vert.x-worker-thread-10] INFO ProfileSnapshotCache [292764673eqId] JobProfileSnapshot was loaded by id 'bfafdedb-7438-45f6-82bc-1558adb676dd' 16:35:06.639 [vert.x-worker-thread-10] DEBUG ametersSnapshotCache [292764674eqId] Trying to load MappingParametersSnapshot by jobExecutionId '8303f7ea-c811-4247-886f-5f48188eb52d' for cache, okapi url: http://okapi:9130, tenantId: diku 16:35:06.656 [vert.x-worker-thread-13] INFO ametersSnapshotCache [292764691eqId] MappingParametersSnapshot was loaded by jobExecutionId '8303f7ea-c811-4247-886f-5f48188eb52d' 16:35:07.013 [vert.x-worker-thread-8] INFO KafkaConsumerWrapper [292765048eqId] Consumer - id: 0 subscriptionPattern: SubscriptionDefinition(eventType=DI_RAW_RECORDS_CHUNK_PARSED, subscriptionPattern=metadata-spitfire\.Default\.\w{1,}\.DI_RAW_RECORDS_CHUNK_PARSED) Committed offset: 12 16:35:07.410 [vert.x-worker-thread-2] INFO AbstractConfig [292765445eqId] ProducerConfig values: acks = -1 batch.size = 16384 bootstrap.servers = [kafka:9092] buffer.memory = 33554432 client.dns.lookup = default client.id = producer-37 compression.type = gzip connections.max.idle.ms = 540000 delivery.timeout.ms = 120000 enable.idempotence = true interceptor.classes = [] key.serializer = class org.apache.kafka.common.serialization.StringSerializer linger.ms = 0 max.block.ms = 60000 max.in.flight.requests.per.connection = 5 max.request.size = 1048576 metadata.max.age.ms = 300000 metadata.max.idle.ms = 300000 metric.reporters = [] metrics.num.samples = 2 metrics.recording.level = INFO metrics.sample.window.ms = 30000 partitioner.class = class org.apache.kafka.clients.producer.internals.DefaultPartitioner receive.buffer.bytes = 32768 reconnect.backoff.max.ms = 1000 reconnect.backoff.ms = 50 request.timeout.ms = 30000 retries = 2147483647 retry.backoff.ms = 100 sasl.client.callback.handler.class = null sasl.jaas.config = null sasl.kerberos.kinit.cmd = /usr/bin/kinit sasl.kerberos.min.time.before.relogin = 60000 sasl.kerberos.service.name = null sasl.kerberos.ticket.renew.jitter = 0.05 sasl.kerberos.ticket.renew.window.factor = 0.8 sasl.login.callback.handler.class = null sasl.login.class = null sasl.login.refresh.buffer.seconds = 300 sasl.login.refresh.min.period.seconds = 60 sasl.login.refresh.window.factor = 0.8 sasl.login.refresh.window.jitter = 0.05 sasl.mechanism = GSSAPI security.protocol = PLAINTEXT security.providers = null send.buffer.bytes = 131072 ssl.cipher.suites = null ssl.enabled.protocols = [TLSv1.2] ssl.endpoint.identification.algorithm = null ssl.key.password = null ssl.keymanager.algorithm = SunX509 ssl.keystore.location = null ssl.keystore.password = null ssl.keystore.type = JKS ssl.protocol = TLSv1.2 ssl.provider = null ssl.secure.random.implementation = null ssl.trustmanager.algorithm = PKIX ssl.truststore.location = null ssl.truststore.password = null ssl.truststore.type = JKS transaction.timeout.ms = 60000 transactional.id = null value.serializer = class org.apache.kafka.common.serialization.StringSerializer 16:35:07.411 [vert.x-worker-thread-2] INFO KafkaProducer [292765446eqId] [Producer clientId=producer-37] Instantiated an idempotent producer. 16:35:07.413 [vert.x-worker-thread-2] INFO KafkaProducer [292765448eqId] [Producer clientId=producer-37] Overriding the default retries config to the recommended value of 2147483647 since the idempotent producer is enabled. 16:35:07.413 [vert.x-worker-thread-2] INFO KafkaProducer [292765448eqId] [Producer clientId=producer-37] Overriding the default acks to all since idempotence is enabled. 16:35:07.414 [vert.x-worker-thread-2] INFO ppInfoParser$AppInfo [292765449eqId] Kafka version: 2.5.0 16:35:07.414 [vert.x-worker-thread-2] INFO ppInfoParser$AppInfo [292765449eqId] Kafka commitId: 66563e712b0b9f84 16:35:07.414 [vert.x-worker-thread-2] INFO ppInfoParser$AppInfo [292765449eqId] Kafka startTimeMs: 1634574907413 16:35:07.415 [vert.x-worker-thread-2] INFO AbstractConfig [292765450eqId] ProducerConfig values: acks = -1 batch.size = 16384 bootstrap.servers = [kafka:9092] buffer.memory = 33554432 client.dns.lookup = default client.id = producer-38 compression.type = gzip connections.max.idle.ms = 540000 delivery.timeout.ms = 120000 enable.idempotence = true interceptor.classes = [] key.serializer = class org.apache.kafka.common.serialization.StringSerializer linger.ms = 0 max.block.ms = 60000 max.in.flight.requests.per.connection = 5 max.request.size = 1048576 metadata.max.age.ms = 300000 metadata.max.idle.ms = 300000 metric.reporters = [] metrics.num.samples = 2 metrics.recording.level = INFO metrics.sample.window.ms = 30000 partitioner.class = class org.apache.kafka.clients.producer.internals.DefaultPartitioner receive.buffer.bytes = 32768 reconnect.backoff.max.ms = 1000 reconnect.backoff.ms = 50 request.timeout.ms = 30000 retries = 2147483647 retry.backoff.ms = 100 sasl.client.callback.handler.class = null sasl.jaas.config = null sasl.kerberos.kinit.cmd = /usr/bin/kinit sasl.kerberos.min.time.before.relogin = 60000 sasl.kerberos.service.name = null sasl.kerberos.ticket.renew.jitter = 0.05 sasl.kerberos.ticket.renew.window.factor = 0.8 sasl.login.callback.handler.class = null sasl.login.class = null sasl.login.refresh.buffer.seconds = 300 sasl.login.refresh.min.period.seconds = 60 sasl.login.refresh.window.factor = 0.8 sasl.login.refresh.window.jitter = 0.05 sasl.mechanism = GSSAPI security.protocol = PLAINTEXT security.providers = null send.buffer.bytes = 131072 ssl.cipher.suites = null ssl.enabled.protocols = [TLSv1.2] ssl.endpoint.identification.algorithm = null ssl.key.password = null ssl.keymanager.algorithm = SunX509 ssl.keystore.location = null ssl.keystore.password = null ssl.keystore.type = JKS ssl.protocol = TLSv1.2 ssl.provider = null ssl.secure.random.implementation = null ssl.trustmanager.algorithm = PKIX ssl.truststore.location = null ssl.truststore.password = null ssl.truststore.type = JKS transaction.timeout.ms = 60000 transactional.id = null value.serializer = class org.apache.kafka.common.serialization.StringSerializer 16:35:07.416 [vert.x-worker-thread-2] INFO KafkaProducer [292765451eqId] [Producer clientId=producer-38] Instantiated an idempotent producer. 16:35:07.449 [vert.x-worker-thread-2] INFO KafkaProducer [292765484eqId] [Producer clientId=producer-38] Overriding the default retries config to the recommended value of 2147483647 since the idempotent producer is enabled. 16:35:07.450 [vert.x-worker-thread-2] INFO KafkaProducer [292765485eqId] [Producer clientId=producer-38] Overriding the default acks to all since idempotence is enabled. 16:35:07.450 [vert.x-worker-thread-2] INFO ppInfoParser$AppInfo [292765485eqId] Kafka version: 2.5.0 16:35:07.450 [kafka-producer-network-thread | producer-37] INFO Metadata [292765485eqId] [Producer clientId=producer-37] Cluster ID: ndvmVewOTKSTf8qW5if9eg 16:35:07.451 [vert.x-worker-thread-2] INFO ppInfoParser$AppInfo [292765486eqId] Kafka commitId: 66563e712b0b9f84 16:35:07.451 [vert.x-worker-thread-2] INFO ppInfoParser$AppInfo [292765486eqId] Kafka startTimeMs: 1634574907450 16:35:07.451 [kafka-producer-network-thread | producer-37] INFO TransactionManager [292765486eqId] [Producer clientId=producer-37] ProducerId set to 4093 with epoch 0 16:35:07.455 [kafka-producer-network-thread | producer-38] INFO Metadata [292765490eqId] [Producer clientId=producer-38] Cluster ID: ndvmVewOTKSTf8qW5if9eg 16:35:07.455 [kafka-producer-network-thread | producer-38] INFO TransactionManager [292765490eqId] [Producer clientId=producer-38] ProducerId set to 4094 with epoch 0 16:35:07.550 [vert.x-worker-thread-2] INFO EventHandlingUtil [292765585eqId] Event with type DI_SRS_MARC_HOLDINGS_HOLDING_HRID_SET and correlationId 092189d6-856c-453a-a7e8-6573f12a0fc4 was sent to kafka 16:35:07.550 [vert.x-worker-thread-2] INFO KafkaProducer [292765585eqId] [Producer clientId=producer-37] Closing the Kafka producer with timeoutMillis = 9223372036854775807 ms. 16:35:07.554 [vert.x-worker-thread-10] INFO KafkaEventPublisher [292765589eqId] Event with type: DI_COMPLETED and correlationId: 092189d6-856c-453a-a7e8-6573f12a0fc4 was sent to the topic metadata-spitfire.Default.diku.DI_COMPLETED 16:35:07.554 [vert.x-worker-thread-10] DEBUG KafkaConsumerWrapper [292765589eqId] Consumer - id: 14 subscriptionPattern: SubscriptionDefinition(eventType=DI_INVENTORY_HOLDINGS_CREATED_READY_FOR_POST_PROCESSING, subscriptionPattern=metadata-spitfire\.Default\.\w{1,}\.DI_INVENTORY_HOLDINGS_CREATED_READY_FOR_POST_PROCESSING) Committing offset: 3 16:35:07.555 [vert.x-worker-thread-10] DEBUG KafkaConsumerWrapper [292765590eqId] Threshold is exceeded, preparing to resume, globalLoad: 0, currentLoad: 0, requestNo: -3 16:35:07.555 [vert.x-worker-thread-10] INFO KafkaProducer [292765590eqId] [Producer clientId=producer-38] Closing the Kafka producer with timeoutMillis = 9223372036854775807 ms. 16:35:07.651 [vert.x-eventloop-thread-1] INFO RestRouting [292765686eqId] invoking putSourceStorageSnapshotsByJobExecutionId 16:35:07.651 [vert.x-worker-thread-8] INFO KafkaConsumerWrapper [292765686eqId] Consumer - id: 14 subscriptionPattern: SubscriptionDefinition(eventType=DI_INVENTORY_HOLDINGS_CREATED_READY_FOR_POST_PROCESSING, subscriptionPattern=metadata-spitfire\.Default\.\w{1,}\.DI_INVENTORY_HOLDINGS_CREATED_READY_FOR_POST_PROCESSING) Committed offset: 3 16:35:07.655 [vert.x-eventloop-thread-1] INFO LogUtil [292765690eqId] 192.168.183.117:40652 PUT /source-storage/snapshots/8303f7ea-c811-4247-886f-5f48188eb52d HTTP_1_1 200 342 5 tid=diku OK 16:36:01.057 [vert.x-eventloop-thread-1] INFO RestRouting [292819092eqId] invoking getSourceStorageRecordsById 16:36:01.076 [vert.x-eventloop-thread-1] INFO LogUtil [292819111eqId] 192.168.183.117:40652 GET /source-storage/records/a6f9e906-4825-49d7-a6cd-52c3e69db49d null HTTP_1_1 200 3127 18 tid=diku OK