|
| 1 | +package io.hstream.impl |
| 2 | + |
| 3 | +import com.google.common.util.concurrent.AbstractService |
| 4 | +import com.google.protobuf.InvalidProtocolBufferException |
| 5 | +import io.hstream.HStreamDBClientException |
| 6 | +import io.hstream.ReceivedRecord |
| 7 | +import io.hstream.Record |
| 8 | +import io.hstream.StreamShardOffset |
| 9 | +import io.hstream.StreamShardReader |
| 10 | +import io.hstream.StreamShardReaderReceiver |
| 11 | +import io.hstream.internal.LookupShardReaderRequest |
| 12 | +import io.hstream.internal.ReadShardStreamRequest |
| 13 | +import io.hstream.internal.ReadShardStreamResponse |
| 14 | +import io.hstream.util.GrpcUtils |
| 15 | +import io.hstream.util.RecordUtils |
| 16 | +import kotlinx.coroutines.CoroutineScope |
| 17 | +import kotlinx.coroutines.Dispatchers |
| 18 | +import kotlinx.coroutines.launch |
| 19 | +import org.slf4j.LoggerFactory |
| 20 | +import java.time.Instant |
| 21 | +import java.util.UUID |
| 22 | +import java.util.concurrent.Executors |
| 23 | + |
| 24 | +class StreamShardReaderKtImpl( |
| 25 | + private val client: HStreamClientKtImpl, |
| 26 | + private val streamName: String, |
| 27 | + private val shardId: Long, |
| 28 | + private val shardOffset: StreamShardOffset, |
| 29 | + private val receiver: StreamShardReaderReceiver, |
| 30 | +) : AbstractService(), StreamShardReader { |
| 31 | + private val readerScope = CoroutineScope(Dispatchers.IO) |
| 32 | + private val readerName: String = UUID.randomUUID().toString() |
| 33 | + private val executorService = Executors.newSingleThreadExecutor() |
| 34 | + |
| 35 | + override fun doStart() { |
| 36 | + Thread { |
| 37 | + |
| 38 | + try { |
| 39 | + logger.info("streamShardReader $readerName is starting") |
| 40 | + val lookupShardReaderRequest = LookupShardReaderRequest.newBuilder() |
| 41 | + .setReaderId(readerName).build() |
| 42 | + val lookupShardReaderResp = client.unaryCallBlocked { it.lookupShardReader(lookupShardReaderRequest) } |
| 43 | + val serverUrl = lookupShardReaderResp.serverNode.host + ":" + lookupShardReaderResp.serverNode.port |
| 44 | + val respFlow = client.getCoroutineStub(serverUrl).readShardStream( |
| 45 | + ReadShardStreamRequest.newBuilder().setReaderId(readerName).setShardId(shardId) |
| 46 | + .setTimeout(1000) |
| 47 | + .setShardOffset(GrpcUtils.streamShardOffsetToGrpc(shardOffset)).build() |
| 48 | + ) |
| 49 | + notifyStarted() |
| 50 | + readerScope.launch { |
| 51 | + respFlow.collect { |
| 52 | + process(it) |
| 53 | + } |
| 54 | + } |
| 55 | + } catch (e: Exception) { |
| 56 | + logger.error("steamShardReader $readerName failed to start", e) |
| 57 | + notifyFailed(HStreamDBClientException(e)) |
| 58 | + } |
| 59 | + }.start() |
| 60 | + } |
| 61 | + |
| 62 | + override fun doStop() { |
| 63 | + Thread { |
| 64 | + executorService.shutdownNow() |
| 65 | + notifyStopped() |
| 66 | + } |
| 67 | + .start() |
| 68 | + } |
| 69 | + |
| 70 | + private fun process(value: ReadShardStreamResponse) { |
| 71 | + if (!isRunning) { |
| 72 | + return |
| 73 | + } |
| 74 | + |
| 75 | + for (receivedRecord in value.receivedRecordsList) { |
| 76 | + |
| 77 | + val receivedHStreamRecords = RecordUtils.decompress(receivedRecord) |
| 78 | + val createdTimestamp = receivedRecord.record.publishTime |
| 79 | + val createdTime = Instant.ofEpochSecond(createdTimestamp.seconds, createdTimestamp.nanos.toLong()) |
| 80 | + for (receivedHStreamRecord in receivedHStreamRecords) { |
| 81 | + |
| 82 | + executorService.submit { |
| 83 | + if (!isRunning) { |
| 84 | + return@submit |
| 85 | + } |
| 86 | + |
| 87 | + try { |
| 88 | + receiver.process(toReceivedRecord(receivedHStreamRecord, createdTime)) |
| 89 | + } catch (e: Exception) { |
| 90 | + notifyFailed(e) |
| 91 | + } |
| 92 | + } |
| 93 | + } |
| 94 | + } |
| 95 | + } |
| 96 | + |
| 97 | + companion object { |
| 98 | + private val logger = LoggerFactory.getLogger(StreamShardReaderKtImpl::class.java) |
| 99 | + |
| 100 | + private fun toReceivedRecord(receivedHStreamRecord: ReceivedHStreamRecord, createdTime: Instant): ReceivedRecord { |
| 101 | + return try { |
| 102 | + val header = RecordUtils.parseRecordHeaderFromHStreamRecord(receivedHStreamRecord.record) |
| 103 | + if (RecordUtils.isRawRecord(receivedHStreamRecord.record)) { |
| 104 | + |
| 105 | + val rawRecord = RecordUtils.parseRawRecordFromHStreamRecord(receivedHStreamRecord.record) |
| 106 | + ReceivedRecord( |
| 107 | + GrpcUtils.recordIdFromGrpc(receivedHStreamRecord.recordId), |
| 108 | + Record.newBuilder().partitionKey(header.partitionKey).rawRecord(rawRecord).build(), |
| 109 | + createdTime |
| 110 | + ) |
| 111 | + } else { |
| 112 | + val hRecord = RecordUtils.parseHRecordFromHStreamRecord(receivedHStreamRecord.record) |
| 113 | + ReceivedRecord( |
| 114 | + GrpcUtils.recordIdFromGrpc(receivedHStreamRecord.recordId), |
| 115 | + Record.newBuilder().partitionKey(header.partitionKey).hRecord(hRecord).build(), |
| 116 | + createdTime |
| 117 | + ) |
| 118 | + } |
| 119 | + } catch (e: InvalidProtocolBufferException) { |
| 120 | + throw HStreamDBClientException.InvalidRecordException("parse HStreamRecord error", e) |
| 121 | + } |
| 122 | + } |
| 123 | + } |
| 124 | +} |
0 commit comments