@@ -70,6 +70,7 @@ type topic struct {
7070 Description string `json:"description"`
7171 Partitions []partition `json:"partitions"`
7272 Messages map [string ]messageConfig `json:"messages,omitempty"`
73+ Bindings bindings `json:"bindings,omitempty"`
7374}
7475
7576type partition struct {
@@ -96,6 +97,16 @@ type messageConfig struct {
9697 ContentType string `json:"contentType"`
9798}
9899
100+ type bindings struct {
101+ Partitions int `json:"partitions,omitempty"`
102+ RetentionBytes int64 `json:"retentionBytes,omitempty"`
103+ RetentionMs int64 `json:"retentionMs,omitempty"`
104+ SegmentBytes int64 `json:"segmentBytes,omitempty"`
105+ SegmentMs int64 `json:"segmentMs,omitempty"`
106+ ValueSchemaValidation bool `json:"valueSchemaValidation,omitempty"`
107+ KeySchemaValidation bool `json:"keySchemaValidation,omitempty"`
108+ }
109+
99110func getKafkaServices (store * runtime.KafkaStore , m * monitor.Monitor ) []interface {} {
100111 list := store .List ()
101112 result := make ([]interface {}, 0 , len (list ))
@@ -188,7 +199,7 @@ func getKafka(info *runtime.KafkaInfo) kafka {
188199 addr = name
189200 }
190201 t := info .Store .Topic (addr )
191- k .Topics = append (k .Topics , newTopic (info .Store , t , ch .Value , info .DefaultContentType ))
202+ k .Topics = append (k .Topics , newTopic (info .Store , t , ch .Value , info .Config ))
192203 }
193204 sort .Slice (k .Topics , func (i , j int ) bool {
194205 return strings .Compare (k .Topics [i ].Name , k .Topics [j ].Name ) < 0
@@ -206,7 +217,7 @@ func getKafka(info *runtime.KafkaInfo) kafka {
206217 return k
207218}
208219
209- func newTopic (s * store.Store , t * store.Topic , config * asyncapi3.Channel , defaultContentType string ) topic {
220+ func newTopic (s * store.Store , t * store.Topic , ch * asyncapi3.Channel , cfg * asyncapi3. Config ) topic {
210221 var partitions []partition
211222 for _ , p := range t .Partitions {
212223 partitions = append (partitions , newPartition (s , p ))
@@ -217,11 +228,20 @@ func newTopic(s *store.Store, t *store.Topic, config *asyncapi3.Channel, default
217228
218229 result := topic {
219230 Name : t .Name ,
220- Description : config .Description ,
231+ Description : ch .Description ,
221232 Partitions : partitions ,
233+ Bindings : bindings {
234+ Partitions : t .Config .Bindings .Kafka .Partitions ,
235+ RetentionBytes : t .Config .Bindings .Kafka .RetentionBytes ,
236+ RetentionMs : t .Config .Bindings .Kafka .RetentionMs ,
237+ SegmentBytes : t .Config .Bindings .Kafka .SegmentBytes ,
238+ SegmentMs : t .Config .Bindings .Kafka .SegmentMs ,
239+ ValueSchemaValidation : t .Config .Bindings .Kafka .ValueSchemaValidation ,
240+ KeySchemaValidation : t .Config .Bindings .Kafka .KeySchemaValidation ,
241+ },
222242 }
223243
224- for messageId , ref := range config .Messages {
244+ for messageId , ref := range ch .Messages {
225245 if ref .Value == nil {
226246 continue
227247 }
@@ -243,7 +263,7 @@ func newTopic(s *store.Store, t *store.Topic, config *asyncapi3.Channel, default
243263 }
244264
245265 if m .ContentType == "" {
246- m .ContentType = defaultContentType
266+ m .ContentType = cfg . DefaultContentType
247267 }
248268
249269 if msg .Bindings .Kafka .Key != nil {
0 commit comments