diff --git a/api/handler.go b/api/handler.go index 7197b1d2b..37fc68db6 100644 --- a/api/handler.go +++ b/api/handler.go @@ -1,9 +1,9 @@ package api import ( + "bytes" "encoding/json" "fmt" - log "github.com/sirupsen/logrus" "io/fs" "mokapi/config/static" "mokapi/runtime" @@ -15,6 +15,8 @@ import ( "slices" "strconv" "strings" + + log "github.com/sirupsen/logrus" ) type handler struct { @@ -220,12 +222,18 @@ func (h *handler) getInfo(w http.ResponseWriter, _ *http.Request) { writeJsonBody(w, i) } -func writeJsonBody(w http.ResponseWriter, i interface{}) { - b, err := json.Marshal(i) +func writeJsonBody(w http.ResponseWriter, v interface{}) { + var buf bytes.Buffer + enc := json.NewEncoder(&buf) + enc.SetEscapeHTML(false) + err := enc.Encode(v) // includes newline + if err != nil { writeError(w, err, http.StatusInternalServerError) return } + + b := bytes.TrimSuffix(buf.Bytes(), []byte("\n")) _, err = w.Write(b) if err != nil { log.Errorf("write response body failed: %v", err) diff --git a/api/handler_config_test.go b/api/handler_config_test.go index dffa6982a..d7cd2ceae 100644 --- a/api/handler_config_test.go +++ b/api/handler_config_test.go @@ -248,7 +248,7 @@ func TestHandler_Config(t *testing.T) { test: []try.ResponseCondition{ try.HasStatusCode(http.StatusOK), try.HasHeader("Content-Type", "application/json"), - try.HasBody(`{"id":"61373430-3061-3131-6663-326332386638","url":"https://git.bar?file=/foo/foo.json\u0026ref=main","provider":"git","time":"2023-12-27T13:01:30Z"}`), + try.HasBody(`{"id":"61373430-3061-3131-6663-326332386638","url":"https://git.bar?file=/foo/foo.json&ref=main","provider":"git","time":"2023-12-27T13:01:30Z"}`), }, }, { diff --git a/api/handler_events_test.go b/api/handler_events_test.go index a37159377..9eb99196e 100644 --- a/api/handler_events_test.go +++ b/api/handler_events_test.go @@ -5,6 +5,7 @@ import ( "encoding/json" "fmt" "mokapi/config/static" + "mokapi/providers/asyncapi3/kafka/store" "mokapi/providers/openapi" "mokapi/runtime" "mokapi/runtime/events" @@ -208,3 +209,127 @@ func TestHandler_Events(t *testing.T) { }) } } + +func TestHandler_KafkaEvents(t *testing.T) { + testcases := []struct { + name string + fn func(t *testing.T, h http.Handler, sm *events.StoreManager) + }{ + { + name: "empty kafka events", + fn: func(t *testing.T, h http.Handler, sm *events.StoreManager) { + try.Handler(t, + http.MethodGet, + "http://foo.api/api/events?namespace=kafka", + nil, + "", + h, + try.HasStatusCode(200), + try.HasHeader("Content-Type", "application/json"), + try.HasBody(`[]`)) + }, + }, + { + name: "with kafka events", + fn: func(t *testing.T, h http.Handler, sm *events.StoreManager) { + sm.SetStore(1, events.NewTraits().WithNamespace("kafka")) + err := sm.Push(&eventstest.Event{Name: "foo"}, events.NewTraits().WithNamespace("kafka")) + event := sm.GetEvents(events.NewTraits())[0] + require.NoError(t, err) + try.Handler(t, + http.MethodGet, + "http://foo.api/api/events?namespace=kafka", + nil, + "", + h, + try.HasStatusCode(200), + try.HasHeader("Content-Type", "application/json"), + try.HasBody(fmt.Sprintf(`[{"id":"%v","traits":{"namespace":"kafka"},"data":{"Name":"foo","api":""},"time":"%v"}]`, + event.Id, + event.Time.Format(time.RFC3339Nano)))) + }, + }, + { + name: "get specific event", + fn: func(t *testing.T, h http.Handler, sm *events.StoreManager) { + sm.SetStore(1, events.NewTraits().WithNamespace("kafka")) + err := sm.Push(&eventstest.Event{Name: "foo"}, events.NewTraits().WithNamespace("kafka")) + event := sm.GetEvents(events.NewTraits())[0] + require.NoError(t, err) + try.Handler(t, + http.MethodGet, + "http://foo.api/api/events/"+event.Id, + nil, + "", + h, + try.HasStatusCode(200), + try.HasHeader("Content-Type", "application/json"), + try.HasBody(fmt.Sprintf(`{"id":"%v","traits":{"namespace":"kafka"},"data":{"Name":"foo","api":""},"time":"%v"}`, + event.Id, + event.Time.Format(time.RFC3339Nano)))) + }, + }, + { + name: "get kafka with producerId", + fn: func(t *testing.T, h http.Handler, sm *events.StoreManager) { + sm.SetStore(1, events.NewTraits().WithNamespace("kafka")) + + err := sm.Push(&store.KafkaLog{ + Offset: 123, + Key: store.LogValue{}, + Message: store.LogValue{}, + MessageId: "foo-1", + Partition: 1, + ProducerId: 3, + ProducerEpoch: 1, + SequenceNumber: 2, + }, events.NewTraits().WithNamespace("kafka")) + require.NoError(t, err) + + try.Handler(t, + http.MethodGet, + "http://foo.api/api/events?namespace=kafka", + nil, + "", + h, + try.HasStatusCode(200), + try.AssertBody(func(t *testing.T, body string) { + var m []map[string]any + require.NoError(t, json.Unmarshal([]byte(body), &m)) + require.Equal(t, map[string]any{ + "api": "", + "deleted": false, + "headers": interface{}(nil), + "key": map[string]interface{}{ + "binary": interface{}(nil), + "value": "", + }, + "message": map[string]interface{}{ + "binary": interface{}(nil), + "value": "", + }, + "messageId": "foo-1", + "offset": float64(123), + "partition": float64(1), + "producerEpoch": float64(1), + "producerId": float64(3), + "schemaId": float64(0), + "sequenceNumber": float64(2), + }, + m[0]["data"]) + })) + }, + }, + } + + for _, tc := range testcases { + tc := tc + t.Run(tc.name, func(t *testing.T) { + cfg := &static.Config{} + app := runtime.New(cfg) + + h := New(app, static.Api{}) + tc.fn(t, h, app.Events) + }) + } +} diff --git a/api/handler_kafka.go b/api/handler_kafka.go index d4536abcb..55dd2b80a 100644 --- a/api/handler_kafka.go +++ b/api/handler_kafka.go @@ -120,11 +120,11 @@ type produceRequest struct { Records []store.Record `json:"records"` } -type produceResponse struct { - Offsets []recordResult `json:"offsets"` +type ProduceResponse struct { + Offsets []RecordResult `json:"offsets"` } -type recordResult struct { +type RecordResult struct { Partition int Offset int64 Error string @@ -222,9 +222,9 @@ func (h *handler) handleKafka(w http.ResponseWriter, r *http.Request) { writeError(w, err, http.StatusBadRequest) } } - res := produceResponse{} + res := ProduceResponse{} for _, rec := range result { - res.Offsets = append(res.Offsets, recordResult{ + res.Offsets = append(res.Offsets, RecordResult{ Partition: rec.Partition, Offset: rec.Offset, Error: rec.Error, @@ -247,7 +247,7 @@ func (h *handler) handleKafka(w http.ResponseWriter, r *http.Request) { w.WriteHeader(http.StatusNotFound) } else { w.Header().Set("Content-Type", "application/json") - writeJsonBody(w, getPartitions(k, t)) + writeJsonBody(w, getPartitions(t)) } return // /api/services/kafka/{cluster}/topics/{topic}/partitions/{id} @@ -276,7 +276,7 @@ func (h *handler) handleKafka(w http.ResponseWriter, r *http.Request) { } if r.Method == "GET" { w.Header().Set("Content-Type", "application/json") - writeJsonBody(w, newPartition(k.Store, p)) + writeJsonBody(w, newPartition(p)) } else { records, err := getProduceRecords(r) if err != nil { @@ -296,9 +296,9 @@ func (h *handler) handleKafka(w http.ResponseWriter, r *http.Request) { writeError(w, err, http.StatusBadRequest) } } - res := produceResponse{} + res := ProduceResponse{} for _, rec := range result { - res.Offsets = append(res.Offsets, recordResult{ + res.Offsets = append(res.Offsets, RecordResult{ Partition: rec.Partition, Offset: rec.Offset, Error: rec.Error, @@ -458,7 +458,7 @@ func getTopics(info *runtime.KafkaInfo) []topic { addr = name } t := info.Store.Topic(addr) - topics = append(topics, newTopic(info.Store, t, ch.Value, info.Config)) + topics = append(topics, newTopic(t, ch.Value, info.Config)) } sort.Slice(topics, func(i, j int) bool { return strings.Compare(topics[i].Name, topics[j].Name) < 0 @@ -477,7 +477,7 @@ func getTopic(info *runtime.KafkaInfo, name string) *topic { } if addr == name { t := info.Store.Topic(addr) - r := newTopic(info.Store, t, ch.Value, info.Config) + r := newTopic(t, ch.Value, info.Config) return &r } @@ -485,10 +485,10 @@ func getTopic(info *runtime.KafkaInfo, name string) *topic { return nil } -func newTopic(s *store.Store, t *store.Topic, ch *asyncapi3.Channel, cfg *asyncapi3.Config) topic { +func newTopic(t *store.Topic, ch *asyncapi3.Channel, cfg *asyncapi3.Config) topic { var partitions []partition for _, p := range t.Partitions { - partitions = append(partitions, newPartition(s, p)) + partitions = append(partitions, newPartition(p)) } sort.Slice(partitions, func(i, j int) bool { return partitions[i].Id < partitions[j].Id @@ -549,10 +549,10 @@ func newTopic(s *store.Store, t *store.Topic, ch *asyncapi3.Channel, cfg *asynca return result } -func getPartitions(info *runtime.KafkaInfo, t *store.Topic) []partition { +func getPartitions(t *store.Topic) []partition { var partitions []partition for _, p := range t.Partitions { - partitions = append(partitions, newPartition(info.Store, p)) + partitions = append(partitions, newPartition(p)) } sort.Slice(partitions, func(i, j int) bool { return partitions[i].Id < partitions[j].Id @@ -594,13 +594,12 @@ func newGroup(g *store.Group) group { return grp } -func newPartition(s *store.Store, p *store.Partition) partition { - leader, _ := s.Broker(p.Leader) +func newPartition(p *store.Partition) partition { return partition{ Id: p.Index, StartOffset: p.StartOffset(), Offset: p.Offset(), - Leader: newBroker(leader), + Leader: newBroker(p.Leader), Segments: len(p.Segments), } } @@ -649,7 +648,7 @@ func getProduceRecords(r *http.Request) ([]store.Record, error) { return pr.Records, nil } -func (r *recordResult) MarshalJSON() ([]byte, error) { +func (r *RecordResult) MarshalJSON() ([]byte, error) { aux := &struct { Partition int `json:"partition"` Offset int64 `json:"offset"` diff --git a/api/handler_kafka_test.go b/api/handler_kafka_test.go index 15665595a..1bd2de160 100644 --- a/api/handler_kafka_test.go +++ b/api/handler_kafka_test.go @@ -1,6 +1,8 @@ -package api +package api_test import ( + "encoding/json" + "mokapi/api" "mokapi/config/dynamic" "mokapi/config/dynamic/dynamictest" "mokapi/config/static" @@ -11,7 +13,7 @@ import ( "mokapi/providers/asyncapi3/asyncapi3test" "mokapi/providers/asyncapi3/kafka/store" "mokapi/providers/openapi/openapitest" - schematest2 "mokapi/providers/openapi/schema/schematest" + openapi "mokapi/providers/openapi/schema/schematest" "mokapi/runtime" "mokapi/runtime/events/eventstest" "mokapi/runtime/monitor" @@ -287,7 +289,7 @@ func TestHandler_Kafka(t *testing.T) { asyncapi3test.WithChannel("foo", asyncapi3test.WithChannelDescription("bar"), asyncapi3test.WithMessage("foo", - asyncapi3test.WithPayloadMulti("foo", schematest2.New("string")), + asyncapi3test.WithPayloadMulti("foo", openapi.New("string")), asyncapi3test.WithContentType("application/json"), ), ), @@ -305,7 +307,7 @@ func TestHandler_Kafka(t *testing.T) { t.Run(tc.name, func(t *testing.T) { t.Parallel() - h := New(tc.app(), static.Api{}) + h := api.New(tc.app(), static.Api{}) try.Handler(t, http.MethodGet, @@ -324,7 +326,7 @@ func TestHandler_KafkaAPI(t *testing.T) { testcases := []struct { name string app func() *runtime.App - test func(t *testing.T, app *runtime.App, api http.Handler) + test func(t *testing.T, app *runtime.App, h http.Handler) }{ { name: "get kafka topics but empty", @@ -338,13 +340,13 @@ func TestHandler_KafkaAPI(t *testing.T) { }, enginetest.NewEngine()) return app }, - test: func(t *testing.T, app *runtime.App, api http.Handler) { + test: func(t *testing.T, app *runtime.App, h http.Handler) { try.Handler(t, http.MethodGet, "http://foo.api/api/services/kafka/foo/topics", nil, "", - api, + h, try.HasStatusCode(200), try.HasHeader("Content-Type", "application/json"), try.HasBody(`[]`), @@ -368,13 +370,13 @@ func TestHandler_KafkaAPI(t *testing.T) { }, enginetest.NewEngine()) return app }, - test: func(t *testing.T, app *runtime.App, api http.Handler) { + test: func(t *testing.T, app *runtime.App, h http.Handler) { try.Handler(t, http.MethodGet, "http://foo.api/api/services/kafka/foo/topics", nil, "", - api, + h, try.HasStatusCode(200), try.HasHeader("Content-Type", "application/json"), try.HasBody(`[{"name":"topic-1","description":"foobar","partitions":[{"id":0,"startOffset":0,"offset":0,"leader":{"name":"broker-1","addr":"localhost:9092"},"segments":0}],"messages":{"foo":{"name":"foo","payload":null,"contentType":"application/json"}},"bindings":{"partitions":1,"valueSchemaValidation":true}}]`), @@ -398,13 +400,13 @@ func TestHandler_KafkaAPI(t *testing.T) { }, enginetest.NewEngine()) return app }, - test: func(t *testing.T, app *runtime.App, api http.Handler) { + test: func(t *testing.T, app *runtime.App, h http.Handler) { try.Handler(t, http.MethodGet, "http://foo.api/api/services/kafka/foo/topics/topic-1", nil, "", - api, + h, try.HasStatusCode(200), try.HasHeader("Content-Type", "application/json"), try.HasBody(`{"name":"topic-1","description":"foobar","partitions":[{"id":0,"startOffset":0,"offset":0,"leader":{"name":"broker-1","addr":"localhost:9092"},"segments":0}],"messages":{"foo":{"name":"foo","payload":null,"contentType":"application/json"}},"bindings":{"partitions":1,"valueSchemaValidation":true}}`), @@ -428,13 +430,13 @@ func TestHandler_KafkaAPI(t *testing.T) { }, enginetest.NewEngine()) return app }, - test: func(t *testing.T, app *runtime.App, api http.Handler) { + test: func(t *testing.T, app *runtime.App, h http.Handler) { try.Handler(t, http.MethodGet, "http://foo.api/api/services/kafka/foo/topics/foo", nil, "", - api, + h, try.HasStatusCode(404), ) }, @@ -443,20 +445,31 @@ func TestHandler_KafkaAPI(t *testing.T) { name: "produce kafka message into topic", app: func() *runtime.App { app := runtime.New(&static.Config{}) + + msg := asyncapi3test.NewMessage( + asyncapi3test.WithContentType("application/json"), + asyncapi3test.WithPayload( + schematest.New("string"), + ), + ) + ch := asyncapi3test.NewChannel(asyncapi3test.UseMessage("foo", &asyncapi3.MessageRef{Value: msg})) + _, _ = app.Kafka.Add(&dynamic.Config{ Info: dynamic.ConfigInfo{Url: try.MustUrl("kafka.yaml")}, Data: asyncapi3test.NewConfig( asyncapi3test.WithInfo("foo", "bar", "1.0"), asyncapi3test.WithServer("broker-1", "kafka", "localhost:9092"), - asyncapi3test.WithChannel("topic-1", - asyncapi3test.WithChannelDescription("foobar"), - asyncapi3test.WithMessage("foo"), + asyncapi3test.AddChannel("topic-1", ch), + asyncapi3test.WithOperation("sendAction", + asyncapi3test.WithOperationAction("send"), + asyncapi3test.WithOperationChannel(ch), + asyncapi3test.UseOperationMessage(msg), ), ), }, enginetest.NewEngine()) return app }, - test: func(t *testing.T, app *runtime.App, api http.Handler) { + test: func(t *testing.T, app *runtime.App, h http.Handler) { try.Handler(t, http.MethodPost, "http://foo.api/api/services/kafka/foo/topics/topic-1", @@ -464,11 +477,17 @@ func TestHandler_KafkaAPI(t *testing.T) { `{ "records": [{"key": "foo", "value": "bar"}] }`, - api, + h, try.HasStatusCode(http.StatusOK), try.HasBody(`{"offsets":[{"partition":0,"offset":0}]}`), ) + s := app.Kafka.Get("foo").Store + b, errCode := s.Topic("topic-1").Partitions[0].Read(0, 100) + require.Equal(t, kafka.None, errCode) + require.Equal(t, "foo", string(kafka.Read(b.Records[0].Key))) + require.Equal(t, `"bar"`, string(kafka.Read(b.Records[0].Value))) + require.Equal(t, float64(1), app.Monitor.Kafka.Messages.WithLabel("foo", "topic-1").Value()) }, }, @@ -489,7 +508,7 @@ func TestHandler_KafkaAPI(t *testing.T) { }, enginetest.NewEngine()) return app }, - test: func(t *testing.T, app *runtime.App, api http.Handler) { + test: func(t *testing.T, app *runtime.App, h http.Handler) { try.Handler(t, http.MethodPost, "http://foo.api/api/services/kafka/foo/topics/topic-1", @@ -497,7 +516,7 @@ func TestHandler_KafkaAPI(t *testing.T) { `{ "records": [{"key": "foo", "value": "YmFy"}] }`, - api, + h, try.HasStatusCode(http.StatusOK), try.HasBody(`{"offsets":[{"partition":0,"offset":0}]}`), ) @@ -515,23 +534,31 @@ func TestHandler_KafkaAPI(t *testing.T) { name: "produce invalid kafka message into topic", app: func() *runtime.App { app := runtime.New(&static.Config{}) + + msg := asyncapi3test.NewMessage( + asyncapi3test.WithContentType("application/json"), + asyncapi3test.WithPayload( + schematest.New("string"), + ), + ) + ch := asyncapi3test.NewChannel(asyncapi3test.UseMessage("foo", &asyncapi3.MessageRef{Value: msg})) + _, _ = app.Kafka.Add(&dynamic.Config{ Info: dynamic.ConfigInfo{Url: try.MustUrl("kafka.yaml")}, Data: asyncapi3test.NewConfig( asyncapi3test.WithInfo("foo", "bar", "1.0"), asyncapi3test.WithServer("broker-1", "kafka", "localhost:9092"), - asyncapi3test.WithChannel("topic-1", - asyncapi3test.WithChannelDescription("foobar"), - asyncapi3test.WithMessage("foo", - asyncapi3test.WithContentType("application/json"), - asyncapi3test.WithPayload(schematest.New("string")), - ), + asyncapi3test.AddChannel("topic-1", ch), + asyncapi3test.WithOperation("sendAction", + asyncapi3test.WithOperationAction("send"), + asyncapi3test.WithOperationChannel(ch), + asyncapi3test.UseOperationMessage(msg), ), ), }, enginetest.NewEngine()) return app }, - test: func(t *testing.T, app *runtime.App, api http.Handler) { + test: func(t *testing.T, app *runtime.App, h http.Handler) { try.Handler(t, http.MethodPost, "http://foo.api/api/services/kafka/foo/topics/topic-1", @@ -539,9 +566,21 @@ func TestHandler_KafkaAPI(t *testing.T) { `{ "records": [{"key": "foo", "value": 123 }] }`, - api, + h, try.HasStatusCode(http.StatusOK), - try.HasBody(`{"offsets":[{"partition":-1,"offset":-1,"error":"validation error: invalid message: error count 1:\n\t- #/type: invalid type, expected string but got number"}]}`), + try.AssertBody(func(t *testing.T, body string) { + var data api.ProduceResponse + _ = json.Unmarshal([]byte(body), &data) + require.Equal(t, api.ProduceResponse{ + Offsets: []api.RecordResult{ + { + Partition: -1, + Offset: -1, + Error: "no matching message configuration found for the given value: 123\nhint:\nencoding data to 'application/json' failed: error count 1:\n\t- #/type: invalid type, expected string but got number\n", + }, + }, + }, data) + }), ) require.Equal(t, float64(0), app.Monitor.Kafka.Messages.WithLabel("foo", "topic-1").Value()) @@ -564,13 +603,13 @@ func TestHandler_KafkaAPI(t *testing.T) { }, enginetest.NewEngine()) return app }, - test: func(t *testing.T, app *runtime.App, api http.Handler) { + test: func(t *testing.T, app *runtime.App, h http.Handler) { try.Handler(t, http.MethodGet, "http://foo.api/api/services/kafka/foo/topics/topic-1/partitions", nil, "", - api, + h, try.HasStatusCode(http.StatusOK), try.HasBody(`[{"id":0,"startOffset":0,"offset":0,"leader":{"name":"broker-1","addr":"localhost:9092"},"segments":0}]`), ) @@ -593,13 +632,13 @@ func TestHandler_KafkaAPI(t *testing.T) { }, enginetest.NewEngine()) return app }, - test: func(t *testing.T, app *runtime.App, api http.Handler) { + test: func(t *testing.T, app *runtime.App, h http.Handler) { try.Handler(t, http.MethodGet, "http://foo.api/api/services/kafka/foo/topics/topic-1/partitions/0", nil, "", - api, + h, try.HasStatusCode(http.StatusOK), try.HasBody(`{"id":0,"startOffset":0,"offset":0,"leader":{"name":"broker-1","addr":"localhost:9092"},"segments":0}`), ) @@ -609,71 +648,284 @@ func TestHandler_KafkaAPI(t *testing.T) { name: "produce kafka message into specific partition", app: func() *runtime.App { app := runtime.New(&static.Config{}) + + msg := asyncapi3test.NewMessage( + asyncapi3test.WithContentType("application/json"), + asyncapi3test.WithPayload( + schematest.New("object", + schematest.WithProperty("foo", schematest.New("string")), + ), + ), + ) + ch := asyncapi3test.NewChannel(asyncapi3test.UseMessage("foo", &asyncapi3.MessageRef{Value: msg})) + _, _ = app.Kafka.Add(&dynamic.Config{ Info: dynamic.ConfigInfo{Url: try.MustUrl("kafka.yaml")}, Data: asyncapi3test.NewConfig( asyncapi3test.WithInfo("foo", "bar", "1.0"), asyncapi3test.WithServer("broker-1", "kafka", "localhost:9092"), - asyncapi3test.WithChannel("topic-1", - asyncapi3test.WithChannelDescription("foobar"), - asyncapi3test.WithMessage("foo"), + asyncapi3test.AddChannel("topic-1", ch), + asyncapi3test.WithOperation("sendAction", + asyncapi3test.WithOperationAction("send"), + asyncapi3test.WithOperationChannel(ch), + asyncapi3test.UseOperationMessage(msg), ), ), }, enginetest.NewEngine()) return app }, - test: func(t *testing.T, app *runtime.App, api http.Handler) { + test: func(t *testing.T, app *runtime.App, h http.Handler) { try.Handler(t, http.MethodPost, "http://foo.api/api/services/kafka/foo/topics/topic-1/partitions/0", map[string]string{"Content-Type": "application/json"}, `{ -"records": [{"key": "foo", "value": "bar"}] +"records": [{"key": "foo", "value": {"foo": "bar"}}] +}`, + h, + try.HasStatusCode(http.StatusOK), + try.HasBody(`{"offsets":[{"partition":0,"offset":0}]}`), + ) + + s := app.Kafka.Get("foo").Store + b, errCode := s.Topic("topic-1").Partitions[0].Read(0, 100) + require.Equal(t, kafka.None, errCode) + require.Equal(t, "foo", string(kafka.Read(b.Records[0].Key))) + require.Equal(t, `{"foo":"bar"}`, string(kafka.Read(b.Records[0].Value))) + + require.Equal(t, float64(1), app.Monitor.Kafka.Messages.WithLabel("foo", "topic-1").Value()) + p := app.Kafka.Get("foo").Store.Topic("topic-1").Partition(0) + require.Equal(t, `{"foo":"bar"}`, string(kafka.Read(p.Segments[p.ActiveSegment].Log[0].Data.Value))) + }, + }, + { + name: "produce kafka message into specific partition using XML", + app: func() *runtime.App { + app := runtime.New(&static.Config{}) + + msg := asyncapi3test.NewMessage( + asyncapi3test.WithContentType("application/xml"), + asyncapi3test.WithPayloadOpenAPI( + openapi.New("object", + openapi.WithProperty("foo", openapi.New("string")), + ), + ), + ) + ch := asyncapi3test.NewChannel(asyncapi3test.UseMessage("foo", &asyncapi3.MessageRef{Value: msg})) + + _, _ = app.Kafka.Add(&dynamic.Config{ + Info: dynamic.ConfigInfo{Url: try.MustUrl("kafka.yaml")}, + Data: asyncapi3test.NewConfig( + asyncapi3test.WithInfo("foo", "bar", "1.0"), + asyncapi3test.WithServer("broker-1", "kafka", "localhost:9092"), + asyncapi3test.AddChannel("topic-1", ch), + asyncapi3test.WithOperation("sendAction", + asyncapi3test.WithOperationAction("send"), + asyncapi3test.WithOperationChannel(ch), + asyncapi3test.UseOperationMessage(msg), + ), + ), + }, enginetest.NewEngine()) + return app + }, + test: func(t *testing.T, app *runtime.App, h http.Handler) { + try.Handler(t, + http.MethodPost, + "http://foo.api/api/services/kafka/foo/topics/topic-1/partitions/0", + map[string]string{"Content-Type": "application/json"}, + `{ +"records": [{"key": "foo", "value": {"foo": "bar"}}] }`, - api, + h, try.HasStatusCode(http.StatusOK), try.HasBody(`{"offsets":[{"partition":0,"offset":0}]}`), ) + s := app.Kafka.Get("foo").Store + b, errCode := s.Topic("topic-1").Partitions[0].Read(0, 100) + require.Equal(t, kafka.None, errCode) + require.Equal(t, "foo", string(kafka.Read(b.Records[0].Key))) + require.Equal(t, `bar`, string(kafka.Read(b.Records[0].Value))) + require.Equal(t, float64(1), app.Monitor.Kafka.Messages.WithLabel("foo", "topic-1").Value()) + + try.Handler(t, + http.MethodGet, + "http://foo.api/api/services/kafka/foo/topics/topic-1/partitions/0/offsets", + map[string]string{"Accept": "application/xml"}, + "", + h, + try.HasStatusCode(http.StatusOK), + try.BodyContains(`[{"offset":0,"key":"foo","value":"bar","partition":0}]`), + ) + }, + }, + { + name: "produce kafka message into specific partition using plain XML string", + app: func() *runtime.App { + app := runtime.New(&static.Config{}) + + msg := asyncapi3test.NewMessage( + asyncapi3test.WithContentType("application/xml"), + asyncapi3test.WithPayloadOpenAPI( + openapi.New("object", + openapi.WithProperty("foo", openapi.New("string")), + ), + ), + ) + ch := asyncapi3test.NewChannel(asyncapi3test.UseMessage("foo", &asyncapi3.MessageRef{Value: msg})) + + _, _ = app.Kafka.Add(&dynamic.Config{ + Info: dynamic.ConfigInfo{Url: try.MustUrl("kafka.yaml")}, + Data: asyncapi3test.NewConfig( + asyncapi3test.WithInfo("foo", "bar", "1.0"), + asyncapi3test.WithServer("broker-1", "kafka", "localhost:9092"), + asyncapi3test.AddChannel("topic-1", ch), + asyncapi3test.WithOperation("sendAction", + asyncapi3test.WithOperationAction("send"), + asyncapi3test.WithOperationChannel(ch), + asyncapi3test.UseOperationMessage(msg), + ), + ), + }, enginetest.NewEngine()) + return app + }, + test: func(t *testing.T, app *runtime.App, h http.Handler) { + try.Handler(t, + http.MethodPost, + "http://foo.api/api/services/kafka/foo/topics/topic-1/partitions/0", + map[string]string{"Content-Type": "application/vnd.mokapi.kafka.xml+json"}, + `{ +"records": [{"key": "foo", "value": "bar"}] +}`, + h, + try.HasStatusCode(http.StatusOK), + try.HasBody(`{"offsets":[{"partition":0,"offset":0}]}`), + ) + + s := app.Kafka.Get("foo").Store + b, errCode := s.Topic("topic-1").Partitions[0].Read(0, 100) + require.Equal(t, kafka.None, errCode) + require.Equal(t, "foo", string(kafka.Read(b.Records[0].Key))) + require.Equal(t, `bar`, string(kafka.Read(b.Records[0].Value))) + + require.Equal(t, float64(1), app.Monitor.Kafka.Messages.WithLabel("foo", "topic-1").Value()) + }, + }, + { + name: "produce invalid kafka message into specific partition using plain XML string", + app: func() *runtime.App { + app := runtime.New(&static.Config{}) + + msg := asyncapi3test.NewMessage( + asyncapi3test.WithContentType("application/xml"), + asyncapi3test.WithPayloadOpenAPI( + openapi.New("object", + openapi.WithProperty("foo", openapi.New("string")), + openapi.WithFreeForm(false), + ), + ), + ) + ch := asyncapi3test.NewChannel(asyncapi3test.UseMessage("foo", &asyncapi3.MessageRef{Value: msg})) + + _, _ = app.Kafka.Add(&dynamic.Config{ + Info: dynamic.ConfigInfo{Url: try.MustUrl("kafka.yaml")}, + Data: asyncapi3test.NewConfig( + asyncapi3test.WithInfo("foo", "bar", "1.0"), + asyncapi3test.WithServer("broker-1", "kafka", "localhost:9092"), + asyncapi3test.AddChannel("topic-1", ch), + asyncapi3test.WithOperation("sendAction", + asyncapi3test.WithOperationAction("send"), + asyncapi3test.WithOperationChannel(ch), + asyncapi3test.UseOperationMessage(msg), + ), + ), + }, enginetest.NewEngine()) + return app + }, + test: func(t *testing.T, app *runtime.App, h http.Handler) { + try.Handler(t, + http.MethodPost, + "http://foo.api/api/services/kafka/foo/topics/topic-1/partitions/0", + map[string]string{"Content-Type": "application/vnd.mokapi.kafka.xml+json"}, + `{ +"records": [{"key": "foo", "value": "bar"}] +}`, + h, + try.HasStatusCode(http.StatusOK), + try.AssertBody(func(t *testing.T, body string) { + var data api.ProduceResponse + _ = json.Unmarshal([]byte(body), &data) + require.Equal(t, api.ProduceResponse{ + Offsets: []api.RecordResult{ + { + Partition: -1, + Offset: -1, + Error: "invalid message: error count 1:\n\t- #/additionalProperties: property 'yuh' not defined and the schema does not allow additional properties", + }, + }, + }, data) + }), + ) + + require.Equal(t, float64(0), app.Monitor.Kafka.Messages.WithLabel("foo", "topic-1").Value()) }, }, { name: "get records", app: func() *runtime.App { app := runtime.New(&static.Config{}) + + msg := asyncapi3test.NewMessage( + asyncapi3test.WithContentType("application/json"), + asyncapi3test.WithPayload( + schematest.New("object", + schematest.WithProperty("foo", schematest.New("string")), + ), + ), + ) + ch := asyncapi3test.NewChannel(asyncapi3test.UseMessage("foo", &asyncapi3.MessageRef{Value: msg})) + _, _ = app.Kafka.Add(&dynamic.Config{ Info: dynamic.ConfigInfo{Url: try.MustUrl("kafka.yaml")}, Data: asyncapi3test.NewConfig( asyncapi3test.WithInfo("foo", "bar", "1.0"), asyncapi3test.WithServer("broker-1", "kafka", "localhost:9092"), - asyncapi3test.WithChannel("topic-1", - asyncapi3test.WithChannelDescription("foobar"), - asyncapi3test.WithMessage("foo"), + asyncapi3test.AddChannel("topic-1", ch), + asyncapi3test.WithOperation("sendAction", + asyncapi3test.WithOperationAction("send"), + asyncapi3test.WithOperationChannel(ch), + asyncapi3test.UseOperationMessage(msg), ), ), }, enginetest.NewEngine()) c := store.NewClient(app.Kafka.Get("foo").Store, app.Monitor.Kafka) ct := media.ParseContentType("application/json") - _, _ = c.Write("topic-1", []store.Record{ + res, err := c.Write("topic-1", []store.Record{ { Key: "foo", Value: map[string]interface{}{"value": "bar"}, }, }, ct) + if err != nil { + panic(err) + } + if len(res) > 0 && res[0].Error != "" { + panic(res[0].Error) + } return app }, - test: func(t *testing.T, app *runtime.App, api http.Handler) { + test: func(t *testing.T, app *runtime.App, h http.Handler) { try.Handler(t, http.MethodGet, "http://foo.api/api/services/kafka/foo/topics/topic-1/partitions/0/offsets", nil, "", - api, + h, try.HasStatusCode(http.StatusOK), - try.HasBody(`[{"offset":0,"key":"foo","value":{"value":"bar"},"partition":0}]`), + try.HasBody(`[{"offset":0,"key":"foo","value":"{\"value\":\"bar\"}","partition":0}]`), ) }, }, @@ -681,14 +933,27 @@ func TestHandler_KafkaAPI(t *testing.T) { name: "get specific record", app: func() *runtime.App { app := runtime.New(&static.Config{}) + + msg := asyncapi3test.NewMessage( + asyncapi3test.WithContentType("application/json"), + asyncapi3test.WithPayload( + schematest.New("object", + schematest.WithProperty("foo", schematest.New("string")), + ), + ), + ) + ch := asyncapi3test.NewChannel(asyncapi3test.UseMessage("foo", &asyncapi3.MessageRef{Value: msg})) + _, _ = app.Kafka.Add(&dynamic.Config{ Info: dynamic.ConfigInfo{Url: try.MustUrl("kafka.yaml")}, Data: asyncapi3test.NewConfig( asyncapi3test.WithInfo("foo", "bar", "1.0"), asyncapi3test.WithServer("broker-1", "kafka", "localhost:9092"), - asyncapi3test.WithChannel("topic-1", - asyncapi3test.WithChannelDescription("foobar"), - asyncapi3test.WithMessage("foo"), + asyncapi3test.AddChannel("topic-1", ch), + asyncapi3test.WithOperation("sendAction", + asyncapi3test.WithOperationAction("send"), + asyncapi3test.WithOperationChannel(ch), + asyncapi3test.UseOperationMessage(msg), ), ), }, enginetest.NewEngine()) @@ -704,13 +969,13 @@ func TestHandler_KafkaAPI(t *testing.T) { return app }, - test: func(t *testing.T, app *runtime.App, api http.Handler) { + test: func(t *testing.T, app *runtime.App, h http.Handler) { try.Handler(t, http.MethodGet, "http://foo.api/api/services/kafka/foo/topics/topic-1/partitions/0/offsets/0", - nil, + map[string]string{"Accept": "application/json"}, "", - api, + h, try.HasStatusCode(http.StatusOK), try.HasBody(`{"offset":0,"key":"foo","value":{"value":"bar"},"partition":0}`), ) @@ -725,14 +990,14 @@ func TestHandler_KafkaAPI(t *testing.T) { t.Parallel() app := tc.app() - h := New(app, static.Api{}) + h := api.New(app, static.Api{}) tc.test(t, app, h) }) } } func TestHandler_Kafka_NotFound(t *testing.T) { - h := New(runtime.New(&static.Config{}), static.Api{}) + h := api.New(runtime.New(&static.Config{}), static.Api{}) try.Handler(t, http.MethodGet, @@ -777,7 +1042,7 @@ func TestHandler_Kafka_Metrics(t *testing.T) { t.Run(tc.name, func(t *testing.T) { t.Parallel() - h := New(tc.app, static.Api{}) + h := api.New(tc.app, static.Api{}) tc.addMetrics(tc.app.Monitor) try.Handler(t, diff --git a/api/handler_search_test.go b/api/handler_search_test.go index 92e3f2bb7..0089cd404 100644 --- a/api/handler_search_test.go +++ b/api/handler_search_test.go @@ -2,7 +2,6 @@ package api import ( "encoding/json" - "github.com/stretchr/testify/require" "mokapi/config/dynamic" "mokapi/config/dynamic/asyncApi/asyncapitest" "mokapi/config/dynamic/dynamictest" @@ -14,6 +13,8 @@ import ( "mokapi/try" "net/http" "testing" + + "github.com/stretchr/testify/require" ) func TestHandler_SearchQuery(t *testing.T) { @@ -57,7 +58,7 @@ func TestHandler_SearchQuery(t *testing.T) { response: []try.ResponseCondition{ try.HasStatusCode(200), try.HasHeader("Content-Type", "application/json"), - try.HasBody(`{"results":[{"type":"HTTP","title":"foo","fragments":["\u003cmark\u003efoo\u003c/mark\u003e"],"params":{"service":"foo","type":"http"}}],"facets":{"type":[{"value":"HTTP","count":1}]},"total":1}`), + try.HasBody(`{"results":[{"type":"HTTP","title":"foo","fragments":["foo"],"params":{"service":"foo","type":"http"}}],"facets":{"type":[{"value":"HTTP","count":1}]},"total":1}`), }, app: func() *runtime.App { app := runtime.New(&static.Config{Api: static.Api{Search: static.Search{ @@ -76,7 +77,7 @@ func TestHandler_SearchQuery(t *testing.T) { response: []try.ResponseCondition{ try.HasStatusCode(200), try.HasHeader("Content-Type", "application/json"), - try.HasBody(`{"results":[{"type":"HTTP","title":"foo","fragments":["\u003cmark\u003efoo\u003c/mark\u003e"],"params":{"service":"foo","type":"http"}}],"facets":{"type":[{"value":"HTTP","count":1}]},"total":1}`), + try.HasBody(`{"results":[{"type":"HTTP","title":"foo","fragments":["foo"],"params":{"service":"foo","type":"http"}}],"facets":{"type":[{"value":"HTTP","count":1}]},"total":1}`), }, app: func() *runtime.App { app := runtime.New(&static.Config{Api: static.Api{Search: static.Search{ diff --git a/config/dynamic/asyncApi/convert.go b/config/dynamic/asyncApi/convert.go index af7058ff9..b9c9b27ac 100644 --- a/config/dynamic/asyncApi/convert.go +++ b/config/dynamic/asyncApi/convert.go @@ -73,6 +73,7 @@ func convertChannels(cfg *asyncapi3.Config, channels map[string]*ChannelRef) err if err != nil { return err } + ch.Value.Config = cfg cfg.Channels[name] = ch } } diff --git a/engine/engine.go b/engine/engine.go index 0515f003f..bd97f2e27 100644 --- a/engine/engine.go +++ b/engine/engine.go @@ -83,9 +83,10 @@ func (e *Engine) AddScript(evt dynamic.ConfigEvent) error { err := e.run(host) if err != nil { if errors.Is(err, UnsupportedError) { - return + log.Debugf("script not supported: %v", evt.Config.Info.Url.Path) + } else { + log.Errorf("error executing script %v: %v", evt.Config.Info.Url, err) } - log.Errorf("error executing script %v: %v", evt.Config.Info.Url, err) } }() } else { @@ -137,9 +138,6 @@ func (e *Engine) addOrUpdate(host *scriptHost) { func (e *Engine) run(host *scriptHost) error { err := host.Run() - if err != nil { - return err - } if host.CanClose() { if e.parallel { @@ -151,7 +149,7 @@ func (e *Engine) run(host *scriptHost) error { delete(e.scripts, host.name) } - return nil + return err } func (e *Engine) Scripts() int { diff --git a/engine/host.go b/engine/host.go index 787980b17..e14638963 100644 --- a/engine/host.go +++ b/engine/host.go @@ -325,6 +325,9 @@ func (sh *scriptHost) HttpClient(opts common.HttpClientOptions) common.HttpClien } func (sh *scriptHost) CanClose() bool { + if sh.script == nil { + return true + } return len(sh.events) == 0 && len(sh.jobs) == 0 && sh.script.CanClose() && len(sh.cleanupFuncs) == 0 } diff --git a/engine/js_test.go b/engine/js_test.go index 429211714..cd4b2b3b9 100644 --- a/engine/js_test.go +++ b/engine/js_test.go @@ -50,6 +50,16 @@ func TestJsScriptEngine(t *testing.T) { time.Sleep(2 * time.Second) e.Close() }) + t.Run("script from GIT provider", func(t *testing.T) { + t.Parallel() + e := enginetest.NewEngine() + cfgEvent := newScript("https://foo.bar?file=test.js&ref=develop", `import { on } from 'mokapi';export default function() { on('http', function() {})}`) + cfgEvent.Config.Info.Provider = "git" + err := e.AddScript(cfgEvent) + r.NoError(t, err) + r.Equal(t, 1, e.Scripts()) + e.Close() + }) } func TestJsEvery(t *testing.T) { diff --git a/engine/kafka.go b/engine/kafka.go index 6f1fe73f7..90f759076 100644 --- a/engine/kafka.go +++ b/engine/kafka.go @@ -42,7 +42,6 @@ func (c *KafkaClient) Produce(args *common.KafkaProduceArgs) (*common.KafkaProdu var produced []common.KafkaMessageResult for _, m := range args.Messages { value := m.Data - ct := media.ContentType{} if m.Value != nil { value = m.Value } @@ -52,40 +51,35 @@ func (c *KafkaClient) Produce(args *common.KafkaProduceArgs) (*common.KafkaProdu Schema: &schema.Schema{Type: schema.Types{"string"}, Pattern: "[a-z]{9}"}, }, } - var payload *asyncapi3.SchemaRef - var msg *asyncapi3.Message - msg, err = selectMessage(value, t.Config, k.Config) - if err != nil { - return nil, fmt.Errorf("failed to produce message to Kafka topic '%v': %w", t.Name, err) - } - if msg != nil { - if msg.Bindings.Kafka.Key != nil { - keySchema = msg.Bindings.Kafka.Key - } - payload = msg.Payload - ct = media.ParseContentType(msg.ContentType) - } else { - ct = media.ParseContentType(k.DefaultContentType) - } - if m.Key == nil { - m.Key, err = createValue(keySchema) + // if m.Value is not used then select Kafka message config by the data which must be valid + if m.Value == nil && (value == nil || m.Key == nil) { + var payload *asyncapi3.SchemaRef + var msg *asyncapi3.Message + + msg, err = selectMessage(value, t.Config, k.Config) if err != nil { - return nil, fmt.Errorf("unable to generate kafka key: %v", err) + return nil, fmt.Errorf("failed to produce message to Kafka topic '%v': %w", t.Name, err) + } + if msg != nil { + if msg.Bindings.Kafka.Key != nil { + keySchema = msg.Bindings.Kafka.Key + } + payload = msg.Payload } - } - if value == nil { - value, err = createValue(payload) - if err != nil { - return nil, fmt.Errorf("unable to generate kafka value: %v", err) + if value == nil { + value, err = createValue(payload) + if err != nil { + return nil, fmt.Errorf("unable to generate kafka value: %v", err) + } } } - if m.Value == nil && payload != nil { - value, err = payload.Value.Marshal(value, ct) + if m.Key == nil { + m.Key, err = createValue(keySchema) if err != nil { - return nil, fmt.Errorf("failed to marshal kafka message: %w", err) + return nil, fmt.Errorf("unable to generate kafka key: %v", err) } } @@ -100,7 +94,7 @@ func (c *KafkaClient) Produce(args *common.KafkaProduceArgs) (*common.KafkaProdu Headers: headers, Partition: m.Partition, SkipValidation: m.Value != nil, - }}, ct) + }}, media.ContentType{}) if err != nil { return nil, fmt.Errorf("failed to produce message to Kafka topic '%v': %w", t.Name, err) } diff --git a/engine/kafka_test.go b/engine/kafka_test.go index af01200f6..1d82f1e22 100644 --- a/engine/kafka_test.go +++ b/engine/kafka_test.go @@ -2,6 +2,7 @@ package engine_test import ( "bytes" + "encoding/binary" "fmt" "io" "mokapi/config/dynamic" @@ -56,7 +57,7 @@ func TestKafkaClient(t *testing.T) { cfg: func() *asyncapi3.Config { msg := asyncapi3test.NewMessage( asyncapi3test.WithPayload(schematest.New("string")), - asyncapi3test.WithKey(schematest.New("string")), + asyncapi3test.WithKey(schematest.New("string", schematest.WithMinLength(3))), ) return createCfg("foo", msg) }, @@ -72,8 +73,8 @@ func TestKafkaClient(t *testing.T) { b, errCode := app.Kafka.Get("foo").Store.Topic("foo").Partition(0).Read(0, 1000) require.Equal(t, kafka.None, errCode) require.NotNil(t, b) - require.Equal(t, "XidZuoWq ", kafka.BytesToString(b.Records[0].Key)) - require.Equal(t, "\"\"", kafka.BytesToString(b.Records[0].Value)) + require.Equal(t, "EZyvmtlRf", kafka.BytesToString(b.Records[0].Key)) + require.Equal(t, `"XidZuoWq "`, kafka.BytesToString(b.Records[0].Value)) }, }, { @@ -81,7 +82,7 @@ func TestKafkaClient(t *testing.T) { cfg: func() *asyncapi3.Config { msg := asyncapi3test.NewMessage( asyncapi3test.WithPayload(schematest.New("string")), - asyncapi3test.WithKey(schematest.New("string")), + asyncapi3test.WithKey(schematest.New("string", schematest.WithMinLength(3))), ) return createCfg("foo", msg) }, @@ -97,8 +98,8 @@ func TestKafkaClient(t *testing.T) { b, errCode := app.Kafka.Get("foo").Store.Topic("foo").Partition(0).Read(0, 1000) require.Equal(t, kafka.None, errCode) require.NotNil(t, b) - require.Equal(t, "XidZuoWq ", kafka.BytesToString(b.Records[0].Key)) - require.Equal(t, "\"\"", kafka.BytesToString(b.Records[0].Value)) + require.Equal(t, "EZyvmtlRf", kafka.BytesToString(b.Records[0].Key)) + require.Equal(t, `"XidZuoWq "`, kafka.BytesToString(b.Records[0].Value)) }, }, { @@ -223,6 +224,7 @@ func TestKafkaClient(t *testing.T) { _, err = b.Records[0].Value.Read(val) require.NoError(t, err) require.Equal(t, []byte{123, 0, 0, 0}, val) + require.Equal(t, 123, int(binary.LittleEndian.Uint32(val))) }, }, { @@ -708,6 +710,42 @@ func TestKafkaClient(t *testing.T) { require.EqualError(t, err, "failed to produce message to Kafka topic 'foo': no matching message configuration found for the given value: {\"foo\":\"bar\"}\nhint:\nencoding data to 'application/xml' failed: error count 1:\n\t- #/required: required properties are missing: id\n at mokapi/js/kafka.(*Module).Produce-fm (native)") }, }, + { + name: "content-type is xml with OpenAPI schema using XML using value", + cfg: func() *asyncapi3.Config { + msg := asyncapi3test.NewMessage( + asyncapi3test.WithContentType("application/xml"), + + asyncapi3test.WithPayloadOpenAPI(opSchematest.New("object", + opSchematest.WithXml(&opSchema.Xml{Name: "foo"}), + opSchematest.WithProperty( + "id", + opSchematest.New( + "string", + opSchematest.WithXml(&opSchema.Xml{Attribute: true}), + ), + ), + opSchematest.WithRequired("id"), + )), + ) + return createCfg("foo", msg) + }, + test: func(t *testing.T, e *engine.Engine, app *runtime.App) { + err := e.AddScript(newScript("test.js", ` + import { produce } from 'mokapi/kafka' + export default function() { + produce({ messages: [{ value: 'bar' }] }) + } + `)) + + require.NoError(t, err) + b, errCode := app.Kafka.Get("foo").Store.Topic("foo").Partition(0).Read(0, 1000) + require.Equal(t, kafka.None, errCode) + require.NotNil(t, b) + require.Equal(t, "gbrmarxhk", kafka.BytesToString(b.Records[0].Key)) + require.Equal(t, `bar`, kafka.BytesToString(b.Records[0].Value)) + }, + }, } for _, tc := range testcases { diff --git a/engine/scripts.go b/engine/scripts.go index d5c32e990..fecc21681 100644 --- a/engine/scripts.go +++ b/engine/scripts.go @@ -33,7 +33,14 @@ func NewDefaultScriptLoader(config *static.Config) ScriptLoader { func (l *DefaultScriptLoader) Load(file *dynamic.Config, host common.Host) (common.Script, error) { s := file.Data.(string) filename := file.Info.Path() - switch filepath.Ext(filename) { + if file.Info.Provider == "git" { + gitFile := file.Info.Url.Query()["file"] + if len(gitFile) > 0 { + filename = gitFile[0] + } + } + ext := filepath.Ext(filename) + switch ext { case ".js", ".cjs", ".mjs", ".ts": return js.New(file, host) case ".lua": diff --git a/examples/mokapi/common.yml b/examples/mokapi/common.yml index 372d23ed4..51ff1bc03 100644 --- a/examples/mokapi/common.yml +++ b/examples/mokapi/common.yml @@ -96,6 +96,12 @@ components: type: string deleted: type: boolean + producerId: + type: number + producerEpoch: + type: number + sequenceNumber: + type: number KafkaValue: type: object properties: diff --git a/examples/mokapi/kafka.js b/examples/mokapi/kafka.js index d865f4421..257b320d3 100644 --- a/examples/mokapi/kafka.js +++ b/examples/mokapi/kafka.js @@ -320,7 +320,10 @@ export let events = [ }, partition: 1, messageId: 'shopOrder', - deleted: true + deleted: true, + producerId: 3, + producerEpoch: 1, + sequenceNumber: 1 } }, { diff --git a/go.mod b/go.mod index add2c1a2f..89ec96c95 100644 --- a/go.mod +++ b/go.mod @@ -12,7 +12,7 @@ require ( github.com/evanw/esbuild v0.27.0 github.com/fsnotify/fsnotify v1.9.0 github.com/go-co-op/gocron v1.37.0 - github.com/go-git/go-git/v5 v5.16.3 + github.com/go-git/go-git/v5 v5.16.4 github.com/google/uuid v1.6.0 github.com/jinzhu/inflection v1.0.0 github.com/pkg/errors v0.9.1 diff --git a/go.sum b/go.sum index f2eba9719..62f3bc215 100644 --- a/go.sum +++ b/go.sum @@ -95,8 +95,8 @@ github.com/go-git/go-billy/v5 v5.6.2 h1:6Q86EsPXMa7c3YZ3aLAQsMA0VlWmy43r6FHqa/UN github.com/go-git/go-billy/v5 v5.6.2/go.mod h1:rcFC2rAsp/erv7CMz9GczHcuD0D32fWzH+MJAU+jaUU= github.com/go-git/go-git-fixtures/v4 v4.3.2-0.20231010084843-55a94097c399 h1:eMje31YglSBqCdIqdhKBW8lokaMrL3uTkpGYlE2OOT4= github.com/go-git/go-git-fixtures/v4 v4.3.2-0.20231010084843-55a94097c399/go.mod h1:1OCfN199q1Jm3HZlxleg+Dw/mwps2Wbk9frAWm+4FII= -github.com/go-git/go-git/v5 v5.16.3 h1:Z8BtvxZ09bYm/yYNgPKCzgWtaRqDTgIKRgIRHBfU6Z8= -github.com/go-git/go-git/v5 v5.16.3/go.mod h1:4Ge4alE/5gPs30F2H1esi2gPd69R0C39lolkucHBOp8= +github.com/go-git/go-git/v5 v5.16.4 h1:7ajIEZHZJULcyJebDLo99bGgS0jRrOxzZG4uCk2Yb2Y= +github.com/go-git/go-git/v5 v5.16.4/go.mod h1:4Ge4alE/5gPs30F2H1esi2gPd69R0C39lolkucHBOp8= github.com/go-sourcemap/sourcemap v2.1.3+incompatible h1:W1iEw64niKVGogNgBN3ePyLFfuisuzeidWPMPWmECqU= github.com/go-sourcemap/sourcemap v2.1.3+incompatible/go.mod h1:F8jJfvm2KbVjc5NqelyYJmf/v5J0dwNLS2mL4sNA1Jg= github.com/golang-jwt/jwt/v4 v4.5.2 h1:YtQM7lnr8iZ+j5q71MGKkNw9Mn7AjHM68uc9g5fXeUI= diff --git a/kafka/client_context.go b/kafka/client_context.go index 9b5cb8800..2bd0f95ab 100644 --- a/kafka/client_context.go +++ b/kafka/client_context.go @@ -2,8 +2,9 @@ package kafka import ( "context" - "github.com/google/uuid" "time" + + "github.com/google/uuid" ) const clientKey = "client" diff --git a/kafka/errors.go b/kafka/errors.go index 7521df55a..d8dcc0db7 100644 --- a/kafka/errors.go +++ b/kafka/errors.go @@ -11,23 +11,30 @@ type Error struct { type ErrorCode int16 const ( - UnknownServerError ErrorCode = -1 - None ErrorCode = 0 - OffsetOutOfRange ErrorCode = 1 - CorruptMessage ErrorCode = 2 - UnknownTopicOrPartition ErrorCode = 3 - CoordinatorNotAvailable ErrorCode = 15 - NotCoordinator ErrorCode = 16 - InvalidTopic ErrorCode = 17 - IllegalGeneration ErrorCode = 22 - InvalidGroupId ErrorCode = 24 - UnknownMemberId ErrorCode = 25 - RebalanceInProgress ErrorCode = 27 - UnsupportedVersion ErrorCode = 35 - TopicAlreadyExists ErrorCode = 36 - GroupIdNotFound ErrorCode = 69 - MemberIdRequired ErrorCode = 79 - InvalidRecord ErrorCode = 87 + UnknownServerError ErrorCode = -1 + None ErrorCode = 0 + OffsetOutOfRange ErrorCode = 1 + CorruptMessage ErrorCode = 2 + UnknownTopicOrPartition ErrorCode = 3 + CoordinatorNotAvailable ErrorCode = 15 + NotCoordinator ErrorCode = 16 + InvalidTopic ErrorCode = 17 + IllegalGeneration ErrorCode = 22 + InvalidGroupId ErrorCode = 24 + UnknownMemberId ErrorCode = 25 + RebalanceInProgress ErrorCode = 27 + UnsupportedVersion ErrorCode = 35 + TopicAlreadyExists ErrorCode = 36 + UnsupportedForMessageFormat ErrorCode = 43 + OutOfOrderSequenceNumber ErrorCode = 45 + DuplicateSequenceNumber ErrorCode = 46 + InvalidProducerEpoch ErrorCode = 47 + InvalidProducerIdMapping ErrorCode = 49 + UnknownProducerId ErrorCode = 59 + GroupIdNotFound ErrorCode = 69 + MemberIdRequired ErrorCode = 79 + InvalidRecord ErrorCode = 87 + ProducerFenced ErrorCode = 90 ) var ( diff --git a/kafka/fetch/fetch_test.go b/kafka/fetch/fetch_test.go index d0aece293..3d4ce6921 100644 --- a/kafka/fetch/fetch_test.go +++ b/kafka/fetch/fetch_test.go @@ -153,11 +153,14 @@ func TestResponse(t *testing.T) { PreferredReadReplica: 0, RecordSet: kafka.RecordBatch{Records: []*kafka.Record{ { - Offset: 0, - Time: kafka.ToTime(1657010762684), - Key: kafka.NewBytes([]byte("foo")), - Value: kafka.NewBytes([]byte("bar")), - Headers: nil, + Offset: 0, + Time: kafka.ToTime(1657010762684), + Key: kafka.NewBytes([]byte("foo")), + Value: kafka.NewBytes([]byte("bar")), + ProducerId: -1, + ProducerEpoch: -1, + SequenceNumber: -1, + Headers: nil, }, }, }, @@ -185,11 +188,14 @@ func TestResponse(t *testing.T) { PreferredReadReplica: 0, RecordSet: kafka.RecordBatch{Records: []*kafka.Record{ { - Offset: 0, - Time: kafka.ToTime(1657010762684), - Key: kafka.NewBytes([]byte("foo")), - Value: kafka.NewBytes([]byte("bar")), - Headers: nil, + Offset: 0, + Time: kafka.ToTime(1657010762684), + Key: kafka.NewBytes([]byte("foo")), + Value: kafka.NewBytes([]byte("bar")), + ProducerId: -1, + ProducerEpoch: -1, + SequenceNumber: -1, + Headers: nil, }, }, }, @@ -217,11 +223,14 @@ func TestResponse(t *testing.T) { PreferredReadReplica: 0, RecordSet: kafka.RecordBatch{Records: []*kafka.Record{ { - Offset: 0, - Time: kafka.ToTime(1657010762684), - Key: kafka.NewBytes([]byte("foo")), - Value: kafka.NewBytes([]byte("bar")), - Headers: nil, + Offset: 0, + Time: kafka.ToTime(1657010762684), + Key: kafka.NewBytes([]byte("foo")), + Value: kafka.NewBytes([]byte("bar")), + ProducerId: -1, + ProducerEpoch: -1, + SequenceNumber: -1, + Headers: nil, }, }, }, diff --git a/kafka/initProducerId/initProducerId.go b/kafka/initProducerId/initProducerId.go new file mode 100644 index 000000000..bb339e039 --- /dev/null +++ b/kafka/initProducerId/initProducerId.go @@ -0,0 +1,37 @@ +package initProducerId + +import "mokapi/kafka" + +func init() { + kafka.Register( + kafka.ApiReg{ + ApiKey: kafka.InitProducerId, + MinVersion: 0, + MaxVersion: 6, + }, + &Request{}, + &Response{}, + 3, + 3, + ) +} + +type Request struct { + TransactionalId string `kafka:"nullable,compact=2"` + TransactionTimeoutMs int32 `kafka:""` + ProducerId int64 `kafka:"min=3"` + ProducerEpoch int16 `kafka:"min=3"` + // Enable2PC true if the client wants to enable two-phase commit (2PC) for transaction + Enable2PC bool `kafka:"min=6"` + TagFields map[int64]string `kafka:"type=TAG_BUFFER,min=3"` +} + +type Response struct { + ThrottleTimeMs int32 `kafka:""` + ErrorCode kafka.ErrorCode `kafka:""` + ProducerId int64 `kafka:""` + ProducerEpoch int16 `kafka:""` + OngoingTxnProducerId int64 `kafka:"min=6"` + OngoingTxnProducerEpoch int16 `kafka:"min=6"` + TagFields map[int64]string `kafka:"type=TAG_BUFFER,min=3"` +} diff --git a/kafka/initProducerId/initProducerId_test.go b/kafka/initProducerId/initProducerId_test.go new file mode 100644 index 000000000..0a1cce3d4 --- /dev/null +++ b/kafka/initProducerId/initProducerId_test.go @@ -0,0 +1,121 @@ +package initProducerId_test + +import ( + "bytes" + "encoding/binary" + "mokapi/kafka" + "mokapi/kafka/initProducerId" + "mokapi/kafka/kafkatest" + "testing" + + "github.com/stretchr/testify/require" +) + +func TestInit(t *testing.T) { + reg := kafka.ApiTypes[kafka.InitProducerId] + require.Equal(t, int16(0), reg.MinVersion) + require.Equal(t, int16(6), reg.MaxVersion) +} + +func TestRequest(t *testing.T) { + kafkatest.TestRequest(t, 2, &initProducerId.Request{ + TransactionalId: "trx", + TransactionTimeoutMs: 100, + }) + + kafkatest.TestRequest(t, 6, &initProducerId.Request{ + TransactionalId: "trx", + TransactionTimeoutMs: 100, + ProducerId: 123, + ProducerEpoch: 1, + Enable2PC: false, + }) + + b := kafkatest.WriteRequest(t, 6, 123, "me", &initProducerId.Request{ + TransactionalId: "trx", + TransactionTimeoutMs: 100, + ProducerId: 123, + ProducerEpoch: 1, + Enable2PC: false, + }) + expected := new(bytes.Buffer) + // header + _ = binary.Write(expected, binary.BigEndian, int32(33)) // length + _ = binary.Write(expected, binary.BigEndian, int16(kafka.InitProducerId)) // ApiKey + _ = binary.Write(expected, binary.BigEndian, int16(6)) // ApiVersion + _ = binary.Write(expected, binary.BigEndian, int32(123)) // correlationId + _ = binary.Write(expected, binary.BigEndian, int16(2)) // ClientId length + _ = binary.Write(expected, binary.BigEndian, []byte("me")) // ClientId + _ = binary.Write(expected, binary.BigEndian, int8(0)) // tag buffer + // message + _ = binary.Write(expected, binary.BigEndian, int8(4)) // TransactionalId length + _ = binary.Write(expected, binary.BigEndian, []byte("trx")) // TransactionalId + _ = binary.Write(expected, binary.BigEndian, int32(100)) // TransactionTimeoutMs + _ = binary.Write(expected, binary.BigEndian, int64(123)) // ProducerId + _ = binary.Write(expected, binary.BigEndian, int16(1)) // ProducerEpoch + _ = binary.Write(expected, binary.BigEndian, int8(0)) // Enable2PC + _ = binary.Write(expected, binary.BigEndian, int8(0)) // tag buffer + require.Equal(t, expected.Bytes(), b) +} + +func TestResponse(t *testing.T) { + kafkatest.TestResponse(t, 2, &initProducerId.Response{ + ThrottleTimeMs: 100, + ErrorCode: 0, + ProducerId: 123, + ProducerEpoch: 1, + }) + + kafkatest.TestResponse(t, 6, &initProducerId.Response{ + ThrottleTimeMs: 100, + ErrorCode: 0, + ProducerId: 123, + ProducerEpoch: 1, + OngoingTxnProducerId: 124, + OngoingTxnProducerEpoch: 2, + }) + + b := kafkatest.WriteResponse(t, 2, 123, &initProducerId.Response{ + ThrottleTimeMs: 100, + ErrorCode: 0, + ProducerId: 123, + ProducerEpoch: 1, + OngoingTxnProducerId: 124, + OngoingTxnProducerEpoch: 2, + }) + expected := new(bytes.Buffer) + // header + _ = binary.Write(expected, binary.BigEndian, int32(20)) // length + _ = binary.Write(expected, binary.BigEndian, int32(123)) // correlationId + // message + _ = binary.Write(expected, binary.BigEndian, int32(100)) // ThrottleTimeMs + _ = binary.Write(expected, binary.BigEndian, int16(0)) // ErrorCode + _ = binary.Write(expected, binary.BigEndian, int64(123)) // ProducerId + _ = binary.Write(expected, binary.BigEndian, int16(1)) // ProducerEpoch + + require.Equal(t, expected.Bytes(), b) + + b = kafkatest.WriteResponse(t, 6, 123, &initProducerId.Response{ + ThrottleTimeMs: 100, + ErrorCode: 0, + ProducerId: 123, + ProducerEpoch: 1, + OngoingTxnProducerId: 124, + OngoingTxnProducerEpoch: 2, + }) + expected = new(bytes.Buffer) + // header + _ = binary.Write(expected, binary.BigEndian, int32(32)) // length + _ = binary.Write(expected, binary.BigEndian, int32(123)) // correlationId + _ = binary.Write(expected, binary.BigEndian, int8(0)) // tag buffer + // message + _ = binary.Write(expected, binary.BigEndian, int32(100)) // ThrottleTimeMs + _ = binary.Write(expected, binary.BigEndian, int16(0)) // ErrorCode + _ = binary.Write(expected, binary.BigEndian, int64(123)) // ProducerId + _ = binary.Write(expected, binary.BigEndian, int16(1)) // ProducerEpoch + _ = binary.Write(expected, binary.BigEndian, int64(124)) // OngoingTxnProducerId + _ = binary.Write(expected, binary.BigEndian, int16(2)) // OngoingTxnProducerEpoch + _ = binary.Write(expected, binary.BigEndian, int8(0)) // tag buffer + + require.Equal(t, expected.Bytes(), b) +} diff --git a/kafka/kafkatest/kafkatest.go b/kafka/kafkatest/kafkatest.go index b6481e1b1..cd8e4ade1 100644 --- a/kafka/kafkatest/kafkatest.go +++ b/kafka/kafkatest/kafkatest.go @@ -10,6 +10,7 @@ import ( "mokapi/kafka/fetch" "mokapi/kafka/findCoordinator" "mokapi/kafka/heartbeat" + "mokapi/kafka/initProducerId" "mokapi/kafka/joinGroup" "mokapi/kafka/listgroup" "mokapi/kafka/metaData" @@ -70,6 +71,8 @@ func getApiKey(msg kafka.Message) kafka.ApiKey { return kafka.ListGroup case *createTopics.Request, *createTopics.Response: return kafka.CreateTopics + case *initProducerId.Request, *initProducerId.Response: + return kafka.InitProducerId default: panic(fmt.Sprintf("unknown type: %v", t)) } diff --git a/kafka/kafkatest/protocol.go b/kafka/kafkatest/protocol.go index 7002cb473..996739536 100644 --- a/kafka/kafkatest/protocol.go +++ b/kafka/kafkatest/protocol.go @@ -2,8 +2,10 @@ package kafkatest import ( "bytes" + "encoding/json" "mokapi/kafka" "reflect" + "strings" "testing" "github.com/stretchr/testify/require" @@ -28,7 +30,17 @@ func TestRequest(t *testing.T, version int16, msg kafka.Message) { err = r2.Read(b) require.NoError(t, err) - require.True(t, deepEqual(r1.Message, r2.Message)) + if deepEqual(r1.Message, r2.Message) { + return + } + + expected, _ := json.MarshalIndent(r1.Message, "", "") + actual, _ := json.MarshalIndent(r2.Message, "", "") + + t.Errorf("Not equal:\n"+ + "expected: %s\n"+ + "actual : %s", strings.ReplaceAll(string(expected), "\n", ""), strings.ReplaceAll(string(actual), "\n", "")) + t.FailNow() } func WriteRequest(t *testing.T, version int16, correlationId int32, clientId string, msg kafka.Message) []byte { @@ -73,7 +85,17 @@ func TestResponse(t *testing.T, version int16, msg kafka.Message) { err = r2.Read(b) require.NoError(t, err) - require.True(t, deepEqual(r1.Message, r2.Message)) + if deepEqual(r1.Message, r2.Message) { + return + } + + expected, _ := json.MarshalIndent(r1.Message, "", "") + actual, _ := json.MarshalIndent(r2.Message, "", "") + + t.Errorf("Not equal:\n"+ + "expected: %s\n"+ + "actual : %s", strings.ReplaceAll(string(expected), "\n", ""), strings.ReplaceAll(string(actual), "\n", "")) + t.FailNow() } func WriteResponse(t *testing.T, version int16, correlationId int32, msg kafka.Message) []byte { @@ -148,6 +170,9 @@ func deepEqual(i1, i2 any) bool { } return true default: - return i1 == i2 + if i1 == i2 { + return true + } + return false } } diff --git a/kafka/produce/produce_test.go b/kafka/produce/produce_test.go index e9378ea04..21fa211a4 100644 --- a/kafka/produce/produce_test.go +++ b/kafka/produce/produce_test.go @@ -31,11 +31,14 @@ func TestRequest(t *testing.T) { Record: kafka.RecordBatch{ Records: []*kafka.Record{ { - Offset: 0, - Time: kafka.ToTime(1657010762684), - Key: kafka.NewBytes([]byte("foo")), - Value: kafka.NewBytes([]byte("bar")), - Headers: nil, + Offset: 0, + Time: kafka.ToTime(1657010762684), + Key: kafka.NewBytes([]byte("foo")), + Value: kafka.NewBytes([]byte("bar")), + ProducerId: -1, + ProducerEpoch: -1, + SequenceNumber: -1, + Headers: nil, }, }, }, @@ -58,11 +61,14 @@ func TestRequest(t *testing.T) { Record: kafka.RecordBatch{ Records: []*kafka.Record{ { - Offset: 0, - Time: kafka.ToTime(1657010762684), - Key: kafka.NewBytes([]byte("foo")), - Value: kafka.NewBytes([]byte("bar")), - Headers: nil, + Offset: 0, + Time: kafka.ToTime(1657010762684), + Key: kafka.NewBytes([]byte("foo")), + Value: kafka.NewBytes([]byte("bar")), + ProducerId: -1, + ProducerEpoch: -1, + SequenceNumber: -1, + Headers: nil, }, }, }, @@ -85,11 +91,14 @@ func TestRequest(t *testing.T) { Record: kafka.RecordBatch{ Records: []*kafka.Record{ { - Offset: 0, - Time: kafka.ToTime(1657010762684), - Key: kafka.NewBytes([]byte("foo")), - Value: kafka.NewBytes([]byte("bar")), - Headers: nil, + Offset: 0, + Time: kafka.ToTime(1657010762684), + Key: kafka.NewBytes([]byte("foo")), + Value: kafka.NewBytes([]byte("bar")), + ProducerId: -1, + ProducerEpoch: -1, + SequenceNumber: -1, + Headers: nil, }, }, }, diff --git a/kafka/protocol.go b/kafka/protocol.go index 525b319f4..a52cad996 100644 --- a/kafka/protocol.go +++ b/kafka/protocol.go @@ -24,6 +24,7 @@ const ( ListGroup ApiKey = 16 ApiVersions ApiKey = 18 CreateTopics ApiKey = 19 + InitProducerId ApiKey = 22 ) var apitext = map[ApiKey]string{ @@ -39,6 +40,7 @@ var apitext = map[ApiKey]string{ SyncGroup: "SyncGroup", ApiVersions: "ApiVersions", CreateTopics: "CreateTopics", + InitProducerId: "InitProducerId", } var ApiTypes = map[ApiKey]ApiType{} diff --git a/kafka/record.go b/kafka/record.go index 9682e518a..5f891cccf 100644 --- a/kafka/record.go +++ b/kafka/record.go @@ -54,11 +54,14 @@ func (rb *RecordBatch) ReadFrom(d *Decoder, version int16, tag kafkaTag) error { } type Record struct { - Offset int64 `json:"offset"` - Time time.Time `json:"time"` - Key Bytes `json:"key"` - Value Bytes `json:"value"` - Headers []RecordHeader `json:"headers"` + Offset int64 `json:"offset"` + Time time.Time `json:"time"` + Key Bytes `json:"key"` + Value Bytes `json:"value"` + Headers []RecordHeader `json:"headers"` + ProducerId int64 `json:"producerId"` + ProducerEpoch int16 `json:"producerEpoch"` + SequenceNumber int32 `json:"sequenceNumber"` } type RecordHeader struct { @@ -230,3 +233,7 @@ func sizeVarInt(x int64) int { i := uint64((x << 1) ^ (x >> 63)) return (bits.Len64(i|1) + 6) / 7 } + +func (r *Record) String() string { + return "" +} diff --git a/kafka/record_v2.go b/kafka/record_v2.go index 4b7aba6aa..53909a351 100644 --- a/kafka/record_v2.go +++ b/kafka/record_v2.go @@ -15,10 +15,10 @@ func (rb *RecordBatch) readFromV2(d *Decoder) error { attributes := Attributes(d.ReadInt16()) d.ReadInt32() // lastOffsetDelta firstTimestamp := d.ReadInt64() - d.ReadInt64() // maxTimestamp - d.ReadInt64() // producer ID - d.ReadInt16() // producer epoch - d.ReadInt32() // baseSequence + d.ReadInt64() // maxTimestamp + producerId := d.ReadInt64() // producer ID + producerEpoch := d.ReadInt16() // producer epoch + sequence := d.ReadInt32() // baseSequence numRecords := d.ReadInt32() if attributes.Compression() != 0 { @@ -29,7 +29,12 @@ func (rb *RecordBatch) readFromV2(d *Decoder) error { defer pb.Unref() rb.Records = make([]*Record, numRecords) for i := range rb.Records { - r := &Record{} + r := &Record{ + ProducerId: producerId, + ProducerEpoch: producerEpoch, + SequenceNumber: sequence, + } + sequence++ rb.Records[i] = r d.ReadVarInt() // record size d.ReadInt8() // attributes diff --git a/providers/asyncapi3/asyncapi3test/config.go b/providers/asyncapi3/asyncapi3test/config.go index 3839428f3..81e47f9b1 100644 --- a/providers/asyncapi3/asyncapi3test/config.go +++ b/providers/asyncapi3/asyncapi3test/config.go @@ -80,6 +80,7 @@ func WithChannel(name string, opts ...ChannelOptions) ConfigOptions { ch := NewChannel(opts...) ch.Name = name c.Channels[name] = &asyncapi3.ChannelRef{Value: ch} + ch.Config = c } } @@ -90,6 +91,7 @@ func AddChannel(name string, ch *asyncapi3.Channel) ConfigOptions { } ch.Name = name c.Channels[name] = &asyncapi3.ChannelRef{Value: ch} + ch.Config = c } } diff --git a/providers/asyncapi3/channel.go b/providers/asyncapi3/channel.go index 05e743e9d..2cdb466e3 100644 --- a/providers/asyncapi3/channel.go +++ b/providers/asyncapi3/channel.go @@ -1,8 +1,9 @@ package asyncapi3 import ( - "gopkg.in/yaml.v3" "mokapi/config/dynamic" + + "gopkg.in/yaml.v3" ) type ChannelRef struct { @@ -22,6 +23,7 @@ type Channel struct { Bindings ChannelBindings `yaml:"bindings" json:"bindings"` ExternalDocs []ExternalDocRef `yaml:"externalDocs" json:"externalDocs"` + Config *Config } type ChannelTrait struct { diff --git a/providers/asyncapi3/config.go b/providers/asyncapi3/config.go index d54ef3551..685138d7b 100644 --- a/providers/asyncapi3/config.go +++ b/providers/asyncapi3/config.go @@ -65,6 +65,7 @@ func (c *Config) Parse(config *dynamic.Config, reader dynamic.Reader) error { } if ch.Value != nil { ch.Value.Name = name + ch.Value.Config = c } } diff --git a/providers/asyncapi3/kafka/store/apiversion_test.go b/providers/asyncapi3/kafka/store/apiversion_test.go index e625c63e7..6a783d147 100644 --- a/providers/asyncapi3/kafka/store/apiversion_test.go +++ b/providers/asyncapi3/kafka/store/apiversion_test.go @@ -73,10 +73,10 @@ func TestApiVersion_Raw(t *testing.T) { // compare the first few bytes expect := []byte{ - 0, 0, 0, 88, // length + 0, 0, 0, 0x5e, // length 0, 0, 0, 0, // Correlation 0, 0, // Error Code - 0, 0, 0, 13, // length of array + 0, 0, 0, 14, // length of array 0, 0, // Produce 0, 0, // min diff --git a/providers/asyncapi3/kafka/store/client.go b/providers/asyncapi3/kafka/store/client.go index ec09ad518..cefc2f44b 100644 --- a/providers/asyncapi3/kafka/store/client.go +++ b/providers/asyncapi3/kafka/store/client.go @@ -4,12 +4,18 @@ import ( "encoding/base64" "encoding/json" "fmt" + "maps" "math/rand" "mokapi/config/dynamic" "mokapi/kafka" - "mokapi/kafka/produce" "mokapi/media" + "mokapi/providers/asyncapi3" + openapi "mokapi/providers/openapi/schema" "mokapi/runtime/monitor" + avro "mokapi/schema/avro/schema" + "mokapi/schema/encoding" + "mokapi/schema/json/schema" + "slices" "time" "github.com/pkg/errors" @@ -73,13 +79,14 @@ func (c *Client) Write(topic string, records []Record, ct media.ContentType) ([] Error: err.Error(), }) } - value, err := c.parse(r.Value, ct) + value, err := c.parse(r.Value, ct, p.Topic.Config) if err != nil { result = append(result, RecordResult{ Partition: -1, Offset: -1, Error: err.Error(), }) + continue } rec := &kafka.Record{ Key: kafka.NewBytes(key), @@ -92,14 +99,14 @@ func (c *Client) Write(topic string, records []Record, ct media.ContentType) ([] }) } b := kafka.RecordBatch{Records: []*kafka.Record{rec}} - var write func(batch kafka.RecordBatch) (baseOffset int64, records []produce.RecordError, err error) + var write func(batch kafka.RecordBatch) (WriteResult, error) if r.SkipValidation { write = p.WriteSkipValidation } else { write = p.Write } - offset, res, err := write(b) + wr, err := write(b) if err != nil { result = append(result, RecordResult{ Partition: -1, @@ -107,15 +114,15 @@ func (c *Client) Write(topic string, records []Record, ct media.ContentType) ([] Error: err.Error(), }) } else { - if len(res) > 0 { + if len(wr.Records) > 0 { result = append(result, RecordResult{ Partition: -1, Offset: -1, - Error: res[0].BatchIndexErrorMessage, + Error: wr.Records[0].BatchIndexErrorMessage, }) } else { rr := RecordResult{ - Offset: offset, + Offset: wr.BaseOffset, Key: kafka.Read(b.Records[0].Key), Value: kafka.Read(b.Records[0].Value), Partition: p.Index, @@ -162,7 +169,7 @@ func (c *Client) Read(topic string, partition int, offset int64, ct *media.Conte getValue = func(value []byte) (any, error) { return base64.StdEncoding.EncodeToString(value), nil } - case ct.Key() == "application/json", ct.IsAny(): + case ct.Key() == "application/json": getValue = func(value []byte) (any, error) { var val any err := json.Unmarshal(value, &val) @@ -171,9 +178,10 @@ func (c *Client) Read(topic string, partition int, offset int64, ct *media.Conte } return val, nil } - default: - return nil, fmt.Errorf("unknown content type: %v", ct) + getValue = func(value []byte) (any, error) { + return string(value), nil + } } for _, r := range b.Records { @@ -214,7 +222,11 @@ func (c *Client) getPartition(t *Topic, id int) (*Partition, error) { return t.Partition(id), nil } -func (c *Client) parse(v any, ct media.ContentType) ([]byte, error) { +func (c *Client) parse(v any, ct media.ContentType, topic *asyncapi3.Channel) ([]byte, error) { + if b, ok := v.([]byte); ok { + return b, nil + } + switch ct.Key() { case "application/vnd.mokapi.kafka.binary+json": s, ok := v.(string) @@ -226,19 +238,40 @@ func (c *Client) parse(v any, ct media.ContentType) ([]byte, error) { return nil, fmt.Errorf("decode base64 string failed: %v", v) } return b, err + case "application/vnd.mokapi.kafka.xml+json": + s, ok := v.(string) + if !ok { + return nil, fmt.Errorf("expected string: %v", v) + } + return []byte(s), nil + case "application/vnd.mokapi.kafka.json+json": + s, ok := v.(string) + if !ok { + return nil, fmt.Errorf("expected string: %v", v) + } + return []byte(s), nil case "application/json": - b, ok := v.([]byte) - if ok { - return b, nil + msg, err := selectMessage(v, topic) + if err != nil { + return nil, err + } + if msg != nil && msg.Payload != nil { + return msg.Payload.Value.Marshal(v, media.ParseContentType(msg.ContentType)) } - b, _ = json.Marshal(v) + b, _ := json.Marshal(v) return b, nil default: + msg, err := selectMessage(v, topic) + if err != nil { + return nil, err + } + if msg != nil && msg.Payload != nil { + return msg.Payload.Value.Marshal(v, media.ParseContentType(msg.ContentType)) + } + switch vt := v.(type) { case []byte: return vt, nil - case string: - return []byte(vt), nil default: return json.Marshal(v) } @@ -272,3 +305,97 @@ func (r *Record) UnmarshalJSON(b []byte) error { *r = Record(a) return nil } + +func selectMessage(value any, topic *asyncapi3.Channel) (*asyncapi3.Message, error) { + noOperationDefined := true + var validationErr error + cfg := topic.Config + + // first try to get send operation + for _, op := range cfg.Operations { + if op.Value == nil || op.Value.Channel.Value == nil { + continue + } + if op.Value.Channel.Value == topic && op.Value.Action == "send" { + noOperationDefined = false + var messages []*asyncapi3.MessageRef + if len(op.Value.Messages) == 0 { + messages = slices.Collect(maps.Values(op.Value.Channel.Value.Messages)) + } else { + messages = op.Value.Messages + } + for _, msg := range messages { + if msg.Value == nil { + continue + } + if validationErr = valueMatchMessagePayload(value, msg.Value); validationErr == nil { + return msg.Value, nil + } + } + } + } + + // second, try to get receive operation + for _, op := range cfg.Operations { + if op.Value == nil || op.Value.Channel.Value == nil { + continue + } + if op.Value.Channel.Value == topic && op.Value.Action == "receive" { + noOperationDefined = false + var messages []*asyncapi3.MessageRef + if len(op.Value.Messages) == 0 { + messages = slices.Collect(maps.Values(op.Value.Channel.Value.Messages)) + } else { + messages = op.Value.Messages + } + for _, msg := range messages { + if msg.Value == nil { + continue + } + if validationErr = valueMatchMessagePayload(value, msg.Value); validationErr == nil { + return msg.Value, nil + } + } + } + } + + if noOperationDefined { + return nil, fmt.Errorf("no 'send' or 'receive' operation defined in specification") + } + + if value != nil { + switch value.(type) { + case string, []byte: + break + default: + b, err := json.Marshal(value) + if err == nil { + value = string(b) + } + } + return nil, fmt.Errorf("no matching message configuration found for the given value: %v\nhint:\n%w\n", value, validationErr) + } + return nil, fmt.Errorf("no message ") +} + +func valueMatchMessagePayload(value any, msg *asyncapi3.Message) error { + if value == nil || msg.Payload == nil { + return nil + } + ct := media.ParseContentType(msg.ContentType) + + switch v := msg.Payload.Value.Schema.(type) { + case *schema.Schema: + _, err := encoding.NewEncoder(v).Write(value, ct) + return err + case *openapi.Schema: + _, err := v.Marshal(value, ct) + return err + case *avro.Schema: + jsSchema := avro.ConvertToJsonSchema(v) + _, err := encoding.NewEncoder(jsSchema).Write(value, ct) + return err + default: + return nil + } +} diff --git a/providers/asyncapi3/kafka/store/client_test.go b/providers/asyncapi3/kafka/store/client_test.go index 4900b42d4..968958f22 100644 --- a/providers/asyncapi3/kafka/store/client_test.go +++ b/providers/asyncapi3/kafka/store/client_test.go @@ -58,18 +58,26 @@ func TestClient(t *testing.T) { }, { name: "value as json and random partition", - cfg: asyncapi3test.NewConfig( - asyncapi3test.WithChannel("foo", - asyncapi3test.WithMessage("foo", - asyncapi3test.WithContentType("application/json"), - asyncapi3test.WithPayload( - schematest.New("object", - schematest.WithProperty("foo", schematest.New("string")), - ), + cfg: func() *asyncapi3.Config { + msg := asyncapi3test.NewMessage( + asyncapi3test.WithContentType("application/json"), + asyncapi3test.WithPayload( + schematest.New("object", + schematest.WithProperty("foo", schematest.New("string")), ), ), - ), - ), + ) + ch := asyncapi3test.NewChannel(asyncapi3test.UseMessage("foo", &asyncapi3.MessageRef{Value: msg})) + + return asyncapi3test.NewConfig( + asyncapi3test.AddChannel("foo", ch), + asyncapi3test.WithOperation("sendAction", + asyncapi3test.WithOperationAction("send"), + asyncapi3test.WithOperationChannel(ch), + asyncapi3test.UseOperationMessage(msg), + ), + ) + }(), test: func(t *testing.T, s *store.Store, monitor *monitor.Kafka) { c := store.NewClient(s, monitor) ct := media.ParseContentType("application/json") @@ -90,18 +98,26 @@ func TestClient(t *testing.T) { }, { name: "value unspecified", - cfg: asyncapi3test.NewConfig( - asyncapi3test.WithChannel("foo", - asyncapi3test.WithMessage("foo", - asyncapi3test.WithContentType("application/json"), - asyncapi3test.WithPayload( - schematest.New("object", - schematest.WithProperty("foo", schematest.New("string")), - ), + cfg: func() *asyncapi3.Config { + msg := asyncapi3test.NewMessage( + asyncapi3test.WithContentType("application/json"), + asyncapi3test.WithPayload( + schematest.New("object", + schematest.WithProperty("foo", schematest.New("string")), ), ), - ), - ), + ) + ch := asyncapi3test.NewChannel(asyncapi3test.UseMessage("foo", &asyncapi3.MessageRef{Value: msg})) + + return asyncapi3test.NewConfig( + asyncapi3test.AddChannel("foo", ch), + asyncapi3test.WithOperation("sendAction", + asyncapi3test.WithOperationAction("send"), + asyncapi3test.WithOperationChannel(ch), + asyncapi3test.UseOperationMessage(msg), + ), + ) + }(), test: func(t *testing.T, s *store.Store, monitor *monitor.Kafka) { c := store.NewClient(s, monitor) ct := media.ParseContentType("") @@ -112,11 +128,11 @@ func TestClient(t *testing.T) { }}, ct) require.NoError(t, err) require.Len(t, result, 1) + require.Equal(t, "", result[0].Error) require.Equal(t, 0, result[0].Partition) require.Equal(t, int64(0), result[0].Offset) require.Nil(t, result[0].Key) require.Equal(t, "{\"foo\":\"foo\"}", string(result[0].Value)) - require.Equal(t, "", result[0].Error) }, }, { @@ -178,32 +194,39 @@ func TestClient(t *testing.T) { }, { name: "use string key and value", - cfg: asyncapi3test.NewConfig( - asyncapi3test.WithChannel("foo", - asyncapi3test.WithMessage("foo", - asyncapi3test.WithContentType("application/json"), - asyncapi3test.WithPayload( - schematest.New("object", - schematest.WithProperty("foo", schematest.New("string")), - ), + cfg: func() *asyncapi3.Config { + msg := asyncapi3test.NewMessage( + asyncapi3test.WithContentType("application/json"), + asyncapi3test.WithPayload( + schematest.New("object", + schematest.WithProperty("foo", schematest.New("string")), ), ), - ), - ), + ) + ch := asyncapi3test.NewChannel(asyncapi3test.UseMessage("foo", &asyncapi3.MessageRef{Value: msg})) + + return asyncapi3test.NewConfig( + asyncapi3test.AddChannel("foo", ch), + asyncapi3test.WithOperation("sendAction", + asyncapi3test.WithOperationAction("send"), + asyncapi3test.WithOperationChannel(ch), + asyncapi3test.UseOperationMessage(msg), + ), + ) + }(), test: func(t *testing.T, s *store.Store, monitor *monitor.Kafka) { c := store.NewClient(s, monitor) - ct := media.ParseContentType("") result, err := c.Write("foo", []store.Record{{ Key: "12345", Value: `{"foo":"bar"}`, - }}, ct) + }}, media.ParseContentType("application/vnd.mokapi.kafka.json+json")) require.NoError(t, err) require.Len(t, result, 1) + require.Equal(t, "", result[0].Error) require.Equal(t, 0, result[0].Partition) require.Equal(t, int64(0), result[0].Offset) require.Equal(t, "12345", string(result[0].Key)) require.Equal(t, `{"foo":"bar"}`, string(result[0].Value)) - require.Equal(t, "", result[0].Error) }, }, { @@ -238,66 +261,82 @@ func TestClient(t *testing.T) { }, { name: "key as number", - cfg: asyncapi3test.NewConfig( - asyncapi3test.WithChannel("foo", - asyncapi3test.WithMessage("foo", - asyncapi3test.WithContentType("application/json"), - asyncapi3test.WithPayload( - schematest.New("object", - schematest.WithProperty("foo", schematest.New("string")), - ), + cfg: func() *asyncapi3.Config { + msg := asyncapi3test.NewMessage( + asyncapi3test.WithContentType("application/json"), + asyncapi3test.WithPayload( + schematest.New("object", + schematest.WithProperty("foo", schematest.New("string")), ), ), - ), - ), + ) + ch := asyncapi3test.NewChannel(asyncapi3test.UseMessage("foo", &asyncapi3.MessageRef{Value: msg})) + + return asyncapi3test.NewConfig( + asyncapi3test.AddChannel("foo", ch), + asyncapi3test.WithOperation("sendAction", + asyncapi3test.WithOperationAction("send"), + asyncapi3test.WithOperationChannel(ch), + asyncapi3test.UseOperationMessage(msg), + ), + ) + }(), test: func(t *testing.T, s *store.Store, monitor *monitor.Kafka) { c := store.NewClient(s, monitor) - ct := media.ParseContentType("") result, err := c.Write("foo", []store.Record{{ Key: 1234, Value: `{"foo":"bar"}`, - }}, ct) + }}, media.ParseContentType("application/vnd.mokapi.kafka.json+json")) require.NoError(t, err) require.Len(t, result, 1) + require.Equal(t, "", result[0].Error) require.Equal(t, 0, result[0].Partition) require.Equal(t, int64(0), result[0].Offset) require.Equal(t, "1234", string(result[0].Key)) require.Equal(t, `{"foo":"bar"}`, string(result[0].Value)) - require.Equal(t, "", result[0].Error) }, }, { name: "read with unknown offset (-1) using 2 partitions", - cfg: asyncapi3test.NewConfig( - asyncapi3test.WithChannel("foo", - asyncapi3test.WithMessage("foo", - asyncapi3test.WithContentType("application/json"), - asyncapi3test.WithPayload( - schematest.New("object", - schematest.WithProperty("foo", schematest.New("string")), - ), + cfg: func() *asyncapi3.Config { + msg := asyncapi3test.NewMessage( + asyncapi3test.WithContentType("application/json"), + asyncapi3test.WithPayload( + schematest.New("object", + schematest.WithProperty("foo", schematest.New("string")), ), ), + ) + ch := asyncapi3test.NewChannel( + asyncapi3test.UseMessage("foo", &asyncapi3.MessageRef{Value: msg}), asyncapi3test.WithKafkaChannelBinding(asyncapi3.TopicBindings{Partitions: 2}), - ), - ), + ) + + return asyncapi3test.NewConfig( + asyncapi3test.AddChannel("foo", ch), + asyncapi3test.WithOperation("sendAction", + asyncapi3test.WithOperationAction("send"), + asyncapi3test.WithOperationChannel(ch), + asyncapi3test.UseOperationMessage(msg), + ), + ) + }(), test: func(t *testing.T, s *store.Store, monitor *monitor.Kafka) { c := store.NewClient(s, monitor) - ct := media.ParseContentType("") result, err := c.Write("foo", []store.Record{{ Key: 1234, Value: `{"foo":"bar"}`, Partition: 1, - }}, ct) + }}, media.ParseContentType("application/vnd.mokapi.kafka.json+json")) require.NoError(t, err) require.Len(t, result, 1) + require.Equal(t, "", result[0].Error) require.Equal(t, 1, result[0].Partition) require.Equal(t, int64(0), result[0].Offset) require.Equal(t, "1234", string(result[0].Key)) require.Equal(t, `{"foo":"bar"}`, string(result[0].Value)) - require.Equal(t, "", result[0].Error) - ct = media.ParseContentType("application/json") + ct := media.ParseContentType("application/json") records, err := c.Read("foo", 1, -1, &ct) require.NoError(t, err) require.Equal(t, []store.Record{ @@ -312,46 +351,52 @@ func TestClient(t *testing.T) { }, { name: "using header", - cfg: asyncapi3test.NewConfig( - asyncapi3test.WithChannel("foo", - asyncapi3test.WithMessage("foo", - asyncapi3test.WithContentType("application/json"), - asyncapi3test.WithPayload( - schematest.New("object", - schematest.WithProperty("foo", schematest.New("string")), - ), + cfg: func() *asyncapi3.Config { + msg := asyncapi3test.NewMessage( + asyncapi3test.WithContentType("application/json"), + asyncapi3test.WithPayload( + schematest.New("object", + schematest.WithProperty("foo", schematest.New("string")), ), ), - asyncapi3test.WithKafkaChannelBinding(asyncapi3.TopicBindings{Partitions: 2}), - ), - ), + ) + ch := asyncapi3test.NewChannel(asyncapi3test.UseMessage("foo", &asyncapi3.MessageRef{Value: msg})) + + return asyncapi3test.NewConfig( + asyncapi3test.AddChannel("foo", ch), + asyncapi3test.WithOperation("sendAction", + asyncapi3test.WithOperationAction("send"), + asyncapi3test.WithOperationChannel(ch), + asyncapi3test.UseOperationMessage(msg), + ), + ) + }(), test: func(t *testing.T, s *store.Store, monitor *monitor.Kafka) { c := store.NewClient(s, monitor) - ct := media.ParseContentType("") result, err := c.Write("foo", []store.Record{{ Key: 1234, Value: `{"foo":"bar"}`, Headers: []store.RecordHeader{{Name: "yuh", Value: "bar"}}, - Partition: 1, - }}, ct) + Partition: 0, + }}, media.ParseContentType("application/vnd.mokapi.kafka.json+json")) require.NoError(t, err) + require.Equal(t, "", result[0].Error) require.Len(t, result, 1) - require.Equal(t, 1, result[0].Partition) + require.Equal(t, 0, result[0].Partition) require.Equal(t, int64(0), result[0].Offset) require.Equal(t, "1234", string(result[0].Key)) require.Equal(t, `{"foo":"bar"}`, string(result[0].Value)) require.Equal(t, []store.RecordHeader{{Name: "yuh", Value: "bar"}}, result[0].Headers) - require.Equal(t, "", result[0].Error) - ct = media.ParseContentType("application/json") - records, err := c.Read("foo", 1, -1, &ct) + ct := media.ParseContentType("application/json") + records, err := c.Read("foo", 0, -1, &ct) require.NoError(t, err) require.Equal(t, []store.Record{ { Key: "1234", Value: map[string]interface{}{"foo": "bar"}, Headers: []store.RecordHeader{{Name: "yuh", Value: "bar"}}, - Partition: 1, + Partition: 0, }, }, records) }, diff --git a/providers/asyncapi3/kafka/store/fetch_test.go b/providers/asyncapi3/kafka/store/fetch_test.go index c15b3601f..1c1813f88 100644 --- a/providers/asyncapi3/kafka/store/fetch_test.go +++ b/providers/asyncapi3/kafka/store/fetch_test.go @@ -1,7 +1,6 @@ package store_test import ( - "github.com/stretchr/testify/require" "mokapi/engine/enginetest" "mokapi/kafka" "mokapi/kafka/fetch" @@ -12,6 +11,8 @@ import ( "mokapi/runtime/events/eventstest" "testing" "time" + + "github.com/stretchr/testify/require" ) func TestFetch(t *testing.T) { @@ -130,14 +131,14 @@ func TestFetch(t *testing.T) { name: "fetch one record", test: func(t *testing.T, s *store.Store) { s.Update(asyncapi3test.NewConfig(asyncapi3test.WithChannel("foo"))) - _, records, err := s.Topic("foo").Partition(0).Write(kafka.RecordBatch{Records: []*kafka.Record{ + wr, err := s.Topic("foo").Partition(0).Write(kafka.RecordBatch{Records: []*kafka.Record{ { Key: kafka.NewBytes([]byte("foo")), Value: kafka.NewBytes([]byte("bar")), }, }}) require.NoError(t, err) - require.Len(t, records, 0) + require.Len(t, wr.Records, 0) rr := kafkatest.NewRecorder() s.ServeMessage(rr, kafkatest.NewRequest("kafkatest", 3, &fetch.Request{ @@ -165,7 +166,7 @@ func TestFetch(t *testing.T) { name: "fetch one record with MaxBytes 4", test: func(t *testing.T, s *store.Store) { s.Update(asyncapi3test.NewConfig(asyncapi3test.WithChannel("foo"))) - _, records, err := s.Topic("foo").Partition(0).Write(kafka.RecordBatch{Records: []*kafka.Record{ + wr, err := s.Topic("foo").Partition(0).Write(kafka.RecordBatch{Records: []*kafka.Record{ { Key: kafka.NewBytes([]byte("key-1")), Value: kafka.NewBytes([]byte("value-1")), @@ -176,7 +177,7 @@ func TestFetch(t *testing.T) { }, }}) require.NoError(t, err) - require.Len(t, records, 0) + require.Len(t, wr.Records, 0) rr := kafkatest.NewRecorder() s.ServeMessage(rr, kafkatest.NewRequest("kafkatest", 3, &fetch.Request{ @@ -204,14 +205,14 @@ func TestFetch(t *testing.T) { name: "fetch next not available record", test: func(t *testing.T, s *store.Store) { s.Update(asyncapi3test.NewConfig(asyncapi3test.WithChannel("foo"))) - _, records, err := s.Topic("foo").Partition(0).Write(kafka.RecordBatch{Records: []*kafka.Record{ + wr, err := s.Topic("foo").Partition(0).Write(kafka.RecordBatch{Records: []*kafka.Record{ { Key: kafka.NewBytes([]byte("foo")), Value: kafka.NewBytes([]byte("bar")), }, }}) require.NoError(t, err) - require.Len(t, records, 0) + require.Len(t, wr.Records, 0) rr := kafkatest.NewRecorder() s.ServeMessage(rr, kafkatest.NewRequest("kafkatest", 3, &fetch.Request{Topics: []fetch.Topic{ @@ -231,7 +232,7 @@ func TestFetch(t *testing.T) { name: "fetch both records", test: func(t *testing.T, s *store.Store) { s.Update(asyncapi3test.NewConfig(asyncapi3test.WithChannel("foo"))) - _, records, err := s.Topic("foo").Partition(0).Write(kafka.RecordBatch{Records: []*kafka.Record{ + wr, err := s.Topic("foo").Partition(0).Write(kafka.RecordBatch{Records: []*kafka.Record{ { Key: kafka.NewBytes([]byte("key-1")), Value: kafka.NewBytes([]byte("value-1")), @@ -242,7 +243,7 @@ func TestFetch(t *testing.T) { }, }}) require.NoError(t, err) - require.Len(t, records, 0) + require.Len(t, wr.Records, 0) rr := kafkatest.NewRecorder() s.ServeMessage(rr, kafkatest.NewRequest("kafkatest", 3, &fetch.Request{ @@ -291,7 +292,7 @@ func TestFetch(t *testing.T) { ch <- res }() time.Sleep(300 * time.Millisecond) - _, records, err := s.Topic("foo").Partition(0).Write(kafka.RecordBatch{ + wr, err := s.Topic("foo").Partition(0).Write(kafka.RecordBatch{ Records: []*kafka.Record{ { Key: kafka.NewBytes([]byte("foo")), @@ -300,7 +301,7 @@ func TestFetch(t *testing.T) { }, }) require.NoError(t, err) - require.Len(t, records, 0) + require.Len(t, wr.Records, 0) r := <-ch @@ -336,14 +337,14 @@ func TestFetch(t *testing.T) { name: "fetch offset out of range", test: func(t *testing.T, s *store.Store) { s.Update(asyncapi3test.NewConfig(asyncapi3test.WithChannel("foo"))) - _, records, err := s.Topic("foo").Partition(0).Write(kafka.RecordBatch{Records: []*kafka.Record{ + wr, err := s.Topic("foo").Partition(0).Write(kafka.RecordBatch{Records: []*kafka.Record{ { Key: kafka.NewBytes([]byte("foo")), Value: kafka.NewBytes([]byte("bar")), }, }}) require.NoError(t, err) - require.Len(t, records, 0) + require.Len(t, wr.Records, 0) rr := kafkatest.NewRecorder() s.ServeMessage(rr, kafkatest.NewRequest("kafkatest", 3, &fetch.Request{Topics: []fetch.Topic{ diff --git a/providers/asyncapi3/kafka/store/init_producerid.go b/providers/asyncapi3/kafka/store/init_producerid.go new file mode 100644 index 000000000..7d8178700 --- /dev/null +++ b/providers/asyncapi3/kafka/store/init_producerid.go @@ -0,0 +1,40 @@ +package store + +import ( + "mokapi/kafka" + "mokapi/kafka/initProducerId" + "sync/atomic" + + log "github.com/sirupsen/logrus" +) + +func (s *Store) initProducerID(rw kafka.ResponseWriter, req *kafka.Request) error { + r := req.Message.(*initProducerId.Request) + res := &initProducerId.Response{} + + if r.TransactionalId == "" { + if r.ProducerId > 0 { + ps, ok := s.producers[r.ProducerId] + if !ok { + res.ErrorCode = kafka.UnknownProducerId + } else if r.ProducerEpoch < ps.ProducerEpoch { + res.ErrorCode = kafka.ProducerFenced + } else { + ps.ProducerEpoch++ + res.ProducerId = ps.ProducerId + res.ProducerEpoch = ps.ProducerEpoch + } + return rw.Write(res) + } + + res.ProducerId = atomic.AddInt64(&s.nextPID, 1) + res.ProducerEpoch = 0 + ps := &ProducerState{ProducerId: res.ProducerId, ProducerEpoch: res.ProducerEpoch} + s.producers[res.ProducerId] = ps + } else { + res.ErrorCode = kafka.UnsupportedForMessageFormat + log.Errorf("kafka: mokapi does not support transactional producer: %s", r.TransactionalId) + } + + return rw.Write(res) +} diff --git a/providers/asyncapi3/kafka/store/log.go b/providers/asyncapi3/kafka/store/log.go index bc9c49e5d..737f11c08 100644 --- a/providers/asyncapi3/kafka/store/log.go +++ b/providers/asyncapi3/kafka/store/log.go @@ -1,21 +1,25 @@ package store import ( + "mokapi/kafka" "mokapi/runtime/events" ) type LogRecord func(log *KafkaLog, traits events.Traits) type KafkaLog struct { - Offset int64 `json:"offset"` - Key LogValue `json:"key"` - Message LogValue `json:"message"` - SchemaId int `json:"schemaId"` - MessageId string `json:"messageId"` - Partition int `json:"partition"` - Headers map[string]LogValue `json:"headers"` - Deleted bool `json:"deleted"` - Api string `json:"api"` + Offset int64 `json:"offset"` + Key LogValue `json:"key"` + Message LogValue `json:"message"` + SchemaId int `json:"schemaId"` + MessageId string `json:"messageId"` + Partition int `json:"partition"` + Headers map[string]LogValue `json:"headers"` + ProducerId int64 `json:"producerId"` + ProducerEpoch int16 `json:"producerEpoch"` + SequenceNumber int32 `json:"sequenceNumber"` + Deleted bool `json:"deleted"` + Api string `json:"api"` } type LogValue struct { @@ -30,3 +34,14 @@ func (l *KafkaLog) Title() string { return string(l.Key.Binary) } } + +func newKafkaLog(record *kafka.Record) *KafkaLog { + return &KafkaLog{ + Key: LogValue{Binary: kafka.Read(record.Key)}, + Message: LogValue{Binary: kafka.Read(record.Value)}, + Headers: convertHeader(record.Headers), + ProducerId: record.ProducerId, + ProducerEpoch: record.ProducerEpoch, + SequenceNumber: record.SequenceNumber, + } +} diff --git a/providers/asyncapi3/kafka/store/log_cleaner.go b/providers/asyncapi3/kafka/store/log_cleaner.go index 86341eeb7..08f2990b7 100644 --- a/providers/asyncapi3/kafka/store/log_cleaner.go +++ b/providers/asyncapi3/kafka/store/log_cleaner.go @@ -1,9 +1,10 @@ package store import ( - log "github.com/sirupsen/logrus" "sort" "time" + + log "github.com/sirupsen/logrus" ) func (s *Store) cleanLog(b *Broker) { @@ -37,7 +38,7 @@ func (s *Store) cleanLog(b *Broker) { } for _, p := range topic.Partitions { - if p.Leader != b.Id { + if p.Leader.Id != b.Id { continue } diff --git a/providers/asyncapi3/kafka/store/metadata.go b/providers/asyncapi3/kafka/store/metadata.go index 93fc68510..2416ad7c8 100644 --- a/providers/asyncapi3/kafka/store/metadata.go +++ b/providers/asyncapi3/kafka/store/metadata.go @@ -77,9 +77,13 @@ func (s *Store) metadata(rw kafka.ResponseWriter, req *kafka.Request) error { for _, n := range replicas { nodes = append(nodes, int32(n)) } + brokerId := -1 + if p.Leader != nil { + brokerId = p.Leader.Id + } resTopic.Partitions = append(resTopic.Partitions, metaData.ResponsePartition{ PartitionIndex: int32(i), - LeaderId: int32(p.Leader), + LeaderId: int32(brokerId), ReplicaNodes: nodes, IsrNodes: nodes, }) diff --git a/providers/asyncapi3/kafka/store/offset_fetch_test.go b/providers/asyncapi3/kafka/store/offset_fetch_test.go index 9196e7d5b..6f587b9bc 100644 --- a/providers/asyncapi3/kafka/store/offset_fetch_test.go +++ b/providers/asyncapi3/kafka/store/offset_fetch_test.go @@ -201,7 +201,7 @@ func TestOffsetFetch(t *testing.T) { asyncapi3test.WithServer("", "kafka", b.Addr), asyncapi3test.WithChannel("foo"), )) - _, _, _ = s.Topic("foo").Partition(0).Write(kafka.RecordBatch{ + _, _ = s.Topic("foo").Partition(0).Write(kafka.RecordBatch{ Records: []*kafka.Record{ { Key: kafka.NewBytes([]byte("foo")), @@ -252,7 +252,7 @@ func TestOffsetFetch(t *testing.T) { asyncapi3test.WithServer("", "kafka", b.Addr), asyncapi3test.WithChannel("foo"), )) - _, _, _ = s.Topic("foo").Partition(0).Write(kafka.RecordBatch{ + _, _ = s.Topic("foo").Partition(0).Write(kafka.RecordBatch{ Records: []*kafka.Record{ { Key: kafka.NewBytes([]byte("foo")), diff --git a/providers/asyncapi3/kafka/store/partition.go b/providers/asyncapi3/kafka/store/partition.go index 2c75ff945..868024e62 100644 --- a/providers/asyncapi3/kafka/store/partition.go +++ b/providers/asyncapi3/kafka/store/partition.go @@ -22,13 +22,15 @@ type Partition struct { Tail int64 Topic *Topic - Leader int + Leader *Broker Replicas []int validator *validator logger LogRecord trigger Trigger + producers map[int64]*PartitionProducerState + m sync.RWMutex } @@ -47,39 +49,54 @@ type record struct { Log *KafkaLog } -type WriteOptions func(args *WriteArgs) - -type WriteArgs struct { +type WriteOptions struct { SkipValidation bool } +type WriteResult struct { + BaseOffset int64 + Records []produce.RecordError + ErrorCode kafka.ErrorCode + ErrorMessage string +} + +type PartitionProducerState struct { + ProducerId int64 + Epoch int16 + LastSequence int32 +} + func newPartition(index int, brokers Brokers, logger LogRecord, trigger Trigger, topic *Topic) *Partition { - brokerList := make([]int, 0, len(brokers)) + brokerIds := make([]int, 0, len(brokers)) + brokerList := make([]*Broker, 0, len(brokers)) for i, b := range brokers { if topic.Config != nil && len(topic.Config.Servers) > 0 { if slices.ContainsFunc(topic.Config.Servers, func(s *asyncapi3.ServerRef) bool { return s.Value == b.config }) { - brokerList = append(brokerList, i) + brokerList = append(brokerList, b) + brokerIds = append(brokerIds, i) } } else { - brokerList = append(brokerList, i) + brokerList = append(brokerList, b) + brokerIds = append(brokerIds, i) } } p := &Partition{ - Index: index, - Head: 0, - Tail: 0, - Segments: make(map[int64]*Segment), - logger: logger, - trigger: trigger, - Topic: topic, + Index: index, + Head: 0, + Tail: 0, + Segments: make(map[int64]*Segment), + logger: logger, + trigger: trigger, + Topic: topic, + producers: make(map[int64]*PartitionProducerState), } if len(brokerList) > 0 { p.Leader = brokerList[0] } if len(brokerList) > 1 { - p.Replicas = brokerList[1:] + p.Replicas = brokerIds[1:] } else { p.Replicas = make([]int, 0) } @@ -107,6 +124,7 @@ func (p *Partition) Read(offset int64, maxBytes int) (kafka.RecordBatch, kafka.E for seg.contains(offset) { r := seg.record(offset) + if baseOffset == 0 { baseOffset = r.Offset baseTime = r.Time @@ -123,43 +141,79 @@ func (p *Partition) Read(offset int64, maxBytes int) (kafka.RecordBatch, kafka.E } } -func (p *Partition) WriteSkipValidation(batch kafka.RecordBatch) (baseOffset int64, records []produce.RecordError, err error) { - return p.write(batch, true) +func (p *Partition) WriteSkipValidation(batch kafka.RecordBatch) (WriteResult, error) { + return p.write(batch, WriteOptions{SkipValidation: true}) } -func (p *Partition) Write(batch kafka.RecordBatch) (baseOffset int64, records []produce.RecordError, err error) { - return p.write(batch, false) +func (p *Partition) Write(batch kafka.RecordBatch) (WriteResult, error) { + return p.write(batch, WriteOptions{SkipValidation: false}) } -func (p *Partition) write(batch kafka.RecordBatch, skipValidation bool) (baseOffset int64, records []produce.RecordError, err error) { +func (p *Partition) write(batch kafka.RecordBatch, opts WriteOptions) (WriteResult, error) { if p == nil { - return 0, nil, fmt.Errorf("partition is nil") + return WriteResult{}, fmt.Errorf("partition is nil") } p.m.Lock() defer p.m.Unlock() + result := WriteResult{} + var writeFuncs []func() + var producer *ProducerState + sequenceNumber := int32(-1) now := time.Now() - baseOffset = p.Tail + result.BaseOffset = p.Tail var baseTime time.Time - for _, r := range batch.Records { - var result *KafkaLog - result, err = p.validator.Validate(r) - if skipValidation && err != nil { - err = nil + for i, r := range batch.Records { + // validate producer idempotence + if r.ProducerId > 0 { + if producer == nil { + producer = p.Topic.s.producers[r.ProducerId] + } + state, ok := p.producers[r.ProducerId] + if ok { + sequenceNumber = state.LastSequence + } + + if producer == nil { + result.fail(i, kafka.InvalidProducerIdMapping, "unknown producer id") + return result, nil + } else if producer.ProducerEpoch != r.ProducerEpoch { + // this is without transactional produce not possible + // due to producer will always get a new producer id + // should return PRODUCER_FENCED when record epoch is lower as current known + // this should be adjusted when transactional is implemented + result.fail(i, kafka.InvalidProducerEpoch, "producer epoch does not match") + return result, nil + } else if r.SequenceNumber != sequenceNumber+1 { + var msg string + if r.SequenceNumber <= sequenceNumber { + msg = fmt.Sprintf("message sequence number already received: %d", r.SequenceNumber) + result.fail(i, kafka.DuplicateSequenceNumber, msg) + + } else { + msg = fmt.Sprintf("expected sequence number %d but got %d", sequenceNumber+1, r.SequenceNumber) + result.fail(i, kafka.OutOfOrderSequenceNumber, msg) + } + return result, nil + } + sequenceNumber++ } - if err != nil { - records = append(records, produce.RecordError{BatchIndex: int32(r.Offset), BatchIndexErrorMessage: err.Error()}) + + kLog, err := p.validator.Validate(r) + if err != nil && !opts.SkipValidation { + result.fail(i, kafka.InvalidRecord, err.Error()) + return result, nil } - if p.trigger(r, result.SchemaId) { + if p.trigger(r, kLog.SchemaId) && !opts.SkipValidation { // validate again - result, err = p.validator.Validate(r) - } - - if len(records) > 0 && p.Topic.Config.Bindings.Kafka.ValueSchemaValidation { - return p.Tail, records, fmt.Errorf("validation error: %w", err) + kLog, err = p.validator.Validate(r) + if err != nil { + result.fail(i, kafka.InvalidRecord, err.Error()) + return result, nil + } } if r.Time.IsZero() { @@ -181,23 +235,37 @@ func (p *Partition) write(batch kafka.RecordBatch, skipValidation bool) (baseOff segment = p.addSegment() } - segment.Log = append(segment.Log, &record{Data: r, Log: result}) + segment.Log = append(segment.Log, &record{Data: r, Log: kLog}) segment.Tail++ segment.LastWritten = now - segment.Size += r.Size(baseOffset, baseTime) + segment.Size += r.Size(result.BaseOffset, baseTime) p.Tail++ - result.Partition = p.Index - result.Offset = r.Offset - p.logger(result, events.NewTraits().With("partition", strconv.Itoa(p.Index))) + kLog.Partition = p.Index + kLog.Offset = r.Offset + p.logger(kLog, events.NewTraits().With("partition", strconv.Itoa(p.Index))) }) } + if len(result.Records) > 0 && p.Topic.Config.Bindings.Kafka.ValueSchemaValidation { + return result, nil + } + for _, writeFunc := range writeFuncs { writeFunc() } - return + if sequenceNumber >= 0 && producer != nil { + state, ok := p.producers[producer.ProducerId] + if !ok { + state = &PartitionProducerState{LastSequence: sequenceNumber} + p.producers[producer.ProducerId] = state + } else { + state.LastSequence = sequenceNumber + } + } + + return result, nil } func (p *Partition) Offset() int64 { @@ -308,3 +376,12 @@ func (s *Segment) delete() { r.Log.Deleted = true } } + +func (r *WriteResult) fail(index int, code kafka.ErrorCode, msg string) { + r.ErrorCode = code + r.ErrorMessage = msg + r.Records = append(r.Records, produce.RecordError{ + BatchIndex: int32(index), + BatchIndexErrorMessage: msg, + }) +} diff --git a/providers/asyncapi3/kafka/store/partition_test.go b/providers/asyncapi3/kafka/store/partition_test.go index eaf70a90a..f15c955f9 100644 --- a/providers/asyncapi3/kafka/store/partition_test.go +++ b/providers/asyncapi3/kafka/store/partition_test.go @@ -1,7 +1,6 @@ package store import ( - "github.com/stretchr/testify/require" "mokapi/kafka" "mokapi/providers/asyncapi3" "mokapi/runtime/events" @@ -9,6 +8,8 @@ import ( "mokapi/schema/json/schema/schematest" "testing" "time" + + "github.com/stretchr/testify/require" ) func TestPartition(t *testing.T) { @@ -23,7 +24,7 @@ func TestPartition(t *testing.T) { require.Equal(t, 0, p.Index) require.Equal(t, int64(0), p.StartOffset()) require.Equal(t, int64(0), p.Offset()) - require.Equal(t, 1, p.Leader) + require.Equal(t, 1, p.Leader.Id) require.Equal(t, []int{}, p.Replicas) } @@ -39,7 +40,7 @@ func TestPartition_Write(t *testing.T) { &Topic{}, ) - offset, records, err := p.Write(kafka.RecordBatch{ + wr, err := p.Write(kafka.RecordBatch{ Records: []*kafka.Record{ { Time: time.Now(), @@ -57,8 +58,8 @@ func TestPartition_Write(t *testing.T) { }) require.NoError(t, err) - require.Len(t, records, 0) - require.Equal(t, int64(0), offset) + require.Len(t, wr.Records, 0) + require.Equal(t, int64(0), wr.BaseOffset) require.Equal(t, int64(2), p.Offset()) require.Equal(t, int64(0), p.StartOffset()) @@ -96,7 +97,7 @@ func TestPartition_Read(t *testing.T) { func(record *kafka.Record, schemaId int) bool { return false }, &Topic{}, ) - offset, records, err := p.Write(kafka.RecordBatch{ + wr, err := p.Write(kafka.RecordBatch{ Records: []*kafka.Record{ { Time: time.Now(), @@ -107,8 +108,8 @@ func TestPartition_Read(t *testing.T) { }, }) require.NoError(t, err) - require.Len(t, records, 0) - require.Equal(t, int64(0), offset) + require.Len(t, wr.Records, 0) + require.Equal(t, int64(0), wr.BaseOffset) b, errCode := p.Read(1, 1) require.Equal(t, kafka.None, errCode) @@ -136,7 +137,7 @@ func TestPartition_Read_OutOfOffset(t *testing.T) { func(record *kafka.Record, schemaId int) bool { return false }, &Topic{}, ) - _, _, _ = p.Write(kafka.RecordBatch{ + _, _ = p.Write(kafka.RecordBatch{ Records: []*kafka.Record{ { Time: time.Now(), @@ -173,7 +174,7 @@ func TestPartition_Write_Value_Validator(t *testing.T) { }, }} - offset, recordsWithError, err := p.Write(kafka.RecordBatch{ + wr, err := p.Write(kafka.RecordBatch{ Records: []*kafka.Record{ { Time: time.Now(), @@ -184,15 +185,15 @@ func TestPartition_Write_Value_Validator(t *testing.T) { }, }) - require.EqualError(t, err, "validation error: invalid message: error count 1:\n\t- #/type: invalid type, expected string but got number") - require.Len(t, recordsWithError, 1) - require.Equal(t, int32(0), recordsWithError[0].BatchIndex) - require.Equal(t, "invalid message: error count 1:\n\t- #/type: invalid type, expected string but got number", recordsWithError[0].BatchIndexErrorMessage) - require.Equal(t, int64(0), offset) + require.NoError(t, err) + require.Len(t, wr.Records, 1) + require.Equal(t, int32(0), wr.Records[0].BatchIndex) + require.Equal(t, "invalid message: error count 1:\n\t- #/type: invalid type, expected string but got number", wr.Records[0].BatchIndexErrorMessage) + require.Equal(t, int64(0), wr.BaseOffset) require.Equal(t, int64(0), p.Offset()) require.Equal(t, int64(0), p.StartOffset()) - offset, recordsWithError, err = p.Write(kafka.RecordBatch{ + wr, err = p.Write(kafka.RecordBatch{ Records: []*kafka.Record{ { Time: time.Now(), @@ -207,8 +208,8 @@ func TestPartition_Write_Value_Validator(t *testing.T) { }) require.NoError(t, err) - require.Len(t, recordsWithError, 0) - require.Equal(t, int64(0), offset) + require.Len(t, wr.Records, 0) + require.Equal(t, int64(0), wr.BaseOffset) require.Equal(t, int64(1), p.Offset()) require.Equal(t, int64(0), p.StartOffset()) record := p.Segments[p.ActiveSegment].record(0) @@ -224,7 +225,7 @@ func TestPatition_Retention(t *testing.T) { &Topic{}, ) require.Equal(t, int64(0), p.Head) - offset, records, err := p.Write(kafka.RecordBatch{ + _, _ = p.Write(kafka.RecordBatch{ Records: []*kafka.Record{ { Time: time.Now(), @@ -234,7 +235,7 @@ func TestPatition_Retention(t *testing.T) { }, }, }) - offset, records, err = p.Write(kafka.RecordBatch{ + wr, err := p.Write(kafka.RecordBatch{ Records: []*kafka.Record{ { Time: time.Now(), @@ -245,8 +246,8 @@ func TestPatition_Retention(t *testing.T) { }, }) require.NoError(t, err) - require.Len(t, records, 0) - require.Equal(t, int64(1), offset) + require.Len(t, wr.Records, 0) + require.Equal(t, int64(1), wr.BaseOffset) require.Equal(t, int64(0), p.Head) require.Equal(t, int64(2), p.Tail) diff --git a/providers/asyncapi3/kafka/store/produce.go b/providers/asyncapi3/kafka/store/produce.go index 9d9b88d40..6ad0af618 100644 --- a/providers/asyncapi3/kafka/store/produce.go +++ b/providers/asyncapi3/kafka/store/produce.go @@ -16,6 +16,7 @@ func (s *Store) produce(rw kafka.ResponseWriter, req *kafka.Request) error { ctx := kafka.ClientFromContext(req) m, withMonitor := monitor.KafkaFromContext(req.Context) + opts := WriteOptions{} for _, rt := range r.Topics { topic := s.Topic(rt.Name) @@ -29,29 +30,33 @@ func (s *Store) produce(rw kafka.ResponseWriter, req *kafka.Request) error { } if topic == nil { - s := fmt.Sprintf("kafka: produce unknown topic %v", rt.Name) + s := fmt.Sprintf("kafka: failed to write: %s", rt.Name) log.Error(s) resPartition.ErrorCode = kafka.UnknownTopicOrPartition resPartition.ErrorMessage = s } else if err := validateProducer(topic, ctx); err != nil { resPartition.ErrorCode = kafka.UnknownServerError resPartition.ErrorMessage = fmt.Sprintf("invalid producer clientId '%v' for topic %v: %v", ctx.ClientId, topic.Name, err) - log.Errorf("kafka Produce: %v", resPartition.ErrorMessage) + log.Errorf("kafka: failed to write to topic '%s': %s", topic.Name, resPartition.ErrorMessage) } else { p := topic.Partition(int(rp.Index)) if p == nil { resPartition.ErrorCode = kafka.UnknownTopicOrPartition resPartition.ErrorMessage = fmt.Sprintf("unknown partition %v", rp.Index) - log.Errorf("kafka Produce: %v", resPartition.ErrorMessage) + log.Errorf("kafka: failed to write to topic '%s': %s", topic.Name, resPartition.ErrorMessage) } else { - baseOffset, records, err := p.Write(rp.Record) + wr, err := p.write(rp.Record, opts) if err != nil { - resPartition.ErrorCode = kafka.InvalidRecord - resPartition.ErrorMessage = fmt.Sprintf("invalid message received for topic %v: %v", rt.Name, err) - resPartition.RecordErrors = records - log.Errorf("kafka Produce: %v", resPartition.ErrorMessage) + resPartition.ErrorCode = kafka.UnknownServerError + resPartition.ErrorMessage = fmt.Sprintf("failed to write to topic '%v': %v", rt.Name, err.Error()) + log.Errorf("kafka: failed to write to topic '%s' partition %d: %s", topic.Name, rp.Index, resPartition.ErrorMessage) + } else if wr.ErrorCode != kafka.None { + resPartition.ErrorCode = wr.ErrorCode + resPartition.ErrorMessage = wr.ErrorMessage + resPartition.RecordErrors = wr.Records + log.Errorf("kafka: failed to write to topic '%s' partition %d: %s", topic.Name, rp.Index, resPartition.ErrorMessage) } else { - resPartition.BaseOffset = baseOffset + resPartition.BaseOffset = wr.BaseOffset if withMonitor { go s.UpdateMetrics(m, topic, p, rp.Record) } diff --git a/providers/asyncapi3/kafka/store/produce_test.go b/providers/asyncapi3/kafka/store/produce_test.go index 8bd671de1..71ba202ff 100644 --- a/providers/asyncapi3/kafka/store/produce_test.go +++ b/providers/asyncapi3/kafka/store/produce_test.go @@ -1,13 +1,12 @@ package store_test import ( - "github.com/sirupsen/logrus" - "github.com/sirupsen/logrus/hooks/test" - "github.com/stretchr/testify/require" + "context" "mokapi/engine/common" "mokapi/engine/enginetest" "mokapi/kafka" "mokapi/kafka/fetch" + "mokapi/kafka/initProducerId" "mokapi/kafka/kafkatest" "mokapi/kafka/offset" "mokapi/kafka/produce" @@ -19,6 +18,10 @@ import ( "mokapi/schema/json/schema/schematest" "testing" "time" + + "github.com/sirupsen/logrus" + "github.com/sirupsen/logrus/hooks/test" + "github.com/stretchr/testify/require" ) func TestProduce(t *testing.T) { @@ -139,7 +142,7 @@ func TestProduce(t *testing.T) { func(t *testing.T, s *store.Store, sm *events.StoreManager) { s.Update(asyncapi3test.NewConfig( asyncapi3test.WithChannel("foo"))) - s.Topic("foo").Partition(0).Write(kafka.RecordBatch{Records: []*kafka.Record{ + _, _ = s.Topic("foo").Partition(0).Write(kafka.RecordBatch{Records: []*kafka.Record{ { Key: kafka.NewBytes([]byte("foo")), Value: kafka.NewBytes([]byte("bar")), @@ -264,7 +267,7 @@ func TestProduce(t *testing.T) { require.Equal(t, 1, len(hook.Entries)) require.Equal(t, logrus.ErrorLevel, hook.LastEntry().Level) - require.Equal(t, "kafka Produce: invalid producer clientId 'kafkatest' for topic foo: error count 1:\n\t- #/pattern: string 'kafkatest' does not match regex pattern '^[A-Z]{10}[0-5]$'", hook.LastEntry().Message) + require.Equal(t, "kafka: failed to write to topic 'foo': invalid producer clientId 'kafkatest' for topic foo: error count 1:\n\t- #/pattern: string 'kafkatest' does not match regex pattern '^[A-Z]{10}[0-5]$'", hook.LastEntry().Message) }, }, { @@ -349,6 +352,189 @@ func TestProduce(t *testing.T) { require.Equal(t, kafka.UnknownTopicOrPartition, res.Topics[0].Partitions[0].ErrorCode, "expected no kafka error") }, }, + { + "using producer Id", + func(t *testing.T, s *store.Store, sm *events.StoreManager) { + ch := asyncapi3test.NewChannel( + asyncapi3test.WithMessage("foo", + asyncapi3test.WithContentType("application/json"), + asyncapi3test.WithPayload(schematest.New("integer"))), + ) + s.Update(asyncapi3test.NewConfig( + asyncapi3test.WithInfo("test", "", ""), + asyncapi3test.AddChannel("foo", ch), + asyncapi3test.WithOperation("foo", + asyncapi3test.WithOperationAction("send"), + asyncapi3test.WithOperationChannel(ch), + ), + )) + hook := test.NewGlobal() + ctx := kafka.NewClientContext(context.Background(), "127.0.0.1:42424") + sm.SetStore(5, events.NewTraits().WithNamespace("kafka")) + + rr := kafkatest.NewRecorder() + s.ServeMessage(rr, kafkatest.NewRequest("MOKAPITEST1", 3, &initProducerId.Request{}).WithContext(ctx)) + producerState := rr.Message.(*initProducerId.Response) + rr = kafkatest.NewRecorder() + s.ServeMessage(rr, kafkatest.NewRequest("MOKAPITEST1", 3, &produce.Request{ + Topics: []produce.RequestTopic{ + {Name: "foo", Partitions: []produce.RequestPartition{ + { + Index: 0, + Record: kafka.RecordBatch{ + Records: []*kafka.Record{ + { + Offset: 0, + Time: time.Now(), + Key: kafka.NewBytes([]byte(`"foo-1"`)), + Value: kafka.NewBytes([]byte(`4`)), + Headers: nil, + ProducerId: producerState.ProducerId, + ProducerEpoch: producerState.ProducerEpoch, + SequenceNumber: 0, + }, + }, + }, + }, + }, + }}, + }).WithContext(ctx)) + + res, ok := rr.Message.(*produce.Response) + require.True(t, ok) + require.Equal(t, "foo", res.Topics[0].Name) + require.Equal(t, kafka.None, res.Topics[0].Partitions[0].ErrorCode, res.Topics[0].Partitions[0].ErrorMessage) + require.Equal(t, int64(0), res.Topics[0].Partitions[0].BaseOffset) + require.Equal(t, "", res.Topics[0].Partitions[0].ErrorMessage) + + require.Equal(t, 0, len(hook.Entries)) + + logs := sm.GetEvents(events.NewTraits().WithNamespace("kafka").WithName("test").With("topic", "foo")) + require.Len(t, logs, 1) + require.Equal(t, `"foo-1"`, string(logs[0].Data.(*store.KafkaLog).Key.Binary)) + require.Equal(t, "4", string(logs[0].Data.(*store.KafkaLog).Message.Binary)) + require.Equal(t, int64(0), logs[0].Data.(*store.KafkaLog).Offset) + require.Equal(t, int64(1), logs[0].Data.(*store.KafkaLog).ProducerId) + require.Equal(t, int16(0), logs[0].Data.(*store.KafkaLog).ProducerEpoch) + require.Equal(t, int32(0), logs[0].Data.(*store.KafkaLog).SequenceNumber) + }, + }, + { + "using producer Id but invalid sequence number, to high", + func(t *testing.T, s *store.Store, sm *events.StoreManager) { + ch := asyncapi3test.NewChannel( + asyncapi3test.WithMessage("foo", + asyncapi3test.WithContentType("application/json"), + asyncapi3test.WithPayload(schematest.New("integer"))), + ) + s.Update(asyncapi3test.NewConfig( + asyncapi3test.AddChannel("foo", ch), + asyncapi3test.WithOperation("foo", + asyncapi3test.WithOperationAction("send"), + asyncapi3test.WithOperationChannel(ch), + ), + )) + hook := test.NewGlobal() + ctx := kafka.NewClientContext(context.Background(), "127.0.0.1:42424") + + rr := kafkatest.NewRecorder() + s.ServeMessage(rr, kafkatest.NewRequest("MOKAPITEST1", 3, &initProducerId.Request{}).WithContext(ctx)) + producerState := rr.Message.(*initProducerId.Response) + rr = kafkatest.NewRecorder() + s.ServeMessage(rr, kafkatest.NewRequest("MOKAPITEST1", 3, &produce.Request{ + Topics: []produce.RequestTopic{ + {Name: "foo", Partitions: []produce.RequestPartition{ + { + Index: 0, + Record: kafka.RecordBatch{ + Records: []*kafka.Record{ + { + Offset: 0, + Time: time.Now(), + Key: kafka.NewBytes([]byte(`"foo-1"`)), + Value: kafka.NewBytes([]byte(`4`)), + Headers: nil, + ProducerId: producerState.ProducerId, + ProducerEpoch: producerState.ProducerEpoch, + SequenceNumber: 10, + }, + }, + }, + }, + }, + }}, + }).WithContext(ctx)) + + res, ok := rr.Message.(*produce.Response) + require.True(t, ok) + require.Equal(t, "foo", res.Topics[0].Name) + require.Equal(t, kafka.OutOfOrderSequenceNumber, res.Topics[0].Partitions[0].ErrorCode, res.Topics[0].Partitions[0].ErrorMessage) + require.Equal(t, int64(0), res.Topics[0].Partitions[0].BaseOffset) + require.Equal(t, "expected sequence number 0 but got 10", res.Topics[0].Partitions[0].ErrorMessage) + require.Equal(t, "expected sequence number 0 but got 10", res.Topics[0].Partitions[0].RecordErrors[0].BatchIndexErrorMessage) + + require.Equal(t, 1, len(hook.Entries)) + require.Equal(t, "kafka: failed to write to topic 'foo' partition 0: expected sequence number 0 but got 10", hook.LastEntry().Message) + }, + }, + { + "call produce twice for same sequence number", + func(t *testing.T, s *store.Store, sm *events.StoreManager) { + ch := asyncapi3test.NewChannel( + asyncapi3test.WithMessage("foo", + asyncapi3test.WithContentType("application/json"), + asyncapi3test.WithPayload(schematest.New("integer"))), + ) + s.Update(asyncapi3test.NewConfig( + asyncapi3test.AddChannel("foo", ch), + asyncapi3test.WithOperation("foo", + asyncapi3test.WithOperationAction("send"), + asyncapi3test.WithOperationChannel(ch), + ), + )) + hook := test.NewGlobal() + ctx := kafka.NewClientContext(context.Background(), "127.0.0.1:42424") + + rr := kafkatest.NewRecorder() + s.ServeMessage(rr, kafkatest.NewRequest("MOKAPITEST1", 3, &initProducerId.Request{}).WithContext(ctx)) + producerState := rr.Message.(*initProducerId.Response) + rr = kafkatest.NewRecorder() + r := kafkatest.NewRequest("MOKAPITEST1", 3, &produce.Request{ + Topics: []produce.RequestTopic{ + {Name: "foo", Partitions: []produce.RequestPartition{ + { + Record: kafka.RecordBatch{ + Records: []*kafka.Record{ + { + Offset: 0, + Time: time.Now(), + Key: kafka.NewBytes([]byte(`"foo-1"`)), + Value: kafka.NewBytes([]byte(`4`)), + ProducerId: producerState.ProducerId, + ProducerEpoch: producerState.ProducerEpoch, + SequenceNumber: 0, + }, + }, + }, + }, + }, + }}, + }).WithContext(ctx) + s.ServeMessage(rr, r) + s.ServeMessage(rr, r) + + res, ok := rr.Message.(*produce.Response) + require.True(t, ok) + require.Equal(t, "foo", res.Topics[0].Name) + require.Equal(t, kafka.DuplicateSequenceNumber, res.Topics[0].Partitions[0].ErrorCode, res.Topics[0].Partitions[0].ErrorMessage) + require.Equal(t, int64(0), res.Topics[0].Partitions[0].BaseOffset) + require.Equal(t, "message sequence number already received: 0", res.Topics[0].Partitions[0].ErrorMessage) + require.Equal(t, "message sequence number already received: 0", res.Topics[0].Partitions[0].RecordErrors[0].BatchIndexErrorMessage) + + require.Equal(t, 1, len(hook.Entries)) + require.Equal(t, "kafka: failed to write to topic 'foo' partition 0: message sequence number already received: 0", hook.LastEntry().Message) + }, + }, } for _, tc := range testcases { diff --git a/providers/asyncapi3/kafka/store/store.go b/providers/asyncapi3/kafka/store/store.go index d8ab3e38c..61af430cf 100644 --- a/providers/asyncapi3/kafka/store/store.go +++ b/providers/asyncapi3/kafka/store/store.go @@ -9,6 +9,7 @@ import ( "mokapi/kafka/fetch" "mokapi/kafka/findCoordinator" "mokapi/kafka/heartbeat" + "mokapi/kafka/initProducerId" "mokapi/kafka/joinGroup" "mokapi/kafka/listgroup" "mokapi/kafka/metaData" @@ -37,8 +38,15 @@ type Store struct { cluster string eventEmitter common.EventEmitter eh events.Handler + producers map[int64]*ProducerState - m sync.RWMutex + nextPID int64 + m sync.RWMutex +} + +type ProducerState struct { + ProducerId int64 + ProducerEpoch int16 } func NewEmpty(eventEmitter common.EventEmitter, eh events.Handler) *Store { @@ -48,6 +56,7 @@ func NewEmpty(eventEmitter common.EventEmitter, eh events.Handler) *Store { groups: make(map[string]*Group), eventEmitter: eventEmitter, eh: eh, + producers: make(map[int64]*ProducerState), } } @@ -219,6 +228,8 @@ func (s *Store) ServeMessage(rw kafka.ResponseWriter, req *kafka.Request) { err = s.apiversion(rw, req) case *createTopics.Request: err = s.createtopics(rw, req) + case *initProducerId.Request: + err = s.initProducerID(rw, req) default: err = fmt.Errorf("kafka: unsupported api key: %v", req.Header.ApiKey) } @@ -333,8 +344,12 @@ func (s *Store) trigger(record *kafka.Record, schemaId int) bool { return false } - _ = record.Key.Close() - _ = record.Value.Close() + if record.Key != nil { + _ = record.Key.Close() + } + if record.Value != nil { + _ = record.Value.Close() + } record.Key = kafka.NewBytes([]byte(r.Key)) record.Value = kafka.NewBytes([]byte(r.Value)) diff --git a/providers/asyncapi3/kafka/store/store_test.go b/providers/asyncapi3/kafka/store/store_test.go index d8986e920..7445b82ec 100644 --- a/providers/asyncapi3/kafka/store/store_test.go +++ b/providers/asyncapi3/kafka/store/store_test.go @@ -1,7 +1,6 @@ package store_test import ( - "github.com/stretchr/testify/require" "mokapi/engine/enginetest" "mokapi/kafka" "mokapi/providers/asyncapi3" @@ -10,6 +9,8 @@ import ( "mokapi/runtime/events" "mokapi/schema/json/schema/schematest" "testing" + + "github.com/stretchr/testify/require" ) func TestStore(t *testing.T) { @@ -123,14 +124,15 @@ func TestStore(t *testing.T) { ), )) - _, _, err := s.Topic("foo").Partitions[0].Write( + wr, err := s.Topic("foo").Partitions[0].Write( kafka.RecordBatch{Records: []*kafka.Record{ { Value: kafka.NewBytes([]byte("123")), }, }}, ) - require.EqualError(t, err, "validation error: invalid message: error count 1:\n\t- #/type: invalid type, expected string but got number") + require.NoError(t, err) + require.Equal(t, "invalid message: error count 1:\n\t- #/type: invalid type, expected string but got number", wr.Records[0].BatchIndexErrorMessage) }, }, } diff --git a/providers/asyncapi3/kafka/store/topic.go b/providers/asyncapi3/kafka/store/topic.go index 99934e6d0..05bbaeeea 100644 --- a/providers/asyncapi3/kafka/store/topic.go +++ b/providers/asyncapi3/kafka/store/topic.go @@ -84,20 +84,20 @@ func (t *Topic) Store() *Store { func (t *Topic) Write(record *kafka.Record) (partition int, recordError *produce.RecordError) { r := rand.New(rand.NewSource(time.Now().Unix())) index := r.Intn(len(t.Partitions)) - _, errs, _ := t.Partitions[index].Write(kafka.RecordBatch{Records: []*kafka.Record{record}}) - if errs == nil { + wr, _ := t.Partitions[index].Write(kafka.RecordBatch{Records: []*kafka.Record{record}}) + if wr.Records == nil { return index, nil } - return index, &errs[0] + return index, &wr.Records[0] } func (t *Topic) WritePartition(partition int, record *kafka.Record) (recordError *produce.RecordError, err error) { if partition >= len(t.Partitions) { return nil, fmt.Errorf("partition out of range") } - _, errs, _ := t.Partitions[partition].Write(kafka.RecordBatch{Records: []*kafka.Record{record}}) - if errs == nil { + wr, _ := t.Partitions[partition].Write(kafka.RecordBatch{Records: []*kafka.Record{record}}) + if wr.Records == nil { return nil, nil } - return &errs[0], nil + return &wr.Records[0], nil } diff --git a/providers/asyncapi3/kafka/store/validation.go b/providers/asyncapi3/kafka/store/validation.go index 147fc369c..8367f1b2d 100644 --- a/providers/asyncapi3/kafka/store/validation.go +++ b/providers/asyncapi3/kafka/store/validation.go @@ -43,11 +43,7 @@ func newValidator(c *asyncapi3.Channel) *validator { func (v *validator) Validate(record *kafka.Record) (l *KafkaLog, err error) { if v == nil { - return &KafkaLog{ - Key: LogValue{Binary: kafka.Read(record.Key)}, - Message: LogValue{Binary: kafka.Read(record.Value)}, - Headers: convertHeader(record.Headers), - }, nil + return newKafkaLog(record), nil } for _, val := range v.validators { @@ -56,11 +52,7 @@ func (v *validator) Validate(record *kafka.Record) (l *KafkaLog, err error) { return } } - return &KafkaLog{ - Key: LogValue{Binary: kafka.Read(record.Key)}, - Message: LogValue{Binary: kafka.Read(record.Value)}, - Headers: convertHeader(record.Headers), - }, err + return newKafkaLog(record), err } type messageValidator struct { @@ -142,7 +134,15 @@ func newMessageValidator(messageId string, msg *asyncapi3.Message, channel *asyn } func (mv *messageValidator) Validate(record *kafka.Record) (*KafkaLog, error) { - r := &KafkaLog{Key: LogValue{}, Message: LogValue{}, Headers: make(map[string]LogValue), MessageId: mv.messageId} + r := &KafkaLog{ + Key: LogValue{}, + Message: LogValue{}, + Headers: make(map[string]LogValue), + MessageId: mv.messageId, + ProducerId: record.ProducerId, + ProducerEpoch: record.ProducerEpoch, + SequenceNumber: record.SequenceNumber, + } if mv.msg != nil && mv.msg.Bindings.Kafka.SchemaIdLocation == "payload" { var err error diff --git a/providers/asyncapi3/kafka/store/validation_test.go b/providers/asyncapi3/kafka/store/validation_test.go index 337873759..056d4015d 100644 --- a/providers/asyncapi3/kafka/store/validation_test.go +++ b/providers/asyncapi3/kafka/store/validation_test.go @@ -37,7 +37,7 @@ func TestValidation(t *testing.T) { ), test: func(t *testing.T, s *store.Store, sm *events.StoreManager) { p := s.Topic("foo").Partition(0) - _, recordErrors, err := p.Write(kafka.RecordBatch{ + wr, err := p.Write(kafka.RecordBatch{ Records: []*kafka.Record{ { Key: kafka.NewBytes([]byte("key-foo")), @@ -50,7 +50,7 @@ func TestValidation(t *testing.T) { }, }) require.NoError(t, err) - require.Len(t, recordErrors, 0) + require.Len(t, wr.Records, 0) e := sm.GetEvents(events.NewTraits()) require.Len(t, e, 2) // latest message is first @@ -94,15 +94,16 @@ func TestValidation(t *testing.T) { ), test: func(t *testing.T, s *store.Store, sm *events.StoreManager) { p := s.Topic("foo").Partition(0) - _, batch, err := p.Write(kafka.RecordBatch{ + wr, err := p.Write(kafka.RecordBatch{ Records: []*kafka.Record{ { Value: kafka.NewBytes([]byte("123")), }, }, }) - require.EqualError(t, err, "validation error: invalid message: error count 1:\n\t- #/type: invalid type, expected string but got number") - require.Len(t, batch, 1) + require.NoError(t, err) + require.Equal(t, "invalid message: error count 1:\n\t- #/type: invalid type, expected string but got number", wr.Records[0].BatchIndexErrorMessage) + require.Len(t, wr.Records, 1) }, }, { @@ -120,7 +121,7 @@ func TestValidation(t *testing.T) { ), test: func(t *testing.T, s *store.Store, sm *events.StoreManager) { p := s.Topic("foo").Partition(0) - _, batch, err := p.Write(kafka.RecordBatch{ + wr, err := p.Write(kafka.RecordBatch{ Records: []*kafka.Record{ { Key: kafka.NewBytes([]byte("foo")), @@ -128,7 +129,7 @@ func TestValidation(t *testing.T) { }, }) require.NoError(t, err) - require.Len(t, batch, 0) + require.Len(t, wr.Records, 0) e := sm.GetEvents(events.NewTraits()) require.Len(t, e, 1) @@ -152,7 +153,7 @@ func TestValidation(t *testing.T) { ), test: func(t *testing.T, s *store.Store, sm *events.StoreManager) { p := s.Topic("foo").Partition(0) - _, batch, err := p.Write(kafka.RecordBatch{ + wr, err := p.Write(kafka.RecordBatch{ Records: []*kafka.Record{ { Key: kafka.NewBytes([]byte("12")), @@ -161,7 +162,7 @@ func TestValidation(t *testing.T) { }, }) require.NoError(t, err) - require.Len(t, batch, 0) + require.Len(t, wr.Records, 0) e := sm.GetEvents(events.NewTraits()) require.Len(t, e, 1) @@ -185,7 +186,7 @@ func TestValidation(t *testing.T) { ), test: func(t *testing.T, s *store.Store, sm *events.StoreManager) { p := s.Topic("foo").Partition(0) - _, batch, err := p.Write(kafka.RecordBatch{ + wr, err := p.Write(kafka.RecordBatch{ Records: []*kafka.Record{ { Value: kafka.NewBytes([]byte{0, 0, 0, 0, 1, '"', 'f', 'o', 'o', '"'}), @@ -193,7 +194,7 @@ func TestValidation(t *testing.T) { }, }) require.NoError(t, err) - require.Len(t, batch, 0) + require.Len(t, wr.Records, 0) e := sm.GetEvents(events.NewTraits()) require.Len(t, e, 1) @@ -212,15 +213,16 @@ func TestValidation(t *testing.T) { ), test: func(t *testing.T, s *store.Store, sm *events.StoreManager) { p := s.Topic("foo").Partition(0) - _, batch, err := p.Write(kafka.RecordBatch{ + wr, err := p.Write(kafka.RecordBatch{ Records: []*kafka.Record{ { Headers: []kafka.RecordHeader{{Key: "foo", Value: []byte{64}}}, }, }, }) - require.EqualError(t, err, "validation error: invalid key: error count 1:\n\t- #/minimum: integer 64 is less than minimum value of 100") - require.Len(t, batch, 1) + require.NoError(t, err) + require.Equal(t, "invalid key: error count 1:\n\t- #/minimum: integer 64 is less than minimum value of 100", wr.Records[0].BatchIndexErrorMessage) + require.Len(t, wr.Records, 1) }, }, { @@ -239,7 +241,7 @@ func TestValidation(t *testing.T) { ), test: func(t *testing.T, s *store.Store, sm *events.StoreManager) { p := s.Topic("foo").Partition(0) - _, batch, err := p.Write(kafka.RecordBatch{ + wr, err := p.Write(kafka.RecordBatch{ Records: []*kafka.Record{ { Value: kafka.NewBytes([]byte("123")), @@ -247,7 +249,7 @@ func TestValidation(t *testing.T) { }, }) require.NoError(t, err) - require.Len(t, batch, 0) + require.Len(t, wr.Records, 0) e := sm.GetEvents(events.NewTraits()) require.Len(t, e, 1) @@ -273,14 +275,15 @@ func TestValidation(t *testing.T) { ), test: func(t *testing.T, s *store.Store, sm *events.StoreManager) { p := s.Topic("foo").Partition(0) - _, _, err := p.Write(kafka.RecordBatch{ + wr, err := p.Write(kafka.RecordBatch{ Records: []*kafka.Record{ { Value: kafka.NewBytes([]byte("")), }, }, }) - require.EqualError(t, err, "validation error: invalid message: error count 1:\n\t- #/required: required properties are missing: bar") + require.NoError(t, err) + require.Equal(t, "invalid message: error count 1:\n\t- #/required: required properties are missing: bar", wr.Records[0].BatchIndexErrorMessage) }, }, } @@ -314,7 +317,7 @@ func TestValidation_Header(t *testing.T) { ), test: func(t *testing.T, s *store.Store, sm *events.StoreManager) { p := s.Topic("foo").Partition(0) - _, batch, err := p.Write(kafka.RecordBatch{ + wr, err := p.Write(kafka.RecordBatch{ Records: []*kafka.Record{ { Headers: []kafka.RecordHeader{{Key: "foo", Value: []byte{1, 0, 0, 0}}}, @@ -322,7 +325,7 @@ func TestValidation_Header(t *testing.T) { }, }) require.NoError(t, err) - require.Len(t, batch, 0) + require.Len(t, wr.Records, 0) e := sm.GetEvents(events.NewTraits()) require.Len(t, e, 1) @@ -341,7 +344,7 @@ func TestValidation_Header(t *testing.T) { ), test: func(t *testing.T, s *store.Store, sm *events.StoreManager) { p := s.Topic("foo").Partition(0) - _, batch, err := p.Write(kafka.RecordBatch{ + wr, err := p.Write(kafka.RecordBatch{ Records: []*kafka.Record{ { Headers: []kafka.RecordHeader{{Key: "foo", Value: []byte{1, 0, 0, 0}}}, @@ -349,7 +352,7 @@ func TestValidation_Header(t *testing.T) { }, }) require.NoError(t, err) - require.Len(t, batch, 0) + require.Len(t, wr.Records, 0) e := sm.GetEvents(events.NewTraits()) require.Len(t, e, 1) @@ -365,7 +368,7 @@ func TestValidation_Header(t *testing.T) { ), test: func(t *testing.T, s *store.Store, sm *events.StoreManager) { p := s.Topic("foo").Partition(0) - _, batch, err := p.Write(kafka.RecordBatch{ + wr, err := p.Write(kafka.RecordBatch{ Records: []*kafka.Record{ { Headers: []kafka.RecordHeader{{Key: "foo", Value: []byte{1, 0, 0, 0}}}, @@ -373,7 +376,7 @@ func TestValidation_Header(t *testing.T) { }, }) require.NoError(t, err) - require.Len(t, batch, 0) + require.Len(t, wr.Records, 0) e := sm.GetEvents(events.NewTraits()) require.Len(t, e, 1) @@ -392,7 +395,7 @@ func TestValidation_Header(t *testing.T) { ), test: func(t *testing.T, s *store.Store, sm *events.StoreManager) { p := s.Topic("foo").Partition(0) - _, batch, err := p.Write(kafka.RecordBatch{ + wr, err := p.Write(kafka.RecordBatch{ Records: []*kafka.Record{ { Headers: []kafka.RecordHeader{{Key: "foo", Value: []byte{119, 16, 73, 64}}}, @@ -400,7 +403,7 @@ func TestValidation_Header(t *testing.T) { }, }) require.NoError(t, err) - require.Len(t, batch, 0) + require.Len(t, wr.Records, 0) e := sm.GetEvents(events.NewTraits()) require.Len(t, e, 1) diff --git a/providers/asyncapi3/schema.go b/providers/asyncapi3/schema.go index 6725ec870..d863891ad 100644 --- a/providers/asyncapi3/schema.go +++ b/providers/asyncapi3/schema.go @@ -116,14 +116,27 @@ func (m *MultiSchemaFormat) parse(config *dynamic.Config, reader dynamic.Reader) return nil } -func (m *MultiSchemaFormat) Resolve(token string) (interface{}, error) { - if token == "" { - if js, ok := m.Schema.(*jsonSchema.Schema); ok { - return js, nil +func (m *MultiSchemaFormat) ConvertTo(i interface{}) (interface{}, error) { + switch i.(type) { + case *jsonSchema.Schema: + switch s := m.Schema.(type) { + case *jsonSchema.Schema: + return m.Schema, nil + case *openapi.Schema: + return openapi.ConvertToJsonSchema(s), nil + case *avro.Schema: + return avro.ConvertToJsonSchema(s), nil + } + case *openapi.Schema: + if _, ok := m.Schema.(*openapi.Schema); ok { + return m.Schema, nil + } + case *avro.Schema: + if _, ok := m.Schema.(*avro.Schema); ok { + return m.Schema, nil } - return m.Schema, nil } - return m, nil + return nil, fmt.Errorf("unsupported schema convert %T: %T", m.Schema, i) } func (m *MultiSchemaFormat) UnmarshalJSON(b []byte) error { diff --git a/providers/openapi/schema/marshal_xml.go b/providers/openapi/schema/marshal_xml.go index 514ca1f2f..a2bd1c7ca 100644 --- a/providers/openapi/schema/marshal_xml.go +++ b/providers/openapi/schema/marshal_xml.go @@ -34,7 +34,9 @@ func marshalXml(i interface{}, r *Schema) ([]byte, error) { } } if name == "" { - return nil, fmt.Errorf("root element name is undefined: reference name of schema, attribute xml.name and $id is empty") + // if no root name is defined we use a default name because for generic tools, the root name isn’t important + // so we can improve the user experience to not hit an error + name = "data" } if i == nil { diff --git a/providers/openapi/schema/marshal_xml_test.go b/providers/openapi/schema/marshal_xml_test.go index f56832c47..7f6b84592 100644 --- a/providers/openapi/schema/marshal_xml_test.go +++ b/providers/openapi/schema/marshal_xml_test.go @@ -1,13 +1,14 @@ package schema_test import ( - "github.com/stretchr/testify/require" "mokapi/media" "mokapi/providers/openapi/schema" "mokapi/providers/openapi/schema/schematest" "mokapi/sortedmap" "strings" "testing" + + "github.com/stretchr/testify/require" ) func TestMarshal_Xml(t *testing.T) { @@ -34,7 +35,8 @@ func TestMarshal_Xml(t *testing.T) { }, schema: schematest.New("integer"), test: func(t *testing.T, s string, err error) { - require.EqualError(t, err, "encoding data to 'application/xml' failed: root element name is undefined: reference name of schema, attribute xml.name and $id is empty") + require.NoError(t, err) + require.Equal(t, "4", s) }, }, { diff --git a/providers/openapi/schema/parse_xml.go b/providers/openapi/schema/parse_xml.go index 30de2dbf6..c8cfd555d 100644 --- a/providers/openapi/schema/parse_xml.go +++ b/providers/openapi/schema/parse_xml.go @@ -44,7 +44,12 @@ func (p *XmlParser) Parse(v any) (any, error) { return nil, fmt.Errorf("failed to parse XML: %w", err) } - pn := parser.Parser{Schema: ConvertToJsonSchema(p.s), ConvertStringToNumber: true, ConvertStringToBoolean: true} + pn := parser.Parser{ + Schema: ConvertToJsonSchema(p.s), + ConvertStringToNumber: true, + ConvertStringToBoolean: true, + ValidateAdditionalProperties: true, + } return pn.Parse(data) } @@ -104,32 +109,34 @@ func parseXML(n *node, s *Schema) (any, error) { // elements can override attribute values for _, attr := range n.Attrs { - name, prop := getProperty(attr.Name, s, true) - if prop != nil || s.IsFreeForm() { - v, err := parseValue(attr.Value, prop) - if err != nil { - return nil, err - } - m[name] = v + name, prop := getProperty(attr.Name, s) + if prop != nil && prop.Xml != nil && !prop.Xml.Attribute { + continue } + v, err := parseValue(attr.Value, prop) + if err != nil { + return nil, err + } + m[name] = v } for _, child := range n.Nodes { - name, prop := getProperty(child.XMLName, s, false) - if prop != nil || s.IsFreeForm() { - v, err := parseXML(&child, prop) - if err != nil { - return nil, err - } - if _, ok := m[name]; ok { - if arr, isArray := m[name].([]any); isArray { - m[name] = append(arr, v) - } else { - m[name] = []interface{}{m[name], v} - } + name, prop := getProperty(child.XMLName, s) + if prop != nil && prop.Xml != nil && prop.Xml.Attribute { + continue + } + v, err := parseXML(&child, prop) + if err != nil { + return nil, err + } + if _, ok := m[name]; ok { + if arr, isArray := m[name].([]any); isArray { + m[name] = append(arr, v) } else { - m[name] = v + m[name] = []interface{}{m[name], v} } + } else { + m[name] = v } } @@ -182,26 +189,13 @@ func parseValue(s string, ref *Schema) (interface{}, error) { return nil, fmt.Errorf("unknown type: %v", ref.Type) } -func getProperty(name xml.Name, s *Schema, asAttr bool) (string, *Schema) { +func getProperty(name xml.Name, s *Schema) (string, *Schema) { if s == nil || !s.HasProperties() { return name.Local, nil } - prop := s.Properties.Get(name.Local) - if prop != nil { - if prop.Xml != nil { - x := prop.Xml - if len(x.Prefix) > 0 && x.Prefix == name.Space { - return name.Local, prop - } - } else { - return name.Local, prop - } - - } - for it := s.Properties.Iter(); it.Next(); { - prop = it.Value() + prop := it.Value() if prop == nil { continue } @@ -209,14 +203,24 @@ func getProperty(name xml.Name, s *Schema, asAttr bool) (string, *Schema) { if x == nil { continue } - if asAttr != x.Attribute { - continue - } - if x.Name == name.Local && x.Attribute == asAttr { + if x.Name == name.Local { return it.Key(), prop } } + prop := s.Properties.Get(name.Local) + if prop != nil { + if prop.Xml != nil { + x := prop.Xml + if x.Prefix == name.Space { + return name.Local, prop + } + } else { + return name.Local, prop + } + + } + return name.Local, nil } diff --git a/providers/openapi/schema/parse_xml_test.go b/providers/openapi/schema/parse_xml_test.go index 32300ab67..b1873820c 100644 --- a/providers/openapi/schema/parse_xml_test.go +++ b/providers/openapi/schema/parse_xml_test.go @@ -27,7 +27,8 @@ func TestParseXML(t *testing.T) { ), test: func(t *testing.T, v any, err error) { require.NoError(t, err) - require.Equal(t, map[string]any{"id": int64(123)}, v) + // id is defined and shouldn't be treated as an additional property + require.Equal(t, map[string]any{}, v) }, }, { @@ -394,13 +395,12 @@ func TestUnmarshalXML_Old(t *testing.T) { { name: "wrapped array", xml: "onetwo", - schema: schematest.New("object", schematest.WithProperty("books", - schematest.New("array", schematest.WithItems("string"), schematest.WithXml(&schema.Xml{ - Wrapped: true, - })))), + schema: schematest.New("array", schematest.WithItems("string"), schematest.WithXml(&schema.Xml{ + Wrapped: true, + })), test: func(t *testing.T, i interface{}, err error) { require.NoError(t, err) - require.Equal(t, map[string]interface{}{"books": []interface{}{"one", "two"}}, i) + require.Equal(t, []interface{}{"one", "two"}, i) }, }, { diff --git a/schema/encoding/encoder_xml.go b/schema/encoding/encoder_xml.go index a19261bbb..1be9cd55e 100644 --- a/schema/encoding/encoder_xml.go +++ b/schema/encoding/encoder_xml.go @@ -6,6 +6,7 @@ import ( "encoding/xml" "fmt" "mokapi/schema/json/schema" + "mokapi/sortedmap" "net/url" "strings" ) @@ -66,6 +67,13 @@ func (x *XmlEncoder) writeXmlElement(name string, data any) error { return err } } + case *sortedmap.LinkedHashMap[string, any]: + for it := t.Iter(); it.Next(); { + err = x.writeXmlElement(it.Key(), it.Value()) + if err != nil { + return err + } + } default: _, err = x.w.WriteString(fmt.Sprintf("%v", data)) if err != nil { diff --git a/webui/e2e/Dashboard/kafka/cluster.spec.ts b/webui/e2e/Dashboard/kafka/cluster.spec.ts index da0b42a8a..cc3d44056 100644 --- a/webui/e2e/Dashboard/kafka/cluster.spec.ts +++ b/webui/e2e/Dashboard/kafka/cluster.spec.ts @@ -60,7 +60,7 @@ test('Visit Kafka cluster "Kafka World"', async ({ page }) => { await expect(config.getCellByName('Last Update')).toHaveText(formatDateTime('2023-02-15T08:49:25.482366+01:00')) }) - await useKafkaMessages().test(page.getByRole('region', { name: "Recent Messages" }).getByRole('table', { name: 'Cluster Messages' })) + await useKafkaMessages(page).test(page.getByRole('region', { name: "Recent Messages" }).getByRole('table', { name: 'Cluster Messages' })) }) test('Visit Kafka cluster config file', async ({ page, context }) => { diff --git a/webui/e2e/Dashboard/kafka/topic.order.spec.ts b/webui/e2e/Dashboard/kafka/topic.order.spec.ts index 24087b46d..620b951ba 100644 --- a/webui/e2e/Dashboard/kafka/topic.order.spec.ts +++ b/webui/e2e/Dashboard/kafka/topic.order.spec.ts @@ -31,7 +31,7 @@ test('Visit Kafka topic mokapi.shop.products', async ({ page, context }) => { await expect(info.getByLabel('Description')).toHaveText(topic.description) }) - await useKafkaMessages().test(page.getByRole('table', { name: 'Topic Messages' }), false) + await useKafkaMessages(page).test(page.getByRole('table', { name: 'Topic Messages' }), false) const tabList = page.getByRole('region', { name: 'Topic Data' }).getByRole('tablist') await test.step('Check partition"', async () => { diff --git a/webui/e2e/components/kafka.ts b/webui/e2e/components/kafka.ts index df00bff64..e34d3ff99 100644 --- a/webui/e2e/components/kafka.ts +++ b/webui/e2e/components/kafka.ts @@ -1,5 +1,5 @@ -import { Locator, expect, test } from "playwright/test" -import { useTable } from '../components/table' +import { Locator, expect, test, type Page } from "playwright/test" +import { useTable } from './table' import { formatDateTime } from "../helpers/format" export interface Topic { @@ -101,7 +101,7 @@ export function useKafkaPartitions(table: Locator) { } } -export function useKafkaMessages() { +export function useKafkaMessages(page: Page) { return { test: async (table: Locator, withTopic: boolean = true) => { await test.step('Check messages log', async () => { @@ -118,6 +118,21 @@ export function useKafkaMessages() { await expect(message.getCellByName('Topic')).toHaveText('mokapi.shop.products') } await expect(message.getCellByName('Time')).toHaveText(formatDateTime('2023-02-13T09:49:25.482366+01:00')) + + await message.click() + await expect(page.getByLabel('Kafka Key', { exact: true })).toHaveText('GGOEWXXX0827') + await expect(page.getByLabel('Kafka Topic', { exact: true })).toHaveText('mokapi.shop.products') + await expect(page.getByLabel('Service Type', { exact: true })).toHaveText('KAFKA') + await expect(page.getByLabel('Offset', { exact: true })).toHaveText('0') + await expect(page.getByLabel('Partition', { exact: true })).toHaveText('0') + await expect(page.getByText('Producer Id', { exact: true })).not.toBeVisible({ timeout: 100 }) + await expect(page.getByText('Producer Epoch', { exact: true })).not.toBeVisible({ timeout: 100 }) + await expect(page.getByText('Sequence Number', { exact: true })).not.toBeVisible({ timeout: 100 }) + await expect(page.getByLabel('Content Type', { exact: true })).toHaveText('application/json') + await expect(page.getByLabel('Key Type', { exact: true })).toHaveText('string') + await expect(page.getByLabel('Time', { exact: true })).toHaveText(formatDateTime('2023-02-13T09:49:25.482366+01:00')) + + await page.goBack() message = messages.getRow(2) await expect(message.getCellByName('Key')).toHaveText('GGOEWXXX0828') @@ -126,6 +141,21 @@ export function useKafkaMessages() { await expect(message.getCellByName('Topic')).toHaveText('mokapi.shop.products') } await expect(message.getCellByName('Time')).toHaveText(formatDateTime('2023-02-13T09:49:25.482366+01:00')) + + await message.click() + await expect(page.getByLabel('Kafka Key', { exact: true })).toHaveText('GGOEWXXX0828') + await expect(page.getByLabel('Kafka Topic', { exact: true })).toHaveText('mokapi.shop.products') + await expect(page.getByLabel('Service Type', { exact: true })).toHaveText('KAFKA') + await expect(page.getByLabel('Offset', { exact: true })).toHaveText('1') + await expect(page.getByLabel('Partition', { exact: true })).toHaveText('1') + await expect(page.getByLabel('Producer Id', { exact: true })).toHaveText('3') + await expect(page.getByLabel('Producer Epoch', { exact: true })).toHaveText('1') + await expect(page.getByLabel('Sequence Number', { exact: true })).toHaveText('1') + await expect(page.getByLabel('Content Type', { exact: true })).toHaveText('application/json') + await expect(page.getByLabel('Key Type', { exact: true })).toHaveText('string') + await expect(page.getByLabel('Time', { exact: true })).toHaveText(formatDateTime('2023-02-13T09:49:25.482366+01:00')) + + await page.goBack() }) } } diff --git a/webui/package-lock.json b/webui/package-lock.json index 06deb32f0..09c4b385f 100644 --- a/webui/package-lock.json +++ b/webui/package-lock.json @@ -9,7 +9,7 @@ "version": "0.10.0", "dependencies": { "@popperjs/core": "^2.11.6", - "@ssthouse/vue3-tree-chart": "^0.2.6", + "@ssthouse/vue3-tree-chart": "^0.3.0", "@types/bootstrap": "^5.2.10", "@types/whatwg-mimetype": "^3.0.2", "ace-builds": "^1.43.4", @@ -21,7 +21,7 @@ "http-status-codes": "^2.3.0", "js-yaml": "^4.1.1", "ncp": "^2.0.0", - "vue": "^3.5.24", + "vue": "^3.5.25", "vue-router": "^4.6.3", "vue3-ace-editor": "^2.2.4", "vue3-highlightjs": "^1.0.5", @@ -30,7 +30,7 @@ "xml-formatter": "^3.6.7" }, "devDependencies": { - "@playwright/test": "^1.56.1", + "@playwright/test": "^1.57.0", "@rushstack/eslint-patch": "^1.15.0", "@types/js-yaml": "^4.0.9", "@types/node": "^24.10.1", @@ -39,12 +39,12 @@ "@vue/eslint-config-typescript": "^14.6.0", "@vue/tsconfig": "^0.8.1", "eslint": "^9.39.1", - "eslint-plugin-vue": "^10.5.1", + "eslint-plugin-vue": "^10.6.2", "npm-run-all": "^4.1.5", "prettier": "^3.6.2", "typescript": "~5.9.3", "vite": "^7.2.4", - "vue-tsc": "^3.1.4", + "vue-tsc": "^3.1.5", "xml2js": "^0.6.2" } }, @@ -776,13 +776,13 @@ } }, "node_modules/@playwright/test": { - "version": "1.56.1", - "resolved": "https://registry.npmjs.org/@playwright/test/-/test-1.56.1.tgz", - "integrity": "sha512-vSMYtL/zOcFpvJCW71Q/OEGQb7KYBPAdKh35WNSkaZA75JlAO8ED8UN6GUNTm3drWomcbcqRPFqQbLae8yBTdg==", + "version": "1.57.0", + "resolved": "https://registry.npmjs.org/@playwright/test/-/test-1.57.0.tgz", + "integrity": "sha512-6TyEnHgd6SArQO8UO2OMTxshln3QMWBtPGrOCgs3wVEmQmwyuNtB10IZMfmYDE0riwNR1cu4q+pPcxMVtaG3TA==", "dev": true, "license": "Apache-2.0", "dependencies": { - "playwright": "1.56.1" + "playwright": "1.57.0" }, "bin": { "playwright": "cli.js" @@ -795,6 +795,7 @@ "version": "2.11.8", "resolved": "https://registry.npmjs.org/@popperjs/core/-/core-2.11.8.tgz", "integrity": "sha512-P1st0aksCrn9sGZhp8GMYwBnQsbvAWsZAX44oXNNvLHGqAOcoVxmjZiohstwQ7SqKnbR47akdNi+uleWD8+g6A==", + "peer": true, "funding": { "type": "opencollective", "url": "https://opencollective.com/popperjs" @@ -1107,19 +1108,20 @@ } }, "node_modules/@ssthouse/tree-chart-core": { - "version": "1.1.2", - "resolved": "https://registry.npmjs.org/@ssthouse/tree-chart-core/-/tree-chart-core-1.1.2.tgz", - "integrity": "sha512-UiPOu+K8XoS1dPPerhlpf2WZMsREBYnuTWKYcWps/oQPyLdLKbS6ZFBZXaFqm4Q91GEvf+AfD8MK8Cllw2V4kg==", + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/@ssthouse/tree-chart-core/-/tree-chart-core-1.2.0.tgz", + "integrity": "sha512-hnr+gDB0l9CHEEHUPm1ic1eXInFJV2wsb+e1zvttEzhkUQuyb69nNIOXTMp/OJzVy2wqyl8Niwo9QKTm1lPn0w==", + "license": "ISC", "dependencies": { "d3": "^7.2.0" } }, "node_modules/@ssthouse/vue3-tree-chart": { - "version": "0.2.6", - "resolved": "https://registry.npmjs.org/@ssthouse/vue3-tree-chart/-/vue3-tree-chart-0.2.6.tgz", - "integrity": "sha512-ihSTPYXTri834ViIKlp4DGfZ5Kt+xwy8H5Bd9KBD2QO/S7Dath0UreF+5Dk5fd8gMDqvvI/wFg6OZxwiNf3iHA==", + "version": "0.3.0", + "resolved": "https://registry.npmjs.org/@ssthouse/vue3-tree-chart/-/vue3-tree-chart-0.3.0.tgz", + "integrity": "sha512-hPi0FZT45yS0E3hRYSlBypjqP0KnQR9dgpw4dF/7jsjg0XE3nLuabiZd5OkPMMgh2H1TilXE7D9BPCB94WMDXg==", "dependencies": { - "@ssthouse/tree-chart-core": "^1.1.0" + "@ssthouse/tree-chart-core": "^1.2.0" } }, "node_modules/@types/bootstrap": { @@ -1154,8 +1156,7 @@ "node_modules/@types/linkify-it": { "version": "3.0.2", "resolved": "https://registry.npmjs.org/@types/linkify-it/-/linkify-it-3.0.2.tgz", - "integrity": "sha512-HZQYqbiFVWufzCwexrvh694SOim8z2d+xJl5UNamcvQFejLY/2YUtzXHYi3cHdI7PMlS8ejH2slRAOJQ32aNbA==", - "peer": true + "integrity": "sha512-HZQYqbiFVWufzCwexrvh694SOim8z2d+xJl5UNamcvQFejLY/2YUtzXHYi3cHdI7PMlS8ejH2slRAOJQ32aNbA==" }, "node_modules/@types/markdown-it": { "version": "12.2.3", @@ -1170,8 +1171,7 @@ "node_modules/@types/mdurl": { "version": "1.0.2", "resolved": "https://registry.npmjs.org/@types/mdurl/-/mdurl-1.0.2.tgz", - "integrity": "sha512-eC4U9MlIcu2q0KQmXszyn5Akca/0jrQmwDRgpAMJai7qBWq4amIQhZyNau4VYGtCeALvW1/NtjzJJ567aZxfKA==", - "peer": true + "integrity": "sha512-eC4U9MlIcu2q0KQmXszyn5Akca/0jrQmwDRgpAMJai7qBWq4amIQhZyNau4VYGtCeALvW1/NtjzJJ567aZxfKA==" }, "node_modules/@types/node": { "version": "24.10.1", @@ -1179,6 +1179,7 @@ "integrity": "sha512-GNWcUTRBgIRJD5zj+Tq0fKOJ5XZajIiBroOF0yvj2bSU1WvNdYS/dn9UxwsujGW4JX06dnHyjV2y9rRaybH0iQ==", "dev": true, "license": "MIT", + "peer": true, "dependencies": { "undici-types": "~7.16.0" } @@ -1298,13 +1299,13 @@ } }, "node_modules/@vue/compiler-core": { - "version": "3.5.24", - "resolved": "https://registry.npmjs.org/@vue/compiler-core/-/compiler-core-3.5.24.tgz", - "integrity": "sha512-eDl5H57AOpNakGNAkFDH+y7kTqrQpJkZFXhWZQGyx/5Wh7B1uQYvcWkvZi11BDhscPgj8N7XV3oRwiPnx1Vrig==", + "version": "3.5.25", + "resolved": "https://registry.npmjs.org/@vue/compiler-core/-/compiler-core-3.5.25.tgz", + "integrity": "sha512-vay5/oQJdsNHmliWoZfHPoVZZRmnSWhug0BYT34njkYTPqClh3DNWLkZNJBVSjsNMrg0CCrBfoKkjZQPM/QVUw==", "license": "MIT", "dependencies": { "@babel/parser": "^7.28.5", - "@vue/shared": "3.5.24", + "@vue/shared": "3.5.25", "entities": "^4.5.0", "estree-walker": "^2.0.2", "source-map-js": "^1.2.1" @@ -1323,26 +1324,26 @@ } }, "node_modules/@vue/compiler-dom": { - "version": "3.5.24", - "resolved": "https://registry.npmjs.org/@vue/compiler-dom/-/compiler-dom-3.5.24.tgz", - "integrity": "sha512-1QHGAvs53gXkWdd3ZMGYuvQFXHW4ksKWPG8HP8/2BscrbZ0brw183q2oNWjMrSWImYLHxHrx1ItBQr50I/q2zw==", + "version": "3.5.25", + "resolved": "https://registry.npmjs.org/@vue/compiler-dom/-/compiler-dom-3.5.25.tgz", + "integrity": "sha512-4We0OAcMZsKgYoGlMjzYvaoErltdFI2/25wqanuTu+S4gismOTRTBPi4IASOjxWdzIwrYSjnqONfKvuqkXzE2Q==", "license": "MIT", "dependencies": { - "@vue/compiler-core": "3.5.24", - "@vue/shared": "3.5.24" + "@vue/compiler-core": "3.5.25", + "@vue/shared": "3.5.25" } }, "node_modules/@vue/compiler-sfc": { - "version": "3.5.24", - "resolved": "https://registry.npmjs.org/@vue/compiler-sfc/-/compiler-sfc-3.5.24.tgz", - "integrity": "sha512-8EG5YPRgmTB+YxYBM3VXy8zHD9SWHUJLIGPhDovo3Z8VOgvP+O7UP5vl0J4BBPWYD9vxtBabzW1EuEZ+Cqs14g==", + "version": "3.5.25", + "resolved": "https://registry.npmjs.org/@vue/compiler-sfc/-/compiler-sfc-3.5.25.tgz", + "integrity": "sha512-PUgKp2rn8fFsI++lF2sO7gwO2d9Yj57Utr5yEsDf3GNaQcowCLKL7sf+LvVFvtJDXUp/03+dC6f2+LCv5aK1ag==", "license": "MIT", "dependencies": { "@babel/parser": "^7.28.5", - "@vue/compiler-core": "3.5.24", - "@vue/compiler-dom": "3.5.24", - "@vue/compiler-ssr": "3.5.24", - "@vue/shared": "3.5.24", + "@vue/compiler-core": "3.5.25", + "@vue/compiler-dom": "3.5.25", + "@vue/compiler-ssr": "3.5.25", + "@vue/shared": "3.5.25", "estree-walker": "^2.0.2", "magic-string": "^0.30.21", "postcss": "^8.5.6", @@ -1350,13 +1351,13 @@ } }, "node_modules/@vue/compiler-ssr": { - "version": "3.5.24", - "resolved": "https://registry.npmjs.org/@vue/compiler-ssr/-/compiler-ssr-3.5.24.tgz", - "integrity": "sha512-trOvMWNBMQ/odMRHW7Ae1CdfYx+7MuiQu62Jtu36gMLXcaoqKvAyh+P73sYG9ll+6jLB6QPovqoKGGZROzkFFg==", + "version": "3.5.25", + "resolved": "https://registry.npmjs.org/@vue/compiler-ssr/-/compiler-ssr-3.5.25.tgz", + "integrity": "sha512-ritPSKLBcParnsKYi+GNtbdbrIE1mtuFEJ4U1sWeuOMlIziK5GtOL85t5RhsNy4uWIXPgk+OUdpnXiTdzn8o3A==", "license": "MIT", "dependencies": { - "@vue/compiler-dom": "3.5.24", - "@vue/shared": "3.5.24" + "@vue/compiler-dom": "3.5.25", + "@vue/shared": "3.5.25" } }, "node_modules/@vue/devtools-api": { @@ -1617,6 +1618,7 @@ "integrity": "sha512-Zhy8HCvBUEfBECzIl1PKqF4p11+d0aUJS1GeUiuqK9WmOug8YCmC4h4bjyBvMyAMI9sbRczmrYL5lKg/YMbrcQ==", "dev": true, "license": "MIT", + "peer": true, "dependencies": { "@typescript-eslint/scope-manager": "8.38.0", "@typescript-eslint/types": "8.38.0", @@ -1705,9 +1707,9 @@ } }, "node_modules/@vue/language-core": { - "version": "3.1.4", - "resolved": "https://registry.npmjs.org/@vue/language-core/-/language-core-3.1.4.tgz", - "integrity": "sha512-n/58wm8SkmoxMWkUNUH/PwoovWe4hmdyPJU2ouldr3EPi1MLoS7iDN46je8CsP95SnVBs2axInzRglPNKvqMcg==", + "version": "3.1.5", + "resolved": "https://registry.npmjs.org/@vue/language-core/-/language-core-3.1.5.tgz", + "integrity": "sha512-FMcqyzWN+sYBeqRMWPGT2QY0mUasZMVIuHvmb5NT3eeqPrbHBYtCP8JWEUCDCgM+Zr62uuWY/qoeBrPrzfa78w==", "dev": true, "license": "MIT", "dependencies": { @@ -1742,53 +1744,53 @@ } }, "node_modules/@vue/reactivity": { - "version": "3.5.24", - "resolved": "https://registry.npmjs.org/@vue/reactivity/-/reactivity-3.5.24.tgz", - "integrity": "sha512-BM8kBhtlkkbnyl4q+HiF5R5BL0ycDPfihowulm02q3WYp2vxgPcJuZO866qa/0u3idbMntKEtVNuAUp5bw4teg==", + "version": "3.5.25", + "resolved": "https://registry.npmjs.org/@vue/reactivity/-/reactivity-3.5.25.tgz", + "integrity": "sha512-5xfAypCQepv4Jog1U4zn8cZIcbKKFka3AgWHEFQeK65OW+Ys4XybP6z2kKgws4YB43KGpqp5D/K3go2UPPunLA==", "license": "MIT", "dependencies": { - "@vue/shared": "3.5.24" + "@vue/shared": "3.5.25" } }, "node_modules/@vue/runtime-core": { - "version": "3.5.24", - "resolved": "https://registry.npmjs.org/@vue/runtime-core/-/runtime-core-3.5.24.tgz", - "integrity": "sha512-RYP/byyKDgNIqfX/gNb2PB55dJmM97jc9wyF3jK7QUInYKypK2exmZMNwnjueWwGceEkP6NChd3D2ZVEp9undQ==", + "version": "3.5.25", + "resolved": "https://registry.npmjs.org/@vue/runtime-core/-/runtime-core-3.5.25.tgz", + "integrity": "sha512-Z751v203YWwYzy460bzsYQISDfPjHTl+6Zzwo/a3CsAf+0ccEjQ8c+0CdX1WsumRTHeywvyUFtW6KvNukT/smA==", "license": "MIT", "dependencies": { - "@vue/reactivity": "3.5.24", - "@vue/shared": "3.5.24" + "@vue/reactivity": "3.5.25", + "@vue/shared": "3.5.25" } }, "node_modules/@vue/runtime-dom": { - "version": "3.5.24", - "resolved": "https://registry.npmjs.org/@vue/runtime-dom/-/runtime-dom-3.5.24.tgz", - "integrity": "sha512-Z8ANhr/i0XIluonHVjbUkjvn+CyrxbXRIxR7wn7+X7xlcb7dJsfITZbkVOeJZdP8VZwfrWRsWdShH6pngMxRjw==", + "version": "3.5.25", + "resolved": "https://registry.npmjs.org/@vue/runtime-dom/-/runtime-dom-3.5.25.tgz", + "integrity": "sha512-a4WrkYFbb19i9pjkz38zJBg8wa/rboNERq3+hRRb0dHiJh13c+6kAbgqCPfMaJ2gg4weWD3APZswASOfmKwamA==", "license": "MIT", "dependencies": { - "@vue/reactivity": "3.5.24", - "@vue/runtime-core": "3.5.24", - "@vue/shared": "3.5.24", + "@vue/reactivity": "3.5.25", + "@vue/runtime-core": "3.5.25", + "@vue/shared": "3.5.25", "csstype": "^3.1.3" } }, "node_modules/@vue/server-renderer": { - "version": "3.5.24", - "resolved": "https://registry.npmjs.org/@vue/server-renderer/-/server-renderer-3.5.24.tgz", - "integrity": "sha512-Yh2j2Y4G/0/4z/xJ1Bad4mxaAk++C2v4kaa8oSYTMJBJ00/ndPuxCnWeot0/7/qafQFLh5pr6xeV6SdMcE/G1w==", + "version": "3.5.25", + "resolved": "https://registry.npmjs.org/@vue/server-renderer/-/server-renderer-3.5.25.tgz", + "integrity": "sha512-UJaXR54vMG61i8XNIzTSf2Q7MOqZHpp8+x3XLGtE3+fL+nQd+k7O5+X3D/uWrnQXOdMw5VPih+Uremcw+u1woQ==", "license": "MIT", "dependencies": { - "@vue/compiler-ssr": "3.5.24", - "@vue/shared": "3.5.24" + "@vue/compiler-ssr": "3.5.25", + "@vue/shared": "3.5.25" }, "peerDependencies": { - "vue": "3.5.24" + "vue": "3.5.25" } }, "node_modules/@vue/shared": { - "version": "3.5.24", - "resolved": "https://registry.npmjs.org/@vue/shared/-/shared-3.5.24.tgz", - "integrity": "sha512-9cwHL2EsJBdi8NY22pngYYWzkTDhld6fAD6jlaeloNGciNSJL6bLpbxVgXl96X00Jtc6YWQv96YA/0sxex/k1A==", + "version": "3.5.25", + "resolved": "https://registry.npmjs.org/@vue/shared/-/shared-3.5.25.tgz", + "integrity": "sha512-AbOPdQQnAnzs58H2FrrDxYj/TJfmeS2jdfEEhgiKINy+bnOANmVizIEgq1r+C5zsbs6l1CCQxtcj71rwNQ4jWg==", "license": "MIT" }, "node_modules/@vue/tsconfig": { @@ -1814,13 +1816,15 @@ "version": "1.43.4", "resolved": "https://registry.npmjs.org/ace-builds/-/ace-builds-1.43.4.tgz", "integrity": "sha512-8hAxVfo2ImICd69BWlZwZlxe9rxDGDjuUhh+WeWgGDvfBCE+r3lkynkQvIovDz4jcMi8O7bsEaFygaDT+h9sBA==", - "license": "BSD-3-Clause" + "license": "BSD-3-Clause", + "peer": true }, "node_modules/acorn": { "version": "8.15.0", "resolved": "https://registry.npmjs.org/acorn/-/acorn-8.15.0.tgz", "integrity": "sha512-NZyJarBfL7nWwIq+FDL6Zp/yHEhePMNnnJ0y3qfieCrmNvYct8uvtiV41UvlSe6apAfk0fY1FbWx+NwfmpvtTg==", "dev": true, + "peer": true, "bin": { "acorn": "bin/acorn" }, @@ -1855,9 +1859,9 @@ } }, "node_modules/alien-signals": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/alien-signals/-/alien-signals-3.1.0.tgz", - "integrity": "sha512-yufC6VpSy8tK3I0lO67pjumo5JvDQVQyr38+3OHqe6CHl1t2VZekKZ7EKKZSqk0cRmE7U7tfZbpXiKNzuc+ckg==", + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/alien-signals/-/alien-signals-3.1.1.tgz", + "integrity": "sha512-ogkIWbVrLwKtHY6oOAXaYkAxP+cTH7V5FZ5+Tm4NZFd8VDZ6uNMDrfzqctTZ42eTMCSR3ne3otpcxmqSnFfPYA==", "dev": true, "license": "MIT" }, @@ -2053,6 +2057,7 @@ "version": "7.2.0", "resolved": "https://registry.npmjs.org/commander/-/commander-7.2.0.tgz", "integrity": "sha512-QrWXB+ZQSVPmIWIhtEO9H+gwHaMGYiF5ChvoJ+K9ZGHG/sVsa6yiesAD1GC/x46sET00Xlwo1u49RVVVzvcSkw==", + "license": "MIT", "engines": { "node": ">= 10" } @@ -2083,6 +2088,7 @@ "resolved": "https://registry.npmjs.org/cssesc/-/cssesc-3.0.0.tgz", "integrity": "sha512-/Tb/JcjK111nNScGob5MNtsntNM1aCNUDipB/TkwZFhyDrrE47SOx/18wF2bbjgc3ZzCSKW1T5nt5EbFoAz/Vg==", "dev": true, + "license": "MIT", "bin": { "cssesc": "bin/cssesc" }, @@ -2091,15 +2097,16 @@ } }, "node_modules/csstype": { - "version": "3.1.3", - "resolved": "https://registry.npmjs.org/csstype/-/csstype-3.1.3.tgz", - "integrity": "sha512-M1uQkMl8rQK/szD0LNhtqxIPLpimGm8sOBwU7lLnCpSbTyY3yeU1Vc7l4KT5zT4s/yOxHH5O7tIuuLOCnLADRw==", + "version": "3.2.3", + "resolved": "https://registry.npmjs.org/csstype/-/csstype-3.2.3.tgz", + "integrity": "sha512-z1HGKcYy2xA8AGQfwrn0PAy+PB7X/GSj3UVJW9qKyn43xWa+gl5nXmU4qqLMRzWVLFC8KusUX8T/0kCiOYpAIQ==", "license": "MIT" }, "node_modules/d3": { "version": "7.9.0", "resolved": "https://registry.npmjs.org/d3/-/d3-7.9.0.tgz", "integrity": "sha512-e1U46jVP+w7Iut8Jt8ri1YsPOvFpg46k+K8TpCb0P+zjCkjkPnV7WzfDJzMHy1LnA+wj5pLT1wjO901gLXeEhA==", + "license": "ISC", "dependencies": { "d3-array": "3", "d3-axis": "3", @@ -2140,6 +2147,7 @@ "version": "3.2.4", "resolved": "https://registry.npmjs.org/d3-array/-/d3-array-3.2.4.tgz", "integrity": "sha512-tdQAmyA18i4J7wprpYq8ClcxZy3SC31QMeByyCFyRt7BVHdREQZ5lpzoe5mFEYZUWe+oq8HBvk9JjpibyEV4Jg==", + "license": "ISC", "dependencies": { "internmap": "1 - 2" }, @@ -2151,6 +2159,7 @@ "version": "3.0.0", "resolved": "https://registry.npmjs.org/d3-axis/-/d3-axis-3.0.0.tgz", "integrity": "sha512-IH5tgjV4jE/GhHkRV0HiVYPDtvfjHQlQfJHs0usq7M30XcSBvOotpmH1IgkcXsO/5gEQZD43B//fc7SRT5S+xw==", + "license": "ISC", "engines": { "node": ">=12" } @@ -2159,6 +2168,7 @@ "version": "3.0.0", "resolved": "https://registry.npmjs.org/d3-brush/-/d3-brush-3.0.0.tgz", "integrity": "sha512-ALnjWlVYkXsVIGlOsuWH1+3udkYFI48Ljihfnh8FZPF2QS9o+PzGLBslO0PjzVoHLZ2KCVgAM8NVkXPJB2aNnQ==", + "license": "ISC", "dependencies": { "d3-dispatch": "1 - 3", "d3-drag": "2 - 3", @@ -2174,6 +2184,7 @@ "version": "3.0.1", "resolved": "https://registry.npmjs.org/d3-chord/-/d3-chord-3.0.1.tgz", "integrity": "sha512-VE5S6TNa+j8msksl7HwjxMHDM2yNK3XCkusIlpX5kwauBfXuyLAtNg9jCp/iHH61tgI4sb6R/EIMWCqEIdjT/g==", + "license": "ISC", "dependencies": { "d3-path": "1 - 3" }, @@ -2185,6 +2196,7 @@ "version": "3.1.0", "resolved": "https://registry.npmjs.org/d3-color/-/d3-color-3.1.0.tgz", "integrity": "sha512-zg/chbXyeBtMQ1LbD/WSoW2DpC3I0mpmPdW+ynRTj/x2DAWYrIY7qeZIHidozwV24m4iavr15lNwIwLxRmOxhA==", + "license": "ISC", "engines": { "node": ">=12" } @@ -2193,6 +2205,7 @@ "version": "4.0.2", "resolved": "https://registry.npmjs.org/d3-contour/-/d3-contour-4.0.2.tgz", "integrity": "sha512-4EzFTRIikzs47RGmdxbeUvLWtGedDUNkTcmzoeyg4sP/dvCexO47AaQL7VKy/gul85TOxw+IBgA8US2xwbToNA==", + "license": "ISC", "dependencies": { "d3-array": "^3.2.0" }, @@ -2204,6 +2217,7 @@ "version": "6.0.4", "resolved": "https://registry.npmjs.org/d3-delaunay/-/d3-delaunay-6.0.4.tgz", "integrity": "sha512-mdjtIZ1XLAM8bm/hx3WwjfHt6Sggek7qH043O8KEjDXN40xi3vx/6pYSVTwLjEgiXQTbvaouWKynLBiUZ6SK6A==", + "license": "ISC", "dependencies": { "delaunator": "5" }, @@ -2215,6 +2229,7 @@ "version": "3.0.1", "resolved": "https://registry.npmjs.org/d3-dispatch/-/d3-dispatch-3.0.1.tgz", "integrity": "sha512-rzUyPU/S7rwUflMyLc1ETDeBj0NRuHKKAcvukozwhshr6g6c5d8zh4c2gQjY2bZ0dXeGLWc1PF174P2tVvKhfg==", + "license": "ISC", "engines": { "node": ">=12" } @@ -2223,6 +2238,7 @@ "version": "3.0.0", "resolved": "https://registry.npmjs.org/d3-drag/-/d3-drag-3.0.0.tgz", "integrity": "sha512-pWbUJLdETVA8lQNJecMxoXfH6x+mO2UQo8rSmZ+QqxcbyA3hfeprFgIT//HW2nlHChWeIIMwS2Fq+gEARkhTkg==", + "license": "ISC", "dependencies": { "d3-dispatch": "1 - 3", "d3-selection": "3" @@ -2235,6 +2251,7 @@ "version": "3.0.1", "resolved": "https://registry.npmjs.org/d3-dsv/-/d3-dsv-3.0.1.tgz", "integrity": "sha512-UG6OvdI5afDIFP9w4G0mNq50dSOsXHJaRE8arAS5o9ApWnIElp8GZw1Dun8vP8OyHOZ/QJUKUJwxiiCCnUwm+Q==", + "license": "ISC", "dependencies": { "commander": "7", "iconv-lite": "0.6", @@ -2259,6 +2276,7 @@ "version": "3.0.1", "resolved": "https://registry.npmjs.org/d3-ease/-/d3-ease-3.0.1.tgz", "integrity": "sha512-wR/XK3D3XcLIZwpbvQwQ5fK+8Ykds1ip7A2Txe0yxncXSdq1L9skcG7blcedkOX+ZcgxGAmLX1FrRGbADwzi0w==", + "license": "BSD-3-Clause", "engines": { "node": ">=12" } @@ -2267,6 +2285,7 @@ "version": "3.0.1", "resolved": "https://registry.npmjs.org/d3-fetch/-/d3-fetch-3.0.1.tgz", "integrity": "sha512-kpkQIM20n3oLVBKGg6oHrUchHM3xODkTzjMoj7aWQFq5QEM+R6E4WkzT5+tojDY7yjez8KgCBRoj4aEr99Fdqw==", + "license": "ISC", "dependencies": { "d3-dsv": "1 - 3" }, @@ -2278,6 +2297,7 @@ "version": "3.0.0", "resolved": "https://registry.npmjs.org/d3-force/-/d3-force-3.0.0.tgz", "integrity": "sha512-zxV/SsA+U4yte8051P4ECydjD/S+qeYtnaIyAs9tgHCqfguma/aAQDjo85A9Z6EKhBirHRJHXIgJUlffT4wdLg==", + "license": "ISC", "dependencies": { "d3-dispatch": "1 - 3", "d3-quadtree": "1 - 3", @@ -2291,6 +2311,7 @@ "version": "3.1.0", "resolved": "https://registry.npmjs.org/d3-format/-/d3-format-3.1.0.tgz", "integrity": "sha512-YyUI6AEuY/Wpt8KWLgZHsIU86atmikuoOmCfommt0LYHiQSPjvX2AcFc38PX0CBpr2RCyZhjex+NS/LPOv6YqA==", + "license": "ISC", "engines": { "node": ">=12" } @@ -2299,6 +2320,7 @@ "version": "3.1.1", "resolved": "https://registry.npmjs.org/d3-geo/-/d3-geo-3.1.1.tgz", "integrity": "sha512-637ln3gXKXOwhalDzinUgY83KzNWZRKbYubaG+fGVuc/dxO64RRljtCTnf5ecMyE1RIdtqpkVcq0IbtU2S8j2Q==", + "license": "ISC", "dependencies": { "d3-array": "2.5.0 - 3" }, @@ -2310,6 +2332,7 @@ "version": "3.1.2", "resolved": "https://registry.npmjs.org/d3-hierarchy/-/d3-hierarchy-3.1.2.tgz", "integrity": "sha512-FX/9frcub54beBdugHjDCdikxThEqjnR93Qt7PvQTOHxyiNCAlvMrHhclk3cD5VeAaq9fxmfRp+CnWw9rEMBuA==", + "license": "ISC", "engines": { "node": ">=12" } @@ -2318,6 +2341,7 @@ "version": "3.0.1", "resolved": "https://registry.npmjs.org/d3-interpolate/-/d3-interpolate-3.0.1.tgz", "integrity": "sha512-3bYs1rOD33uo8aqJfKP3JWPAibgw8Zm2+L9vBKEHJ2Rg+viTR7o5Mmv5mZcieN+FRYaAOWX5SJATX6k1PWz72g==", + "license": "ISC", "dependencies": { "d3-color": "1 - 3" }, @@ -2329,6 +2353,7 @@ "version": "3.1.0", "resolved": "https://registry.npmjs.org/d3-path/-/d3-path-3.1.0.tgz", "integrity": "sha512-p3KP5HCf/bvjBSSKuXid6Zqijx7wIfNW+J/maPs+iwR35at5JCbLUT0LzF1cnjbCHWhqzQTIN2Jpe8pRebIEFQ==", + "license": "ISC", "engines": { "node": ">=12" } @@ -2337,6 +2362,7 @@ "version": "3.0.1", "resolved": "https://registry.npmjs.org/d3-polygon/-/d3-polygon-3.0.1.tgz", "integrity": "sha512-3vbA7vXYwfe1SYhED++fPUQlWSYTTGmFmQiany/gdbiWgU/iEyQzyymwL9SkJjFFuCS4902BSzewVGsHHmHtXg==", + "license": "ISC", "engines": { "node": ">=12" } @@ -2345,6 +2371,7 @@ "version": "3.0.1", "resolved": "https://registry.npmjs.org/d3-quadtree/-/d3-quadtree-3.0.1.tgz", "integrity": "sha512-04xDrxQTDTCFwP5H6hRhsRcb9xxv2RzkcsygFzmkSIOJy3PeRJP7sNk3VRIbKXcog561P9oU0/rVH6vDROAgUw==", + "license": "ISC", "engines": { "node": ">=12" } @@ -2353,6 +2380,7 @@ "version": "3.0.1", "resolved": "https://registry.npmjs.org/d3-random/-/d3-random-3.0.1.tgz", "integrity": "sha512-FXMe9GfxTxqd5D6jFsQ+DJ8BJS4E/fT5mqqdjovykEB2oFbTMDVdg1MGFxfQW+FBOGoB++k8swBrgwSHT1cUXQ==", + "license": "ISC", "engines": { "node": ">=12" } @@ -2361,6 +2389,7 @@ "version": "4.0.2", "resolved": "https://registry.npmjs.org/d3-scale/-/d3-scale-4.0.2.tgz", "integrity": "sha512-GZW464g1SH7ag3Y7hXjf8RoUuAFIqklOAq3MRl4OaWabTFJY9PN/E1YklhXLh+OQ3fM9yS2nOkCoS+WLZ6kvxQ==", + "license": "ISC", "dependencies": { "d3-array": "2.10.0 - 3", "d3-format": "1 - 3", @@ -2376,6 +2405,7 @@ "version": "3.1.0", "resolved": "https://registry.npmjs.org/d3-scale-chromatic/-/d3-scale-chromatic-3.1.0.tgz", "integrity": "sha512-A3s5PWiZ9YCXFye1o246KoscMWqf8BsD9eRiJ3He7C9OBaxKhAd5TFCdEx/7VbKtxxTsu//1mMJFrEt572cEyQ==", + "license": "ISC", "dependencies": { "d3-color": "1 - 3", "d3-interpolate": "1 - 3" @@ -2388,6 +2418,8 @@ "version": "3.0.0", "resolved": "https://registry.npmjs.org/d3-selection/-/d3-selection-3.0.0.tgz", "integrity": "sha512-fmTRWbNMmsmWq6xJV8D19U/gw/bwrHfNXxrIN+HfZgnzqTHp9jOmKMhsTUjXOJnZOdZY9Q28y4yebKzqDKlxlQ==", + "license": "ISC", + "peer": true, "engines": { "node": ">=12" } @@ -2396,6 +2428,7 @@ "version": "3.2.0", "resolved": "https://registry.npmjs.org/d3-shape/-/d3-shape-3.2.0.tgz", "integrity": "sha512-SaLBuwGm3MOViRq2ABk3eLoxwZELpH6zhl3FbAoJ7Vm1gofKx6El1Ib5z23NUEhF9AsGl7y+dzLe5Cw2AArGTA==", + "license": "ISC", "dependencies": { "d3-path": "^3.1.0" }, @@ -2407,6 +2440,7 @@ "version": "3.1.0", "resolved": "https://registry.npmjs.org/d3-time/-/d3-time-3.1.0.tgz", "integrity": "sha512-VqKjzBLejbSMT4IgbmVgDjpkYrNWUYJnbCGo874u7MMKIWsILRX+OpX/gTk8MqjpT1A/c6HY2dCA77ZN0lkQ2Q==", + "license": "ISC", "dependencies": { "d3-array": "2 - 3" }, @@ -2418,6 +2452,7 @@ "version": "4.1.0", "resolved": "https://registry.npmjs.org/d3-time-format/-/d3-time-format-4.1.0.tgz", "integrity": "sha512-dJxPBlzC7NugB2PDLwo9Q8JiTR3M3e4/XANkreKSUxF8vvXKqm1Yfq4Q5dl8budlunRVlUUaDUgFt7eA8D6NLg==", + "license": "ISC", "dependencies": { "d3-time": "1 - 3" }, @@ -2429,6 +2464,7 @@ "version": "3.0.1", "resolved": "https://registry.npmjs.org/d3-timer/-/d3-timer-3.0.1.tgz", "integrity": "sha512-ndfJ/JxxMd3nw31uyKoY2naivF+r29V+Lc0svZxe1JvvIRmi8hUsrMvdOwgS1o6uBHmiz91geQ0ylPP0aj1VUA==", + "license": "ISC", "engines": { "node": ">=12" } @@ -2437,6 +2473,7 @@ "version": "3.0.1", "resolved": "https://registry.npmjs.org/d3-transition/-/d3-transition-3.0.1.tgz", "integrity": "sha512-ApKvfjsSR6tg06xrL434C0WydLr7JewBB3V+/39RMHsaXTOG0zmt/OAXeng5M5LBm0ojmxJrpomQVZ1aPvBL4w==", + "license": "ISC", "dependencies": { "d3-color": "1 - 3", "d3-dispatch": "1 - 3", @@ -2455,6 +2492,7 @@ "version": "3.0.0", "resolved": "https://registry.npmjs.org/d3-zoom/-/d3-zoom-3.0.0.tgz", "integrity": "sha512-b8AmV3kfQaqWAuacbPuNbL6vahnOJflOhexLzMMNLga62+/nh0JzvJ0aO/5a5MVgUFGS7Hu1P9P03o3fJkDCyw==", + "license": "ISC", "dependencies": { "d3-dispatch": "1 - 3", "d3-drag": "2 - 3", @@ -2558,6 +2596,7 @@ "version": "5.0.1", "resolved": "https://registry.npmjs.org/delaunator/-/delaunator-5.0.1.tgz", "integrity": "sha512-8nvh+XBe96aCESrGOqMp/84b13H9cdKbG5P2ejQCh4d4sK9RL4371qou9drQjMhvnPmhWl5hnmqbEE0fXr9Xnw==", + "license": "ISC", "dependencies": { "robust-predicates": "^3.0.2" } @@ -2764,6 +2803,7 @@ "integrity": "sha512-BhHmn2yNOFA9H9JmmIVKJmd288g9hrVRDkdoIgRCRuSySRUHH7r/DI6aAXW9T1WwUuY3DFgrcaqB+deURBLR5g==", "dev": true, "license": "MIT", + "peer": true, "dependencies": { "@eslint-community/eslint-utils": "^4.8.0", "@eslint-community/regexpp": "^4.12.1", @@ -2824,6 +2864,7 @@ "integrity": "sha512-lZBts941cyJyeaooiKxAtzoPHTN+GbQTJFAIdQbRhA4/8whaAraEh47Whw/ZFfrjNSnlAxqfm9i0XVAEkULjCw==", "dev": true, "license": "MIT", + "peer": true, "bin": { "eslint-config-prettier": "build/bin/cli.js" }, @@ -2863,16 +2904,17 @@ } }, "node_modules/eslint-plugin-vue": { - "version": "10.5.1", - "resolved": "https://registry.npmjs.org/eslint-plugin-vue/-/eslint-plugin-vue-10.5.1.tgz", - "integrity": "sha512-SbR9ZBUFKgvWAbq3RrdCtWaW0IKm6wwUiApxf3BVTNfqUIo4IQQmreMg2iHFJJ6C/0wss3LXURBJ1OwS/MhFcQ==", + "version": "10.6.2", + "resolved": "https://registry.npmjs.org/eslint-plugin-vue/-/eslint-plugin-vue-10.6.2.tgz", + "integrity": "sha512-nA5yUs/B1KmKzvC42fyD0+l9Yd+LtEpVhWRbXuDj0e+ZURcTtyRbMDWUeJmTAh2wC6jC83raS63anNM2YT3NPw==", "dev": true, "license": "MIT", + "peer": true, "dependencies": { "@eslint-community/eslint-utils": "^4.4.0", "natural-compare": "^1.4.0", "nth-check": "^2.1.1", - "postcss-selector-parser": "^6.0.15", + "postcss-selector-parser": "^7.1.0", "semver": "^7.6.3", "xml-name-validator": "^4.0.0" }, @@ -3467,6 +3509,7 @@ "version": "0.6.3", "resolved": "https://registry.npmjs.org/iconv-lite/-/iconv-lite-0.6.3.tgz", "integrity": "sha512-4fCk79wshMdzMp2rH06qWrJE4iolqLhCUH+OiuIgU++RB0+94NlDL81atO7GX55uUKueo0txHNtvEyI6D7WdMw==", + "license": "MIT", "dependencies": { "safer-buffer": ">= 2.1.2 < 3.0.0" }, @@ -3528,6 +3571,7 @@ "version": "2.0.3", "resolved": "https://registry.npmjs.org/internmap/-/internmap-2.0.3.tgz", "integrity": "sha512-5Hh7Y1wQbvY5ooGgPbDaL5iYLAPzMTUrjMulskHLH6wnv/A+1q5rgEaiuqEjB+oxGXIVZs1FF+R/KPN3ZSQYYg==", + "license": "ISC", "engines": { "node": ">=12" } @@ -3915,6 +3959,7 @@ "version": "12.3.2", "resolved": "https://registry.npmjs.org/markdown-it/-/markdown-it-12.3.2.tgz", "integrity": "sha512-TchMembfxfNVpHkbtriWltGWc+m3xszaRD0CZup7GFFhzIgQqxIfn3eGj1yZpfuflzPvfkt611B2Q/Bsk1YnGg==", + "peer": true, "dependencies": { "argparse": "^2.0.1", "entities": "~2.1.0", @@ -4505,13 +4550,13 @@ } }, "node_modules/playwright": { - "version": "1.56.1", - "resolved": "https://registry.npmjs.org/playwright/-/playwright-1.56.1.tgz", - "integrity": "sha512-aFi5B0WovBHTEvpM3DzXTUaeN6eN0qWnTkKx4NQaH4Wvcmc153PdaY2UBdSYKaGYw+UyWXSVyxDUg5DoPEttjw==", + "version": "1.57.0", + "resolved": "https://registry.npmjs.org/playwright/-/playwright-1.57.0.tgz", + "integrity": "sha512-ilYQj1s8sr2ppEJ2YVadYBN0Mb3mdo9J0wQ+UuDhzYqURwSoW4n1Xs5vs7ORwgDGmyEh33tRMeS8KhdkMoLXQw==", "dev": true, "license": "Apache-2.0", "dependencies": { - "playwright-core": "1.56.1" + "playwright-core": "1.57.0" }, "bin": { "playwright": "cli.js" @@ -4524,9 +4569,9 @@ } }, "node_modules/playwright-core": { - "version": "1.56.1", - "resolved": "https://registry.npmjs.org/playwright-core/-/playwright-core-1.56.1.tgz", - "integrity": "sha512-hutraynyn31F+Bifme+Ps9Vq59hKuUCz7H1kDOcBs+2oGguKkWTU50bBWrtz34OUWmIwpBTWDxaRPXrIXkgvmQ==", + "version": "1.57.0", + "resolved": "https://registry.npmjs.org/playwright-core/-/playwright-core-1.57.0.tgz", + "integrity": "sha512-agTcKlMw/mjBWOnD6kFZttAAGHgi/Nw0CZ2o6JqWSbMlI219lAFLZZCyqByTsvVAJq5XA5H8cA6PrvBRpBWEuQ==", "dev": true, "license": "Apache-2.0", "bin": { @@ -4565,10 +4610,11 @@ } }, "node_modules/postcss-selector-parser": { - "version": "6.0.15", - "resolved": "https://registry.npmjs.org/postcss-selector-parser/-/postcss-selector-parser-6.0.15.tgz", - "integrity": "sha512-rEYkQOMUCEMhsKbK66tbEU9QVIxbhN18YiniAwA7XQYTVBqrBy+P2p5JcdqsHgKM2zWylp8d7J6eszocfds5Sw==", + "version": "7.1.1", + "resolved": "https://registry.npmjs.org/postcss-selector-parser/-/postcss-selector-parser-7.1.1.tgz", + "integrity": "sha512-orRsuYpJVw8LdAwqqLykBj9ecS5/cRHlI5+nvTo8LcCKmzDmqVORXtOIYEEQuL9D4BxtA1lm5isAqzQZCoQ6Eg==", "dev": true, + "license": "MIT", "dependencies": { "cssesc": "^3.0.0", "util-deprecate": "^1.0.2" @@ -4604,6 +4650,7 @@ "integrity": "sha512-I7AIg5boAr5R0FFtJ6rCfD+LFsWHp81dolrFD8S79U9tb8Az2nGrJncnMSnys+bpQJfRUzqs9hnA81OAA3hCuQ==", "dev": true, "license": "MIT", + "peer": true, "bin": { "prettier": "bin/prettier.cjs" }, @@ -4770,7 +4817,8 @@ "node_modules/robust-predicates": { "version": "3.0.2", "resolved": "https://registry.npmjs.org/robust-predicates/-/robust-predicates-3.0.2.tgz", - "integrity": "sha512-IXgzBWvWQwE6PrDI05OvmXUIruQTcoMDzRsOd5CDvHCVLcLHMTSYvOK5Cm46kWqlV3yAbuSpBZdJ5oP5OUoStg==" + "integrity": "sha512-IXgzBWvWQwE6PrDI05OvmXUIruQTcoMDzRsOd5CDvHCVLcLHMTSYvOK5Cm46kWqlV3yAbuSpBZdJ5oP5OUoStg==", + "license": "Unlicense" }, "node_modules/rollup": { "version": "4.46.2", @@ -4837,7 +4885,8 @@ "node_modules/rw": { "version": "1.3.3", "resolved": "https://registry.npmjs.org/rw/-/rw-1.3.3.tgz", - "integrity": "sha512-PdhdWy89SiZogBLaw42zdeqtRJ//zFd2PgQavcICDUgJT5oW10QCRKbJ6bg4r0/UY2M6BWd5tkxuGFRvCkgfHQ==" + "integrity": "sha512-PdhdWy89SiZogBLaw42zdeqtRJ//zFd2PgQavcICDUgJT5oW10QCRKbJ6bg4r0/UY2M6BWd5tkxuGFRvCkgfHQ==", + "license": "BSD-3-Clause" }, "node_modules/safe-regex-test": { "version": "1.0.0", @@ -4856,7 +4905,8 @@ "node_modules/safer-buffer": { "version": "2.1.2", "resolved": "https://registry.npmjs.org/safer-buffer/-/safer-buffer-2.1.2.tgz", - "integrity": "sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg==" + "integrity": "sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg==", + "license": "MIT" }, "node_modules/sax": { "version": "1.4.1", @@ -5186,6 +5236,7 @@ "integrity": "sha512-5gTmgEY/sqK6gFXLIsQNH19lWb4ebPDLA4SdLP7dsWkIXHWlG66oPuVvXSGFPppYZz8ZDZq0dYYrbHfBCVUb1Q==", "dev": true, "license": "MIT", + "peer": true, "engines": { "node": ">=12" }, @@ -5255,6 +5306,7 @@ "integrity": "sha512-jl1vZzPDinLr9eUt3J/t7V6FgNEw9QjvBPdysz9KfQDD41fQrC2Y4vKQdiaUpFT4bXlb1RHhLpp8wtm6M5TgSw==", "devOptional": true, "license": "Apache-2.0", + "peer": true, "bin": { "tsc": "bin/tsc", "tsserver": "bin/tsserver" @@ -5316,7 +5368,8 @@ "version": "1.0.2", "resolved": "https://registry.npmjs.org/util-deprecate/-/util-deprecate-1.0.2.tgz", "integrity": "sha512-EPD5q1uXyFxJpCrLnCc1nHnq3gOa6DZBocAIiI2TaSCA7VCJ1UJDMagCzIkXNsUYfD1daK//LTEQ8xiIbrHtcw==", - "dev": true + "dev": true, + "license": "MIT" }, "node_modules/validate-npm-package-license": { "version": "3.0.4", @@ -5334,6 +5387,7 @@ "integrity": "sha512-NL8jTlbo0Tn4dUEXEsUg8KeyG/Lkmc4Fnzb8JXN/Ykm9G4HNImjtABMJgkQoVjOBN/j2WAwDTRytdqJbZsah7w==", "dev": true, "license": "MIT", + "peer": true, "dependencies": { "esbuild": "^0.25.0", "fdir": "^6.5.0", @@ -5441,6 +5495,7 @@ "integrity": "sha512-5gTmgEY/sqK6gFXLIsQNH19lWb4ebPDLA4SdLP7dsWkIXHWlG66oPuVvXSGFPppYZz8ZDZq0dYYrbHfBCVUb1Q==", "dev": true, "license": "MIT", + "peer": true, "engines": { "node": ">=12" }, @@ -5456,16 +5511,17 @@ "license": "MIT" }, "node_modules/vue": { - "version": "3.5.24", - "resolved": "https://registry.npmjs.org/vue/-/vue-3.5.24.tgz", - "integrity": "sha512-uTHDOpVQTMjcGgrqFPSb8iO2m1DUvo+WbGqoXQz8Y1CeBYQ0FXf2z1gLRaBtHjlRz7zZUBHxjVB5VTLzYkvftg==", + "version": "3.5.25", + "resolved": "https://registry.npmjs.org/vue/-/vue-3.5.25.tgz", + "integrity": "sha512-YLVdgv2K13WJ6n+kD5owehKtEXwdwXuj2TTyJMsO7pSeKw2bfRNZGjhB7YzrpbMYj5b5QsUebHpOqR3R3ziy/g==", "license": "MIT", + "peer": true, "dependencies": { - "@vue/compiler-dom": "3.5.24", - "@vue/compiler-sfc": "3.5.24", - "@vue/runtime-dom": "3.5.24", - "@vue/server-renderer": "3.5.24", - "@vue/shared": "3.5.24" + "@vue/compiler-dom": "3.5.25", + "@vue/compiler-sfc": "3.5.25", + "@vue/runtime-dom": "3.5.25", + "@vue/server-renderer": "3.5.25", + "@vue/shared": "3.5.25" }, "peerDependencies": { "typescript": "*" @@ -5528,14 +5584,14 @@ } }, "node_modules/vue-tsc": { - "version": "3.1.4", - "resolved": "https://registry.npmjs.org/vue-tsc/-/vue-tsc-3.1.4.tgz", - "integrity": "sha512-GsRJxttj4WkmXW/zDwYPGMJAN3np/4jTzoDFQTpTsI5Vg/JKMWamBwamlmLihgSVHO66y9P7GX+uoliYxeI4Hw==", + "version": "3.1.5", + "resolved": "https://registry.npmjs.org/vue-tsc/-/vue-tsc-3.1.5.tgz", + "integrity": "sha512-L/G9IUjOWhBU0yun89rv8fKqmKC+T0HfhrFjlIml71WpfBv9eb4E9Bev8FMbyueBIU9vxQqbd+oOsVcDa5amGw==", "dev": true, "license": "MIT", "dependencies": { "@volar/typescript": "2.4.23", - "@vue/language-core": "3.1.4" + "@vue/language-core": "3.1.5" }, "bin": { "vue-tsc": "bin/vue-tsc.js" diff --git a/webui/package.json b/webui/package.json index 875d998c9..6c4de06d4 100644 --- a/webui/package.json +++ b/webui/package.json @@ -17,7 +17,7 @@ }, "dependencies": { "@popperjs/core": "^2.11.6", - "@ssthouse/vue3-tree-chart": "^0.2.6", + "@ssthouse/vue3-tree-chart": "^0.3.0", "@types/bootstrap": "^5.2.10", "@types/whatwg-mimetype": "^3.0.2", "ace-builds": "^1.43.4", @@ -29,7 +29,7 @@ "http-status-codes": "^2.3.0", "js-yaml": "^4.1.1", "ncp": "^2.0.0", - "vue": "^3.5.24", + "vue": "^3.5.25", "vue-router": "^4.6.3", "vue3-ace-editor": "^2.2.4", "vue3-highlightjs": "^1.0.5", @@ -38,7 +38,7 @@ "xml-formatter": "^3.6.7" }, "devDependencies": { - "@playwright/test": "^1.56.1", + "@playwright/test": "^1.57.0", "@rushstack/eslint-patch": "^1.15.0", "@types/js-yaml": "^4.0.9", "@types/node": "^24.10.1", @@ -47,12 +47,12 @@ "@vue/eslint-config-typescript": "^14.6.0", "@vue/tsconfig": "^0.8.1", "eslint": "^9.39.1", - "eslint-plugin-vue": "^10.5.1", + "eslint-plugin-vue": "^10.6.2", "npm-run-all": "^4.1.5", "prettier": "^3.6.2", "typescript": "~5.9.3", "vite": "^7.2.4", - "vue-tsc": "^3.1.4", + "vue-tsc": "^3.1.5", "xml2js": "^0.6.2" } } diff --git a/webui/src/components/dashboard/ServiceInfoCard.vue b/webui/src/components/dashboard/ServiceInfoCard.vue index 8afb836db..551ef639b 100644 --- a/webui/src/components/dashboard/ServiceInfoCard.vue +++ b/webui/src/components/dashboard/ServiceInfoCard.vue @@ -1,5 +1,5 @@