Skip to content

Commit 78572f9

Browse files
authored
Merge pull request #657 from marle3003/develop
Develop
2 parents da60cd9 + 12633c0 commit 78572f9

File tree

17 files changed

+715
-293
lines changed

17 files changed

+715
-293
lines changed

config/dynamic/json.go

Lines changed: 6 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -4,11 +4,12 @@ import (
44
"bytes"
55
"encoding/json"
66
"fmt"
7-
"github.com/pkg/errors"
87
"io"
98
"reflect"
109
"strings"
1110
"unicode"
11+
12+
"github.com/pkg/errors"
1213
)
1314

1415
type decoder struct {
@@ -91,11 +92,13 @@ func value(token json.Token, d *decoder, v reflect.Value) error {
9192
return number(t, v)
9293
case bool:
9394
_, v = indirect(v)
94-
if v.Type().AssignableTo(reflect.TypeOf(t)) {
95+
vt := reflect.ValueOf(t)
96+
if vt.Type().AssignableTo(v.Type()) {
9597
v.Set(reflect.ValueOf(t))
9698
} else {
97-
return fmt.Errorf("bool is not assignable to string")
99+
return fmt.Errorf("bool is not assignable to %s", toTypeName(v))
98100
}
101+
99102
return nil
100103
case nil:
101104
switch v.Kind() {

config/dynamic/json_test.go

Lines changed: 19 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,8 +1,9 @@
11
package dynamic
22

33
import (
4-
"github.com/stretchr/testify/require"
54
"testing"
5+
6+
"github.com/stretchr/testify/require"
67
)
78

89
func TestSchema_UnmarshalJSON(t *testing.T) {
@@ -72,6 +73,23 @@ func TestSchema_UnmarshalJSON(t *testing.T) {
7273
require.Equal(t, true, b)
7374
},
7475
},
76+
{
77+
name: "bool to interface",
78+
test: func(t *testing.T) {
79+
var v any
80+
err := UnmarshalJSON([]byte(`true`), &v)
81+
require.NoError(t, err)
82+
require.Equal(t, true, v)
83+
},
84+
},
85+
{
86+
name: "bool to string",
87+
test: func(t *testing.T) {
88+
var s string
89+
err := UnmarshalJSON([]byte(`true`), &s)
90+
require.EqualError(t, err, "bool is not assignable to string")
91+
},
92+
},
7593
{
7694
name: "array",
7795
test: func(t *testing.T) {

docs/config.json

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -206,7 +206,8 @@
206206
"Bring Your Mock APIs to Life with Mokapi and JavaScript": "resources/blogs/dynamic-mocks-with-javascript.md",
207207
"Debugging Mokapi Scripts": "resources/blogs/debugging-mokapi-scripts.md",
208208
"Acceptance Testing with Mokapi: Focus on What Matters": "resources/blogs/acceptance-testing.md",
209-
"Testing Email Workflows with Playwright and Mokapi": "resources/blogs/testing-email-workflows-playwright.md"
209+
"Testing Email Workflows with Playwright and Mokapi": "resources/blogs/testing-email-workflows-playwright.md",
210+
"Testing Kafka Workflows with Playwright and Mokapi": "resources/blogs/testing-kafka-workflows-playwright.md"
210211
}
211212
}
212213
}
Lines changed: 252 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,252 @@
1+
---
2+
title: Testing Kafka Workflows with Playwright and Mokapi
3+
description: Simulating real message flows end-to-end with Node.js, Kafka topics, and browser-driven tests.
4+
icon: bi-lightning
5+
tech: kafka
6+
---
7+
8+
# Testing Kafka Workflows with Playwright and Mokapi
9+
10+
## Introduction
11+
12+
Modern applications rarely work in isolation. Instead, they communicate through **events** on Kafka topics, making
13+
testing a challenge. How do you know your backend consumes the right command messages, processes them correctly,
14+
and publishes the expected events?
15+
16+
This is where Mokapi comes in. Mokapi lets you **mock Kafka brokers** using
17+
[AsyncAPI](https://www.asyncapi.com/) specifications or dynamic configuration. You can define your topics and schemas,
18+
and then interact with them over a simple REST API. That means you don’t need to spin up a real Kafka cluster just
19+
for testing.
20+
21+
For end-to-end testing, we combine Mokapi with [Playwright](https://playwright.dev). Normally Playwright is used for
22+
browser UI tests, but in this tutorial we’ll use it to drive our workflow tests: producing Kafka messages, waiting for
23+
backend processing, and validating the resulting events. (If you’re wondering: Jest would also work here, but
24+
Playwright makes it easy to integrate with workflows that already involve UI.)
25+
26+
By the end, you’ll see how a full event-driven workflow can be tested reliably without the overhead of managing a real
27+
Kafka cluster.
28+
29+
---
30+
31+
## The Testing Scenario
32+
33+
Imagine this workflow:
34+
35+
- A foreign system publishes a **command** to the topic `document.send-command`.
36+
- Our **backend service** (written in Node.js) consumes this command, simulates sending the document, and then publishes a **resulting event** to `document.send-event`.
37+
- Our **test** (written in Playwright) acts as the foreign system:
38+
1. It produces a command message to `document.send-command` using Mokapi’s REST API.
39+
2. It waits for the backend to process the command.
40+
3. It retrieves the resulting event from `document.send-event` and verifies the contents (such as `documentId` and `status`).
41+
42+
<img src="/kafka-workflow.png" alt="Sequence diagram of Kafka workflow testing with Mokapi, Playwright, and a backend service" title="Kafka workflow testing with Mokapi and Playwright">
43+
44+
This mirrors what happens in production: a message-driven workflow across multiple services. The only difference is
45+
that for testing, Kafka itself is mocked by Mokapi.
46+
47+
## Defining Kafka Topics with AsyncAPI
48+
49+
The heart of this setup is the AsyncAPI specification. It declares the two topics we use:
50+
- document.send-command (input command)
51+
- document.send-event (output event)
52+
53+
Here is part of the API specification:
54+
55+
```yaml
56+
asyncapi: 3.0.0
57+
info:
58+
title: Kafka Mock
59+
version: 1.0.0
60+
servers:
61+
mock:
62+
host: localhost:9092
63+
protocol: kafka
64+
65+
channels:
66+
document.send-command:
67+
description: Sending documents to customers
68+
messages:
69+
documentCommand:
70+
$ref: '#/components/messages/documentCommand'
71+
document.send-event:
72+
description: Events when sending documents
73+
messages:
74+
documentEvent:
75+
$ref: '#/components/messages/documentEvent'
76+
```
77+
78+
## Setting Up the Backend
79+
80+
Our backend is a small Node.js service. It consumes commands from document.send-command, simulates sending a document,
81+
and publishes an event to document.send-event.
82+
83+
```javascript
84+
import { Kafka } from 'kafkajs';
85+
86+
const kafka = new Kafka({
87+
clientId: 'backend',
88+
brokers: ['localhost:9092']
89+
});
90+
91+
const consumer = kafka.consumer({ groupId: 'backend-group' });
92+
const producer = kafka.producer();
93+
94+
async function start() {
95+
await consumer.connect();
96+
await producer.connect();
97+
98+
await consumer.subscribe({ topic: 'document.send-command', fromBeginning: true });
99+
100+
await consumer.run({
101+
eachMessage: async ({ topic, partition, message }) => {
102+
const value = JSON.parse(message.value.toString());
103+
console.log('Received command:', value);
104+
105+
// Simulate sending the document
106+
await new Promise(res => setTimeout(res, 500));
107+
108+
// Publish send-event
109+
const event = {
110+
documentId: value.documentId,
111+
status: 'SENT'
112+
};
113+
await producer.send({
114+
topic: 'document.send-event',
115+
messages: [{ key: value.documentId, value: JSON.stringify(event) }]
116+
});
117+
118+
console.log('Published event:', event);
119+
}
120+
});
121+
}
122+
123+
start().catch(console.error);
124+
```
125+
126+
---
127+
128+
## Writing the Test with Playwright
129+
130+
Now let’s test this workflow with Playwright and Mokapi.
131+
132+
1. Produce a command to document.send-command.
133+
2. Wait for the backend to publish an event to document.send-event.
134+
3. Retrieve the event and assert its contents.
135+
136+
We can access Mokapi's REST API either through Playwright's Page object or via fetch.
137+
138+
```typescript
139+
import { test, expect } from '@playwright/test';
140+
import fetch from 'node-fetch';
141+
142+
const MOKAPI_API = 'http://localhost:8080/api/services/kafka/Kafka%20Mock';
143+
const TOPIC_COMMAND = 'document.send-command';
144+
const TOPIC_EVENT = 'document.send-event';
145+
146+
test('Kafka document send workflow', async () => {
147+
const documentId = 'doc-' + Date.now();
148+
let startOffset = -1
149+
console.log('using document ID: ' + documentId)
150+
151+
await test.step('Get current offset for events', async () => {
152+
startOffset = await getPartitionOffset(TOPIC_EVENT, 0)
153+
console.log('current partition offset is: ' + startOffset)
154+
})
155+
156+
await test.step('Produce a message to document.send-command topic', async () => {
157+
await produce(TOPIC_COMMAND, {
158+
key: documentId,
159+
value: {
160+
documentId: documentId,
161+
recipient: '[email protected]',
162+
document: {
163+
mediaType: 'text/plain',
164+
fileName: 'test.txt',
165+
content: 'Hello Alice'
166+
}
167+
}
168+
})
169+
})
170+
171+
await test.step('Get messages from document.send-event', async () => {
172+
let record: any = undefined;
173+
const timeout = Date.now() + 5000;
174+
175+
while (Date.now() < timeout && !record) {
176+
const records: any = await read(TOPIC_EVENT, 0, startOffset);
177+
console.log(records)
178+
record = records.find(x => x.value.documentId === documentId);
179+
if (record) {
180+
break
181+
}
182+
startOffset += records.length
183+
// short delay before retry
184+
await new Promise(res => setTimeout(res, 200));
185+
}
186+
expect(record, 'record should be found').not.toBeNull();
187+
expect(record.value.status).toBe('SENT')
188+
})
189+
})
190+
191+
async function getPartitionOffset(topic, partition) {
192+
const res = await fetch(`${MOKAPI_API}/topics/${topic}/partitions/${partition}`);
193+
const data: any = await res.json();
194+
return data.offset
195+
}
196+
197+
async function produce(topic: string, record: {key: string, value: any}) {
198+
const res = await fetch(`${MOKAPI_API}/topics/${topic}`, {
199+
method: 'POST',
200+
headers: { 'Content-Type': 'application/json' },
201+
body: JSON.stringify({
202+
records: [
203+
{
204+
key: record.key,
205+
value: record.value
206+
}
207+
]
208+
})
209+
});
210+
expect(res.status).toBe(200);
211+
const data: any = await res.json();
212+
expect(data.offsets.every(x => !('error' in x))).toBe(true);
213+
}
214+
215+
async function read(topic: string, partition: number, offset: number) {
216+
const res = await fetch(`${MOKAPI_API}/topics/${topic}/partitions/${partition}/offsets?offset=${offset}`)
217+
expect(res.status).toBe(200);
218+
return await res.json()
219+
}
220+
```
221+
222+
This test simulates the entire flow end-to-end — no mocks inside the backend, no shortcuts. Just a command going in, and an event coming out.
223+
224+
## Why This Matters
225+
226+
This example might feel simple, but the pattern is powerful:
227+
228+
- Spec-first: Mokapi uses AsyncAPI to define topics and message schemas. Tests validate against this shared contract.
229+
- Realistic simulation: you’re testing exactly how your backend would behave in production.
230+
- Fast feedback: no need to set up and maintain a real Kafka cluster in your CI pipeline.
231+
- Scalability: add more backends and topics, and the test structure remains the same.
232+
233+
With Mokapi and Playwright, you get a full end-to-end test for an event-driven workflow without the heavy setup.
234+
235+
---
236+
237+
## Conclusion
238+
239+
We’ve built a full workflow test where:
240+
241+
- A command is produced to Kafka.
242+
- A backend consumes it and publishes an event.
243+
- A test verifies the event using Mokapi’s REST API.
244+
245+
This pattern can be applied to much more complex event-driven architectures: multiple services, multiple topics,
246+
or more complex event payloads. However, the testing approach remains the same: Produce → Consume → Verify.
247+
248+
By mocking Kafka with Mokapi, you avoid the overhead of running real brokers in tests, while still validating real
249+
message flows. Combined with Playwright, this gives you a powerful way to ensure your event-driven applications
250+
work exactly as expected.
251+
252+
👉 You can find the full working example in the repository: [mokapi-kafka-workflow](https://github.com/marle3003/mokapi-kafka-workflow).

providers/openapi/config.go

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -4,13 +4,14 @@ import (
44
_ "embed"
55
"errors"
66
"fmt"
7-
"gopkg.in/yaml.v3"
87
"mokapi/config/dynamic"
98
"mokapi/media"
109
jsonSchema "mokapi/schema/json/schema"
1110
"mokapi/version"
1211
"net/http"
1312
"strings"
13+
14+
"gopkg.in/yaml.v3"
1415
)
1516

1617
var (

providers/openapi/error.go

Lines changed: 15 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,9 +1,13 @@
11
package openapi
22

3-
import "fmt"
3+
import (
4+
"fmt"
5+
"net/http"
6+
)
47

58
type httpError struct {
69
StatusCode int
10+
Header http.Header
711
message string
812
}
913

@@ -24,3 +28,13 @@ func newHttpErrorf(status int, format string, args ...interface{}) *httpError {
2428
message: fmt.Sprintf(format, args...),
2529
}
2630
}
31+
32+
func newMethodNotAllowedErrorf(status int, methods []string, format string, args ...interface{}) *httpError {
33+
return &httpError{
34+
StatusCode: status,
35+
Header: http.Header{
36+
"Allow": methods,
37+
},
38+
message: fmt.Sprintf(format, args...),
39+
}
40+
}

0 commit comments

Comments
 (0)