|
16 | 16 | */ |
17 | 17 | public class GenerateAvroSamples { |
18 | 18 |
|
| 19 | + private GenerateAvroSamples() { |
| 20 | + // Utility class |
| 21 | + } |
| 22 | + |
19 | 23 | public static void main(String[] args) throws IOException { |
20 | 24 | // Create three different products |
21 | 25 | AvroProduct product1 = new AvroProduct(1001, "Laptop", 999.99); |
22 | 26 | AvroProduct product2 = new AvroProduct(1002, "Smartphone", 599.99); |
23 | 27 | AvroProduct product3 = new AvroProduct(1003, "Headphones", 149.99); |
24 | | - |
| 28 | + |
25 | 29 | // Serialize and encode each product |
26 | 30 | String encodedProduct1 = serializeAndEncode(product1); |
27 | 31 | String encodedProduct2 = serializeAndEncode(product2); |
28 | 32 | String encodedProduct3 = serializeAndEncode(product3); |
29 | | - |
| 33 | + |
30 | 34 | // Serialize and encode an integer key |
31 | 35 | String encodedKey = serializeAndEncodeInteger(42); |
32 | | - |
| 36 | + |
33 | 37 | // Print the results |
34 | 38 | System.out.println("Base64 encoded Avro products for use in kafka-avro-event.json:"); |
35 | 39 | System.out.println("\nProduct 1 (with key):"); |
36 | 40 | System.out.println("key: \"" + encodedKey + "\","); |
37 | 41 | System.out.println("value: \"" + encodedProduct1 + "\","); |
38 | | - |
| 42 | + |
39 | 43 | System.out.println("\nProduct 2 (with key):"); |
40 | 44 | System.out.println("key: \"" + encodedKey + "\","); |
41 | 45 | System.out.println("value: \"" + encodedProduct2 + "\","); |
42 | | - |
| 46 | + |
43 | 47 | System.out.println("\nProduct 3 (without key):"); |
44 | 48 | System.out.println("key: null,"); |
45 | 49 | System.out.println("value: \"" + encodedProduct3 + "\","); |
46 | | - |
| 50 | + |
47 | 51 | // Print a sample event structure |
48 | 52 | System.out.println("\nSample event structure:"); |
49 | 53 | printSampleEvent(encodedKey, encodedProduct1, encodedProduct2, encodedProduct3); |
50 | 54 | } |
51 | | - |
| 55 | + |
52 | 56 | private static String serializeAndEncode(AvroProduct product) throws IOException { |
53 | 57 | ByteArrayOutputStream baos = new ByteArrayOutputStream(); |
54 | 58 | BinaryEncoder encoder = EncoderFactory.get().binaryEncoder(baos, null); |
55 | 59 | DatumWriter<AvroProduct> writer = new SpecificDatumWriter<>(AvroProduct.class); |
56 | | - |
| 60 | + |
57 | 61 | writer.write(product, encoder); |
58 | 62 | encoder.flush(); |
59 | | - |
| 63 | + |
60 | 64 | return Base64.getEncoder().encodeToString(baos.toByteArray()); |
61 | 65 | } |
62 | | - |
| 66 | + |
63 | 67 | private static String serializeAndEncodeInteger(Integer value) throws IOException { |
64 | 68 | // For simple types like integers, we'll just convert to string and encode |
65 | 69 | return Base64.getEncoder().encodeToString(value.toString().getBytes()); |
66 | 70 | } |
67 | | - |
| 71 | + |
68 | 72 | private static void printSampleEvent(String key, String product1, String product2, String product3) { |
69 | 73 | System.out.println("{\n" + |
70 | 74 | " \"eventSource\": \"aws:kafka\",\n" + |
71 | | - " \"eventSourceArn\": \"arn:aws:kafka:us-east-1:0123456789019:cluster/SalesCluster/abcd1234-abcd-cafe-abab-9876543210ab-4\",\n" + |
72 | | - " \"bootstrapServers\": \"b-2.demo-cluster-1.a1bcde.c1.kafka.us-east-1.amazonaws.com:9092,b-1.demo-cluster-1.a1bcde.c1.kafka.us-east-1.amazonaws.com:9092\",\n" + |
| 75 | + " \"eventSourceArn\": \"arn:aws:kafka:us-east-1:0123456789019:cluster/SalesCluster/abcd1234-abcd-cafe-abab-9876543210ab-4\",\n" |
| 76 | + + |
| 77 | + " \"bootstrapServers\": \"b-2.demo-cluster-1.a1bcde.c1.kafka.us-east-1.amazonaws.com:9092,b-1.demo-cluster-1.a1bcde.c1.kafka.us-east-1.amazonaws.com:9092\",\n" |
| 78 | + + |
73 | 79 | " \"records\": {\n" + |
74 | 80 | " \"mytopic-0\": [\n" + |
75 | 81 | " {\n" + |
|
0 commit comments