Here's a comprehensive definition for Kafka events in an order management trading system with hash verification for message producers:
{
"eventType": "ORDER_CREATED",
"version": "1.0",
"timestamp": "2025-03-10T08:45:32.215Z",
"orderId": "ORD-12345678",
"producerId": "TRADING-SVC-01",
"payload": {
"clientId": "CLI-98765",
"symbol": "AAPL",
"quantity": 100,
"price": 225.75,
"side": "BUY",
"orderType": "LIMIT",
"timeInForce": "GTC",
"status": "PENDING"
},
"metadata": {
"correlationId": "a1b2c3d4-e5f6-7890-1234-567890abcdef",
"origin": "WEB_PLATFORM"
},
"hash": "63a9f0ea7bb98050796b649e85481845"
}
For an order management system, you might use the following topic structure:
trading.orders.created
trading.orders.updated
trading.orders.executed
trading.orders.canceled
trading.orders.rejected
Here's how to implement the hash verification in Java:
import org.apache.kafka.clients.producer.KafkaProducer;
import org.apache.kafka.clients.producer.ProducerRecord;
import com.fasterxml.jackson.databind.ObjectMapper;
import java.security.MessageDigest;
import java.util.Properties;
public class OrderEventProducer {
private final KafkaProducer<String, String> producer;
private final ObjectMapper objectMapper;
private final String producerId;
public OrderEventProducer(String bootstrapServers, String producerId) {
Properties props = new Properties();
props.put("bootstrap.servers", bootstrapServers);
props.put("key.serializer", "org.apache.kafka.common.serialization.StringSerializer");
props.put("value.serializer", "org.apache.kafka.common.serialization.StringSerializer");
this.producer = new KafkaProducer<>(props);
this.objectMapper = new ObjectMapper();
this.producerId = producerId;
}
public void sendOrderEvent(String topic, OrderEvent event) throws Exception {
// Set producer ID
event.setProducerId(this.producerId);
// Create a copy of the event without the hash field for hash calculation
OrderEvent eventForHashing = event.copyWithoutHash();
String eventJson = objectMapper.writeValueAsString(eventForHashing);
// Calculate MD5 hash
String hash = calculateMD5Hash(eventJson);
event.setHash(hash);
// Serialize the complete event
String completeEventJson = objectMapper.writeValueAsString(event);
// Send to Kafka
ProducerRecord<String, String> record = new ProducerRecord<>(
topic, event.getOrderId(), completeEventJson);
producer.send(record);
}
private String calculateMD5Hash(String input) throws Exception {
MessageDigest md = MessageDigest.getInstance("MD5");
byte[] hashBytes = md.digest(input.getBytes("UTF-8"));
StringBuilder hexString = new StringBuilder();
for (byte hashByte : hashBytes) {
String hex = Integer.toHexString(0xff & hashByte);
if (hex.length() == 1) hexString.append('0');
hexString.append(hex);
}
return hexString.toString();
}
public void close() {
producer.close();
}
}
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.apache.kafka.clients.consumer.KafkaConsumer;
import com.fasterxml.jackson.databind.ObjectMapper;
import java.security.MessageDigest;
import java.util.Collections;
import java.util.Properties;
public class OrderEventConsumer {
private final KafkaConsumer<String, String> consumer;
private final ObjectMapper objectMapper;
public OrderEventConsumer(String bootstrapServers, String groupId) {
Properties props = new Properties();
props.put("bootstrap.servers", bootstrapServers);
props.put("group.id", groupId);
props.put("key.deserializer", "org.apache.kafka.common.serialization.StringDeserializer");
props.put("value.deserializer", "org.apache.kafka.common.serialization.StringDeserializer");
this.consumer = new KafkaConsumer<>(props);
this.objectMapper = new ObjectMapper();
}
public void subscribe(String topic) {
consumer.subscribe(Collections.singletonList(topic));
}
public void processEvents() {
while (true) {
var records = consumer.poll(java.time.Duration.ofMillis(100));
for (ConsumerRecord<String, String> record : records) {
try {
// Deserialize the event
OrderEvent event = objectMapper.readValue(record.value(), OrderEvent.class);
// Extract the hash
String receivedHash = event.getHash();
// Create a copy without the hash for verification
OrderEvent eventForVerification = event.copyWithoutHash();
String eventJson = objectMapper.writeValueAsString(eventForVerification);
// Calculate hash for verification
String calculatedHash = calculateMD5Hash(eventJson);
// Verify hash
if (receivedHash.equals(calculatedHash)) {
// Hash is valid, process the event
processValidEvent(event);
} else {
// Hash is invalid, handle the error
handleInvalidHash(event, receivedHash, calculatedHash);
}
} catch (Exception e) {
// Handle deserialization error
System.err.println("Error processing message: " + e.getMessage());
}
}
}
}
private String calculateMD5Hash(String input) throws Exception {
MessageDigest md = MessageDigest.getInstance("MD5");
byte[] hashBytes = md.digest(input.getBytes("UTF-8"));
StringBuilder hexString = new StringBuilder();
for (byte hashByte : hashBytes) {
String hex = Integer.toHexString(0xff & hashByte);
if (hex.length() == 1) hexString.append('0');
hexString.append(hex);
}
return hexString.toString();
}
private void processValidEvent(OrderEvent event) {
// Process the valid event
System.out.println("Processing valid event: " + event.getOrderId());
}
private void handleInvalidHash(OrderEvent event, String receivedHash, String calculatedHash) {
// Handle the invalid hash
System.err.println("Invalid hash for event: " + event.getOrderId());
System.err.println("Received hash: " + receivedHash);
System.err.println("Calculated hash: " + calculatedHash);
}
}
import com.fasterxml.jackson.annotation.JsonIgnore;
import com.fasterxml.jackson.databind.ObjectMapper;
import java.time.Instant;
import java.util.UUID;
public class OrderEvent {
private String eventType;
private String version;
private String timestamp;
private String orderId;
private String producerId;
private OrderPayload payload;
private EventMetadata metadata;
private String hash;
// Constructor, getters and setters
public OrderEvent() {
this.timestamp = Instant.now().toString();
}
@JsonIgnore
public OrderEvent copyWithoutHash() {
OrderEvent copy = new OrderEvent();
copy.setEventType(this.eventType);
copy.setVersion(this.version);
copy.setTimestamp(this.timestamp);
copy.setOrderId(this.orderId);
copy.setProducerId(this.producerId);
copy.setPayload(this.payload);
copy.setMetadata(this.metadata);
// Hash is intentionally not copied
return copy;
}
// Getters and setters
}
- Message Integrity: The hash ensures the message hasn't been tampered with
- Producer Verification: The producerId and hash combination verifies the source
- Idempotency Support: The orderId and hash can be used to detect duplicates
- Audit Trail: All messages can be verified for compliance and audit purposes
Would you like me to elaborate on any specific aspect of this Kafka order management system?
{ "eventType": "ORDER_CREATED", "version": "1.0", "timestamp": "2025-03-10T08:45:32.215Z", "orderId": "ORD-12345678", "producerId": "TRADING-SVC-01", "batchId": "BATCH-12345678", "priority": "HIGH", "payload": [{ "orderId": "ORD-12345678", "clientId": "CLI-98765", "symbol": "AAPL", "quantity": 100, "price": 225.75, "side": "BUY", "orderType": "LIMIT", "timeInForce": "GTC", "status": "PENDING", "splitDetails": { "splitId": "SPLIT-987654", // Unique identifier for the split group "partNumber": 1, // Sequence in split order "totalParts": 2, // Total expected parts "nextPointer": "ORD-12345679", // Next order in sequence "prevPointer": "ORD-12345678" // Optional previous order reference } }], "batchProperties": { "size": 2, "compression": "GZIP", "retryPolicy": "EXPONENTIAL_BACKOFF", "maxAttempts": 5, "splitGroups": { "SPLIT-987654": { "totalExpected": 2, "receivedParts": 2, "complete": true, "originalOrderId": "ORD-12345678", // Original unsplit order ID "status": "FULFILLED" } } }, "metadata": { "correlationId": "a1b2c3d4-e5f6-7890-1234-567890abcdef", "origin": "WEB_PLATFORM" }, "hash": "63a9f0ea7bb98050796b649e85481845" }