I'm trying to send 2 custom events to kafka, the idea is to send each event to its topic, but at the moment I receive both events in both topics.
I have configured 2 mappings with different schemas and 2 sinks (one for each mapping).
here is the divolte-collector.conf:
divolte {
global {
kafka {
enabled = true
threads = 2
buffer_size = 1048576
producer = {
bootstrap.servers = "localhost:9092"
acks = 1
retries = 0
compression.type = lz4
max.in.flight.requests.per.connection = 1
}
}
}
mappings {
a_mapping = {
schema_file = "path-to/conf/MyEventRecord.avsc"
mapping_script_file = "path-to/conf/mapping.groovy"
sources = [browser]
sinks = [kafka1]
}
b_mapping = {
schema_file = "path-to/conf/MyEventRecord2.avsc"
mapping_script_file = "path-to/conf/mapping2.groovy"
sources = [browser]
sinks = [kafka2]
}
}
sinks {
kafka1 {
type = kafka
topic = topic-1
}
kafka2 {
type = kafka
topic = topic-2
}
}
}
my groovy file:
mapping {
map eventParameter('js_error_msg') onto 'js_error_msg'
map eventParameter('js_error_level') onto 'js_error_level'
map eventParameter('js_url') onto 'js_url'
map eventParameter('js_line') onto 'js_line'
map eventParameter('js_column') onto 'js_column'
map eventParameter('js_is_error_caught') onto 'js_is_error_caught'
}
my avro file:
{
"name": "tracking",
"type": "record",
"fields": [
{ "name": "js_error_msg", "type": "string" },
{ "name": "js_error_level", "type": "string" },
{ "name": "js_url", "type": "string" },
{ "name": "js_line", "type": "string" },
{ "name": "js_column", "type": "string" },
{ "name": "js_is_error_caught", "type": "string" }
]
}