Browse Source

Kafka avro support (#527)

* Kafka rule extension. Added payload, key and headers for JSON.

* Kafka avro support.

Co-authored-by: Mittul Madaan <mittul.madaan@reedelsevier.com>
pull/528/head
Mittul Madaan 6 years ago
committed by GitHub
parent
commit
d3b3f76dcb
No known key found for this signature in database GPG Key ID: 4AEE18F83AFDEB23
  1. 4
      backend/extensions/Squidex.Extensions/Actions/Kafka/KafkaAction.cs
  2. 9
      backend/extensions/Squidex.Extensions/Actions/Kafka/KafkaActionHandler.cs
  3. 110
      backend/extensions/Squidex.Extensions/Actions/Kafka/KafkaProducer.cs
  4. 11
      backend/extensions/Squidex.Extensions/Actions/Kafka/KafkaProducerOptions.cs
  5. 2
      backend/extensions/Squidex.Extensions/Squidex.Extensions.csproj

4
backend/extensions/Squidex.Extensions/Actions/Kafka/KafkaAction.cs

@ -40,5 +40,9 @@ namespace Squidex.Extensions.Actions.Kafka
[DataType(DataType.MultilineText)] [DataType(DataType.MultilineText)]
[Formattable] [Formattable]
public string Headers { get; set; } public string Headers { get; set; }
[Display(Name = "Schema (Optional)", Description = "Define a specific AVRO schema in JSON format.")]
[DataType(DataType.MultilineText)]
public string Schema { get; set; }
} }
} }

9
backend/extensions/Squidex.Extensions/Actions/Kafka/KafkaActionHandler.cs

@ -54,7 +54,8 @@ namespace Squidex.Extensions.Actions.Kafka
TopicName = action.TopicName, TopicName = action.TopicName,
MessageKey = key, MessageKey = key,
MessageValue = value, MessageValue = value,
Headers = ParseHeaders(action.Headers, @event) Headers = ParseHeaders(action.Headers, @event),
Schema = action.Schema
}; };
return (Description, ruleJob); return (Description, ruleJob);
@ -105,13 +106,13 @@ namespace Squidex.Extensions.Actions.Kafka
} }
} }
await kafkaProducer.Send(job.TopicName, message); await kafkaProducer.Send(job.TopicName, message, job.Schema);
return Result.Success($"Event pushed to {job.TopicName} kafka topic."); return Result.Success($"Event pushed to {job.TopicName} kafka topic.");
} }
catch (Exception ex) catch (Exception ex)
{ {
return Result.Failed(ex, "Push to Kafka failed."); return Result.Failed(ex, $"Push to Kafka failed: {ex}");
} }
} }
} }
@ -125,5 +126,7 @@ namespace Squidex.Extensions.Actions.Kafka
public string MessageValue { get; set; } public string MessageValue { get; set; }
public Dictionary<string, string> Headers { get; set; } public Dictionary<string, string> Headers { get; set; }
public string Schema { get; set; }
} }
} }

110
backend/extensions/Squidex.Extensions/Actions/Kafka/KafkaProducer.cs

@ -5,20 +5,34 @@
// All rights reserved. Licensed under the MIT license. // All rights reserved. Licensed under the MIT license.
// ========================================================================== // ==========================================================================
using System;
using System.Collections.Generic;
using System.Threading.Tasks; using System.Threading.Tasks;
using Avro;
using Avro.Generic;
using Confluent.Kafka; using Confluent.Kafka;
using Confluent.SchemaRegistry;
using Confluent.SchemaRegistry.Serdes;
using GraphQL.Types;
using Microsoft.Extensions.Options; using Microsoft.Extensions.Options;
using Squidex.Infrastructure.Json;
using Squidex.Infrastructure.Json.Objects;
using Squidex.Infrastructure.Log; using Squidex.Infrastructure.Log;
namespace Squidex.Extensions.Actions.Kafka namespace Squidex.Extensions.Actions.Kafka
{ {
public sealed class KafkaProducer public sealed class KafkaProducer
{ {
private readonly IProducer<string, string> producer; private readonly IProducer<string, string> textProducer;
private readonly IProducer<string, GenericRecord> avroProducer;
private readonly ISchemaRegistryClient schemaRegistry;
private readonly IJsonSerializer jsonSerializer;
public KafkaProducer(IOptions<KafkaProducerOptions> options, ISemanticLog log) public KafkaProducer(IOptions<KafkaProducerOptions> options, ISemanticLog log, IJsonSerializer jsonSerializer)
{ {
producer = new ProducerBuilder<string, string>(options.Value) this.jsonSerializer = jsonSerializer;
textProducer = new ProducerBuilder<string, string>(options.Value)
.SetErrorHandler((p, error) => .SetErrorHandler((p, error) =>
{ {
LogError(log, error); LogError(log, error);
@ -30,6 +44,24 @@ namespace Squidex.Extensions.Actions.Kafka
.SetKeySerializer(Serializers.Utf8) .SetKeySerializer(Serializers.Utf8)
.SetValueSerializer(Serializers.Utf8) .SetValueSerializer(Serializers.Utf8)
.Build(); .Build();
if (options.Value.IsSchemaRegistryConfigured())
{
schemaRegistry = new CachedSchemaRegistryClient(options.Value.SchemaRegistry);
avroProducer = new ProducerBuilder<string, GenericRecord>(options.Value)
.SetErrorHandler((p, error) =>
{
LogError(log, error);
})
.SetLogHandler((p, message) =>
{
LogMessage(log, message);
})
.SetKeySerializer(Serializers.Utf8)
.SetValueSerializer(new AvroSerializer<GenericRecord>(schemaRegistry, options.Value.AvroSerializer))
.Build();
}
} }
private static void LogMessage(ISemanticLog log, LogMessage message) private static void LogMessage(ISemanticLog log, LogMessage message)
@ -77,14 +109,80 @@ namespace Squidex.Extensions.Actions.Kafka
.WriteProperty("reason", error.Reason)); .WriteProperty("reason", error.Reason));
} }
public async Task<DeliveryResult<string, string>> Send(string topicName, Message<string, string> message) public async Task<DeliveryResult<string, string>> Send(string topicName, Message<string, string> message, string schema)
{ {
return await producer.ProduceAsync(topicName, message); if (!string.IsNullOrWhiteSpace(schema))
{
var value = CreateAvroRecord(message.Value, schema);
var avroMessage = new Message<string, GenericRecord> { Key = message.Key, Headers = message.Headers, Value = value };
await avroProducer.ProduceAsync(topicName, avroMessage);
}
return await textProducer.ProduceAsync(topicName, message);
}
private GenericRecord CreateAvroRecord(string json, string avroSchema)
{
var schema = (RecordSchema)Avro.Schema.Parse(avroSchema);
var jsonObject = jsonSerializer.Deserialize<JsonObject>(json);
var result = (GenericRecord)GetValue(jsonObject, schema);
return result;
} }
public void Dispose() public void Dispose()
{ {
producer?.Dispose(); textProducer?.Dispose();
avroProducer?.Dispose();
}
private object GetValue(IJsonValue value, Avro.Schema schema)
{
switch (value)
{
case JsonString s:
return s.Value;
case JsonNumber n:
return n.Value;
case JsonBoolean b:
return b.Value;
case JsonObject o:
{
var recordSchema = (RecordSchema)schema;
var result = new GenericRecord(recordSchema);
foreach (var (key, childValue) in o)
{
if (recordSchema.TryGetField(key, out var field))
{
result.Add(key, GetValue(childValue, field.Schema));
}
}
return result;
}
case JsonArray a:
{
var arraySchema = (ArraySchema)schema;
var result = new List<object>();
foreach (var item in a)
{
result.Add(GetValue(item, arraySchema.ItemSchema));
}
return result.ToArray();
}
}
return null;
} }
} }
} }

11
backend/extensions/Squidex.Extensions/Actions/Kafka/KafkaProducerOptions.cs

@ -6,14 +6,25 @@
// ========================================================================== // ==========================================================================
using Confluent.Kafka; using Confluent.Kafka;
using Confluent.SchemaRegistry;
using Confluent.SchemaRegistry.Serdes;
namespace Squidex.Extensions.Actions.Kafka namespace Squidex.Extensions.Actions.Kafka
{ {
public class KafkaProducerOptions : ProducerConfig public class KafkaProducerOptions : ProducerConfig
{ {
public SchemaRegistryConfig SchemaRegistry { get; set; }
public AvroSerializerConfig AvroSerializer { get; set; }
public bool IsProducerConfigured() public bool IsProducerConfigured()
{ {
return !string.IsNullOrWhiteSpace(BootstrapServers); return !string.IsNullOrWhiteSpace(BootstrapServers);
} }
public bool IsSchemaRegistryConfigured()
{
return !string.IsNullOrWhiteSpace(SchemaRegistry?.Url);
}
} }
} }

2
backend/extensions/Squidex.Extensions/Squidex.Extensions.csproj

@ -9,7 +9,9 @@
</ItemGroup> </ItemGroup>
<ItemGroup> <ItemGroup>
<PackageReference Include="Algolia.Search" Version="6.5.1" /> <PackageReference Include="Algolia.Search" Version="6.5.1" />
<PackageReference Include="Confluent.Apache.Avro" Version="1.7.7.7" />
<PackageReference Include="Confluent.Kafka" Version="1.3.0" /> <PackageReference Include="Confluent.Kafka" Version="1.3.0" />
<PackageReference Include="Confluent.SchemaRegistry.Serdes" Version="1.3.0" />
<PackageReference Include="CoreTweet" Version="1.0.0.483" /> <PackageReference Include="CoreTweet" Version="1.0.0.483" />
<PackageReference Include="Datadog.Trace" Version="1.14.2" /> <PackageReference Include="Datadog.Trace" Version="1.14.2" />
<PackageReference Include="Elasticsearch.Net" Version="7.6.1" /> <PackageReference Include="Elasticsearch.Net" Version="7.6.1" />

Loading…
Cancel
Save