pl.touk.nussknacker.engine.schemedkafka.schemaregistry.confluent.serialization
AbstractConfluentKafkaAvroSerializer
class AbstractConfluentKafkaAvroSerializer extends AbstractKafkaAvroSerializer with DatumReaderWriterMixin
Abstract confluent serializer class. Serialize algorithm is copy past from AbstractKafkaAvroSerializer.serializeImpl. Serializer try convert data (in most cases it will be GenericContainer) to indicated schema.
There is some problem when GenericContainer has different schema then final schema - DatumWriter throws exception, because data could not support field from schema. When this situation has place wy try to convert data to provided schema by using AvroSchemaEvolution.alignRecordToSchema implementation.
- Alphabetic
- By Inheritance
- AbstractConfluentKafkaAvroSerializer
- DatumReaderWriterMixin
- AbstractKafkaAvroSerializer
- AbstractKafkaSchemaSerDe
- AnyRef
- Any
- Hide All
- Show All
- Public
- All
Instance Constructors
- new AbstractConfluentKafkaAvroSerializer(avroSchemaEvolution: AvroSchemaEvolution)
Value Members
-
final
def
!=(arg0: Any): Boolean
- Definition Classes
- AnyRef → Any
-
final
def
##(): Int
- Definition Classes
- AnyRef → Any
-
final
def
==(arg0: Any): Boolean
- Definition Classes
- AnyRef → Any
-
final
def
asInstanceOf[T0]: T0
- Definition Classes
- Any
-
def
clone(): AnyRef
- Attributes
- protected[lang]
- Definition Classes
- AnyRef
- Annotations
- @throws( ... ) @native() @HotSpotIntrinsicCandidate()
-
def
configure(arg0: KafkaAvroSerializerConfig): Unit
- Attributes
- protected[serializers]
- Definition Classes
- AbstractKafkaAvroSerializer
-
def
configureClientProperties(arg0: AbstractKafkaSchemaSerDeConfig, arg1: SchemaProvider): Unit
- Attributes
- protected[serializers]
- Definition Classes
- AbstractKafkaSchemaSerDe
-
def
createDatumReader(writerSchema: Schema, readerSchema: Schema, useSchemaReflection: Boolean, useSpecificAvroReader: Boolean): DatumReader[AnyRef]
- Definition Classes
- DatumReaderWriterMixin
-
def
createDatumWriter(record: Any, schema: Schema, useSchemaReflection: Boolean): GenericDatumWriter[Any]
- Definition Classes
- DatumReaderWriterMixin
-
val
encoderFactory: EncoderFactory
- Attributes
- protected
-
def
encoderToUse(schema: Schema, out: OutputStream): Encoder
- Attributes
- protected
-
final
def
eq(arg0: AnyRef): Boolean
- Definition Classes
- AnyRef
-
def
equals(arg0: Any): Boolean
- Definition Classes
- AnyRef → Any
-
def
getByteBuffer(arg0: Array[Byte]): ByteBuffer
- Attributes
- protected[serializers]
- Definition Classes
- AbstractKafkaSchemaSerDe
-
final
def
getClass(): Class[_]
- Definition Classes
- AnyRef → Any
- Annotations
- @native() @HotSpotIntrinsicCandidate()
-
def
getContextName(arg0: String, arg1: String): String
- Attributes
- protected[serializers]
- Definition Classes
- AbstractKafkaSchemaSerDe
-
def
getContextName(arg0: String): String
- Attributes
- protected[serializers]
- Definition Classes
- AbstractKafkaSchemaSerDe
-
def
getDatumWriter(arg0: Any, arg1: Schema): DatumWriter[_]
- Attributes
- protected[serializers]
- Definition Classes
- AbstractKafkaAvroSerializer
-
def
getOldSubjectName(arg0: Any): String
- Attributes
- protected[serializers]
- Definition Classes
- AbstractKafkaSchemaSerDe
-
def
getSchemaById(arg0: Int): ParsedSchema
- Definition Classes
- AbstractKafkaSchemaSerDe
- Annotations
- @throws( classOf[java.io.IOException] ) @throws( ... )
-
def
getSchemaBySubjectAndId(arg0: String, arg1: Int): ParsedSchema
- Definition Classes
- AbstractKafkaSchemaSerDe
- Annotations
- @throws( classOf[java.io.IOException] ) @throws( ... )
-
def
getSubjectName(arg0: String, arg1: Boolean, arg2: Any, arg3: ParsedSchema): String
- Attributes
- protected[serializers]
- Definition Classes
- AbstractKafkaSchemaSerDe
-
def
hashCode(): Int
- Definition Classes
- AnyRef → Any
- Annotations
- @native() @HotSpotIntrinsicCandidate()
-
def
isDeprecatedSubjectNameStrategy(arg0: Boolean): Boolean
- Attributes
- protected[serializers]
- Definition Classes
- AbstractKafkaSchemaSerDe
-
final
def
isInstanceOf[T0]: Boolean
- Definition Classes
- Any
-
def
lookupLatestVersion(arg0: String, arg1: ParsedSchema, arg2: Boolean): ParsedSchema
- Attributes
- protected[serializers]
- Definition Classes
- AbstractKafkaSchemaSerDe
- Annotations
- @throws( classOf[java.io.IOException] ) @throws( ... )
-
def
lookupSchemaBySubjectAndId(arg0: String, arg1: Int, arg2: ParsedSchema, arg3: Boolean): ParsedSchema
- Attributes
- protected[serializers]
- Definition Classes
- AbstractKafkaSchemaSerDe
- Annotations
- @throws( classOf[java.io.IOException] ) @throws( ... )
-
final
def
ne(arg0: AnyRef): Boolean
- Definition Classes
- AnyRef
-
final
def
notify(): Unit
- Definition Classes
- AnyRef
- Annotations
- @native() @HotSpotIntrinsicCandidate()
-
final
def
notifyAll(): Unit
- Definition Classes
- AnyRef
- Annotations
- @native() @HotSpotIntrinsicCandidate()
-
val
primitives: Map[String, Schema]
We use it on checking writerSchema is primitive - on creating DatumReader (createDatumReader).
We use it on checking writerSchema is primitive - on creating DatumReader (createDatumReader).
- Attributes
- protected
- Definition Classes
- DatumReaderWriterMixin
-
def
register(arg0: String, arg1: ParsedSchema, arg2: Boolean): Int
- Definition Classes
- AbstractKafkaSchemaSerDe
- Annotations
- @throws( classOf[java.io.IOException] ) @throws( ... )
-
def
register(arg0: String, arg1: ParsedSchema): Int
- Definition Classes
- AbstractKafkaSchemaSerDe
- Annotations
- @throws( classOf[java.io.IOException] ) @throws( ... )
- def serialize(avroSchemaOpt: Option[AvroSchema], topic: String, data: Any, isKey: Boolean): Array[Byte]
-
def
serializeImpl(arg0: String, arg1: Any, arg2: AvroSchema): Array[Byte]
- Attributes
- protected[serializers]
- Definition Classes
- AbstractKafkaAvroSerializer
- Annotations
- @throws( ... ) @throws( ... )
-
def
serializerConfig(arg0: Properties): KafkaAvroSerializerConfig
- Attributes
- protected[serializers]
- Definition Classes
- AbstractKafkaAvroSerializer
-
def
serializerConfig(arg0: Map[String, _]): KafkaAvroSerializerConfig
- Attributes
- protected[serializers]
- Definition Classes
- AbstractKafkaAvroSerializer
-
def
strategyUsesSchema(arg0: Boolean): Boolean
- Attributes
- protected[serializers]
- Definition Classes
- AbstractKafkaSchemaSerDe
-
final
def
synchronized[T0](arg0: ⇒ T0): T0
- Definition Classes
- AnyRef
-
def
toString(): String
- Definition Classes
- AnyRef → Any
-
final
def
wait(arg0: Long, arg1: Int): Unit
- Definition Classes
- AnyRef
- Annotations
- @throws( ... )
-
final
def
wait(arg0: Long): Unit
- Definition Classes
- AnyRef
- Annotations
- @throws( ... ) @native()
-
final
def
wait(): Unit
- Definition Classes
- AnyRef
- Annotations
- @throws( ... )
-
def
writeData(data: Any, avroSchema: Schema, schemaId: Int): Array[Byte]
- Attributes
- protected
-
def
writeHeader(data: Any, avroSchema: Schema, schemaId: Int, out: OutputStream): Unit
- Attributes
- protected
Deprecated Value Members
-
def
finalize(): Unit
- Attributes
- protected[lang]
- Definition Classes
- AnyRef
- Annotations
- @throws( classOf[java.lang.Throwable] ) @Deprecated @deprecated
- Deprecated
(Since version ) see corresponding Javadoc for more information.
-
def
getById(arg0: Int): Schema
- Definition Classes
- AbstractKafkaSchemaSerDe
- Annotations
- @throws( classOf[java.io.IOException] ) @throws( ... ) @Deprecated @deprecated
- Deprecated
(Since version ) see corresponding Javadoc for more information.
-
def
getBySubjectAndId(arg0: String, arg1: Int): Schema
- Definition Classes
- AbstractKafkaSchemaSerDe
- Annotations
- @throws( classOf[java.io.IOException] ) @throws( ... ) @Deprecated @deprecated
- Deprecated
(Since version ) see corresponding Javadoc for more information.
-
def
register(arg0: String, arg1: Schema): Int
- Definition Classes
- AbstractKafkaSchemaSerDe
- Annotations
- @throws( classOf[java.io.IOException] ) @throws( ... ) @Deprecated @deprecated
- Deprecated
(Since version ) see corresponding Javadoc for more information.