Not a member of Pastebin yet?
Sign Up,
it unlocks many cool features!
- syntax = "proto3";
- package com.company;
- option java_package = "com.company";
- option java_outer_classname = "MyObjectData";
- import public "wrappers.proto";
- message MyObject {
- int64 site_id = 1;
- string time_zone = 2;
- uint64 dev_id = 3;
- uint64 rep_id = 4;
- uint64 dev_sn = 5;
- UInt64Value timestamp = 6;
- UInt32Value secs = 7;
- UInt64Value man_id = 8;
- FloatValue panv = 9;
- FloatValue outputv = 10;
- FloatValue panelc = 11;
- FloatValue ereset = 12;
- FloatValue temp = 13;
- FloatValue tempin = 14;
- FloatValue tempout = 15;
- UInt32Value sectelem = 16;
- FloatValue energytelem = 17;
- UInt32Value ecode = 18;
- }
- bootstrap.servers=k1:9092,k2:9092,k3:9092
- key.converter=org.apache.kafka.connect.storage.StringConverter
- value.converter=com.blueapron.connect.protobuf.ProtobufConverter
- value.converter.protoClassName=com.company.MyObjectData$MyObject
- key.converter.schemas.enable=false
- value.converter.schemas.enable=true
- offset.storage.file.filename=/tmp/connect.offsets
- offset.flush.interval.ms=10000
- plugin.path=/usr/share/java
- name=hdfs-sink
- connector.class=io.confluent.connect.hdfs.HdfsSinkConnector
- tasks.max=1
- topics=ObjectTopic
- hadoop.conf.dir=/etc/hadoop
- hdfs.url=hdfs://hdp-01:8020/user/hdfs/telems
- hadoop.home=/etc/hadoop/client
- flush.size=3
- key.converter=org.apache.kafka.connect.storage.StringConverter
- value.converter=com.blueapron.connect.protobuf.ProtobufConverter
- value.converter.protoClassName=com.company.MyObjectData$MyObject
- format.class=io.confluent.connect.hdfs.parquet.ParquetFormat
- transforms=SetSchemaName
- transforms.SetSchemaName.type=org.apache.kafka.connect.transforms.SetSchemaMetadata$Value
- transforms.SetSchemaName.schema.name=com.acme.avro.MyObject
Add Comment
Please, Sign In to add comment