Skip to content

Commit

Permalink
minor
Browse files Browse the repository at this point in the history
  • Loading branch information
vdesabou committed Oct 3, 2023
1 parent 8e0d715 commit d93a19a
Show file tree
Hide file tree
Showing 20 changed files with 355 additions and 383 deletions.
26 changes: 13 additions & 13 deletions connect/connect-gcp-spanner-sink/gcp-spanner-sink-proxy.sh
Original file line number Diff line number Diff line change
Expand Up @@ -103,19 +103,19 @@ docker exec --privileged --user root connect bash -c "iptables -A INPUT -p tcp -
log "Creating GCP Spanner Sink connector"
playground connector create-or-update --connector gcp-spanner-sink << EOF
{
"connector.class": "io.confluent.connect.gcp.spanner.SpannerSinkConnector",
"tasks.max" : "1",
"topics" : "products",
"auto.create" : "true",
"table.name.format" : "kafka_\${topic}",
"gcp.spanner.instance.id" : "$INSTANCE",
"gcp.spanner.database.id" : "$DATABASE",
"gcp.spanner.credentials.path" : "/tmp/keyfile.json",
"gcp.spanner.proxy.url": "https://nginx-proxy:8888",
"confluent.license": "",
"confluent.topic.bootstrap.servers": "broker:9092",
"confluent.topic.replication.factor": "1"
}
"connector.class": "io.confluent.connect.gcp.spanner.SpannerSinkConnector",
"tasks.max" : "1",
"topics" : "products",
"auto.create" : "true",
"table.name.format" : "kafka_\${topic}",
"gcp.spanner.instance.id" : "$INSTANCE",
"gcp.spanner.database.id" : "$DATABASE",
"gcp.spanner.credentials.path" : "/tmp/keyfile.json",
"gcp.spanner.proxy.url": "https://nginx-proxy:8888",
"confluent.license": "",
"confluent.topic.bootstrap.servers": "broker:9092",
"confluent.topic.replication.factor": "1"
}
EOF

sleep 60
Expand Down
24 changes: 12 additions & 12 deletions connect/connect-gcp-spanner-sink/gcp-spanner-sink.sh
Original file line number Diff line number Diff line change
Expand Up @@ -97,18 +97,18 @@ EOF
log "Creating GCP Spanner Sink connector"
playground connector create-or-update --connector gcp-spanner-sink << EOF
{
"connector.class": "io.confluent.connect.gcp.spanner.SpannerSinkConnector",
"tasks.max" : "1",
"topics" : "products",
"auto.create" : "true",
"table.name.format" : "kafka_\${topic}",
"gcp.spanner.instance.id" : "$INSTANCE",
"gcp.spanner.database.id" : "$DATABASE",
"gcp.spanner.credentials.path" : "/tmp/keyfile.json",
"confluent.license": "",
"confluent.topic.bootstrap.servers": "broker:9092",
"confluent.topic.replication.factor": "1"
}
"connector.class": "io.confluent.connect.gcp.spanner.SpannerSinkConnector",
"tasks.max" : "1",
"topics" : "products",
"auto.create" : "true",
"table.name.format" : "kafka_\${topic}",
"gcp.spanner.instance.id" : "$INSTANCE",
"gcp.spanner.database.id" : "$DATABASE",
"gcp.spanner.credentials.path" : "/tmp/keyfile.json",
"confluent.license": "",
"confluent.topic.bootstrap.servers": "broker:9092",
"confluent.topic.replication.factor": "1"
}
EOF

sleep 60
Expand Down
26 changes: 13 additions & 13 deletions connect/connect-hbase-sink/hbase-sink-2.2.4.sh
Original file line number Diff line number Diff line change
Expand Up @@ -21,19 +21,19 @@ EOF
log "Creating HBase sink connector"
playground connector create-or-update --connector hbase-sink << EOF
{
"connector.class": "io.confluent.connect.hbase.HBaseSinkConnector",
"tasks.max": "1",
"key.converter":"org.apache.kafka.connect.storage.StringConverter",
"value.converter":"org.apache.kafka.connect.storage.StringConverter",
"confluent.topic.bootstrap.servers": "broker:9092",
"confluent.topic.replication.factor":1,
"hbase.zookeeper.quorum": "hbase",
"hbase.zookeeper.property.clientPort": "2181",
"auto.create.tables": "true",
"auto.create.column.families": "false",
"table.name.format": "example_table",
"topics": "hbase-test"
}
"connector.class": "io.confluent.connect.hbase.HBaseSinkConnector",
"tasks.max": "1",
"key.converter":"org.apache.kafka.connect.storage.StringConverter",
"value.converter":"org.apache.kafka.connect.storage.StringConverter",
"confluent.topic.bootstrap.servers": "broker:9092",
"confluent.topic.replication.factor":1,
"hbase.zookeeper.quorum": "hbase",
"hbase.zookeeper.property.clientPort": "2181",
"auto.create.tables": "true",
"auto.create.column.families": "false",
"table.name.format": "example_table",
"topics": "hbase-test"
}
EOF

# Since 2.0.2:
Expand Down
36 changes: 18 additions & 18 deletions connect/connect-hdfs2-sink/hdfs2-sink-ha-kerberos.sh
Original file line number Diff line number Diff line change
Expand Up @@ -39,24 +39,24 @@ fi
log "Creating HDFS Sink connector"
playground connector create-or-update --connector hdfs2-sink-ha-kerberos << EOF
{
"connector.class":"io.confluent.connect.hdfs.HdfsSinkConnector",
"tasks.max":"1",
"topics":"test_hdfs",
"store.url":"hdfs://sh",
"flush.size":"3",
"hadoop.conf.dir":"/opt/hadoop/etc/hadoop/",
"partitioner.class": "io.confluent.connect.storage.partitioner.DefaultPartitioner",
"rotate.interval.ms":"120000",
"logs.dir":"/logs",
"hdfs.authentication.kerberos": "true",
"connect.hdfs.principal": "connect/[email protected]",
"connect.hdfs.keytab": "/tmp/connect.keytab",
"hdfs.namenode.principal": "nn/[email protected]",
"key.converter":"org.apache.kafka.connect.storage.StringConverter",
"value.converter":"io.confluent.connect.avro.AvroConverter",
"value.converter.schema.registry.url":"http://schema-registry:8081",
"schema.compatibility":"BACKWARD"
}
"connector.class":"io.confluent.connect.hdfs.HdfsSinkConnector",
"tasks.max":"1",
"topics":"test_hdfs",
"store.url":"hdfs://sh",
"flush.size":"3",
"hadoop.conf.dir":"/opt/hadoop/etc/hadoop/",
"partitioner.class": "io.confluent.connect.storage.partitioner.DefaultPartitioner",
"rotate.interval.ms":"120000",
"logs.dir":"/logs",
"hdfs.authentication.kerberos": "true",
"connect.hdfs.principal": "connect/[email protected]",
"connect.hdfs.keytab": "/tmp/connect.keytab",
"hdfs.namenode.principal": "nn/[email protected]",
"key.converter":"org.apache.kafka.connect.storage.StringConverter",
"value.converter":"io.confluent.connect.avro.AvroConverter",
"value.converter.schema.registry.url":"http://schema-registry:8081",
"schema.compatibility":"BACKWARD"
}
EOF


Expand Down
38 changes: 19 additions & 19 deletions connect/connect-hdfs2-sink/hdfs2-sink-kerberos.sh
Original file line number Diff line number Diff line change
Expand Up @@ -53,25 +53,25 @@ fi
log "Creating HDFS Sink connector"
playground connector create-or-update --connector hdfs-sink-kerberos << EOF
{
"connector.class":"io.confluent.connect.hdfs.HdfsSinkConnector",
"tasks.max":"1",
"topics":"test_hdfs",
"store.url":"hdfs://hadoop.kerberos-demo.local:9000",
"flush.size":"3",
"hadoop.conf.dir":"/etc/hadoop/",
"partitioner.class": "io.confluent.connect.storage.partitioner.DefaultPartitioner",
"rotate.interval.ms":"120000",
"logs.dir":"/logs",
"hdfs.authentication.kerberos": "true",
"kerberos.ticket.renew.period.ms": "60000",
"connect.hdfs.principal": "connect/[email protected]",
"connect.hdfs.keytab": "/tmp/connect.keytab",
"hdfs.namenode.principal": "nn/[email protected]",
"key.converter":"org.apache.kafka.connect.storage.StringConverter",
"value.converter":"io.confluent.connect.avro.AvroConverter",
"value.converter.schema.registry.url":"http://schema-registry:8081",
"schema.compatibility":"BACKWARD"
}
"connector.class":"io.confluent.connect.hdfs.HdfsSinkConnector",
"tasks.max":"1",
"topics":"test_hdfs",
"store.url":"hdfs://hadoop.kerberos-demo.local:9000",
"flush.size":"3",
"hadoop.conf.dir":"/etc/hadoop/",
"partitioner.class": "io.confluent.connect.storage.partitioner.DefaultPartitioner",
"rotate.interval.ms":"120000",
"logs.dir":"/logs",
"hdfs.authentication.kerberos": "true",
"kerberos.ticket.renew.period.ms": "60000",
"connect.hdfs.principal": "connect/[email protected]",
"connect.hdfs.keytab": "/tmp/connect.keytab",
"hdfs.namenode.principal": "nn/[email protected]",
"key.converter":"org.apache.kafka.connect.storage.StringConverter",
"value.converter":"io.confluent.connect.avro.AvroConverter",
"value.converter.schema.registry.url":"http://schema-registry:8081",
"schema.compatibility":"BACKWARD"
}
EOF

log "Sending messages to topic test_hdfs"
Expand Down
34 changes: 17 additions & 17 deletions connect/connect-hdfs2-sink/hdfs2-sink.sh
Original file line number Diff line number Diff line change
Expand Up @@ -20,23 +20,23 @@ docker exec namenode bash -c "/opt/hadoop-2.7.4/bin/hdfs dfs -chmod 777 /"
log "Creating HDFS Sink connector"
playground connector create-or-update --connector hdfs-sink << EOF
{
"connector.class":"io.confluent.connect.hdfs.HdfsSinkConnector",
"tasks.max":"1",
"topics":"test_hdfs",
"store.url":"hdfs://namenode:8020",
"flush.size":"3",
"hadoop.conf.dir":"/etc/hadoop/",
"partitioner.class": "io.confluent.connect.storage.partitioner.DefaultPartitioner",
"rotate.interval.ms":"120000",
"logs.dir":"/tmp",
"hive.integration": "true",
"hive.metastore.uris": "thrift://hive-metastore:9083",
"hive.database": "testhive",
"key.converter":"org.apache.kafka.connect.storage.StringConverter",
"value.converter":"io.confluent.connect.avro.AvroConverter",
"value.converter.schema.registry.url":"http://schema-registry:8081",
"schema.compatibility":"BACKWARD"
}
"connector.class":"io.confluent.connect.hdfs.HdfsSinkConnector",
"tasks.max":"1",
"topics":"test_hdfs",
"store.url":"hdfs://namenode:8020",
"flush.size":"3",
"hadoop.conf.dir":"/etc/hadoop/",
"partitioner.class": "io.confluent.connect.storage.partitioner.DefaultPartitioner",
"rotate.interval.ms":"120000",
"logs.dir":"/tmp",
"hive.integration": "true",
"hive.metastore.uris": "thrift://hive-metastore:9083",
"hive.database": "testhive",
"key.converter":"org.apache.kafka.connect.storage.StringConverter",
"value.converter":"io.confluent.connect.avro.AvroConverter",
"value.converter.schema.registry.url":"http://schema-registry:8081",
"schema.compatibility":"BACKWARD"
}
EOF


Expand Down
44 changes: 22 additions & 22 deletions connect/connect-hdfs2-source/hdfs2-source-kerberos.sh
Original file line number Diff line number Diff line change
Expand Up @@ -38,28 +38,28 @@ fi
log "Creating HDFS Sink connector"
playground connector create-or-update --connector hdfs-sink-kerberos << EOF
{
"connector.class":"io.confluent.connect.hdfs.HdfsSinkConnector",
"tasks.max":"1",
"topics":"test_hdfs",
"store.url":"hdfs://hadoop.kerberos.local:9000",
"flush.size":"3",
"hadoop.conf.dir":"/etc/hadoop/",
"partitioner.class":"io.confluent.connect.hdfs.partitioner.FieldPartitioner",
"partition.field.name":"f1",
"rotate.interval.ms":"120000",
"logs.dir":"/logs",
"hdfs.authentication.kerberos": "true",
"connect.hdfs.principal": "connect/[email protected]",
"connect.hdfs.keytab": "/tmp/connect.keytab",
"hdfs.namenode.principal": "nn/[email protected]",
"confluent.license": "",
"confluent.topic.bootstrap.servers": "broker:9092",
"confluent.topic.replication.factor": "1",
"key.converter":"org.apache.kafka.connect.storage.StringConverter",
"value.converter":"io.confluent.connect.avro.AvroConverter",
"value.converter.schema.registry.url":"http://schema-registry:8081",
"schema.compatibility":"BACKWARD"
}
"connector.class":"io.confluent.connect.hdfs.HdfsSinkConnector",
"tasks.max":"1",
"topics":"test_hdfs",
"store.url":"hdfs://hadoop.kerberos.local:9000",
"flush.size":"3",
"hadoop.conf.dir":"/etc/hadoop/",
"partitioner.class":"io.confluent.connect.hdfs.partitioner.FieldPartitioner",
"partition.field.name":"f1",
"rotate.interval.ms":"120000",
"logs.dir":"/logs",
"hdfs.authentication.kerberos": "true",
"connect.hdfs.principal": "connect/[email protected]",
"connect.hdfs.keytab": "/tmp/connect.keytab",
"hdfs.namenode.principal": "nn/[email protected]",
"confluent.license": "",
"confluent.topic.bootstrap.servers": "broker:9092",
"confluent.topic.replication.factor": "1",
"key.converter":"org.apache.kafka.connect.storage.StringConverter",
"value.converter":"io.confluent.connect.avro.AvroConverter",
"value.converter.schema.registry.url":"http://schema-registry:8081",
"schema.compatibility":"BACKWARD"
}
EOF

log "Sending messages to topic test_hdfs"
Expand Down
42 changes: 21 additions & 21 deletions connect/connect-hdfs2-source/hdfs2-source.sh
Original file line number Diff line number Diff line change
Expand Up @@ -18,27 +18,27 @@ docker exec namenode bash -c "/opt/hadoop-2.7.4/bin/hdfs dfs -chmod 777 /"
log "Creating HDFS Sink connector"
playground connector create-or-update --connector hdfs-sink << EOF
{
"connector.class":"io.confluent.connect.hdfs.HdfsSinkConnector",
"tasks.max":"1",
"topics":"test_hdfs",
"store.url":"hdfs://namenode:8020",
"flush.size":"3",
"hadoop.conf.dir":"/etc/hadoop/",
"partitioner.class":"io.confluent.connect.hdfs.partitioner.FieldPartitioner",
"partition.field.name":"f1",
"rotate.interval.ms":"120000",
"logs.dir":"/tmp",
"hive.integration": "true",
"hive.metastore.uris": "thrift://hive-metastore:9083",
"hive.database": "testhive",
"confluent.license": "",
"confluent.topic.bootstrap.servers": "broker:9092",
"confluent.topic.replication.factor": "1",
"key.converter":"org.apache.kafka.connect.storage.StringConverter",
"value.converter":"io.confluent.connect.avro.AvroConverter",
"value.converter.schema.registry.url":"http://schema-registry:8081",
"schema.compatibility":"BACKWARD"
}
"connector.class":"io.confluent.connect.hdfs.HdfsSinkConnector",
"tasks.max":"1",
"topics":"test_hdfs",
"store.url":"hdfs://namenode:8020",
"flush.size":"3",
"hadoop.conf.dir":"/etc/hadoop/",
"partitioner.class":"io.confluent.connect.hdfs.partitioner.FieldPartitioner",
"partition.field.name":"f1",
"rotate.interval.ms":"120000",
"logs.dir":"/tmp",
"hive.integration": "true",
"hive.metastore.uris": "thrift://hive-metastore:9083",
"hive.database": "testhive",
"confluent.license": "",
"confluent.topic.bootstrap.servers": "broker:9092",
"confluent.topic.replication.factor": "1",
"key.converter":"org.apache.kafka.connect.storage.StringConverter",
"value.converter":"io.confluent.connect.avro.AvroConverter",
"value.converter.schema.registry.url":"http://schema-registry:8081",
"schema.compatibility":"BACKWARD"
}
EOF


Expand Down
42 changes: 21 additions & 21 deletions connect/connect-hdfs3-sink/hdfs3-sink-ha-kerberos.sh
Original file line number Diff line number Diff line change
Expand Up @@ -33,27 +33,27 @@ fi
log "Creating HDFS Sink connector"
playground connector create-or-update --connector hdfs3-sink-ha-kerberos << EOF
{
"connector.class":"io.confluent.connect.hdfs3.Hdfs3SinkConnector",
"tasks.max":"1",
"topics":"test_hdfs",
"store.url":"hdfs://sh",
"flush.size":"3",
"hadoop.conf.dir":"/opt/hadoop/etc/hadoop/",
"partitioner.class": "io.confluent.connect.storage.partitioner.DefaultPartitioner",
"rotate.interval.ms":"120000",
"logs.dir":"/logs",
"hdfs.authentication.kerberos": "true",
"connect.hdfs.principal": "connect/[email protected]",
"connect.hdfs.keytab": "/tmp/connect.keytab",
"hdfs.namenode.principal": "nn/[email protected]",
"confluent.license": "",
"confluent.topic.bootstrap.servers": "broker:9092",
"confluent.topic.replication.factor": "1",
"key.converter":"org.apache.kafka.connect.storage.StringConverter",
"value.converter":"io.confluent.connect.avro.AvroConverter",
"value.converter.schema.registry.url":"http://schema-registry:8081",
"schema.compatibility":"BACKWARD"
}
"connector.class":"io.confluent.connect.hdfs3.Hdfs3SinkConnector",
"tasks.max":"1",
"topics":"test_hdfs",
"store.url":"hdfs://sh",
"flush.size":"3",
"hadoop.conf.dir":"/opt/hadoop/etc/hadoop/",
"partitioner.class": "io.confluent.connect.storage.partitioner.DefaultPartitioner",
"rotate.interval.ms":"120000",
"logs.dir":"/logs",
"hdfs.authentication.kerberos": "true",
"connect.hdfs.principal": "connect/[email protected]",
"connect.hdfs.keytab": "/tmp/connect.keytab",
"hdfs.namenode.principal": "nn/[email protected]",
"confluent.license": "",
"confluent.topic.bootstrap.servers": "broker:9092",
"confluent.topic.replication.factor": "1",
"key.converter":"org.apache.kafka.connect.storage.StringConverter",
"value.converter":"io.confluent.connect.avro.AvroConverter",
"value.converter.schema.registry.url":"http://schema-registry:8081",
"schema.compatibility":"BACKWARD"
}
EOF

log "Sending messages to topic test_hdfs"
Expand Down
Loading

0 comments on commit d93a19a

Please sign in to comment.