Skip to content

Commit

Permalink
repro
Browse files Browse the repository at this point in the history
  • Loading branch information
vdesabou committed Dec 15, 2023
1 parent 8c7099c commit 1ac7098
Show file tree
Hide file tree
Showing 2 changed files with 37 additions and 10 deletions.
1 change: 0 additions & 1 deletion connect/connect-graphdb-sink/docker-compose.plaintext.yml
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,5 @@ services:

connect:
environment:
#CONNECT_PLUGIN_PATH: /usr/share/confluent-hub-components/confluentinc-kafka-connect-http
CONNECT_PLUGIN_PATH: /usr/share/confluent-hub-components/ontotext-kafka-sink-graphdb

46 changes: 37 additions & 9 deletions connect/connect-graphdb-sink/graphdb-sink.sh
Original file line number Diff line number Diff line change
Expand Up @@ -27,22 +27,23 @@ function wait_service {
}

function create_graphdb_repo {
if ! curl --fail -X GET --header 'Accept: application/json' http://localhost:7200/rest/repositories/test &> /dev/null; then
curl 'http://localhost:7200/rest/repositories' \
-H 'Accept: application/json, text/plain, */*' \
-H 'Content-Type: application/json;charset=UTF-8' \
-d '{"id": "test", "params": {"imports": {"name": "imports", "label": "Imported RDF files('\'';'\'' delimited)", "value": ""}, "defaultNS": {"name": "defaultNS", "label": "Default namespaces for imports('\'';'\'' delimited)", "value": ""}}, "title": "", "type": "graphdb", "location": ""}'
fi
if ! curl --fail -X GET --header 'Accept: application/json' http://localhost:7200/rest/repositories/test &> /dev/null; then
curl 'http://localhost:7200/rest/repositories' \
-H 'Accept: application/json, text/plain, */*' \
-H 'Content-Type: application/json;charset=UTF-8' \
-d '{"id": "test", "params": {"imports": {"name": "imports", "label": "Imported RDF files('\'';'\'' delimited)", "value": ""}, "defaultNS": {"name": "defaultNS", "label": "Default namespaces for imports('\'';'\'' delimited)", "value": ""}}, "title": "", "type": "graphdb", "location": ""}'
fi
}


${DIR}/../../environment/plaintext/start.sh "${PWD}/docker-compose.plaintext.no-auth.yml"
${DIR}/../../environment/plaintext/start.sh "${PWD}/docker-compose.plaintext.yml"

wait_service 'http://localhost:7200/protocol'

create_graphdb_repo

log "Creating http-source connector"
playground connector create-or-update --connector http-source << EOF
log "Creating graphdb-sink connector"
playground connector create-or-update --connector graphdb-sink << EOF
{
"connector.class":"com.ontotext.kafka.GraphDBSinkConnector",
"key.converter": "com.ontotext.kafka.convert.DirectRDFConverter",
Expand All @@ -61,9 +62,36 @@ playground connector create-or-update --connector http-source << EOF
}
EOF

# [2023-12-15 14:51:28,134] ERROR [graphdb-sink|worker] [Worker clientId=connect-adminclient-producer, groupId=connect-cluster] Failed to start connector 'graphdb-sink' (org.apache.kafka.connect.runtime.distributed.DistributedHerder:1928)
# org.apache.kafka.connect.errors.ConnectException: Failed to start connector: graphdb-sink
# at org.apache.kafka.connect.runtime.distributed.DistributedHerder.lambda$startConnector$36(DistributedHerder.java:1899)
# at org.apache.kafka.connect.runtime.WorkerConnector.doTransitionTo(WorkerConnector.java:361)
# at org.apache.kafka.connect.runtime.WorkerConnector.doRun(WorkerConnector.java:145)
# at org.apache.kafka.connect.runtime.WorkerConnector.run(WorkerConnector.java:123)
# at org.apache.kafka.connect.runtime.isolation.Plugins.lambda$withClassLoader$1(Plugins.java:181)
# at java.base/java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:515)
# at java.base/java.util.concurrent.FutureTask.run(FutureTask.java:264)
# at java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)
# at java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)
# at java.base/java.lang.Thread.run(Thread.java:829)
# Caused by: org.apache.kafka.connect.errors.ConnectException: Failed to transition connector graphdb-sink to state STARTED
# ... 9 more
# Caused by: java.lang.NoSuchMethodError: 'void org.apache.kafka.connect.runtime.errors.RetryWithToleranceOperator.<init>(long, long, org.apache.kafka.connect.runtime.errors.ToleranceType, org.apache.kafka.common.utils.Time)'
# at com.ontotext.kafka.operation.GraphDBOperator.<init>(GraphDBOperator.java:34)
# at com.ontotext.kafka.service.GraphDBService.initialize(GraphDBService.java:45)
# at com.ontotext.kafka.GraphDBSinkConnector.start(GraphDBSinkConnector.java:58)
# at org.apache.kafka.connect.runtime.WorkerConnector.doStart(WorkerConnector.java:193)
# at org.apache.kafka.connect.runtime.WorkerConnector.start(WorkerConnector.java:218)
# at org.apache.kafka.connect.runtime.WorkerConnector.doTransitionTo(WorkerConnector.java:377)
# at org.apache.kafka.connect.runtime.WorkerConnector.doTransitionTo(WorkerConnector.java:358)
# ... 8 more

playground connector restart

playground topic produce -t test --nb-messages 10 << 'EOF'
<urn:a> <urn:b> <urn:c> .
EOF

log "go to http://127.0.0.1:7200/sparql select the test repository and execute select * where { <urn:a> ?p ?o . }"


0 comments on commit 1ac7098

Please sign in to comment.