@@ -8,6 +8,7 @@ use rdkafka::{
88 message:: Message ,
99 producer:: FutureProducer ,
1010} ;
11+ use std:: fs;
1112use tips_audit:: { BundleEvent , BundleEventPublisher , KafkaBundleEventPublisher } ;
1213use tips_datastore:: { BundleDatastore , postgres:: PostgresDatastore } ;
1314use tokio:: time:: Duration ;
@@ -20,18 +21,18 @@ struct Args {
2021 #[ arg( long, env = "TIPS_INGRESS_WRITER_DATABASE_URL" ) ]
2122 database_url : String ,
2223
23- #[ arg( long, env = "TIPS_INGRESS_WRITER_KAFKA_BROKERS" ) ]
24- kafka_brokers : String ,
24+ #[ arg( long, env = "TIPS_INGRESS_WRITER_KAFKA_PROPERTIES_FILE" ) ]
25+ kafka_properties_file : String ,
26+
27+ #[ arg( long, env = "TIPS_INGRESS_KAFKA_TOPIC" , default_value = "tips-ingress" ) ]
28+ ingress_topic : String ,
2529
2630 #[ arg(
2731 long,
28- env = "TIPS_INGRESS_WRITER_KAFKA_TOPIC " ,
29- default_value = "tips-ingress-rpc "
32+ env = "TIPS_INGRESS_WRITER_AUDIT_TOPIC " ,
33+ default_value = "tips-audit "
3034 ) ]
31- kafka_topic : String ,
32-
33- #[ arg( long, env = "TIPS_INGRESS_WRITER_KAFKA_GROUP_ID" ) ]
34- kafka_group_id : String ,
35+ audit_topic : String ,
3536
3637 #[ arg( long, env = "TIPS_INGRESS_WRITER_LOG_LEVEL" , default_value = "info" ) ]
3738 log_level : String ,
@@ -129,31 +130,25 @@ async fn main() -> Result<()> {
129130 . with_env_filter ( & args. log_level )
130131 . init ( ) ;
131132
132- let mut config = ClientConfig :: new ( ) ;
133- config
134- . set ( "group.id" , & args. kafka_group_id )
135- . set ( "bootstrap.servers" , & args. kafka_brokers )
136- . set ( "auto.offset.reset" , "earliest" )
137- . set ( "enable.partition.eof" , "false" )
138- . set ( "session.timeout.ms" , "6000" )
139- . set ( "enable.auto.commit" , "true" ) ;
140-
141- let kafka_producer: FutureProducer = ClientConfig :: new ( )
142- . set ( "bootstrap.servers" , & args. kafka_brokers )
143- . set ( "message.timeout.ms" , "5000" )
144- . create ( ) ?;
145-
146- let publisher = KafkaBundleEventPublisher :: new ( kafka_producer, "tips-audit" . to_string ( ) ) ;
133+ let config = load_kafka_config_from_file ( & args. kafka_properties_file ) ?;
134+ let kafka_producer: FutureProducer = config. create ( ) ?;
135+
136+ let publisher = KafkaBundleEventPublisher :: new ( kafka_producer, args. audit_topic . clone ( ) ) ;
147137 let consumer = config. create ( ) ?;
148138
149139 let bundle_store = PostgresDatastore :: connect ( args. database_url ) . await ?;
150140 bundle_store. run_migrations ( ) . await ?;
151141
152- let writer = IngressWriter :: new ( consumer, args. kafka_topic . clone ( ) , bundle_store, publisher) ?;
142+ let writer = IngressWriter :: new (
143+ consumer,
144+ args. ingress_topic . clone ( ) ,
145+ bundle_store,
146+ publisher,
147+ ) ?;
153148
154149 info ! (
155150 "Ingress Writer service started, consuming from topic: {}" ,
156- args. kafka_topic
151+ args. ingress_topic
157152 ) ;
158153 loop {
159154 match writer. insert_bundle ( ) . await {
@@ -167,3 +162,22 @@ async fn main() -> Result<()> {
167162 }
168163 }
169164}
165+
166+ fn load_kafka_config_from_file ( properties_file_path : & str ) -> Result < ClientConfig > {
167+ let kafka_properties = fs:: read_to_string ( properties_file_path) ?;
168+ info ! ( "Kafka properties:\n {}" , kafka_properties) ;
169+
170+ let mut client_config = ClientConfig :: new ( ) ;
171+
172+ for line in kafka_properties. lines ( ) {
173+ let line = line. trim ( ) ;
174+ if line. is_empty ( ) || line. starts_with ( '#' ) {
175+ continue ;
176+ }
177+ if let Some ( ( key, value) ) = line. split_once ( '=' ) {
178+ client_config. set ( key. trim ( ) , value. trim ( ) ) ;
179+ }
180+ }
181+
182+ Ok ( client_config)
183+ }
0 commit comments