@@ -8,6 +8,7 @@ use rdkafka::{
88 message:: Message ,
99 producer:: FutureProducer ,
1010} ;
11+ use std:: fs;
1112use tips_audit:: { BundleEvent , BundleEventPublisher , KafkaBundleEventPublisher } ;
1213use tips_datastore:: { BundleDatastore , postgres:: PostgresDatastore } ;
1314use tokio:: time:: Duration ;
@@ -20,18 +21,22 @@ struct Args {
2021 #[ arg( long, env = "TIPS_INGRESS_WRITER_DATABASE_URL" ) ]
2122 database_url : String ,
2223
23- #[ arg( long, env = "TIPS_INGRESS_WRITER_KAFKA_BROKERS " ) ]
24- kafka_brokers : String ,
24+ #[ arg( long, env = "TIPS_INGRESS_WRITER_KAFKA_PROPERTIES_FILE " ) ]
25+ kafka_properties_file : String ,
2526
2627 #[ arg(
2728 long,
28- env = "TIPS_INGRESS_WRITER_KAFKA_TOPIC " ,
29- default_value = "tips-ingress-rpc "
29+ env = "TIPS_INGRESS_KAFKA_TOPIC " ,
30+ default_value = "tips-ingress"
3031 ) ]
31- kafka_topic : String ,
32+ ingress_topic : String ,
3233
33- #[ arg( long, env = "TIPS_INGRESS_WRITER_KAFKA_GROUP_ID" ) ]
34- kafka_group_id : String ,
34+ #[ arg(
35+ long,
36+ env = "TIPS_INGRESS_WRITER_AUDIT_TOPIC" ,
37+ default_value = "tips-audit"
38+ ) ]
39+ audit_topic : String ,
3540
3641 #[ arg( long, env = "TIPS_INGRESS_WRITER_LOG_LEVEL" , default_value = "info" ) ]
3742 log_level : String ,
@@ -129,31 +134,20 @@ async fn main() -> Result<()> {
129134 . with_env_filter ( & args. log_level )
130135 . init ( ) ;
131136
132- let mut config = ClientConfig :: new ( ) ;
133- config
134- . set ( "group.id" , & args. kafka_group_id )
135- . set ( "bootstrap.servers" , & args. kafka_brokers )
136- . set ( "auto.offset.reset" , "earliest" )
137- . set ( "enable.partition.eof" , "false" )
138- . set ( "session.timeout.ms" , "6000" )
139- . set ( "enable.auto.commit" , "true" ) ;
140-
141- let kafka_producer: FutureProducer = ClientConfig :: new ( )
142- . set ( "bootstrap.servers" , & args. kafka_brokers )
143- . set ( "message.timeout.ms" , "5000" )
144- . create ( ) ?;
145-
146- let publisher = KafkaBundleEventPublisher :: new ( kafka_producer, "tips-audit" . to_string ( ) ) ;
137+ let config = load_kafka_config_from_file ( & args. kafka_properties_file ) ?;
138+ let kafka_producer: FutureProducer = config. create ( ) ?;
139+
140+ let publisher = KafkaBundleEventPublisher :: new ( kafka_producer, args. audit_topic . clone ( ) ) ;
147141 let consumer = config. create ( ) ?;
148142
149143 let bundle_store = PostgresDatastore :: connect ( args. database_url ) . await ?;
150144 bundle_store. run_migrations ( ) . await ?;
151145
152- let writer = IngressWriter :: new ( consumer, args. kafka_topic . clone ( ) , bundle_store, publisher) ?;
146+ let writer = IngressWriter :: new ( consumer, args. ingress_topic . clone ( ) , bundle_store, publisher) ?;
153147
154148 info ! (
155149 "Ingress Writer service started, consuming from topic: {}" ,
156- args. kafka_topic
150+ args. ingress_topic
157151 ) ;
158152 loop {
159153 match writer. insert_bundle ( ) . await {
@@ -167,3 +161,22 @@ async fn main() -> Result<()> {
167161 }
168162 }
169163}
164+
165+ fn load_kafka_config_from_file ( properties_file_path : & str ) -> Result < ClientConfig > {
166+ let kafka_properties = fs:: read_to_string ( properties_file_path) ?;
167+ info ! ( "Kafka properties:\n {}" , kafka_properties) ;
168+
169+ let mut client_config = ClientConfig :: new ( ) ;
170+
171+ for line in kafka_properties. lines ( ) {
172+ let line = line. trim ( ) ;
173+ if line. is_empty ( ) || line. starts_with ( '#' ) {
174+ continue ;
175+ }
176+ if let Some ( ( key, value) ) = line. split_once ( '=' ) {
177+ client_config. set ( key. trim ( ) , value. trim ( ) ) ;
178+ }
179+ }
180+
181+ Ok ( client_config)
182+ }
0 commit comments