@@ -96,7 +96,7 @@ def _run_in_memory_and_yield_progress(
9696
9797 model_outputs = []
9898 timestamps = []
99- run_exceptions = set ()
99+ run_exceptions = []
100100 run_cost = 0
101101 total_rows = len (input_data )
102102 current_row = 0
@@ -121,24 +121,43 @@ def _run_in_memory_and_yield_progress(
121121
122122 model_outputs .append (output )
123123 run_cost += cost
124- run_exceptions .update (exceptions )
124+ run_exceptions .append (exceptions )
125125 timestamps .append (datetime .datetime .utcnow ().isoformat ())
126126 current_row += 1
127127
128128 yield pd .DataFrame (
129- {"output" : model_outputs , "output_time_utc" : timestamps }
129+ {
130+ "output" : model_outputs ,
131+ "output_time_utc" : timestamps ,
132+ "exceptions" : run_exceptions ,
133+ }
130134 ), current_row / total_rows
131135
136+ if (
137+ len (run_exceptions ) > 0
138+ and None not in run_exceptions
139+ and len (set (run_exceptions )) == 1
140+ ):
141+ raise openlayer_exceptions .OpenlayerLlmException (
142+ f"Calculating all outputs failed with: { run_exceptions [0 ]} "
143+ )
144+
132145 self .logger .info ("Successfully ran data through the model!" )
133146
134- self ._report_exceptions (run_exceptions )
147+ self ._report_exceptions (set ( run_exceptions ) )
135148 self .cost_estimates .append (run_cost )
136149
137150 yield pd .DataFrame (
138- {"output" : model_outputs , "output_time_utc" : timestamps }
151+ {
152+ "output" : model_outputs ,
153+ "output_time_utc" : timestamps ,
154+ "exceptions" : run_exceptions ,
155+ }
139156 ), 1.0
140157
141- def _run_single_input (self , input_data_row : pd .Series ) -> Tuple [str , float , set ]:
158+ def _run_single_input (
159+ self , input_data_row : pd .Series
160+ ) -> Tuple [str , float , Optional [Exception ]]:
142161 """Runs the LLM on a single row of input data.
143162
144163 Returns a tuple of the output, cost, and exceptions encountered.
@@ -151,10 +170,10 @@ def _run_single_input(self, input_data_row: pd.Series) -> Tuple[str, float, set]
151170
152171 try :
153172 outputs = self ._get_llm_output (llm_input )
154- return outputs ["output" ], outputs ["cost" ], set ()
173+ return outputs ["output" ], outputs ["cost" ], None
155174 # pylint: disable=broad-except
156175 except Exception as exc :
157- return None , 0 , { exc }
176+ return None , 0 , exc
158177
159178 def _inject_prompt (self , input_variables_dict : dict ) -> List [Dict [str , str ]]:
160179 """Injects the input variables into the prompt template.
0 commit comments