diff --git a/recommendation/dlrm/pytorch/python/criteo.py b/recommendation/dlrm/pytorch/python/criteo.py index de08b82f1..77c7184c2 100755 --- a/recommendation/dlrm/pytorch/python/criteo.py +++ b/recommendation/dlrm/pytorch/python/criteo.py @@ -253,7 +253,10 @@ def load_query_samples(self, sample_list): s = self.random_offsets[l] e = self.random_offsets[l+1] - ls = [self.test_data[i] for i in range(s, e)] + if self.use_mlperf_bin_loader and self.samples_to_aggregate > 1: + ls = [self.test_data[l]] + else: + ls = [self.test_data[i] for i in range(s, e)] if self.use_mlperf_bin_loader: # NOTE: in binary dataset the values are transformed ls_t = list(zip(*ls)) diff --git a/v0.5/recommendation/python/backend_onnxruntime.py b/v0.5/recommendation/python/backend_onnxruntime.py index 189484da8..6c1198a84 100755 --- a/v0.5/recommendation/python/backend_onnxruntime.py +++ b/v0.5/recommendation/python/backend_onnxruntime.py @@ -45,7 +45,16 @@ def predict(self, batch_dense_X, batch_lS_o, batch_lS_i): # print("onnx predict") # print(self.inputs) # print(self.outputs) - + + ''' + incoming_bs = batch_dense_X.shape[0] + model_saved_bs = 2048 + if (incoming_bs != model_saved_bs): + print("WARNING: mismatch beween incoming " + str(incoming_bs) + " and model saved " + str(model_saved_bs) + " mini-batch size") + fake_output = torch.zeros(size=(incoming_bs,1), dtype=torch.float32) + return fake_output + ''' + dict_inputs = {} # Dmitriy's approach to build dictionaries