From bc576e853600c3a1b3804ade342c3bd5fb628b2b Mon Sep 17 00:00:00 2001 From: Steven Liu Date: Mon, 28 Sep 2020 11:55:58 -0700 Subject: [PATCH] Back out "disable dual writing, save models to Manifold" Summary: Original commit changeset: b6de7b4fea66 Differential Revision: D23969502 fbshipit-source-id: 2d57e6596f2f9a953476f7cfa5c4d81843eb6a8e --- pytext/exporters/exporter.py | 12 ++---------- pytext/task/new_task.py | 4 +--- pytext/task/tasks.py | 7 ++----- 3 files changed, 5 insertions(+), 18 deletions(-) diff --git a/pytext/exporters/exporter.py b/pytext/exporters/exporter.py index e49155c9e..ffd36b44c 100644 --- a/pytext/exporters/exporter.py +++ b/pytext/exporters/exporter.py @@ -1,7 +1,6 @@ #!/usr/bin/env python3 # Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved -import tempfile from typing import Callable, Dict, List, Tuple, Union import torch @@ -13,7 +12,6 @@ from pytext.data import CommonMetadata from pytext.fields import FieldMeta from pytext.utils import onnx -from pytext.utils.file_io import PathManager from pytext.utils.usage import log_class_usage @@ -180,17 +178,12 @@ def export_to_caffe2( print(f"Saving caffe2 model to: {export_path}") - # caffe2/onnx doesn't support internal uri(i.e. manifold) - # workaround: save to a temp file and copy to model_path - # this will be deprecated soon after caffe2 fully deprecated - _, temp_path = tempfile.mkstemp(prefix="pytext") - c2_prepared = onnx.pytorch_to_caffe2( model, self.dummy_model_input, self.input_names, self.output_names, - temp_path, + export_path, export_onnx_path, ) c2_prepared, final_input_names = self.prepend_operators( @@ -215,10 +208,9 @@ def export_to_caffe2( c2_prepared, final_input_names, final_out_names, - temp_path, + export_path, self.get_extra_params(), ) - PathManager.copy_from_local(temp_path, export_path, overwrite=True) return final_out_names def export_to_metrics(self, model, metric_channels): diff --git a/pytext/task/new_task.py b/pytext/task/new_task.py index abb3df07e..2985e1c9c 100644 --- a/pytext/task/new_task.py +++ b/pytext/task/new_task.py @@ -14,7 +14,6 @@ from pytext.models.model import BaseModel from pytext.trainers import TaskTrainer, TrainingState from pytext.utils import cuda, onnx -from pytext.utils.file_io import PathManager from pytext.utils.usage import log_class_usage from torch import jit, sort @@ -341,8 +340,7 @@ def torchscript_export( ) if export_path is not None: print(f"Saving torchscript model to: {export_path}") - with PathManager.open(export_path, "wb") as f: - torch.jit.save(trace, f) + trace.save(export_path) return trace diff --git a/pytext/task/tasks.py b/pytext/task/tasks.py index 8f25618ca..33f1a7d2b 100644 --- a/pytext/task/tasks.py +++ b/pytext/task/tasks.py @@ -57,7 +57,6 @@ from pytext.task.new_task import NewTask from pytext.trainers import EnsembleTrainer, HogwildTrainer, TaskTrainer from pytext.utils import cuda -from pytext.utils.file_io import PathManager from torch import jit @@ -274,8 +273,7 @@ def torchscript_export(self, model, export_path=None, **kwargs): ) if export_path is not None: print(f"Saving torchscript model to: {export_path}") - with PathManager.open(export_path, "wb") as f: - torch.jit.save(trace, f) + trace.save(export_path) return trace @@ -364,5 +362,4 @@ def torchscript_export(self, model, export_path=None, **kwargs): model.eval() if hasattr(model, "torchscriptify"): jit_module = model.torchscriptify() - with PathManager.open(export_path, "wb") as f: - torch.jit.save(jit_module, f) + jit_module.save(export_path)