-
Notifications
You must be signed in to change notification settings - Fork 374
Fix some bugs in onnxmlirtorch package #3319
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
Changes from all commits
bb991fb
cd1b1f3
1cce5f0
f0e8687
282c221
9374f4d
File filter
Filter by extension
Conversations
Jump to
Diff view
Diff view
There are no files selected for viewing
| Original file line number | Diff line number | Diff line change |
|---|---|---|
|
|
@@ -58,6 +58,7 @@ class InferenceSession: | |
| def __init__(self, model_path, **kwargs): | ||
| self.debug = False | ||
| self.session = None | ||
| self.output_dir = tempfile.TemporaryDirectory() | ||
|
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. We want to keep this temp folder, so move it to the class member. Otherwise, the temp folder is cleaned up automatically. |
||
| self.handleParameters(model_path, **kwargs) | ||
| if self.session is not None: | ||
| return | ||
|
|
@@ -117,7 +118,7 @@ def handleParameters(self, model_path, **kwargs): | |
| self.compiled_model += ".so" | ||
| self.output_dirname = os.path.dirname(self.compiled_model) | ||
| else: | ||
| self.output_dirname = tempfile.TemporaryDirectory().name | ||
| self.output_dirname = self.output_dir.name | ||
| self.compiled_model = os.path.join( | ||
| self.output_dirname, self.model_basename.removesuffix(self.model_suffix) | ||
| ) | ||
|
|
@@ -292,7 +293,7 @@ def Compile(self): | |
|
|
||
| def getSession(self): | ||
| # When the script is used in package onnxmlir, the files to be imported | ||
| # are within the package. Path in the pakcage should be used. | ||
| # are within the package. Path in the package should be used. | ||
| # Otherwise, env variable ONNX_MLIR_HOME is used to for import path | ||
| if __package__ == "onnxmlir" or __package__ == "onnxmlirtorch": | ||
| try: | ||
|
|
||
| Original file line number | Diff line number | Diff line change |
|---|---|---|
|
|
@@ -123,13 +123,16 @@ class config: | |
| cache_size = 3 | ||
|
|
||
|
|
||
| glocalSessionCache = SessionCache(config.cache_size) | ||
|
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Make it global. Otherwise, the cache is local to each pytorch call to the backend. |
||
|
|
||
|
|
||
| class ONNXMLIRTorch: | ||
| def __init__(self, torch_model, **kwargs): | ||
| self.torch_model = torch_model | ||
| # Temporary directory | ||
| self.workdir = tempfile.TemporaryDirectory() | ||
| self.default_model_name = "model" | ||
| self.sessionCache = SessionCache(config.cache_size) | ||
| self.sessionCache = glocalSessionCache | ||
| if "compile_tag" in kwargs.keys(): | ||
| self.tag = kwargs["compile_tag"] | ||
| else: | ||
|
|
@@ -195,5 +198,5 @@ def forward(self, *args, **kwargs): | |
| _, sess = cached_session | ||
|
|
||
| # Run the inference | ||
| outputs = sess.run(None, np_args) | ||
| outputs = sess.run(np_args) | ||
| return [torch.from_numpy(output) for output in outputs] | ||
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
Missing
libsfolder so that OMExecutionSession was not found.