Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

fix: update ModelScopeEstimator image #7

Merged
merged 3 commits into from
Jan 2, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 3 additions & 0 deletions pai/huggingface/estimator.py
Original file line number Diff line number Diff line change
Expand Up @@ -196,6 +196,8 @@ def __init__(
session=session,
**kwargs,
)
# Check image_uri and transformers_version
self.training_image_uri()

def _validate_image_uri(self, image_uri: str, transformers_version: str) -> None:
"""Check if image_uri or transformers_version arguments are specified."""
Expand Down Expand Up @@ -272,6 +274,7 @@ def _get_supported_tf_versions_for_training(self) -> List[str]:
if label["Value"] not in res:
res.append(label["Value"])

res.sort(key=lambda x: to_semantic_version(x))
return res

def _get_latest_tf_version_for_training(self) -> str:
Expand Down
34 changes: 19 additions & 15 deletions pai/huggingface/model.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,6 @@
# limitations under the License.

import logging
import re
from typing import Any, Dict, List, Optional, Union

from ..common.utils import to_semantic_version
Expand All @@ -29,10 +28,6 @@

logger = logging.getLogger(__name__)

_PAI_HF_IMAGE_TAG_PATTERN_INFERENCE = re.compile(
r"huggingface-inference:transformers-(\d.+)-(gpu|cpu)"
)


class HuggingFaceModel(ModelBase):
"""A HuggingFace ``Model`` that can be deployed in PAI to create a prediction service.
Expand Down Expand Up @@ -180,6 +175,8 @@ def __init__(
model_data=self.model_data,
session=session or get_default_session(),
)
# Check image_uri and transformers_version
self.serving_image_uri()

def _validate_args(self, image_uri: str, transformers_version: str) -> None:
"""Check if image_uri or transformers_version arguments are specified."""
Expand All @@ -189,7 +186,7 @@ def _validate_args(self, image_uri: str, transformers_version: str) -> None:
"Specify either transformers_version or image_uri."
)

def serving_image_uri(self, instance_type: str) -> str:
def serving_image_uri(self) -> str:
"""Return the Docker image to use for serving.

The :meth:`pai.huggingface.model.HuggingFaceModel.deploy` method, that does the
Expand All @@ -212,10 +209,13 @@ def serving_image_uri(self, instance_type: str) -> str:
# Filter images by Transformers version
if self.transformers_version == "latest":
latest_version = self._get_latest_tf_version_for_inference()
name = f"huggingface-inference:transformers-{latest_version}-"
labels.append(ImageLabel.framework_version("Transformers", latest_version))
else:
name = f"huggingface-inference:transformers-{self.transformers_version}-"
labels.append(
ImageLabel.framework_version("Transformers", self.transformers_version)
)

name = "huggingface-inference:"
resp = self.session.image_api.list(
name=name,
labels=labels,
Expand All @@ -241,21 +241,25 @@ def _get_supported_tf_versions_for_inference(self) -> List[str]:
ImageLabel.EAS_LABEL,
ImageLabel.PROVIDER_PAI_LABEL,
ImageLabel.DEVICE_TYPE_GPU,
ImageLabel.framework_version("Transformers", "*"),
]
name = "huggingface-inference:transformers-"
name = "huggingface-inference:"
list_images = self.session.image_api.list(
name=name,
labels=labels,
verbose=True,
workspace_id=0,
).items

res = []
for image in list_images:
tag_match = _PAI_HF_IMAGE_TAG_PATTERN_INFERENCE.match(image["Name"])
transformer_version, _ = tag_match.groups()
if transformer_version not in res:
res.append(transformer_version)

for label in image["Labels"]:
if (
label["Key"] == "system.framework.Transformers"
and label["Value"] not in res
):
res.append(label["Value"])
res.sort(key=lambda x: to_semantic_version(x))
return res

def _get_latest_tf_version_for_inference(self) -> str:
Expand Down Expand Up @@ -327,7 +331,7 @@ def deploy(
:class:`pai.predictor.Predictor` : A PAI ``Predictor`` instance used for
making prediction to the prediction service.
"""
image_uri = self.serving_image_uri(instance_type=instance_type)
image_uri = self.serving_image_uri()
self.inference_spec = container_serving_spec(
command=self.command,
image_uri=image_uri,
Expand Down
11 changes: 5 additions & 6 deletions pai/modelscope/estimator.py
Original file line number Diff line number Diff line change
Expand Up @@ -196,6 +196,8 @@ def __init__(
session=session,
**kwargs,
)
# Check image_uri and modelscope_version
self.training_image_uri()
pitt-liang marked this conversation as resolved.
Show resolved Hide resolved

def _validate_image_uri(self, image_uri: str, modelscope_version: str) -> None:
"""Check if image_uri or modelscope_version arguments are specified."""
Expand All @@ -219,10 +221,8 @@ def training_image_uri(self) -> str:

labels = [
ImageLabel.OFFICIAL_LABEL,
ImageLabel.DLC_LABEL,
ImageLabel.PROVIDER_COMMUNITY_LABEL,
ImageLabel.DSW_LABEL,
ImageLabel.DEVICE_TYPE_GPU,
ImageLabel.framework_version("PyTorch", "*"),
]

# Filter images by ModelScope version
Expand Down Expand Up @@ -255,10 +255,8 @@ def _get_supported_ms_versions_for_training(self) -> List[str]:
label_keys = "system.framework.ModelScope"
label_filter = [
ImageLabel.OFFICIAL_LABEL,
ImageLabel.DLC_LABEL,
ImageLabel.PROVIDER_COMMUNITY_LABEL,
ImageLabel.DSW_LABEL,
ImageLabel.DEVICE_TYPE_GPU,
ImageLabel.framework_version("PyTorch", "*"),
ImageLabel.framework_version("ModelScope", "*"),
]
list_image_labels = self.session.image_api.list_labels(
Expand All @@ -272,6 +270,7 @@ def _get_supported_ms_versions_for_training(self) -> List[str]:
if label["Value"] not in res:
res.append(label["Value"])

res.sort(key=lambda x: to_semantic_version(x))
return res

def _get_latest_ms_version_for_training(self) -> str:
Expand Down
30 changes: 18 additions & 12 deletions pai/modelscope/model.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,6 @@
# limitations under the License.

import logging
import re
from typing import Any, Dict, List, Optional, Union

from ..api.image import ImageLabel
Expand All @@ -29,8 +28,6 @@

logger = logging.getLogger(__name__)

_PAI_MS_IMAGE_TAG_PATTERN_INFERENCE = re.compile(r"modelscope-inference:(\d.+)")


class ModelScopeModel(ModelBase):
"""A ModelScope ``Model`` that can be deployed in PAI to create a prediction service.
Expand Down Expand Up @@ -175,6 +172,8 @@ def __init__(
model_data=self.model_data,
session=session,
)
# Check image_uri and modelscope_version
self.serving_image_uri()

def _validate_args(self, image_uri: str, modelscope_version: str) -> None:
"""Check if image_uri or modelscope_version arguments are specified."""
Expand All @@ -184,7 +183,7 @@ def _validate_args(self, image_uri: str, modelscope_version: str) -> None:
"Specify either modelscope_version or image_uri."
)

def serving_image_uri(self, instance_type: str) -> str:
def serving_image_uri(self) -> str:
"""Return the Docker image to use for serving.

The :meth:`pai.modelscope.model.ModelScopeModel.deploy` method, that does the
Expand All @@ -208,10 +207,13 @@ def serving_image_uri(self, instance_type: str) -> str:
# Filter images by Transformers version
if self.modelscope_version == "latest":
latest_version = self._get_latest_ms_version_for_inference()
name = f"modelscope-inference:{latest_version}"
labels.append(ImageLabel.framework_version("ModelScope", latest_version))
else:
name = f"modelscope-inference:{self.modelscope_version}"
labels.append(
ImageLabel.framework_version("ModelScope", self.modelscope_version)
)

name = "modelscope-inference:"
list_images = self.session.image_api.list(
name=name,
labels=labels,
Expand All @@ -236,21 +238,25 @@ def _get_supported_ms_versions_for_inference(self) -> List[str]:
ImageLabel.EAS_LABEL,
ImageLabel.PROVIDER_PAI_LABEL,
ImageLabel.DEVICE_TYPE_GPU,
ImageLabel.framework_version("ModelScope", "*"),
]
name = "modelscope-inference:"
list_images = self.session.image_api.list(
name=name,
labels=labels,
verbose=True,
workspace_id=0,
).items

res = []
for image in list_images:
tag_match = _PAI_MS_IMAGE_TAG_PATTERN_INFERENCE.match(image["Name"])
(modelscope_version,) = tag_match.groups()
if modelscope_version not in res:
res.append(modelscope_version)

for label in image["Labels"]:
if (
label["Key"] == "system.framework.ModelScope"
and label["Value"] not in res
):
res.append(label["Value"])
res.sort(key=lambda x: to_semantic_version(x))
return res

def _get_latest_ms_version_for_inference(self) -> str:
Expand Down Expand Up @@ -322,7 +328,7 @@ def deploy(
:class:`pai.predictor.Predictor` : A PAI ``Predictor`` instance used for
making prediction to the prediction service.
"""
image_uri = self.serving_image_uri(instance_type=instance_type)
image_uri = self.serving_image_uri()
self.inference_spec = container_serving_spec(
command=self.command,
image_uri=image_uri,
Expand Down