diff --git a/pai/api/training_job.py b/pai/api/training_job.py index f8648fa..25697c8 100644 --- a/pai/api/training_job.py +++ b/pai/api/training_job.py @@ -20,12 +20,14 @@ CreateTrainingJobRequest, CreateTrainingJobRequestComputeResource, CreateTrainingJobRequestComputeResourceInstanceSpec, + CreateTrainingJobRequestComputeResourceSpotSpec, CreateTrainingJobRequestExperimentConfig, CreateTrainingJobRequestHyperParameters, CreateTrainingJobRequestInputChannels, CreateTrainingJobRequestLabels, CreateTrainingJobRequestOutputChannels, CreateTrainingJobRequestScheduler, + CreateTrainingJobRequestSettings, CreateTrainingJobRequestUserVpc, CreateTrainingJobResponseBody, GetTrainingJobRequest, @@ -86,8 +88,10 @@ def create( instance_type, instance_count, job_name, + spot_spec: Optional[Dict[str, Any]] = None, instance_spec: Optional[Dict[str, str]] = None, resource_id: Optional[str] = None, + resource_type: Optional[str] = None, hyperparameters: Optional[Dict[str, Any]] = None, input_channels: Optional[List[Dict[str, Any]]] = None, output_channels: Optional[List[Dict[str, Any]]] = None, @@ -102,6 +106,7 @@ def create( algorithm_spec: Optional[Dict[str, Any]] = None, user_vpc_config: Optional[Dict[str, Any]] = None, experiment_config: Optional[Dict[str, Any]] = None, + settings: Optional[Dict[str, Any]] = None, ) -> str: """Create a TrainingJob.""" if algorithm_spec and ( @@ -126,9 +131,16 @@ def create( for ch in output_channels ] if instance_type: + spot_spec = ( + CreateTrainingJobRequestComputeResourceSpotSpec().from_map(spot_spec) + if spot_spec + else None + ) compute_resource = CreateTrainingJobRequestComputeResource( ecs_count=instance_count, ecs_spec=instance_type, + use_spot_instance=bool(spot_spec), + spot_spec=spot_spec, ) elif instance_spec: compute_resource = CreateTrainingJobRequestComputeResource( @@ -169,6 +181,7 @@ def create( compute_resource=compute_resource, hyper_parameters=hyper_parameters, input_channels=input_channels, + resource_type=resource_type, environments=environments, python_requirements=requirements, labels=labels, @@ -181,6 +194,11 @@ def create( experiment_config=CreateTrainingJobRequestExperimentConfig().from_map( experiment_config ), + settings=( + CreateTrainingJobRequestSettings().from_map(settings) + if settings + else None + ), ) resp: CreateTrainingJobResponseBody = self._do_request( diff --git a/pai/estimator.py b/pai/estimator.py index a3ee149..58a539d 100644 --- a/pai/estimator.py +++ b/pai/estimator.py @@ -35,6 +35,8 @@ DEFAULT_OUTPUT_MODEL_CHANNEL_NAME, DEFAULT_TENSORBOARD_CHANNEL_NAME, ExperimentConfig, + ResourceType, + SpotSpec, UserVpcConfig, ) from .model import InferenceSpec, Model, ResourceConfig @@ -187,11 +189,14 @@ def __init__( environments: Optional[Dict[str, str]] = None, requirements: Optional[List[str]] = None, instance_type: Optional[str] = None, + spot_spec: Optional[SpotSpec] = None, instance_spec: Optional[Dict] = None, resource_id: Optional[Dict] = None, + resource_type: Optional[Union[str, ResourceType]] = None, instance_count: Optional[int] = None, user_vpc_config: Optional[UserVpcConfig] = None, experiment_config: Optional[ExperimentConfig] = None, + settings: Optional[Dict[str, Any]] = None, labels: Optional[Dict[str, str]] = None, session: Optional[Session] = None, ): @@ -252,12 +257,18 @@ def __init__( 'package' or 'package==version'. This is similar to the contents of a requirements.txt file used in Python projects. If requirements.txt is provided in user code directory, requirements will override the conflict dependencies directly. + resource_type (str, optional): The resource type used to run the training job. + By default, general computing resource is used. If the resource_type is + 'Lingjun', Lingjun computing resource is used. instance_type (str, optional): The machine instance type used to run the training job. To view the supported machine instance types, please refer to the document: https://help.aliyun.com/document_detail/171758.htm#section-55y-4tq-84y. If the instance_type is "local", the training job is executed locally using docker. + spot_spec (:class:`pai.job.SpotSpec`, optional): The specification of the spot + instance used to run the training job. If provided, the training job will + use the spot instance to run the training job. instance_count (int): The number of machines used to run the training job. user_vpc_config (:class:`pai.estimator.UserVpcConfig`, optional): The VPC configuration used to enable the training job instance to connect to the @@ -270,6 +281,8 @@ def __init__( training job and the experiment. If provided, the training job will belong to the specified experiment, in which case the training job will use artifact_uri of experiment as default output path. Default to None. + settings (dict, optional): A dictionary that represents the additional settings + for job, such as AIMaster configurations. labels (Dict[str, str], optional): A dictionary that maps label names to their values. This optional field allows you to provide a set of labels that will be applied to the training job. @@ -287,11 +300,14 @@ def __init__( instance_type=instance_type, instance_count=instance_count, resource_id=resource_id, + resource_type=resource_type, + spot_spec=spot_spec, instance_spec=instance_spec, user_vpc_config=user_vpc_config, max_run_time=max_run_time, environments=environments, requirements=requirements, + settings=settings, labels=labels, ) diff --git a/pai/job/__init__.py b/pai/job/__init__.py index 6d2769c..239b3ff 100644 --- a/pai/job/__init__.py +++ b/pai/job/__init__.py @@ -22,6 +22,9 @@ InstanceSpec, ModelRecipeSpec, OssLocation, + ResourceType, + SpotSpec, + SpotStrategy, TrainingJob, TrainingJobStatus, UriInput, @@ -45,4 +48,7 @@ "ExperimentConfig", "InstanceSpec", "UriInput", + "SpotSpec", + "ResourceType", + "SpotStrategy", ] diff --git a/pai/job/_training_job.py b/pai/job/_training_job.py index 8cfc57a..6af5e60 100644 --- a/pai/job/_training_job.py +++ b/pai/job/_training_job.py @@ -17,6 +17,7 @@ import time import typing from concurrent.futures import ThreadPoolExecutor +from enum import Enum from typing import Any, Dict, List, Optional, Union from pydantic import BaseModel, ConfigDict, Field @@ -55,6 +56,19 @@ def as_oss_dir_uri(uri: str): DEFAULT_TENSORBOARD_CHANNEL_NAME = "tensorboard" +class SpotStrategy(str, Enum): + SpotWithPriceLimit = "SpotWithPriceLimit" + SpotAsPriceGo = "SpotAsPriceGo" + + def __repr__(self): + return self.value + + +class ResourceType(str, Enum): + Lingjun = "Lingjun" + General = "General" + + class BaseAPIModel(BaseModel): model_config = ConfigDict( @@ -275,11 +289,14 @@ class AlgorithmSpec(BaseAPIModel): ) hyperparameter_definitions: List[HyperParameterDefinition] = Field( default_factory=list, - alias="HyperParameter", + alias="HyperParameters", description="Hyperparameter definitions.", ) job_type: str = Field(default="PyTorchJob") code_dir: Optional[CodeDir] = Field(None, description="Source code location.") + customization: Optional[Dict[str, Any]] = Field( + None, description="Whether the algorithm supports customize code." + ) class ModelRecipeSpec(BaseAPIModel): @@ -300,6 +317,19 @@ class ModelRecipeSpec(BaseAPIModel): requirements: Optional[List[str]] = None +class SpotSpec(BaseAPIModel): + spot_strategy: SpotStrategy = Field( + ..., + description="Spot instance strategy, support 'SpotWithPriceLimit', 'SpotAsPriceGo'", + ) + spot_discount_limit: Optional[float] = Field( + None, + description="Spot instance discount limit, maximum 2 decimal places, " + "required when spot_strategy is 'SpotWithPriceLimit'." + "For example, 0.5 means 50% off the original price.", + ) + + class TrainingJob(BaseAPIModel): """TrainingJob represents a training job in the PAI service.""" @@ -542,23 +572,29 @@ def __init__( instance_spec: Optional[Dict] = None, instance_count: Optional[int] = None, resource_id: Optional[Dict] = None, + resource_type: Optional[Union[str, ResourceType]] = None, + spot_spec: Optional[SpotSpec] = None, environments: Optional[Dict] = None, requirements: Optional[List[str]] = None, labels: Optional[Dict[str, str]] = None, + settings: Optional[Dict[str, Any]] = None, ): self.session = get_default_session() self._training_jobs = [] self.base_job_name = base_job_name or type(self).__name__.lower() self.output_path = output_path self.user_vpc_config = user_vpc_config + self.spot_spec = spot_spec self.experiment_config = experiment_config self.max_run_time = max_run_time self.instance_type = instance_type self.instance_spec = instance_spec self.instance_count = instance_count or 1 self.resource_id = resource_id + self.resource_type = ResourceType(resource_type) if resource_type else None self.environments = environments self.requirements = requirements + self.settings = settings self.labels = labels def wait(self, interval: int = 5, show_logs: bool = True, all_jobs: bool = False): @@ -704,6 +740,7 @@ def build_outputs( return [item.model_dump() for item in res] + # TODO: get arguments, such as VPCConfig, instance_type etc, from self instance. def _submit( self, job_name: str, @@ -728,6 +765,20 @@ def _submit( show_logs: bool = False, ): session = get_default_session() + + if not self.resource_type or self.resource_type == ResourceType.General: + resource_type = None + else: + resource_type = self.resource_type.value + + if self.spot_spec: + spot_spec = { + "SpotStrategy": self.spot_spec.spot_strategy.value, + } + if self.spot_spec.spot_discount_limit: + spot_spec["SpotDiscountLimit"] = self.spot_spec.spot_discount_limit + else: + spot_spec = None training_job_id = session.training_job_api.create( instance_count=instance_count, instance_spec=instance_spec.model_dump() if instance_spec else None, @@ -738,9 +789,11 @@ def _submit( if experiment_config and isinstance(experiment_config, ExperimentConfig) else experiment_config ), + spot_spec=spot_spec, algorithm_version=algorithm_version, instance_type=instance_type, resource_id=resource_id, + resource_type=resource_type, job_name=job_name, hyperparameters=hyperparameters, max_running_in_seconds=max_run_time, @@ -751,6 +804,7 @@ def _submit( user_vpc_config=user_vpc_config, labels=labels, environments=environments, + settings=self.settings, ) training_job = TrainingJob.get(training_job_id) self._training_jobs.append(training_job) diff --git a/pai/libs/alibabacloud_paistudio20220112/client.py b/pai/libs/alibabacloud_paistudio20220112/client.py index b89d438..63832fb 100644 --- a/pai/libs/alibabacloud_paistudio20220112/client.py +++ b/pai/libs/alibabacloud_paistudio20220112/client.py @@ -18,7 +18,7 @@ class Client(OpenApiClient): *\ """ def __init__( - self, + self, config: open_api_models.Config, ): super().__init__(config) @@ -60,102 +60,6 @@ def get_endpoint( return endpoint_map.get(region_id) return EndpointUtilClient.get_endpoint_rules(product_id, region_id, endpoint_rule, network, suffix) - def build_llmsnapshot_with_options( - self, - project_id: str, - snapshot_id: str, - request: pai_studio_20220112_models.BuildLLMSnapshotRequest, - headers: Dict[str, str], - runtime: util_models.RuntimeOptions, - ) -> pai_studio_20220112_models.BuildLLMSnapshotResponse: - UtilClient.validate_model(request) - body = {} - if not UtilClient.is_unset(request.description): - body['Description'] = request.description - if not UtilClient.is_unset(request.display_name): - body['DisplayName'] = request.display_name - if not UtilClient.is_unset(request.labels): - body['Labels'] = request.labels - if not UtilClient.is_unset(request.workload): - body['Workload'] = request.workload - req = open_api_models.OpenApiRequest( - headers=headers, - body=OpenApiUtilClient.parse_to_map(body) - ) - params = open_api_models.Params( - action='BuildLLMSnapshot', - version='2022-01-12', - protocol='HTTPS', - pathname=f'/api/v1/langstudio/projects/{OpenApiUtilClient.get_encode_param(project_id)}/snapshots/{OpenApiUtilClient.get_encode_param(snapshot_id)}/build', - method='PUT', - auth_type='AK', - style='ROA', - req_body_type='json', - body_type='json' - ) - return TeaCore.from_map( - pai_studio_20220112_models.BuildLLMSnapshotResponse(), - self.call_api(params, req, runtime) - ) - - async def build_llmsnapshot_with_options_async( - self, - project_id: str, - snapshot_id: str, - request: pai_studio_20220112_models.BuildLLMSnapshotRequest, - headers: Dict[str, str], - runtime: util_models.RuntimeOptions, - ) -> pai_studio_20220112_models.BuildLLMSnapshotResponse: - UtilClient.validate_model(request) - body = {} - if not UtilClient.is_unset(request.description): - body['Description'] = request.description - if not UtilClient.is_unset(request.display_name): - body['DisplayName'] = request.display_name - if not UtilClient.is_unset(request.labels): - body['Labels'] = request.labels - if not UtilClient.is_unset(request.workload): - body['Workload'] = request.workload - req = open_api_models.OpenApiRequest( - headers=headers, - body=OpenApiUtilClient.parse_to_map(body) - ) - params = open_api_models.Params( - action='BuildLLMSnapshot', - version='2022-01-12', - protocol='HTTPS', - pathname=f'/api/v1/langstudio/projects/{OpenApiUtilClient.get_encode_param(project_id)}/snapshots/{OpenApiUtilClient.get_encode_param(snapshot_id)}/build', - method='PUT', - auth_type='AK', - style='ROA', - req_body_type='json', - body_type='json' - ) - return TeaCore.from_map( - pai_studio_20220112_models.BuildLLMSnapshotResponse(), - await self.call_api_async(params, req, runtime) - ) - - def build_llmsnapshot( - self, - project_id: str, - snapshot_id: str, - request: pai_studio_20220112_models.BuildLLMSnapshotRequest, - ) -> pai_studio_20220112_models.BuildLLMSnapshotResponse: - runtime = util_models.RuntimeOptions() - headers = {} - return self.build_llmsnapshot_with_options(project_id, snapshot_id, request, headers, runtime) - - async def build_llmsnapshot_async( - self, - project_id: str, - snapshot_id: str, - request: pai_studio_20220112_models.BuildLLMSnapshotRequest, - ) -> pai_studio_20220112_models.BuildLLMSnapshotResponse: - runtime = util_models.RuntimeOptions() - headers = {} - return await self.build_llmsnapshot_with_options_async(project_id, snapshot_id, request, headers, runtime) - def check_instance_web_terminal_with_options( self, training_job_id: str, @@ -164,6 +68,14 @@ def check_instance_web_terminal_with_options( headers: Dict[str, str], runtime: util_models.RuntimeOptions, ) -> pai_studio_20220112_models.CheckInstanceWebTerminalResponse: + """ + @summary 检查WebTerminal + + @param request: CheckInstanceWebTerminalRequest + @param headers: map + @param runtime: runtime options for this request RuntimeOptions + @return: CheckInstanceWebTerminalResponse + """ UtilClient.validate_model(request) body = {} if not UtilClient.is_unset(request.check_info): @@ -196,6 +108,14 @@ async def check_instance_web_terminal_with_options_async( headers: Dict[str, str], runtime: util_models.RuntimeOptions, ) -> pai_studio_20220112_models.CheckInstanceWebTerminalResponse: + """ + @summary 检查WebTerminal + + @param request: CheckInstanceWebTerminalRequest + @param headers: map + @param runtime: runtime options for this request RuntimeOptions + @return: CheckInstanceWebTerminalResponse + """ UtilClient.validate_model(request) body = {} if not UtilClient.is_unset(request.check_info): @@ -226,6 +146,12 @@ def check_instance_web_terminal( instance_id: str, request: pai_studio_20220112_models.CheckInstanceWebTerminalRequest, ) -> pai_studio_20220112_models.CheckInstanceWebTerminalResponse: + """ + @summary 检查WebTerminal + + @param request: CheckInstanceWebTerminalRequest + @return: CheckInstanceWebTerminalResponse + """ runtime = util_models.RuntimeOptions() headers = {} return self.check_instance_web_terminal_with_options(training_job_id, instance_id, request, headers, runtime) @@ -236,6 +162,12 @@ async def check_instance_web_terminal_async( instance_id: str, request: pai_studio_20220112_models.CheckInstanceWebTerminalRequest, ) -> pai_studio_20220112_models.CheckInstanceWebTerminalResponse: + """ + @summary 检查WebTerminal + + @param request: CheckInstanceWebTerminalRequest + @return: CheckInstanceWebTerminalResponse + """ runtime = util_models.RuntimeOptions() headers = {} return await self.check_instance_web_terminal_with_options_async(training_job_id, instance_id, request, headers, runtime) @@ -245,6 +177,13 @@ def create_ai4ddefault_bucket_with_options( headers: Dict[str, str], runtime: util_models.RuntimeOptions, ) -> pai_studio_20220112_models.CreateAI4DDefaultBucketResponse: + """ + @summary 创建AI4D模型桶 + + @param headers: map + @param runtime: runtime options for this request RuntimeOptions + @return: CreateAI4DDefaultBucketResponse + """ req = open_api_models.OpenApiRequest( headers=headers ) @@ -269,6 +208,13 @@ async def create_ai4ddefault_bucket_with_options_async( headers: Dict[str, str], runtime: util_models.RuntimeOptions, ) -> pai_studio_20220112_models.CreateAI4DDefaultBucketResponse: + """ + @summary 创建AI4D模型桶 + + @param headers: map + @param runtime: runtime options for this request RuntimeOptions + @return: CreateAI4DDefaultBucketResponse + """ req = open_api_models.OpenApiRequest( headers=headers ) @@ -289,11 +235,21 @@ async def create_ai4ddefault_bucket_with_options_async( ) def create_ai4ddefault_bucket(self) -> pai_studio_20220112_models.CreateAI4DDefaultBucketResponse: + """ + @summary 创建AI4D模型桶 + + @return: CreateAI4DDefaultBucketResponse + """ runtime = util_models.RuntimeOptions() headers = {} return self.create_ai4ddefault_bucket_with_options(headers, runtime) async def create_ai4ddefault_bucket_async(self) -> pai_studio_20220112_models.CreateAI4DDefaultBucketResponse: + """ + @summary 创建AI4D模型桶 + + @return: CreateAI4DDefaultBucketResponse + """ runtime = util_models.RuntimeOptions() headers = {} return await self.create_ai4ddefault_bucket_with_options_async(headers, runtime) @@ -304,6 +260,14 @@ def create_ai4dserivce_with_options( headers: Dict[str, str], runtime: util_models.RuntimeOptions, ) -> pai_studio_20220112_models.CreateAI4DSerivceResponse: + """ + @summary 创建AI4D服务 + + @param request: CreateAI4DSerivceRequest + @param headers: map + @param runtime: runtime options for this request RuntimeOptions + @return: CreateAI4DSerivceResponse + """ UtilClient.validate_model(request) body = {} if not UtilClient.is_unset(request.inference_spec): @@ -338,6 +302,14 @@ async def create_ai4dserivce_with_options_async( headers: Dict[str, str], runtime: util_models.RuntimeOptions, ) -> pai_studio_20220112_models.CreateAI4DSerivceResponse: + """ + @summary 创建AI4D服务 + + @param request: CreateAI4DSerivceRequest + @param headers: map + @param runtime: runtime options for this request RuntimeOptions + @return: CreateAI4DSerivceResponse + """ UtilClient.validate_model(request) body = {} if not UtilClient.is_unset(request.inference_spec): @@ -370,6 +342,12 @@ def create_ai4dserivce( self, request: pai_studio_20220112_models.CreateAI4DSerivceRequest, ) -> pai_studio_20220112_models.CreateAI4DSerivceResponse: + """ + @summary 创建AI4D服务 + + @param request: CreateAI4DSerivceRequest + @return: CreateAI4DSerivceResponse + """ runtime = util_models.RuntimeOptions() headers = {} return self.create_ai4dserivce_with_options(request, headers, runtime) @@ -378,6 +356,12 @@ async def create_ai4dserivce_async( self, request: pai_studio_20220112_models.CreateAI4DSerivceRequest, ) -> pai_studio_20220112_models.CreateAI4DSerivceResponse: + """ + @summary 创建AI4D服务 + + @param request: CreateAI4DSerivceRequest + @return: CreateAI4DSerivceResponse + """ runtime = util_models.RuntimeOptions() headers = {} return await self.create_ai4dserivce_with_options_async(request, headers, runtime) @@ -388,6 +372,14 @@ def create_algorithm_with_options( headers: Dict[str, str], runtime: util_models.RuntimeOptions, ) -> pai_studio_20220112_models.CreateAlgorithmResponse: + """ + @summary 创建新的算法 + + @param request: CreateAlgorithmRequest + @param headers: map + @param runtime: runtime options for this request RuntimeOptions + @return: CreateAlgorithmResponse + """ UtilClient.validate_model(request) body = {} if not UtilClient.is_unset(request.algorithm_description): @@ -424,6 +416,14 @@ async def create_algorithm_with_options_async( headers: Dict[str, str], runtime: util_models.RuntimeOptions, ) -> pai_studio_20220112_models.CreateAlgorithmResponse: + """ + @summary 创建新的算法 + + @param request: CreateAlgorithmRequest + @param headers: map + @param runtime: runtime options for this request RuntimeOptions + @return: CreateAlgorithmResponse + """ UtilClient.validate_model(request) body = {} if not UtilClient.is_unset(request.algorithm_description): @@ -458,6 +458,12 @@ def create_algorithm( self, request: pai_studio_20220112_models.CreateAlgorithmRequest, ) -> pai_studio_20220112_models.CreateAlgorithmResponse: + """ + @summary 创建新的算法 + + @param request: CreateAlgorithmRequest + @return: CreateAlgorithmResponse + """ runtime = util_models.RuntimeOptions() headers = {} return self.create_algorithm_with_options(request, headers, runtime) @@ -466,6 +472,12 @@ async def create_algorithm_async( self, request: pai_studio_20220112_models.CreateAlgorithmRequest, ) -> pai_studio_20220112_models.CreateAlgorithmResponse: + """ + @summary 创建新的算法 + + @param request: CreateAlgorithmRequest + @return: CreateAlgorithmResponse + """ runtime = util_models.RuntimeOptions() headers = {} return await self.create_algorithm_with_options_async(request, headers, runtime) @@ -478,6 +490,14 @@ def create_algorithm_version_with_options( headers: Dict[str, str], runtime: util_models.RuntimeOptions, ) -> pai_studio_20220112_models.CreateAlgorithmVersionResponse: + """ + @summary 创建一个新的算法版本 + + @param tmp_req: CreateAlgorithmVersionRequest + @param headers: map + @param runtime: runtime options for this request RuntimeOptions + @return: CreateAlgorithmVersionResponse + """ UtilClient.validate_model(tmp_req) request = pai_studio_20220112_models.CreateAlgorithmVersionShrinkRequest() OpenApiUtilClient.convert(tmp_req, request) @@ -514,6 +534,14 @@ async def create_algorithm_version_with_options_async( headers: Dict[str, str], runtime: util_models.RuntimeOptions, ) -> pai_studio_20220112_models.CreateAlgorithmVersionResponse: + """ + @summary 创建一个新的算法版本 + + @param tmp_req: CreateAlgorithmVersionRequest + @param headers: map + @param runtime: runtime options for this request RuntimeOptions + @return: CreateAlgorithmVersionResponse + """ UtilClient.validate_model(tmp_req) request = pai_studio_20220112_models.CreateAlgorithmVersionShrinkRequest() OpenApiUtilClient.convert(tmp_req, request) @@ -548,6 +576,12 @@ def create_algorithm_version( algorithm_version: str, request: pai_studio_20220112_models.CreateAlgorithmVersionRequest, ) -> pai_studio_20220112_models.CreateAlgorithmVersionResponse: + """ + @summary 创建一个新的算法版本 + + @param request: CreateAlgorithmVersionRequest + @return: CreateAlgorithmVersionResponse + """ runtime = util_models.RuntimeOptions() headers = {} return self.create_algorithm_version_with_options(algorithm_id, algorithm_version, request, headers, runtime) @@ -558,6 +592,12 @@ async def create_algorithm_version_async( algorithm_version: str, request: pai_studio_20220112_models.CreateAlgorithmVersionRequest, ) -> pai_studio_20220112_models.CreateAlgorithmVersionResponse: + """ + @summary 创建一个新的算法版本 + + @param request: CreateAlgorithmVersionRequest + @return: CreateAlgorithmVersionResponse + """ runtime = util_models.RuntimeOptions() headers = {} return await self.create_algorithm_version_with_options_async(algorithm_id, algorithm_version, request, headers, runtime) @@ -568,6 +608,14 @@ def create_component_with_options( headers: Dict[str, str], runtime: util_models.RuntimeOptions, ) -> pai_studio_20220112_models.CreateComponentResponse: + """ + @summary 创建组件 + + @param request: CreateComponentRequest + @param headers: map + @param runtime: runtime options for this request RuntimeOptions + @return: CreateComponentResponse + """ UtilClient.validate_model(request) body = {} if not UtilClient.is_unset(request.description): @@ -606,6 +654,14 @@ async def create_component_with_options_async( headers: Dict[str, str], runtime: util_models.RuntimeOptions, ) -> pai_studio_20220112_models.CreateComponentResponse: + """ + @summary 创建组件 + + @param request: CreateComponentRequest + @param headers: map + @param runtime: runtime options for this request RuntimeOptions + @return: CreateComponentResponse + """ UtilClient.validate_model(request) body = {} if not UtilClient.is_unset(request.description): @@ -642,6 +698,12 @@ def create_component( self, request: pai_studio_20220112_models.CreateComponentRequest, ) -> pai_studio_20220112_models.CreateComponentResponse: + """ + @summary 创建组件 + + @param request: CreateComponentRequest + @return: CreateComponentResponse + """ runtime = util_models.RuntimeOptions() headers = {} return self.create_component_with_options(request, headers, runtime) @@ -650,6 +712,12 @@ async def create_component_async( self, request: pai_studio_20220112_models.CreateComponentRequest, ) -> pai_studio_20220112_models.CreateComponentResponse: + """ + @summary 创建组件 + + @param request: CreateComponentRequest + @return: CreateComponentResponse + """ runtime = util_models.RuntimeOptions() headers = {} return await self.create_component_with_options_async(request, headers, runtime) @@ -661,6 +729,14 @@ def create_component_version_with_options( headers: Dict[str, str], runtime: util_models.RuntimeOptions, ) -> pai_studio_20220112_models.CreateComponentVersionResponse: + """ + @summary 创建组件版本 + + @param request: CreateComponentVersionRequest + @param headers: map + @param runtime: runtime options for this request RuntimeOptions + @return: CreateComponentVersionResponse + """ UtilClient.validate_model(request) body = {} if not UtilClient.is_unset(request.config_dir): @@ -700,6 +776,14 @@ async def create_component_version_with_options_async( headers: Dict[str, str], runtime: util_models.RuntimeOptions, ) -> pai_studio_20220112_models.CreateComponentVersionResponse: + """ + @summary 创建组件版本 + + @param request: CreateComponentVersionRequest + @param headers: map + @param runtime: runtime options for this request RuntimeOptions + @return: CreateComponentVersionResponse + """ UtilClient.validate_model(request) body = {} if not UtilClient.is_unset(request.config_dir): @@ -737,6 +821,12 @@ def create_component_version( component_id: str, request: pai_studio_20220112_models.CreateComponentVersionRequest, ) -> pai_studio_20220112_models.CreateComponentVersionResponse: + """ + @summary 创建组件版本 + + @param request: CreateComponentVersionRequest + @return: CreateComponentVersionResponse + """ runtime = util_models.RuntimeOptions() headers = {} return self.create_component_version_with_options(component_id, request, headers, runtime) @@ -746,6 +836,12 @@ async def create_component_version_async( component_id: str, request: pai_studio_20220112_models.CreateComponentVersionRequest, ) -> pai_studio_20220112_models.CreateComponentVersionResponse: + """ + @summary 创建组件版本 + + @param request: CreateComponentVersionRequest + @return: CreateComponentVersionResponse + """ runtime = util_models.RuntimeOptions() headers = {} return await self.create_component_version_with_options_async(component_id, request, headers, runtime) @@ -757,6 +853,13 @@ def create_instance_web_terminal_with_options( headers: Dict[str, str], runtime: util_models.RuntimeOptions, ) -> pai_studio_20220112_models.CreateInstanceWebTerminalResponse: + """ + @summary 创建WebTerminal + + @param headers: map + @param runtime: runtime options for this request RuntimeOptions + @return: CreateInstanceWebTerminalResponse + """ req = open_api_models.OpenApiRequest( headers=headers ) @@ -783,6 +886,13 @@ async def create_instance_web_terminal_with_options_async( headers: Dict[str, str], runtime: util_models.RuntimeOptions, ) -> pai_studio_20220112_models.CreateInstanceWebTerminalResponse: + """ + @summary 创建WebTerminal + + @param headers: map + @param runtime: runtime options for this request RuntimeOptions + @return: CreateInstanceWebTerminalResponse + """ req = open_api_models.OpenApiRequest( headers=headers ) @@ -807,6 +917,11 @@ def create_instance_web_terminal( training_job_id: str, instance_id: str, ) -> pai_studio_20220112_models.CreateInstanceWebTerminalResponse: + """ + @summary 创建WebTerminal + + @return: CreateInstanceWebTerminalResponse + """ runtime = util_models.RuntimeOptions() headers = {} return self.create_instance_web_terminal_with_options(training_job_id, instance_id, headers, runtime) @@ -816,272 +931,29 @@ async def create_instance_web_terminal_async( training_job_id: str, instance_id: str, ) -> pai_studio_20220112_models.CreateInstanceWebTerminalResponse: + """ + @summary 创建WebTerminal + + @return: CreateInstanceWebTerminalResponse + """ runtime = util_models.RuntimeOptions() headers = {} return await self.create_instance_web_terminal_with_options_async(training_job_id, instance_id, headers, runtime) - def create_llmproject_with_options( - self, - request: pai_studio_20220112_models.CreateLLMProjectRequest, - headers: Dict[str, str], - runtime: util_models.RuntimeOptions, - ) -> pai_studio_20220112_models.CreateLLMProjectResponse: - UtilClient.validate_model(request) - body = {} - if not UtilClient.is_unset(request.labels): - body['Labels'] = request.labels - if not UtilClient.is_unset(request.project_description): - body['ProjectDescription'] = request.project_description - if not UtilClient.is_unset(request.project_name): - body['ProjectName'] = request.project_name - if not UtilClient.is_unset(request.project_type): - body['ProjectType'] = request.project_type - if not UtilClient.is_unset(request.root_path): - body['RootPath'] = request.root_path - if not UtilClient.is_unset(request.runtime): - body['Runtime'] = request.runtime - if not UtilClient.is_unset(request.workspace_id): - body['WorkspaceId'] = request.workspace_id - req = open_api_models.OpenApiRequest( - headers=headers, - body=OpenApiUtilClient.parse_to_map(body) - ) - params = open_api_models.Params( - action='CreateLLMProject', - version='2022-01-12', - protocol='HTTPS', - pathname=f'/api/v1/langstudio/projects', - method='POST', - auth_type='AK', - style='ROA', - req_body_type='json', - body_type='json' - ) - return TeaCore.from_map( - pai_studio_20220112_models.CreateLLMProjectResponse(), - self.call_api(params, req, runtime) - ) - - async def create_llmproject_with_options_async( - self, - request: pai_studio_20220112_models.CreateLLMProjectRequest, - headers: Dict[str, str], - runtime: util_models.RuntimeOptions, - ) -> pai_studio_20220112_models.CreateLLMProjectResponse: - UtilClient.validate_model(request) - body = {} - if not UtilClient.is_unset(request.labels): - body['Labels'] = request.labels - if not UtilClient.is_unset(request.project_description): - body['ProjectDescription'] = request.project_description - if not UtilClient.is_unset(request.project_name): - body['ProjectName'] = request.project_name - if not UtilClient.is_unset(request.project_type): - body['ProjectType'] = request.project_type - if not UtilClient.is_unset(request.root_path): - body['RootPath'] = request.root_path - if not UtilClient.is_unset(request.runtime): - body['Runtime'] = request.runtime - if not UtilClient.is_unset(request.workspace_id): - body['WorkspaceId'] = request.workspace_id - req = open_api_models.OpenApiRequest( - headers=headers, - body=OpenApiUtilClient.parse_to_map(body) - ) - params = open_api_models.Params( - action='CreateLLMProject', - version='2022-01-12', - protocol='HTTPS', - pathname=f'/api/v1/langstudio/projects', - method='POST', - auth_type='AK', - style='ROA', - req_body_type='json', - body_type='json' - ) - return TeaCore.from_map( - pai_studio_20220112_models.CreateLLMProjectResponse(), - await self.call_api_async(params, req, runtime) - ) - - def create_llmproject( - self, - request: pai_studio_20220112_models.CreateLLMProjectRequest, - ) -> pai_studio_20220112_models.CreateLLMProjectResponse: - runtime = util_models.RuntimeOptions() - headers = {} - return self.create_llmproject_with_options(request, headers, runtime) - - async def create_llmproject_async( - self, - request: pai_studio_20220112_models.CreateLLMProjectRequest, - ) -> pai_studio_20220112_models.CreateLLMProjectResponse: - runtime = util_models.RuntimeOptions() - headers = {} - return await self.create_llmproject_with_options_async(request, headers, runtime) - - def create_llmservice_identity_role_with_options( - self, - request: pai_studio_20220112_models.CreateLLMServiceIdentityRoleRequest, - headers: Dict[str, str], - runtime: util_models.RuntimeOptions, - ) -> pai_studio_20220112_models.CreateLLMServiceIdentityRoleResponse: - UtilClient.validate_model(request) - body = {} - if not UtilClient.is_unset(request.role_name): - body['RoleName'] = request.role_name - req = open_api_models.OpenApiRequest( - headers=headers, - body=OpenApiUtilClient.parse_to_map(body) - ) - params = open_api_models.Params( - action='CreateLLMServiceIdentityRole', - version='2022-01-12', - protocol='HTTPS', - pathname=f'/api/v1/langstudio/serviceidentityroles', - method='POST', - auth_type='AK', - style='ROA', - req_body_type='json', - body_type='json' - ) - return TeaCore.from_map( - pai_studio_20220112_models.CreateLLMServiceIdentityRoleResponse(), - self.call_api(params, req, runtime) - ) - - async def create_llmservice_identity_role_with_options_async( - self, - request: pai_studio_20220112_models.CreateLLMServiceIdentityRoleRequest, - headers: Dict[str, str], - runtime: util_models.RuntimeOptions, - ) -> pai_studio_20220112_models.CreateLLMServiceIdentityRoleResponse: - UtilClient.validate_model(request) - body = {} - if not UtilClient.is_unset(request.role_name): - body['RoleName'] = request.role_name - req = open_api_models.OpenApiRequest( - headers=headers, - body=OpenApiUtilClient.parse_to_map(body) - ) - params = open_api_models.Params( - action='CreateLLMServiceIdentityRole', - version='2022-01-12', - protocol='HTTPS', - pathname=f'/api/v1/langstudio/serviceidentityroles', - method='POST', - auth_type='AK', - style='ROA', - req_body_type='json', - body_type='json' - ) - return TeaCore.from_map( - pai_studio_20220112_models.CreateLLMServiceIdentityRoleResponse(), - await self.call_api_async(params, req, runtime) - ) - - def create_llmservice_identity_role( - self, - request: pai_studio_20220112_models.CreateLLMServiceIdentityRoleRequest, - ) -> pai_studio_20220112_models.CreateLLMServiceIdentityRoleResponse: - runtime = util_models.RuntimeOptions() - headers = {} - return self.create_llmservice_identity_role_with_options(request, headers, runtime) - - async def create_llmservice_identity_role_async( - self, - request: pai_studio_20220112_models.CreateLLMServiceIdentityRoleRequest, - ) -> pai_studio_20220112_models.CreateLLMServiceIdentityRoleResponse: - runtime = util_models.RuntimeOptions() - headers = {} - return await self.create_llmservice_identity_role_with_options_async(request, headers, runtime) - - def create_llmsnapshot_with_options( - self, - project_id: str, - request: pai_studio_20220112_models.CreateLLMSnapshotRequest, - headers: Dict[str, str], - runtime: util_models.RuntimeOptions, - ) -> pai_studio_20220112_models.CreateLLMSnapshotResponse: - UtilClient.validate_model(request) - body = {} - if not UtilClient.is_unset(request.storage): - body['Storage'] = request.storage - req = open_api_models.OpenApiRequest( - headers=headers, - body=OpenApiUtilClient.parse_to_map(body) - ) - params = open_api_models.Params( - action='CreateLLMSnapshot', - version='2022-01-12', - protocol='HTTPS', - pathname=f'/api/v1/langstudio/projects/{OpenApiUtilClient.get_encode_param(project_id)}/snapshots', - method='POST', - auth_type='AK', - style='ROA', - req_body_type='json', - body_type='json' - ) - return TeaCore.from_map( - pai_studio_20220112_models.CreateLLMSnapshotResponse(), - self.call_api(params, req, runtime) - ) - - async def create_llmsnapshot_with_options_async( - self, - project_id: str, - request: pai_studio_20220112_models.CreateLLMSnapshotRequest, - headers: Dict[str, str], - runtime: util_models.RuntimeOptions, - ) -> pai_studio_20220112_models.CreateLLMSnapshotResponse: - UtilClient.validate_model(request) - body = {} - if not UtilClient.is_unset(request.storage): - body['Storage'] = request.storage - req = open_api_models.OpenApiRequest( - headers=headers, - body=OpenApiUtilClient.parse_to_map(body) - ) - params = open_api_models.Params( - action='CreateLLMSnapshot', - version='2022-01-12', - protocol='HTTPS', - pathname=f'/api/v1/langstudio/projects/{OpenApiUtilClient.get_encode_param(project_id)}/snapshots', - method='POST', - auth_type='AK', - style='ROA', - req_body_type='json', - body_type='json' - ) - return TeaCore.from_map( - pai_studio_20220112_models.CreateLLMSnapshotResponse(), - await self.call_api_async(params, req, runtime) - ) - - def create_llmsnapshot( - self, - project_id: str, - request: pai_studio_20220112_models.CreateLLMSnapshotRequest, - ) -> pai_studio_20220112_models.CreateLLMSnapshotResponse: - runtime = util_models.RuntimeOptions() - headers = {} - return self.create_llmsnapshot_with_options(project_id, request, headers, runtime) - - async def create_llmsnapshot_async( - self, - project_id: str, - request: pai_studio_20220112_models.CreateLLMSnapshotRequest, - ) -> pai_studio_20220112_models.CreateLLMSnapshotResponse: - runtime = util_models.RuntimeOptions() - headers = {} - return await self.create_llmsnapshot_with_options_async(project_id, request, headers, runtime) - def create_quota_with_options( self, request: pai_studio_20220112_models.CreateQuotaRequest, headers: Dict[str, str], runtime: util_models.RuntimeOptions, ) -> pai_studio_20220112_models.CreateQuotaResponse: + """ + @summary 创建Quota + + @param request: CreateQuotaRequest + @param headers: map + @param runtime: runtime options for this request RuntimeOptions + @return: CreateQuotaResponse + """ UtilClient.validate_model(request) body = {} if not UtilClient.is_unset(request.allocate_strategy): @@ -1130,6 +1002,14 @@ async def create_quota_with_options_async( headers: Dict[str, str], runtime: util_models.RuntimeOptions, ) -> pai_studio_20220112_models.CreateQuotaResponse: + """ + @summary 创建Quota + + @param request: CreateQuotaRequest + @param headers: map + @param runtime: runtime options for this request RuntimeOptions + @return: CreateQuotaResponse + """ UtilClient.validate_model(request) body = {} if not UtilClient.is_unset(request.allocate_strategy): @@ -1176,6 +1056,12 @@ def create_quota( self, request: pai_studio_20220112_models.CreateQuotaRequest, ) -> pai_studio_20220112_models.CreateQuotaResponse: + """ + @summary 创建Quota + + @param request: CreateQuotaRequest + @return: CreateQuotaResponse + """ runtime = util_models.RuntimeOptions() headers = {} return self.create_quota_with_options(request, headers, runtime) @@ -1184,6 +1070,12 @@ async def create_quota_async( self, request: pai_studio_20220112_models.CreateQuotaRequest, ) -> pai_studio_20220112_models.CreateQuotaResponse: + """ + @summary 创建Quota + + @param request: CreateQuotaRequest + @return: CreateQuotaResponse + """ runtime = util_models.RuntimeOptions() headers = {} return await self.create_quota_with_options_async(request, headers, runtime) @@ -1194,6 +1086,14 @@ def create_resource_group_with_options( headers: Dict[str, str], runtime: util_models.RuntimeOptions, ) -> pai_studio_20220112_models.CreateResourceGroupResponse: + """ + @summary 创建资源组 + + @param request: CreateResourceGroupRequest + @param headers: map + @param runtime: runtime options for this request RuntimeOptions + @return: CreateResourceGroupResponse + """ UtilClient.validate_model(request) body = {} if not UtilClient.is_unset(request.computing_resource_provider): @@ -1234,6 +1134,14 @@ async def create_resource_group_with_options_async( headers: Dict[str, str], runtime: util_models.RuntimeOptions, ) -> pai_studio_20220112_models.CreateResourceGroupResponse: + """ + @summary 创建资源组 + + @param request: CreateResourceGroupRequest + @param headers: map + @param runtime: runtime options for this request RuntimeOptions + @return: CreateResourceGroupResponse + """ UtilClient.validate_model(request) body = {} if not UtilClient.is_unset(request.computing_resource_provider): @@ -1272,6 +1180,12 @@ def create_resource_group( self, request: pai_studio_20220112_models.CreateResourceGroupRequest, ) -> pai_studio_20220112_models.CreateResourceGroupResponse: + """ + @summary 创建资源组 + + @param request: CreateResourceGroupRequest + @return: CreateResourceGroupResponse + """ runtime = util_models.RuntimeOptions() headers = {} return self.create_resource_group_with_options(request, headers, runtime) @@ -1280,6 +1194,12 @@ async def create_resource_group_async( self, request: pai_studio_20220112_models.CreateResourceGroupRequest, ) -> pai_studio_20220112_models.CreateResourceGroupResponse: + """ + @summary 创建资源组 + + @param request: CreateResourceGroupRequest + @return: CreateResourceGroupResponse + """ runtime = util_models.RuntimeOptions() headers = {} return await self.create_resource_group_with_options_async(request, headers, runtime) @@ -1291,6 +1211,14 @@ def create_resource_group_machine_group_with_options( headers: Dict[str, str], runtime: util_models.RuntimeOptions, ) -> pai_studio_20220112_models.CreateResourceGroupMachineGroupResponse: + """ + @summary 创建机器组 + + @param request: CreateResourceGroupMachineGroupRequest + @param headers: map + @param runtime: runtime options for this request RuntimeOptions + @return: CreateResourceGroupMachineGroupResponse + """ UtilClient.validate_model(request) body = {} if not UtilClient.is_unset(request.ecs_count): @@ -1334,6 +1262,14 @@ async def create_resource_group_machine_group_with_options_async( headers: Dict[str, str], runtime: util_models.RuntimeOptions, ) -> pai_studio_20220112_models.CreateResourceGroupMachineGroupResponse: + """ + @summary 创建机器组 + + @param request: CreateResourceGroupMachineGroupRequest + @param headers: map + @param runtime: runtime options for this request RuntimeOptions + @return: CreateResourceGroupMachineGroupResponse + """ UtilClient.validate_model(request) body = {} if not UtilClient.is_unset(request.ecs_count): @@ -1375,6 +1311,12 @@ def create_resource_group_machine_group( resource_group_id: str, request: pai_studio_20220112_models.CreateResourceGroupMachineGroupRequest, ) -> pai_studio_20220112_models.CreateResourceGroupMachineGroupResponse: + """ + @summary 创建机器组 + + @param request: CreateResourceGroupMachineGroupRequest + @return: CreateResourceGroupMachineGroupResponse + """ runtime = util_models.RuntimeOptions() headers = {} return self.create_resource_group_machine_group_with_options(resource_group_id, request, headers, runtime) @@ -1384,6 +1326,12 @@ async def create_resource_group_machine_group_async( resource_group_id: str, request: pai_studio_20220112_models.CreateResourceGroupMachineGroupRequest, ) -> pai_studio_20220112_models.CreateResourceGroupMachineGroupResponse: + """ + @summary 创建机器组 + + @param request: CreateResourceGroupMachineGroupRequest + @return: CreateResourceGroupMachineGroupResponse + """ runtime = util_models.RuntimeOptions() headers = {} return await self.create_resource_group_machine_group_with_options_async(resource_group_id, request, headers, runtime) @@ -1394,6 +1342,14 @@ def create_service_identity_role_with_options( headers: Dict[str, str], runtime: util_models.RuntimeOptions, ) -> pai_studio_20220112_models.CreateServiceIdentityRoleResponse: + """ + @summary 创建服务认证角色 + + @param request: CreateServiceIdentityRoleRequest + @param headers: map + @param runtime: runtime options for this request RuntimeOptions + @return: CreateServiceIdentityRoleResponse + """ UtilClient.validate_model(request) body = {} if not UtilClient.is_unset(request.role_name): @@ -1424,6 +1380,14 @@ async def create_service_identity_role_with_options_async( headers: Dict[str, str], runtime: util_models.RuntimeOptions, ) -> pai_studio_20220112_models.CreateServiceIdentityRoleResponse: + """ + @summary 创建服务认证角色 + + @param request: CreateServiceIdentityRoleRequest + @param headers: map + @param runtime: runtime options for this request RuntimeOptions + @return: CreateServiceIdentityRoleResponse + """ UtilClient.validate_model(request) body = {} if not UtilClient.is_unset(request.role_name): @@ -1452,6 +1416,12 @@ def create_service_identity_role( self, request: pai_studio_20220112_models.CreateServiceIdentityRoleRequest, ) -> pai_studio_20220112_models.CreateServiceIdentityRoleResponse: + """ + @summary 创建服务认证角色 + + @param request: CreateServiceIdentityRoleRequest + @return: CreateServiceIdentityRoleResponse + """ runtime = util_models.RuntimeOptions() headers = {} return self.create_service_identity_role_with_options(request, headers, runtime) @@ -1460,6 +1430,12 @@ async def create_service_identity_role_async( self, request: pai_studio_20220112_models.CreateServiceIdentityRoleRequest, ) -> pai_studio_20220112_models.CreateServiceIdentityRoleResponse: + """ + @summary 创建服务认证角色 + + @param request: CreateServiceIdentityRoleRequest + @return: CreateServiceIdentityRoleResponse + """ runtime = util_models.RuntimeOptions() headers = {} return await self.create_service_identity_role_with_options_async(request, headers, runtime) @@ -1470,6 +1446,14 @@ def create_training_job_with_options( headers: Dict[str, str], runtime: util_models.RuntimeOptions, ) -> pai_studio_20220112_models.CreateTrainingJobResponse: + """ + @summary 创建TrainingJob + + @param request: CreateTrainingJobRequest + @param headers: map + @param runtime: runtime options for this request RuntimeOptions + @return: CreateTrainingJobResponse + """ UtilClient.validate_model(request) body = {} if not UtilClient.is_unset(request.algorithm_name): @@ -1538,6 +1522,14 @@ async def create_training_job_with_options_async( headers: Dict[str, str], runtime: util_models.RuntimeOptions, ) -> pai_studio_20220112_models.CreateTrainingJobResponse: + """ + @summary 创建TrainingJob + + @param request: CreateTrainingJobRequest + @param headers: map + @param runtime: runtime options for this request RuntimeOptions + @return: CreateTrainingJobResponse + """ UtilClient.validate_model(request) body = {} if not UtilClient.is_unset(request.algorithm_name): @@ -1604,6 +1596,12 @@ def create_training_job( self, request: pai_studio_20220112_models.CreateTrainingJobRequest, ) -> pai_studio_20220112_models.CreateTrainingJobResponse: + """ + @summary 创建TrainingJob + + @param request: CreateTrainingJobRequest + @return: CreateTrainingJobResponse + """ runtime = util_models.RuntimeOptions() headers = {} return self.create_training_job_with_options(request, headers, runtime) @@ -1612,6 +1610,12 @@ async def create_training_job_async( self, request: pai_studio_20220112_models.CreateTrainingJobRequest, ) -> pai_studio_20220112_models.CreateTrainingJobResponse: + """ + @summary 创建TrainingJob + + @param request: CreateTrainingJobRequest + @return: CreateTrainingJobResponse + """ runtime = util_models.RuntimeOptions() headers = {} return await self.create_training_job_with_options_async(request, headers, runtime) @@ -1622,6 +1626,13 @@ def delete_algorithm_with_options( headers: Dict[str, str], runtime: util_models.RuntimeOptions, ) -> pai_studio_20220112_models.DeleteAlgorithmResponse: + """ + @summary 删除算法 + + @param headers: map + @param runtime: runtime options for this request RuntimeOptions + @return: DeleteAlgorithmResponse + """ req = open_api_models.OpenApiRequest( headers=headers ) @@ -1647,6 +1658,13 @@ async def delete_algorithm_with_options_async( headers: Dict[str, str], runtime: util_models.RuntimeOptions, ) -> pai_studio_20220112_models.DeleteAlgorithmResponse: + """ + @summary 删除算法 + + @param headers: map + @param runtime: runtime options for this request RuntimeOptions + @return: DeleteAlgorithmResponse + """ req = open_api_models.OpenApiRequest( headers=headers ) @@ -1670,6 +1688,11 @@ def delete_algorithm( self, algorithm_id: str, ) -> pai_studio_20220112_models.DeleteAlgorithmResponse: + """ + @summary 删除算法 + + @return: DeleteAlgorithmResponse + """ runtime = util_models.RuntimeOptions() headers = {} return self.delete_algorithm_with_options(algorithm_id, headers, runtime) @@ -1678,6 +1701,11 @@ async def delete_algorithm_async( self, algorithm_id: str, ) -> pai_studio_20220112_models.DeleteAlgorithmResponse: + """ + @summary 删除算法 + + @return: DeleteAlgorithmResponse + """ runtime = util_models.RuntimeOptions() headers = {} return await self.delete_algorithm_with_options_async(algorithm_id, headers, runtime) @@ -1689,6 +1717,13 @@ def delete_algorithm_version_with_options( headers: Dict[str, str], runtime: util_models.RuntimeOptions, ) -> pai_studio_20220112_models.DeleteAlgorithmVersionResponse: + """ + @summary 删除算法版本 + + @param headers: map + @param runtime: runtime options for this request RuntimeOptions + @return: DeleteAlgorithmVersionResponse + """ req = open_api_models.OpenApiRequest( headers=headers ) @@ -1715,6 +1750,13 @@ async def delete_algorithm_version_with_options_async( headers: Dict[str, str], runtime: util_models.RuntimeOptions, ) -> pai_studio_20220112_models.DeleteAlgorithmVersionResponse: + """ + @summary 删除算法版本 + + @param headers: map + @param runtime: runtime options for this request RuntimeOptions + @return: DeleteAlgorithmVersionResponse + """ req = open_api_models.OpenApiRequest( headers=headers ) @@ -1739,6 +1781,11 @@ def delete_algorithm_version( algorithm_id: str, algorithm_version: str, ) -> pai_studio_20220112_models.DeleteAlgorithmVersionResponse: + """ + @summary 删除算法版本 + + @return: DeleteAlgorithmVersionResponse + """ runtime = util_models.RuntimeOptions() headers = {} return self.delete_algorithm_version_with_options(algorithm_id, algorithm_version, headers, runtime) @@ -1748,6 +1795,11 @@ async def delete_algorithm_version_async( algorithm_id: str, algorithm_version: str, ) -> pai_studio_20220112_models.DeleteAlgorithmVersionResponse: + """ + @summary 删除算法版本 + + @return: DeleteAlgorithmVersionResponse + """ runtime = util_models.RuntimeOptions() headers = {} return await self.delete_algorithm_version_with_options_async(algorithm_id, algorithm_version, headers, runtime) @@ -1758,6 +1810,13 @@ def delete_component_with_options( headers: Dict[str, str], runtime: util_models.RuntimeOptions, ) -> pai_studio_20220112_models.DeleteComponentResponse: + """ + @summary 删除组件 + + @param headers: map + @param runtime: runtime options for this request RuntimeOptions + @return: DeleteComponentResponse + """ req = open_api_models.OpenApiRequest( headers=headers ) @@ -1783,6 +1842,13 @@ async def delete_component_with_options_async( headers: Dict[str, str], runtime: util_models.RuntimeOptions, ) -> pai_studio_20220112_models.DeleteComponentResponse: + """ + @summary 删除组件 + + @param headers: map + @param runtime: runtime options for this request RuntimeOptions + @return: DeleteComponentResponse + """ req = open_api_models.OpenApiRequest( headers=headers ) @@ -1806,6 +1872,11 @@ def delete_component( self, component_id: str, ) -> pai_studio_20220112_models.DeleteComponentResponse: + """ + @summary 删除组件 + + @return: DeleteComponentResponse + """ runtime = util_models.RuntimeOptions() headers = {} return self.delete_component_with_options(component_id, headers, runtime) @@ -1814,6 +1885,11 @@ async def delete_component_async( self, component_id: str, ) -> pai_studio_20220112_models.DeleteComponentResponse: + """ + @summary 删除组件 + + @return: DeleteComponentResponse + """ runtime = util_models.RuntimeOptions() headers = {} return await self.delete_component_with_options_async(component_id, headers, runtime) @@ -1825,6 +1901,13 @@ def delete_component_version_with_options( headers: Dict[str, str], runtime: util_models.RuntimeOptions, ) -> pai_studio_20220112_models.DeleteComponentVersionResponse: + """ + @summary 删除组件版本 + + @param headers: map + @param runtime: runtime options for this request RuntimeOptions + @return: DeleteComponentVersionResponse + """ req = open_api_models.OpenApiRequest( headers=headers ) @@ -1851,6 +1934,13 @@ async def delete_component_version_with_options_async( headers: Dict[str, str], runtime: util_models.RuntimeOptions, ) -> pai_studio_20220112_models.DeleteComponentVersionResponse: + """ + @summary 删除组件版本 + + @param headers: map + @param runtime: runtime options for this request RuntimeOptions + @return: DeleteComponentVersionResponse + """ req = open_api_models.OpenApiRequest( headers=headers ) @@ -1875,6 +1965,11 @@ def delete_component_version( component_id: str, version: str, ) -> pai_studio_20220112_models.DeleteComponentVersionResponse: + """ + @summary 删除组件版本 + + @return: DeleteComponentVersionResponse + """ runtime = util_models.RuntimeOptions() headers = {} return self.delete_component_version_with_options(component_id, version, headers, runtime) @@ -1884,6 +1979,11 @@ async def delete_component_version_async( component_id: str, version: str, ) -> pai_studio_20220112_models.DeleteComponentVersionResponse: + """ + @summary 删除组件版本 + + @return: DeleteComponentVersionResponse + """ runtime = util_models.RuntimeOptions() headers = {} return await self.delete_component_version_with_options_async(component_id, version, headers, runtime) @@ -1894,6 +1994,13 @@ def delete_component_version_snapshot_with_options( headers: Dict[str, str], runtime: util_models.RuntimeOptions, ) -> pai_studio_20220112_models.DeleteComponentVersionSnapshotResponse: + """ + @summary 删除组件版本快照 + + @param headers: map + @param runtime: runtime options for this request RuntimeOptions + @return: DeleteComponentVersionSnapshotResponse + """ req = open_api_models.OpenApiRequest( headers=headers ) @@ -1919,6 +2026,13 @@ async def delete_component_version_snapshot_with_options_async( headers: Dict[str, str], runtime: util_models.RuntimeOptions, ) -> pai_studio_20220112_models.DeleteComponentVersionSnapshotResponse: + """ + @summary 删除组件版本快照 + + @param headers: map + @param runtime: runtime options for this request RuntimeOptions + @return: DeleteComponentVersionSnapshotResponse + """ req = open_api_models.OpenApiRequest( headers=headers ) @@ -1942,6 +2056,11 @@ def delete_component_version_snapshot( self, snapshot_id: str, ) -> pai_studio_20220112_models.DeleteComponentVersionSnapshotResponse: + """ + @summary 删除组件版本快照 + + @return: DeleteComponentVersionSnapshotResponse + """ runtime = util_models.RuntimeOptions() headers = {} return self.delete_component_version_snapshot_with_options(snapshot_id, headers, runtime) @@ -1950,24 +2069,36 @@ async def delete_component_version_snapshot_async( self, snapshot_id: str, ) -> pai_studio_20220112_models.DeleteComponentVersionSnapshotResponse: + """ + @summary 删除组件版本快照 + + @return: DeleteComponentVersionSnapshotResponse + """ runtime = util_models.RuntimeOptions() headers = {} return await self.delete_component_version_snapshot_with_options_async(snapshot_id, headers, runtime) - def delete_llmproject_with_options( + def delete_machine_group_with_options( self, - project_id: str, + machine_group_id: str, headers: Dict[str, str], runtime: util_models.RuntimeOptions, - ) -> pai_studio_20220112_models.DeleteLLMProjectResponse: + ) -> pai_studio_20220112_models.DeleteMachineGroupResponse: + """ + @summary delete machine group + + @param headers: map + @param runtime: runtime options for this request RuntimeOptions + @return: DeleteMachineGroupResponse + """ req = open_api_models.OpenApiRequest( headers=headers ) params = open_api_models.Params( - action='DeleteLLMProject', + action='DeleteMachineGroup', version='2022-01-12', protocol='HTTPS', - pathname=f'/api/v1/langstudio/projects/{OpenApiUtilClient.get_encode_param(project_id)}', + pathname=f'/api/v1/resources/machinegroups/{OpenApiUtilClient.get_encode_param(machine_group_id)}', method='DELETE', auth_type='AK', style='ROA', @@ -1975,24 +2106,31 @@ def delete_llmproject_with_options( body_type='json' ) return TeaCore.from_map( - pai_studio_20220112_models.DeleteLLMProjectResponse(), + pai_studio_20220112_models.DeleteMachineGroupResponse(), self.call_api(params, req, runtime) ) - async def delete_llmproject_with_options_async( + async def delete_machine_group_with_options_async( self, - project_id: str, + machine_group_id: str, headers: Dict[str, str], runtime: util_models.RuntimeOptions, - ) -> pai_studio_20220112_models.DeleteLLMProjectResponse: + ) -> pai_studio_20220112_models.DeleteMachineGroupResponse: + """ + @summary delete machine group + + @param headers: map + @param runtime: runtime options for this request RuntimeOptions + @return: DeleteMachineGroupResponse + """ req = open_api_models.OpenApiRequest( headers=headers ) params = open_api_models.Params( - action='DeleteLLMProject', + action='DeleteMachineGroup', version='2022-01-12', protocol='HTTPS', - pathname=f'/api/v1/langstudio/projects/{OpenApiUtilClient.get_encode_param(project_id)}', + pathname=f'/api/v1/resources/machinegroups/{OpenApiUtilClient.get_encode_param(machine_group_id)}', method='DELETE', auth_type='AK', style='ROA', @@ -2000,88 +2138,32 @@ async def delete_llmproject_with_options_async( body_type='json' ) return TeaCore.from_map( - pai_studio_20220112_models.DeleteLLMProjectResponse(), + pai_studio_20220112_models.DeleteMachineGroupResponse(), await self.call_api_async(params, req, runtime) ) - def delete_llmproject( + def delete_machine_group( self, - project_id: str, - ) -> pai_studio_20220112_models.DeleteLLMProjectResponse: + machine_group_id: str, + ) -> pai_studio_20220112_models.DeleteMachineGroupResponse: + """ + @summary delete machine group + + @return: DeleteMachineGroupResponse + """ runtime = util_models.RuntimeOptions() headers = {} - return self.delete_llmproject_with_options(project_id, headers, runtime) + return self.delete_machine_group_with_options(machine_group_id, headers, runtime) - async def delete_llmproject_async( + async def delete_machine_group_async( self, - project_id: str, - ) -> pai_studio_20220112_models.DeleteLLMProjectResponse: - runtime = util_models.RuntimeOptions() - headers = {} - return await self.delete_llmproject_with_options_async(project_id, headers, runtime) - - def delete_machine_group_with_options( - self, - machine_group_id: str, - headers: Dict[str, str], - runtime: util_models.RuntimeOptions, - ) -> pai_studio_20220112_models.DeleteMachineGroupResponse: - req = open_api_models.OpenApiRequest( - headers=headers - ) - params = open_api_models.Params( - action='DeleteMachineGroup', - version='2022-01-12', - protocol='HTTPS', - pathname=f'/api/v1/resources/machinegroups/{OpenApiUtilClient.get_encode_param(machine_group_id)}', - method='DELETE', - auth_type='AK', - style='ROA', - req_body_type='json', - body_type='json' - ) - return TeaCore.from_map( - pai_studio_20220112_models.DeleteMachineGroupResponse(), - self.call_api(params, req, runtime) - ) - - async def delete_machine_group_with_options_async( - self, - machine_group_id: str, - headers: Dict[str, str], - runtime: util_models.RuntimeOptions, - ) -> pai_studio_20220112_models.DeleteMachineGroupResponse: - req = open_api_models.OpenApiRequest( - headers=headers - ) - params = open_api_models.Params( - action='DeleteMachineGroup', - version='2022-01-12', - protocol='HTTPS', - pathname=f'/api/v1/resources/machinegroups/{OpenApiUtilClient.get_encode_param(machine_group_id)}', - method='DELETE', - auth_type='AK', - style='ROA', - req_body_type='json', - body_type='json' - ) - return TeaCore.from_map( - pai_studio_20220112_models.DeleteMachineGroupResponse(), - await self.call_api_async(params, req, runtime) - ) - - def delete_machine_group( - self, - machine_group_id: str, - ) -> pai_studio_20220112_models.DeleteMachineGroupResponse: - runtime = util_models.RuntimeOptions() - headers = {} - return self.delete_machine_group_with_options(machine_group_id, headers, runtime) - - async def delete_machine_group_async( - self, - machine_group_id: str, - ) -> pai_studio_20220112_models.DeleteMachineGroupResponse: + machine_group_id: str, + ) -> pai_studio_20220112_models.DeleteMachineGroupResponse: + """ + @summary delete machine group + + @return: DeleteMachineGroupResponse + """ runtime = util_models.RuntimeOptions() headers = {} return await self.delete_machine_group_with_options_async(machine_group_id, headers, runtime) @@ -2092,6 +2174,13 @@ def delete_quota_with_options( headers: Dict[str, str], runtime: util_models.RuntimeOptions, ) -> pai_studio_20220112_models.DeleteQuotaResponse: + """ + @summary 删除Quota + + @param headers: map + @param runtime: runtime options for this request RuntimeOptions + @return: DeleteQuotaResponse + """ req = open_api_models.OpenApiRequest( headers=headers ) @@ -2117,6 +2206,13 @@ async def delete_quota_with_options_async( headers: Dict[str, str], runtime: util_models.RuntimeOptions, ) -> pai_studio_20220112_models.DeleteQuotaResponse: + """ + @summary 删除Quota + + @param headers: map + @param runtime: runtime options for this request RuntimeOptions + @return: DeleteQuotaResponse + """ req = open_api_models.OpenApiRequest( headers=headers ) @@ -2140,6 +2236,11 @@ def delete_quota( self, quota_id: str, ) -> pai_studio_20220112_models.DeleteQuotaResponse: + """ + @summary 删除Quota + + @return: DeleteQuotaResponse + """ runtime = util_models.RuntimeOptions() headers = {} return self.delete_quota_with_options(quota_id, headers, runtime) @@ -2148,6 +2249,11 @@ async def delete_quota_async( self, quota_id: str, ) -> pai_studio_20220112_models.DeleteQuotaResponse: + """ + @summary 删除Quota + + @return: DeleteQuotaResponse + """ runtime = util_models.RuntimeOptions() headers = {} return await self.delete_quota_with_options_async(quota_id, headers, runtime) @@ -2159,6 +2265,14 @@ def delete_quota_labels_with_options( headers: Dict[str, str], runtime: util_models.RuntimeOptions, ) -> pai_studio_20220112_models.DeleteQuotaLabelsResponse: + """ + @summary 删除Quota标签 + + @param request: DeleteQuotaLabelsRequest + @param headers: map + @param runtime: runtime options for this request RuntimeOptions + @return: DeleteQuotaLabelsResponse + """ UtilClient.validate_model(request) query = {} if not UtilClient.is_unset(request.keys): @@ -2190,6 +2304,14 @@ async def delete_quota_labels_with_options_async( headers: Dict[str, str], runtime: util_models.RuntimeOptions, ) -> pai_studio_20220112_models.DeleteQuotaLabelsResponse: + """ + @summary 删除Quota标签 + + @param request: DeleteQuotaLabelsRequest + @param headers: map + @param runtime: runtime options for this request RuntimeOptions + @return: DeleteQuotaLabelsResponse + """ UtilClient.validate_model(request) query = {} if not UtilClient.is_unset(request.keys): @@ -2219,6 +2341,12 @@ def delete_quota_labels( quota_id: str, request: pai_studio_20220112_models.DeleteQuotaLabelsRequest, ) -> pai_studio_20220112_models.DeleteQuotaLabelsResponse: + """ + @summary 删除Quota标签 + + @param request: DeleteQuotaLabelsRequest + @return: DeleteQuotaLabelsResponse + """ runtime = util_models.RuntimeOptions() headers = {} return self.delete_quota_labels_with_options(quota_id, request, headers, runtime) @@ -2228,6 +2356,12 @@ async def delete_quota_labels_async( quota_id: str, request: pai_studio_20220112_models.DeleteQuotaLabelsRequest, ) -> pai_studio_20220112_models.DeleteQuotaLabelsResponse: + """ + @summary 删除Quota标签 + + @param request: DeleteQuotaLabelsRequest + @return: DeleteQuotaLabelsResponse + """ runtime = util_models.RuntimeOptions() headers = {} return await self.delete_quota_labels_with_options_async(quota_id, request, headers, runtime) @@ -2238,6 +2372,13 @@ def delete_resource_group_with_options( headers: Dict[str, str], runtime: util_models.RuntimeOptions, ) -> pai_studio_20220112_models.DeleteResourceGroupResponse: + """ + @summary 删除资源组 + + @param headers: map + @param runtime: runtime options for this request RuntimeOptions + @return: DeleteResourceGroupResponse + """ req = open_api_models.OpenApiRequest( headers=headers ) @@ -2263,6 +2404,13 @@ async def delete_resource_group_with_options_async( headers: Dict[str, str], runtime: util_models.RuntimeOptions, ) -> pai_studio_20220112_models.DeleteResourceGroupResponse: + """ + @summary 删除资源组 + + @param headers: map + @param runtime: runtime options for this request RuntimeOptions + @return: DeleteResourceGroupResponse + """ req = open_api_models.OpenApiRequest( headers=headers ) @@ -2286,6 +2434,11 @@ def delete_resource_group( self, resource_group_id: str, ) -> pai_studio_20220112_models.DeleteResourceGroupResponse: + """ + @summary 删除资源组 + + @return: DeleteResourceGroupResponse + """ runtime = util_models.RuntimeOptions() headers = {} return self.delete_resource_group_with_options(resource_group_id, headers, runtime) @@ -2294,6 +2447,11 @@ async def delete_resource_group_async( self, resource_group_id: str, ) -> pai_studio_20220112_models.DeleteResourceGroupResponse: + """ + @summary 删除资源组 + + @return: DeleteResourceGroupResponse + """ runtime = util_models.RuntimeOptions() headers = {} return await self.delete_resource_group_with_options_async(resource_group_id, headers, runtime) @@ -2305,6 +2463,13 @@ def delete_resource_group_machine_group_with_options( headers: Dict[str, str], runtime: util_models.RuntimeOptions, ) -> pai_studio_20220112_models.DeleteResourceGroupMachineGroupResponse: + """ + @summary delete machine group + + @param headers: map + @param runtime: runtime options for this request RuntimeOptions + @return: DeleteResourceGroupMachineGroupResponse + """ req = open_api_models.OpenApiRequest( headers=headers ) @@ -2331,6 +2496,13 @@ async def delete_resource_group_machine_group_with_options_async( headers: Dict[str, str], runtime: util_models.RuntimeOptions, ) -> pai_studio_20220112_models.DeleteResourceGroupMachineGroupResponse: + """ + @summary delete machine group + + @param headers: map + @param runtime: runtime options for this request RuntimeOptions + @return: DeleteResourceGroupMachineGroupResponse + """ req = open_api_models.OpenApiRequest( headers=headers ) @@ -2355,6 +2527,11 @@ def delete_resource_group_machine_group( machine_group_id: str, resource_group_id: str, ) -> pai_studio_20220112_models.DeleteResourceGroupMachineGroupResponse: + """ + @summary delete machine group + + @return: DeleteResourceGroupMachineGroupResponse + """ runtime = util_models.RuntimeOptions() headers = {} return self.delete_resource_group_machine_group_with_options(machine_group_id, resource_group_id, headers, runtime) @@ -2364,6 +2541,11 @@ async def delete_resource_group_machine_group_async( machine_group_id: str, resource_group_id: str, ) -> pai_studio_20220112_models.DeleteResourceGroupMachineGroupResponse: + """ + @summary delete machine group + + @return: DeleteResourceGroupMachineGroupResponse + """ runtime = util_models.RuntimeOptions() headers = {} return await self.delete_resource_group_machine_group_with_options_async(machine_group_id, resource_group_id, headers, runtime) @@ -2374,6 +2556,13 @@ def delete_training_job_with_options( headers: Dict[str, str], runtime: util_models.RuntimeOptions, ) -> pai_studio_20220112_models.DeleteTrainingJobResponse: + """ + @summary 删除一个TrainingJob + + @param headers: map + @param runtime: runtime options for this request RuntimeOptions + @return: DeleteTrainingJobResponse + """ req = open_api_models.OpenApiRequest( headers=headers ) @@ -2399,6 +2588,13 @@ async def delete_training_job_with_options_async( headers: Dict[str, str], runtime: util_models.RuntimeOptions, ) -> pai_studio_20220112_models.DeleteTrainingJobResponse: + """ + @summary 删除一个TrainingJob + + @param headers: map + @param runtime: runtime options for this request RuntimeOptions + @return: DeleteTrainingJobResponse + """ req = open_api_models.OpenApiRequest( headers=headers ) @@ -2422,6 +2618,11 @@ def delete_training_job( self, training_job_id: str, ) -> pai_studio_20220112_models.DeleteTrainingJobResponse: + """ + @summary 删除一个TrainingJob + + @return: DeleteTrainingJobResponse + """ runtime = util_models.RuntimeOptions() headers = {} return self.delete_training_job_with_options(training_job_id, headers, runtime) @@ -2430,6 +2631,11 @@ async def delete_training_job_async( self, training_job_id: str, ) -> pai_studio_20220112_models.DeleteTrainingJobResponse: + """ + @summary 删除一个TrainingJob + + @return: DeleteTrainingJobResponse + """ runtime = util_models.RuntimeOptions() headers = {} return await self.delete_training_job_with_options_async(training_job_id, headers, runtime) @@ -2441,6 +2647,14 @@ def delete_training_job_labels_with_options( headers: Dict[str, str], runtime: util_models.RuntimeOptions, ) -> pai_studio_20220112_models.DeleteTrainingJobLabelsResponse: + """ + @summary 删除TrainingJob的Labels + + @param request: DeleteTrainingJobLabelsRequest + @param headers: map + @param runtime: runtime options for this request RuntimeOptions + @return: DeleteTrainingJobLabelsResponse + """ UtilClient.validate_model(request) query = {} if not UtilClient.is_unset(request.keys): @@ -2472,6 +2686,14 @@ async def delete_training_job_labels_with_options_async( headers: Dict[str, str], runtime: util_models.RuntimeOptions, ) -> pai_studio_20220112_models.DeleteTrainingJobLabelsResponse: + """ + @summary 删除TrainingJob的Labels + + @param request: DeleteTrainingJobLabelsRequest + @param headers: map + @param runtime: runtime options for this request RuntimeOptions + @return: DeleteTrainingJobLabelsResponse + """ UtilClient.validate_model(request) query = {} if not UtilClient.is_unset(request.keys): @@ -2501,6 +2723,12 @@ def delete_training_job_labels( training_job_id: str, request: pai_studio_20220112_models.DeleteTrainingJobLabelsRequest, ) -> pai_studio_20220112_models.DeleteTrainingJobLabelsResponse: + """ + @summary 删除TrainingJob的Labels + + @param request: DeleteTrainingJobLabelsRequest + @return: DeleteTrainingJobLabelsResponse + """ runtime = util_models.RuntimeOptions() headers = {} return self.delete_training_job_labels_with_options(training_job_id, request, headers, runtime) @@ -2510,111 +2738,28 @@ async def delete_training_job_labels_async( training_job_id: str, request: pai_studio_20220112_models.DeleteTrainingJobLabelsRequest, ) -> pai_studio_20220112_models.DeleteTrainingJobLabelsResponse: + """ + @summary 删除TrainingJob的Labels + + @param request: DeleteTrainingJobLabelsRequest + @return: DeleteTrainingJobLabelsResponse + """ runtime = util_models.RuntimeOptions() headers = {} return await self.delete_training_job_labels_with_options_async(training_job_id, request, headers, runtime) - def deploy_llmsnapshot_with_options( - self, - project_id: str, - snapshot_id: str, - request: pai_studio_20220112_models.DeployLLMSnapshotRequest, - headers: Dict[str, str], - runtime: util_models.RuntimeOptions, - ) -> pai_studio_20220112_models.DeployLLMSnapshotResponse: - UtilClient.validate_model(request) - body = {} - if not UtilClient.is_unset(request.description): - body['Description'] = request.description - if not UtilClient.is_unset(request.display_name): - body['DisplayName'] = request.display_name - if not UtilClient.is_unset(request.labels): - body['Labels'] = request.labels - if not UtilClient.is_unset(request.workload): - body['Workload'] = request.workload - req = open_api_models.OpenApiRequest( - headers=headers, - body=OpenApiUtilClient.parse_to_map(body) - ) - params = open_api_models.Params( - action='DeployLLMSnapshot', - version='2022-01-12', - protocol='HTTPS', - pathname=f'/api/v1/langstudio/projects/{OpenApiUtilClient.get_encode_param(project_id)}/snapshots/{OpenApiUtilClient.get_encode_param(snapshot_id)}/deploy', - method='PUT', - auth_type='AK', - style='ROA', - req_body_type='json', - body_type='json' - ) - return TeaCore.from_map( - pai_studio_20220112_models.DeployLLMSnapshotResponse(), - self.call_api(params, req, runtime) - ) - - async def deploy_llmsnapshot_with_options_async( - self, - project_id: str, - snapshot_id: str, - request: pai_studio_20220112_models.DeployLLMSnapshotRequest, - headers: Dict[str, str], - runtime: util_models.RuntimeOptions, - ) -> pai_studio_20220112_models.DeployLLMSnapshotResponse: - UtilClient.validate_model(request) - body = {} - if not UtilClient.is_unset(request.description): - body['Description'] = request.description - if not UtilClient.is_unset(request.display_name): - body['DisplayName'] = request.display_name - if not UtilClient.is_unset(request.labels): - body['Labels'] = request.labels - if not UtilClient.is_unset(request.workload): - body['Workload'] = request.workload - req = open_api_models.OpenApiRequest( - headers=headers, - body=OpenApiUtilClient.parse_to_map(body) - ) - params = open_api_models.Params( - action='DeployLLMSnapshot', - version='2022-01-12', - protocol='HTTPS', - pathname=f'/api/v1/langstudio/projects/{OpenApiUtilClient.get_encode_param(project_id)}/snapshots/{OpenApiUtilClient.get_encode_param(snapshot_id)}/deploy', - method='PUT', - auth_type='AK', - style='ROA', - req_body_type='json', - body_type='json' - ) - return TeaCore.from_map( - pai_studio_20220112_models.DeployLLMSnapshotResponse(), - await self.call_api_async(params, req, runtime) - ) - - def deploy_llmsnapshot( - self, - project_id: str, - snapshot_id: str, - request: pai_studio_20220112_models.DeployLLMSnapshotRequest, - ) -> pai_studio_20220112_models.DeployLLMSnapshotResponse: - runtime = util_models.RuntimeOptions() - headers = {} - return self.deploy_llmsnapshot_with_options(project_id, snapshot_id, request, headers, runtime) - - async def deploy_llmsnapshot_async( - self, - project_id: str, - snapshot_id: str, - request: pai_studio_20220112_models.DeployLLMSnapshotRequest, - ) -> pai_studio_20220112_models.DeployLLMSnapshotResponse: - runtime = util_models.RuntimeOptions() - headers = {} - return await self.deploy_llmsnapshot_with_options_async(project_id, snapshot_id, request, headers, runtime) - def get_ai4ddefault_bucket_with_options( self, headers: Dict[str, str], runtime: util_models.RuntimeOptions, ) -> pai_studio_20220112_models.GetAI4DDefaultBucketResponse: + """ + @summary 获取AI4D模型桶 + + @param headers: map + @param runtime: runtime options for this request RuntimeOptions + @return: GetAI4DDefaultBucketResponse + """ req = open_api_models.OpenApiRequest( headers=headers ) @@ -2639,6 +2784,13 @@ async def get_ai4ddefault_bucket_with_options_async( headers: Dict[str, str], runtime: util_models.RuntimeOptions, ) -> pai_studio_20220112_models.GetAI4DDefaultBucketResponse: + """ + @summary 获取AI4D模型桶 + + @param headers: map + @param runtime: runtime options for this request RuntimeOptions + @return: GetAI4DDefaultBucketResponse + """ req = open_api_models.OpenApiRequest( headers=headers ) @@ -2659,11 +2811,21 @@ async def get_ai4ddefault_bucket_with_options_async( ) def get_ai4ddefault_bucket(self) -> pai_studio_20220112_models.GetAI4DDefaultBucketResponse: + """ + @summary 获取AI4D模型桶 + + @return: GetAI4DDefaultBucketResponse + """ runtime = util_models.RuntimeOptions() headers = {} return self.get_ai4ddefault_bucket_with_options(headers, runtime) async def get_ai4ddefault_bucket_async(self) -> pai_studio_20220112_models.GetAI4DDefaultBucketResponse: + """ + @summary 获取AI4D模型桶 + + @return: GetAI4DDefaultBucketResponse + """ runtime = util_models.RuntimeOptions() headers = {} return await self.get_ai4ddefault_bucket_with_options_async(headers, runtime) @@ -2674,6 +2836,13 @@ def get_algorithm_with_options( headers: Dict[str, str], runtime: util_models.RuntimeOptions, ) -> pai_studio_20220112_models.GetAlgorithmResponse: + """ + @summary 获取一个算法信息 + + @param headers: map + @param runtime: runtime options for this request RuntimeOptions + @return: GetAlgorithmResponse + """ req = open_api_models.OpenApiRequest( headers=headers ) @@ -2699,6 +2868,13 @@ async def get_algorithm_with_options_async( headers: Dict[str, str], runtime: util_models.RuntimeOptions, ) -> pai_studio_20220112_models.GetAlgorithmResponse: + """ + @summary 获取一个算法信息 + + @param headers: map + @param runtime: runtime options for this request RuntimeOptions + @return: GetAlgorithmResponse + """ req = open_api_models.OpenApiRequest( headers=headers ) @@ -2722,6 +2898,11 @@ def get_algorithm( self, algorithm_id: str, ) -> pai_studio_20220112_models.GetAlgorithmResponse: + """ + @summary 获取一个算法信息 + + @return: GetAlgorithmResponse + """ runtime = util_models.RuntimeOptions() headers = {} return self.get_algorithm_with_options(algorithm_id, headers, runtime) @@ -2730,6 +2911,11 @@ async def get_algorithm_async( self, algorithm_id: str, ) -> pai_studio_20220112_models.GetAlgorithmResponse: + """ + @summary 获取一个算法信息 + + @return: GetAlgorithmResponse + """ runtime = util_models.RuntimeOptions() headers = {} return await self.get_algorithm_with_options_async(algorithm_id, headers, runtime) @@ -2741,6 +2927,13 @@ def get_algorithm_version_with_options( headers: Dict[str, str], runtime: util_models.RuntimeOptions, ) -> pai_studio_20220112_models.GetAlgorithmVersionResponse: + """ + @summary 创建一个新的算法版本 + + @param headers: map + @param runtime: runtime options for this request RuntimeOptions + @return: GetAlgorithmVersionResponse + """ req = open_api_models.OpenApiRequest( headers=headers ) @@ -2767,6 +2960,13 @@ async def get_algorithm_version_with_options_async( headers: Dict[str, str], runtime: util_models.RuntimeOptions, ) -> pai_studio_20220112_models.GetAlgorithmVersionResponse: + """ + @summary 创建一个新的算法版本 + + @param headers: map + @param runtime: runtime options for this request RuntimeOptions + @return: GetAlgorithmVersionResponse + """ req = open_api_models.OpenApiRequest( headers=headers ) @@ -2791,6 +2991,11 @@ def get_algorithm_version( algorithm_id: str, algorithm_version: str, ) -> pai_studio_20220112_models.GetAlgorithmVersionResponse: + """ + @summary 创建一个新的算法版本 + + @return: GetAlgorithmVersionResponse + """ runtime = util_models.RuntimeOptions() headers = {} return self.get_algorithm_version_with_options(algorithm_id, algorithm_version, headers, runtime) @@ -2800,6 +3005,11 @@ async def get_algorithm_version_async( algorithm_id: str, algorithm_version: str, ) -> pai_studio_20220112_models.GetAlgorithmVersionResponse: + """ + @summary 创建一个新的算法版本 + + @return: GetAlgorithmVersionResponse + """ runtime = util_models.RuntimeOptions() headers = {} return await self.get_algorithm_version_with_options_async(algorithm_id, algorithm_version, headers, runtime) @@ -2810,6 +3020,13 @@ def get_component_with_options( headers: Dict[str, str], runtime: util_models.RuntimeOptions, ) -> pai_studio_20220112_models.GetComponentResponse: + """ + @summary 查询组件信息 + + @param headers: map + @param runtime: runtime options for this request RuntimeOptions + @return: GetComponentResponse + """ req = open_api_models.OpenApiRequest( headers=headers ) @@ -2835,6 +3052,13 @@ async def get_component_with_options_async( headers: Dict[str, str], runtime: util_models.RuntimeOptions, ) -> pai_studio_20220112_models.GetComponentResponse: + """ + @summary 查询组件信息 + + @param headers: map + @param runtime: runtime options for this request RuntimeOptions + @return: GetComponentResponse + """ req = open_api_models.OpenApiRequest( headers=headers ) @@ -2858,6 +3082,11 @@ def get_component( self, component_id: str, ) -> pai_studio_20220112_models.GetComponentResponse: + """ + @summary 查询组件信息 + + @return: GetComponentResponse + """ runtime = util_models.RuntimeOptions() headers = {} return self.get_component_with_options(component_id, headers, runtime) @@ -2866,6 +3095,11 @@ async def get_component_async( self, component_id: str, ) -> pai_studio_20220112_models.GetComponentResponse: + """ + @summary 查询组件信息 + + @return: GetComponentResponse + """ runtime = util_models.RuntimeOptions() headers = {} return await self.get_component_with_options_async(component_id, headers, runtime) @@ -2877,6 +3111,13 @@ def get_component_version_with_options( headers: Dict[str, str], runtime: util_models.RuntimeOptions, ) -> pai_studio_20220112_models.GetComponentVersionResponse: + """ + @summary 获取组件版本 + + @param headers: map + @param runtime: runtime options for this request RuntimeOptions + @return: GetComponentVersionResponse + """ req = open_api_models.OpenApiRequest( headers=headers ) @@ -2903,6 +3144,13 @@ async def get_component_version_with_options_async( headers: Dict[str, str], runtime: util_models.RuntimeOptions, ) -> pai_studio_20220112_models.GetComponentVersionResponse: + """ + @summary 获取组件版本 + + @param headers: map + @param runtime: runtime options for this request RuntimeOptions + @return: GetComponentVersionResponse + """ req = open_api_models.OpenApiRequest( headers=headers ) @@ -2927,6 +3175,11 @@ def get_component_version( component_id: str, version: str, ) -> pai_studio_20220112_models.GetComponentVersionResponse: + """ + @summary 获取组件版本 + + @return: GetComponentVersionResponse + """ runtime = util_models.RuntimeOptions() headers = {} return self.get_component_version_with_options(component_id, version, headers, runtime) @@ -2936,6 +3189,11 @@ async def get_component_version_async( component_id: str, version: str, ) -> pai_studio_20220112_models.GetComponentVersionResponse: + """ + @summary 获取组件版本 + + @return: GetComponentVersionResponse + """ runtime = util_models.RuntimeOptions() headers = {} return await self.get_component_version_with_options_async(component_id, version, headers, runtime) @@ -2946,6 +3204,13 @@ def get_component_version_snapshot_with_options( headers: Dict[str, str], runtime: util_models.RuntimeOptions, ) -> pai_studio_20220112_models.GetComponentVersionSnapshotResponse: + """ + @summary 获取组件版本快照 + + @param headers: map + @param runtime: runtime options for this request RuntimeOptions + @return: GetComponentVersionSnapshotResponse + """ req = open_api_models.OpenApiRequest( headers=headers ) @@ -2971,6 +3236,13 @@ async def get_component_version_snapshot_with_options_async( headers: Dict[str, str], runtime: util_models.RuntimeOptions, ) -> pai_studio_20220112_models.GetComponentVersionSnapshotResponse: + """ + @summary 获取组件版本快照 + + @param headers: map + @param runtime: runtime options for this request RuntimeOptions + @return: GetComponentVersionSnapshotResponse + """ req = open_api_models.OpenApiRequest( headers=headers ) @@ -2994,6 +3266,11 @@ def get_component_version_snapshot( self, snapshot_id: str, ) -> pai_studio_20220112_models.GetComponentVersionSnapshotResponse: + """ + @summary 获取组件版本快照 + + @return: GetComponentVersionSnapshotResponse + """ runtime = util_models.RuntimeOptions() headers = {} return self.get_component_version_snapshot_with_options(snapshot_id, headers, runtime) @@ -3002,6 +3279,11 @@ async def get_component_version_snapshot_async( self, snapshot_id: str, ) -> pai_studio_20220112_models.GetComponentVersionSnapshotResponse: + """ + @summary 获取组件版本快照 + + @return: GetComponentVersionSnapshotResponse + """ runtime = util_models.RuntimeOptions() headers = {} return await self.get_component_version_snapshot_with_options_async(snapshot_id, headers, runtime) @@ -3012,6 +3294,13 @@ def get_instance_job_with_options( headers: Dict[str, str], runtime: util_models.RuntimeOptions, ) -> pai_studio_20220112_models.GetInstanceJobResponse: + """ + @summary 获取实例任务 + + @param headers: map + @param runtime: runtime options for this request RuntimeOptions + @return: GetInstanceJobResponse + """ req = open_api_models.OpenApiRequest( headers=headers ) @@ -3037,6 +3326,13 @@ async def get_instance_job_with_options_async( headers: Dict[str, str], runtime: util_models.RuntimeOptions, ) -> pai_studio_20220112_models.GetInstanceJobResponse: + """ + @summary 获取实例任务 + + @param headers: map + @param runtime: runtime options for this request RuntimeOptions + @return: GetInstanceJobResponse + """ req = open_api_models.OpenApiRequest( headers=headers ) @@ -3060,6 +3356,11 @@ def get_instance_job( self, instance_job_id: str, ) -> pai_studio_20220112_models.GetInstanceJobResponse: + """ + @summary 获取实例任务 + + @return: GetInstanceJobResponse + """ runtime = util_models.RuntimeOptions() headers = {} return self.get_instance_job_with_options(instance_job_id, headers, runtime) @@ -3068,6 +3369,11 @@ async def get_instance_job_async( self, instance_job_id: str, ) -> pai_studio_20220112_models.GetInstanceJobResponse: + """ + @summary 获取实例任务 + + @return: GetInstanceJobResponse + """ runtime = util_models.RuntimeOptions() headers = {} return await self.get_instance_job_with_options_async(instance_job_id, headers, runtime) @@ -3079,6 +3385,14 @@ def get_job_view_metrics_with_options( headers: Dict[str, str], runtime: util_models.RuntimeOptions, ) -> pai_studio_20220112_models.GetJobViewMetricsResponse: + """ + @summary 按照job来统计性能指标 + + @param request: GetJobViewMetricsRequest + @param headers: map + @param runtime: runtime options for this request RuntimeOptions + @return: GetJobViewMetricsResponse + """ UtilClient.validate_model(request) query = {} if not UtilClient.is_unset(request.end_time): @@ -3122,6 +3436,14 @@ async def get_job_view_metrics_with_options_async( headers: Dict[str, str], runtime: util_models.RuntimeOptions, ) -> pai_studio_20220112_models.GetJobViewMetricsResponse: + """ + @summary 按照job来统计性能指标 + + @param request: GetJobViewMetricsRequest + @param headers: map + @param runtime: runtime options for this request RuntimeOptions + @return: GetJobViewMetricsResponse + """ UtilClient.validate_model(request) query = {} if not UtilClient.is_unset(request.end_time): @@ -3163,6 +3485,12 @@ def get_job_view_metrics( resource_group_id: str, request: pai_studio_20220112_models.GetJobViewMetricsRequest, ) -> pai_studio_20220112_models.GetJobViewMetricsResponse: + """ + @summary 按照job来统计性能指标 + + @param request: GetJobViewMetricsRequest + @return: GetJobViewMetricsResponse + """ runtime = util_models.RuntimeOptions() headers = {} return self.get_job_view_metrics_with_options(resource_group_id, request, headers, runtime) @@ -3172,6 +3500,12 @@ async def get_job_view_metrics_async( resource_group_id: str, request: pai_studio_20220112_models.GetJobViewMetricsRequest, ) -> pai_studio_20220112_models.GetJobViewMetricsResponse: + """ + @summary 按照job来统计性能指标 + + @param request: GetJobViewMetricsRequest + @return: GetJobViewMetricsResponse + """ runtime = util_models.RuntimeOptions() headers = {} return await self.get_job_view_metrics_with_options_async(resource_group_id, request, headers, runtime) @@ -3183,6 +3517,14 @@ def get_jobs_statistics_by_quota_with_options( headers: Dict[str, str], runtime: util_models.RuntimeOptions, ) -> pai_studio_20220112_models.GetJobsStatisticsByQuotaResponse: + """ + @summary 获取当前资源配额的作业统计信息 + + @param request: GetJobsStatisticsByQuotaRequest + @param headers: map + @param runtime: runtime options for this request RuntimeOptions + @return: GetJobsStatisticsByQuotaResponse + """ UtilClient.validate_model(request) query = {} if not UtilClient.is_unset(request.end_time): @@ -3218,6 +3560,14 @@ async def get_jobs_statistics_by_quota_with_options_async( headers: Dict[str, str], runtime: util_models.RuntimeOptions, ) -> pai_studio_20220112_models.GetJobsStatisticsByQuotaResponse: + """ + @summary 获取当前资源配额的作业统计信息 + + @param request: GetJobsStatisticsByQuotaRequest + @param headers: map + @param runtime: runtime options for this request RuntimeOptions + @return: GetJobsStatisticsByQuotaResponse + """ UtilClient.validate_model(request) query = {} if not UtilClient.is_unset(request.end_time): @@ -3251,6 +3601,12 @@ def get_jobs_statistics_by_quota( quota_id: str, request: pai_studio_20220112_models.GetJobsStatisticsByQuotaRequest, ) -> pai_studio_20220112_models.GetJobsStatisticsByQuotaResponse: + """ + @summary 获取当前资源配额的作业统计信息 + + @param request: GetJobsStatisticsByQuotaRequest + @return: GetJobsStatisticsByQuotaResponse + """ runtime = util_models.RuntimeOptions() headers = {} return self.get_jobs_statistics_by_quota_with_options(quota_id, request, headers, runtime) @@ -3260,6 +3616,12 @@ async def get_jobs_statistics_by_quota_async( quota_id: str, request: pai_studio_20220112_models.GetJobsStatisticsByQuotaRequest, ) -> pai_studio_20220112_models.GetJobsStatisticsByQuotaResponse: + """ + @summary 获取当前资源配额的作业统计信息 + + @param request: GetJobsStatisticsByQuotaRequest + @return: GetJobsStatisticsByQuotaResponse + """ runtime = util_models.RuntimeOptions() headers = {} return await self.get_jobs_statistics_by_quota_with_options_async(quota_id, request, headers, runtime) @@ -3271,6 +3633,14 @@ def get_jobs_statistics_by_resource_group_with_options( headers: Dict[str, str], runtime: util_models.RuntimeOptions, ) -> pai_studio_20220112_models.GetJobsStatisticsByResourceGroupResponse: + """ + @summary 按照resource group,查询Job的状态统计信息 + + @param request: GetJobsStatisticsByResourceGroupRequest + @param headers: map + @param runtime: runtime options for this request RuntimeOptions + @return: GetJobsStatisticsByResourceGroupResponse + """ UtilClient.validate_model(request) query = {} if not UtilClient.is_unset(request.end_time): @@ -3306,6 +3676,14 @@ async def get_jobs_statistics_by_resource_group_with_options_async( headers: Dict[str, str], runtime: util_models.RuntimeOptions, ) -> pai_studio_20220112_models.GetJobsStatisticsByResourceGroupResponse: + """ + @summary 按照resource group,查询Job的状态统计信息 + + @param request: GetJobsStatisticsByResourceGroupRequest + @param headers: map + @param runtime: runtime options for this request RuntimeOptions + @return: GetJobsStatisticsByResourceGroupResponse + """ UtilClient.validate_model(request) query = {} if not UtilClient.is_unset(request.end_time): @@ -3339,6 +3717,12 @@ def get_jobs_statistics_by_resource_group( resource_group_id: str, request: pai_studio_20220112_models.GetJobsStatisticsByResourceGroupRequest, ) -> pai_studio_20220112_models.GetJobsStatisticsByResourceGroupResponse: + """ + @summary 按照resource group,查询Job的状态统计信息 + + @param request: GetJobsStatisticsByResourceGroupRequest + @return: GetJobsStatisticsByResourceGroupResponse + """ runtime = util_models.RuntimeOptions() headers = {} return self.get_jobs_statistics_by_resource_group_with_options(resource_group_id, request, headers, runtime) @@ -3348,24 +3732,37 @@ async def get_jobs_statistics_by_resource_group_async( resource_group_id: str, request: pai_studio_20220112_models.GetJobsStatisticsByResourceGroupRequest, ) -> pai_studio_20220112_models.GetJobsStatisticsByResourceGroupResponse: + """ + @summary 按照resource group,查询Job的状态统计信息 + + @param request: GetJobsStatisticsByResourceGroupRequest + @return: GetJobsStatisticsByResourceGroupResponse + """ runtime = util_models.RuntimeOptions() headers = {} return await self.get_jobs_statistics_by_resource_group_with_options_async(resource_group_id, request, headers, runtime) - def get_llmproject_with_options( + def get_machine_group_with_options( self, - project_id: str, + machine_group_id: str, headers: Dict[str, str], runtime: util_models.RuntimeOptions, - ) -> pai_studio_20220112_models.GetLLMProjectResponse: + ) -> pai_studio_20220112_models.GetMachineGroupResponse: + """ + @summary get machine group + + @param headers: map + @param runtime: runtime options for this request RuntimeOptions + @return: GetMachineGroupResponse + """ req = open_api_models.OpenApiRequest( headers=headers ) params = open_api_models.Params( - action='GetLLMProject', + action='GetMachineGroup', version='2022-01-12', protocol='HTTPS', - pathname=f'/api/v1/langstudio/projects/{OpenApiUtilClient.get_encode_param(project_id)}', + pathname=f'/api/v1/resources/machinegroups/{OpenApiUtilClient.get_encode_param(machine_group_id)}', method='GET', auth_type='AK', style='ROA', @@ -3373,24 +3770,31 @@ def get_llmproject_with_options( body_type='json' ) return TeaCore.from_map( - pai_studio_20220112_models.GetLLMProjectResponse(), + pai_studio_20220112_models.GetMachineGroupResponse(), self.call_api(params, req, runtime) ) - async def get_llmproject_with_options_async( + async def get_machine_group_with_options_async( self, - project_id: str, + machine_group_id: str, headers: Dict[str, str], runtime: util_models.RuntimeOptions, - ) -> pai_studio_20220112_models.GetLLMProjectResponse: + ) -> pai_studio_20220112_models.GetMachineGroupResponse: + """ + @summary get machine group + + @param headers: map + @param runtime: runtime options for this request RuntimeOptions + @return: GetMachineGroupResponse + """ req = open_api_models.OpenApiRequest( headers=headers ) params = open_api_models.Params( - action='GetLLMProject', + action='GetMachineGroup', version='2022-01-12', protocol='HTTPS', - pathname=f'/api/v1/langstudio/projects/{OpenApiUtilClient.get_encode_param(project_id)}', + pathname=f'/api/v1/resources/machinegroups/{OpenApiUtilClient.get_encode_param(machine_group_id)}', method='GET', auth_type='AK', style='ROA', @@ -3398,40 +3802,79 @@ async def get_llmproject_with_options_async( body_type='json' ) return TeaCore.from_map( - pai_studio_20220112_models.GetLLMProjectResponse(), + pai_studio_20220112_models.GetMachineGroupResponse(), await self.call_api_async(params, req, runtime) ) - def get_llmproject( + def get_machine_group( self, - project_id: str, - ) -> pai_studio_20220112_models.GetLLMProjectResponse: + machine_group_id: str, + ) -> pai_studio_20220112_models.GetMachineGroupResponse: + """ + @summary get machine group + + @return: GetMachineGroupResponse + """ runtime = util_models.RuntimeOptions() headers = {} - return self.get_llmproject_with_options(project_id, headers, runtime) + return self.get_machine_group_with_options(machine_group_id, headers, runtime) - async def get_llmproject_async( + async def get_machine_group_async( self, - project_id: str, - ) -> pai_studio_20220112_models.GetLLMProjectResponse: + machine_group_id: str, + ) -> pai_studio_20220112_models.GetMachineGroupResponse: + """ + @summary get machine group + + @return: GetMachineGroupResponse + """ runtime = util_models.RuntimeOptions() headers = {} - return await self.get_llmproject_with_options_async(project_id, headers, runtime) + return await self.get_machine_group_with_options_async(machine_group_id, headers, runtime) - def get_llmservice_identity_role_with_options( + def get_metrics_with_options( self, - role_name: str, + request: pai_studio_20220112_models.GetMetricsRequest, headers: Dict[str, str], runtime: util_models.RuntimeOptions, - ) -> pai_studio_20220112_models.GetLLMServiceIdentityRoleResponse: + ) -> pai_studio_20220112_models.GetMetricsResponse: + """ + @summary 云监控 DescribeMetricList 代理 API + + @param request: GetMetricsRequest + @param headers: map + @param runtime: runtime options for this request RuntimeOptions + @return: GetMetricsResponse + """ + UtilClient.validate_model(request) + query = {} + if not UtilClient.is_unset(request.dimensions): + query['Dimensions'] = request.dimensions + if not UtilClient.is_unset(request.end_time): + query['EndTime'] = request.end_time + if not UtilClient.is_unset(request.express): + query['Express'] = request.express + if not UtilClient.is_unset(request.length): + query['Length'] = request.length + if not UtilClient.is_unset(request.metric_name): + query['MetricName'] = request.metric_name + if not UtilClient.is_unset(request.namespace): + query['Namespace'] = request.namespace + if not UtilClient.is_unset(request.next_token): + query['NextToken'] = request.next_token + if not UtilClient.is_unset(request.period): + query['Period'] = request.period + if not UtilClient.is_unset(request.start_time): + query['StartTime'] = request.start_time req = open_api_models.OpenApiRequest( - headers=headers + headers=headers, + query=OpenApiUtilClient.query(query) ) params = open_api_models.Params( - action='GetLLMServiceIdentityRole', + action='GetMetrics', version='2022-01-12', protocol='HTTPS', - pathname=f'/api/v1/langstudio/serviceidentityroles/{OpenApiUtilClient.get_encode_param(role_name)}', + pathname=f'/api/v1/quotas/cms/metrics', method='GET', auth_type='AK', style='ROA', @@ -3439,24 +3882,53 @@ def get_llmservice_identity_role_with_options( body_type='json' ) return TeaCore.from_map( - pai_studio_20220112_models.GetLLMServiceIdentityRoleResponse(), + pai_studio_20220112_models.GetMetricsResponse(), self.call_api(params, req, runtime) ) - async def get_llmservice_identity_role_with_options_async( + async def get_metrics_with_options_async( self, - role_name: str, + request: pai_studio_20220112_models.GetMetricsRequest, headers: Dict[str, str], runtime: util_models.RuntimeOptions, - ) -> pai_studio_20220112_models.GetLLMServiceIdentityRoleResponse: + ) -> pai_studio_20220112_models.GetMetricsResponse: + """ + @summary 云监控 DescribeMetricList 代理 API + + @param request: GetMetricsRequest + @param headers: map + @param runtime: runtime options for this request RuntimeOptions + @return: GetMetricsResponse + """ + UtilClient.validate_model(request) + query = {} + if not UtilClient.is_unset(request.dimensions): + query['Dimensions'] = request.dimensions + if not UtilClient.is_unset(request.end_time): + query['EndTime'] = request.end_time + if not UtilClient.is_unset(request.express): + query['Express'] = request.express + if not UtilClient.is_unset(request.length): + query['Length'] = request.length + if not UtilClient.is_unset(request.metric_name): + query['MetricName'] = request.metric_name + if not UtilClient.is_unset(request.namespace): + query['Namespace'] = request.namespace + if not UtilClient.is_unset(request.next_token): + query['NextToken'] = request.next_token + if not UtilClient.is_unset(request.period): + query['Period'] = request.period + if not UtilClient.is_unset(request.start_time): + query['StartTime'] = request.start_time req = open_api_models.OpenApiRequest( - headers=headers + headers=headers, + query=OpenApiUtilClient.query(query) ) params = open_api_models.Params( - action='GetLLMServiceIdentityRole', + action='GetMetrics', version='2022-01-12', protocol='HTTPS', - pathname=f'/api/v1/langstudio/serviceidentityroles/{OpenApiUtilClient.get_encode_param(role_name)}', + pathname=f'/api/v1/quotas/cms/metrics', method='GET', auth_type='AK', style='ROA', @@ -3464,41 +3936,72 @@ async def get_llmservice_identity_role_with_options_async( body_type='json' ) return TeaCore.from_map( - pai_studio_20220112_models.GetLLMServiceIdentityRoleResponse(), + pai_studio_20220112_models.GetMetricsResponse(), await self.call_api_async(params, req, runtime) ) - def get_llmservice_identity_role( + def get_metrics( self, - role_name: str, - ) -> pai_studio_20220112_models.GetLLMServiceIdentityRoleResponse: + request: pai_studio_20220112_models.GetMetricsRequest, + ) -> pai_studio_20220112_models.GetMetricsResponse: + """ + @summary 云监控 DescribeMetricList 代理 API + + @param request: GetMetricsRequest + @return: GetMetricsResponse + """ runtime = util_models.RuntimeOptions() headers = {} - return self.get_llmservice_identity_role_with_options(role_name, headers, runtime) + return self.get_metrics_with_options(request, headers, runtime) - async def get_llmservice_identity_role_async( + async def get_metrics_async( self, - role_name: str, - ) -> pai_studio_20220112_models.GetLLMServiceIdentityRoleResponse: + request: pai_studio_20220112_models.GetMetricsRequest, + ) -> pai_studio_20220112_models.GetMetricsResponse: + """ + @summary 云监控 DescribeMetricList 代理 API + + @param request: GetMetricsRequest + @return: GetMetricsResponse + """ runtime = util_models.RuntimeOptions() headers = {} - return await self.get_llmservice_identity_role_with_options_async(role_name, headers, runtime) + return await self.get_metrics_with_options_async(request, headers, runtime) - def get_llmsnapshot_with_options( + def get_node_gpumetrics_with_options( self, - project_id: str, - snapshot_id: str, + node_id: str, + request: pai_studio_20220112_models.GetNodeGPUMetricsRequest, headers: Dict[str, str], runtime: util_models.RuntimeOptions, - ) -> pai_studio_20220112_models.GetLLMSnapshotResponse: + ) -> pai_studio_20220112_models.GetNodeGPUMetricsResponse: + """ + @summary 查询节点的GPU指标 + + @param request: GetNodeGPUMetricsRequest + @param headers: map + @param runtime: runtime options for this request RuntimeOptions + @return: GetNodeGPUMetricsResponse + """ + UtilClient.validate_model(request) + query = {} + if not UtilClient.is_unset(request.end_time): + query['EndTime'] = request.end_time + if not UtilClient.is_unset(request.metric_type): + query['MetricType'] = request.metric_type + if not UtilClient.is_unset(request.quota_id): + query['QuotaId'] = request.quota_id + if not UtilClient.is_unset(request.start_time): + query['StartTime'] = request.start_time req = open_api_models.OpenApiRequest( - headers=headers + headers=headers, + query=OpenApiUtilClient.query(query) ) params = open_api_models.Params( - action='GetLLMSnapshot', + action='GetNodeGPUMetrics', version='2022-01-12', protocol='HTTPS', - pathname=f'/api/v1/langstudio/projects/{OpenApiUtilClient.get_encode_param(project_id)}/snapshots/{OpenApiUtilClient.get_encode_param(snapshot_id)}', + pathname=f'/api/v1/nodes/{OpenApiUtilClient.get_encode_param(node_id)}/gpumetrics', method='GET', auth_type='AK', style='ROA', @@ -3506,25 +4009,44 @@ def get_llmsnapshot_with_options( body_type='json' ) return TeaCore.from_map( - pai_studio_20220112_models.GetLLMSnapshotResponse(), + pai_studio_20220112_models.GetNodeGPUMetricsResponse(), self.call_api(params, req, runtime) ) - async def get_llmsnapshot_with_options_async( + async def get_node_gpumetrics_with_options_async( self, - project_id: str, - snapshot_id: str, + node_id: str, + request: pai_studio_20220112_models.GetNodeGPUMetricsRequest, headers: Dict[str, str], runtime: util_models.RuntimeOptions, - ) -> pai_studio_20220112_models.GetLLMSnapshotResponse: + ) -> pai_studio_20220112_models.GetNodeGPUMetricsResponse: + """ + @summary 查询节点的GPU指标 + + @param request: GetNodeGPUMetricsRequest + @param headers: map + @param runtime: runtime options for this request RuntimeOptions + @return: GetNodeGPUMetricsResponse + """ + UtilClient.validate_model(request) + query = {} + if not UtilClient.is_unset(request.end_time): + query['EndTime'] = request.end_time + if not UtilClient.is_unset(request.metric_type): + query['MetricType'] = request.metric_type + if not UtilClient.is_unset(request.quota_id): + query['QuotaId'] = request.quota_id + if not UtilClient.is_unset(request.start_time): + query['StartTime'] = request.start_time req = open_api_models.OpenApiRequest( - headers=headers + headers=headers, + query=OpenApiUtilClient.query(query) ) params = open_api_models.Params( - action='GetLLMSnapshot', + action='GetNodeGPUMetrics', version='2022-01-12', protocol='HTTPS', - pathname=f'/api/v1/langstudio/projects/{OpenApiUtilClient.get_encode_param(project_id)}/snapshots/{OpenApiUtilClient.get_encode_param(snapshot_id)}', + pathname=f'/api/v1/nodes/{OpenApiUtilClient.get_encode_param(node_id)}/gpumetrics', method='GET', auth_type='AK', style='ROA', @@ -3532,42 +4054,77 @@ async def get_llmsnapshot_with_options_async( body_type='json' ) return TeaCore.from_map( - pai_studio_20220112_models.GetLLMSnapshotResponse(), + pai_studio_20220112_models.GetNodeGPUMetricsResponse(), await self.call_api_async(params, req, runtime) ) - def get_llmsnapshot( + def get_node_gpumetrics( self, - project_id: str, - snapshot_id: str, - ) -> pai_studio_20220112_models.GetLLMSnapshotResponse: + node_id: str, + request: pai_studio_20220112_models.GetNodeGPUMetricsRequest, + ) -> pai_studio_20220112_models.GetNodeGPUMetricsResponse: + """ + @summary 查询节点的GPU指标 + + @param request: GetNodeGPUMetricsRequest + @return: GetNodeGPUMetricsResponse + """ runtime = util_models.RuntimeOptions() headers = {} - return self.get_llmsnapshot_with_options(project_id, snapshot_id, headers, runtime) + return self.get_node_gpumetrics_with_options(node_id, request, headers, runtime) - async def get_llmsnapshot_async( + async def get_node_gpumetrics_async( self, - project_id: str, - snapshot_id: str, - ) -> pai_studio_20220112_models.GetLLMSnapshotResponse: + node_id: str, + request: pai_studio_20220112_models.GetNodeGPUMetricsRequest, + ) -> pai_studio_20220112_models.GetNodeGPUMetricsResponse: + """ + @summary 查询节点的GPU指标 + + @param request: GetNodeGPUMetricsRequest + @return: GetNodeGPUMetricsResponse + """ runtime = util_models.RuntimeOptions() headers = {} - return await self.get_llmsnapshot_with_options_async(project_id, snapshot_id, headers, runtime) + return await self.get_node_gpumetrics_with_options_async(node_id, request, headers, runtime) - def get_machine_group_with_options( + def get_node_metrics_with_options( self, - machine_group_id: str, + resource_group_id: str, + metric_type: str, + request: pai_studio_20220112_models.GetNodeMetricsRequest, headers: Dict[str, str], runtime: util_models.RuntimeOptions, - ) -> pai_studio_20220112_models.GetMachineGroupResponse: + ) -> pai_studio_20220112_models.GetNodeMetricsResponse: + """ + @summary get resource group node metrics + + @param request: GetNodeMetricsRequest + @param headers: map + @param runtime: runtime options for this request RuntimeOptions + @return: GetNodeMetricsResponse + """ + UtilClient.validate_model(request) + query = {} + if not UtilClient.is_unset(request.end_time): + query['EndTime'] = request.end_time + if not UtilClient.is_unset(request.gputype): + query['GPUType'] = request.gputype + if not UtilClient.is_unset(request.start_time): + query['StartTime'] = request.start_time + if not UtilClient.is_unset(request.time_step): + query['TimeStep'] = request.time_step + if not UtilClient.is_unset(request.verbose): + query['Verbose'] = request.verbose req = open_api_models.OpenApiRequest( - headers=headers + headers=headers, + query=OpenApiUtilClient.query(query) ) params = open_api_models.Params( - action='GetMachineGroup', + action='GetNodeMetrics', version='2022-01-12', protocol='HTTPS', - pathname=f'/api/v1/resources/machinegroups/{OpenApiUtilClient.get_encode_param(machine_group_id)}', + pathname=f'/api/v1/resources/{OpenApiUtilClient.get_encode_param(resource_group_id)}/nodemetrics/{OpenApiUtilClient.get_encode_param(metric_type)}', method='GET', auth_type='AK', style='ROA', @@ -3575,99 +4132,26 @@ def get_machine_group_with_options( body_type='json' ) return TeaCore.from_map( - pai_studio_20220112_models.GetMachineGroupResponse(), + pai_studio_20220112_models.GetNodeMetricsResponse(), self.call_api(params, req, runtime) ) - async def get_machine_group_with_options_async( + async def get_node_metrics_with_options_async( self, - machine_group_id: str, - headers: Dict[str, str], - runtime: util_models.RuntimeOptions, - ) -> pai_studio_20220112_models.GetMachineGroupResponse: - req = open_api_models.OpenApiRequest( - headers=headers - ) - params = open_api_models.Params( - action='GetMachineGroup', - version='2022-01-12', - protocol='HTTPS', - pathname=f'/api/v1/resources/machinegroups/{OpenApiUtilClient.get_encode_param(machine_group_id)}', - method='GET', - auth_type='AK', - style='ROA', - req_body_type='json', - body_type='json' - ) - return TeaCore.from_map( - pai_studio_20220112_models.GetMachineGroupResponse(), - await self.call_api_async(params, req, runtime) - ) - - def get_machine_group( - self, - machine_group_id: str, - ) -> pai_studio_20220112_models.GetMachineGroupResponse: - runtime = util_models.RuntimeOptions() - headers = {} - return self.get_machine_group_with_options(machine_group_id, headers, runtime) - - async def get_machine_group_async( - self, - machine_group_id: str, - ) -> pai_studio_20220112_models.GetMachineGroupResponse: - runtime = util_models.RuntimeOptions() - headers = {} - return await self.get_machine_group_with_options_async(machine_group_id, headers, runtime) - - def get_node_metrics_with_options( - self, - resource_group_id: str, - metric_type: str, - request: pai_studio_20220112_models.GetNodeMetricsRequest, - headers: Dict[str, str], - runtime: util_models.RuntimeOptions, - ) -> pai_studio_20220112_models.GetNodeMetricsResponse: - UtilClient.validate_model(request) - query = {} - if not UtilClient.is_unset(request.end_time): - query['EndTime'] = request.end_time - if not UtilClient.is_unset(request.gputype): - query['GPUType'] = request.gputype - if not UtilClient.is_unset(request.start_time): - query['StartTime'] = request.start_time - if not UtilClient.is_unset(request.time_step): - query['TimeStep'] = request.time_step - if not UtilClient.is_unset(request.verbose): - query['Verbose'] = request.verbose - req = open_api_models.OpenApiRequest( - headers=headers, - query=OpenApiUtilClient.query(query) - ) - params = open_api_models.Params( - action='GetNodeMetrics', - version='2022-01-12', - protocol='HTTPS', - pathname=f'/api/v1/resources/{OpenApiUtilClient.get_encode_param(resource_group_id)}/nodemetrics/{OpenApiUtilClient.get_encode_param(metric_type)}', - method='GET', - auth_type='AK', - style='ROA', - req_body_type='json', - body_type='json' - ) - return TeaCore.from_map( - pai_studio_20220112_models.GetNodeMetricsResponse(), - self.call_api(params, req, runtime) - ) - - async def get_node_metrics_with_options_async( - self, - resource_group_id: str, - metric_type: str, - request: pai_studio_20220112_models.GetNodeMetricsRequest, + resource_group_id: str, + metric_type: str, + request: pai_studio_20220112_models.GetNodeMetricsRequest, headers: Dict[str, str], runtime: util_models.RuntimeOptions, ) -> pai_studio_20220112_models.GetNodeMetricsResponse: + """ + @summary get resource group node metrics + + @param request: GetNodeMetricsRequest + @param headers: map + @param runtime: runtime options for this request RuntimeOptions + @return: GetNodeMetricsResponse + """ UtilClient.validate_model(request) query = {} if not UtilClient.is_unset(request.end_time): @@ -3706,6 +4190,12 @@ def get_node_metrics( metric_type: str, request: pai_studio_20220112_models.GetNodeMetricsRequest, ) -> pai_studio_20220112_models.GetNodeMetricsResponse: + """ + @summary get resource group node metrics + + @param request: GetNodeMetricsRequest + @return: GetNodeMetricsResponse + """ runtime = util_models.RuntimeOptions() headers = {} return self.get_node_metrics_with_options(resource_group_id, metric_type, request, headers, runtime) @@ -3716,6 +4206,12 @@ async def get_node_metrics_async( metric_type: str, request: pai_studio_20220112_models.GetNodeMetricsRequest, ) -> pai_studio_20220112_models.GetNodeMetricsResponse: + """ + @summary get resource group node metrics + + @param request: GetNodeMetricsRequest + @return: GetNodeMetricsResponse + """ runtime = util_models.RuntimeOptions() headers = {} return await self.get_node_metrics_with_options_async(resource_group_id, metric_type, request, headers, runtime) @@ -3727,6 +4223,14 @@ def get_node_view_metrics_with_options( headers: Dict[str, str], runtime: util_models.RuntimeOptions, ) -> pai_studio_20220112_models.GetNodeViewMetricsResponse: + """ + @summary 获取节点视角的metrics + + @param request: GetNodeViewMetricsRequest + @param headers: map + @param runtime: runtime options for this request RuntimeOptions + @return: GetNodeViewMetricsResponse + """ UtilClient.validate_model(request) query = {} if not UtilClient.is_unset(request.node_id): @@ -3766,6 +4270,14 @@ async def get_node_view_metrics_with_options_async( headers: Dict[str, str], runtime: util_models.RuntimeOptions, ) -> pai_studio_20220112_models.GetNodeViewMetricsResponse: + """ + @summary 获取节点视角的metrics + + @param request: GetNodeViewMetricsRequest + @param headers: map + @param runtime: runtime options for this request RuntimeOptions + @return: GetNodeViewMetricsResponse + """ UtilClient.validate_model(request) query = {} if not UtilClient.is_unset(request.node_id): @@ -3803,6 +4315,12 @@ def get_node_view_metrics( resource_group_id: str, request: pai_studio_20220112_models.GetNodeViewMetricsRequest, ) -> pai_studio_20220112_models.GetNodeViewMetricsResponse: + """ + @summary 获取节点视角的metrics + + @param request: GetNodeViewMetricsRequest + @return: GetNodeViewMetricsResponse + """ runtime = util_models.RuntimeOptions() headers = {} return self.get_node_view_metrics_with_options(resource_group_id, request, headers, runtime) @@ -3812,6 +4330,12 @@ async def get_node_view_metrics_async( resource_group_id: str, request: pai_studio_20220112_models.GetNodeViewMetricsRequest, ) -> pai_studio_20220112_models.GetNodeViewMetricsResponse: + """ + @summary 获取节点视角的metrics + + @param request: GetNodeViewMetricsRequest + @return: GetNodeViewMetricsResponse + """ runtime = util_models.RuntimeOptions() headers = {} return await self.get_node_view_metrics_with_options_async(resource_group_id, request, headers, runtime) @@ -3822,6 +4346,13 @@ def get_operation_with_options( headers: Dict[str, str], runtime: util_models.RuntimeOptions, ) -> pai_studio_20220112_models.GetOperationResponse: + """ + @summary 获取资源变更详情 + + @param headers: map + @param runtime: runtime options for this request RuntimeOptions + @return: GetOperationResponse + """ req = open_api_models.OpenApiRequest( headers=headers ) @@ -3847,6 +4378,13 @@ async def get_operation_with_options_async( headers: Dict[str, str], runtime: util_models.RuntimeOptions, ) -> pai_studio_20220112_models.GetOperationResponse: + """ + @summary 获取资源变更详情 + + @param headers: map + @param runtime: runtime options for this request RuntimeOptions + @return: GetOperationResponse + """ req = open_api_models.OpenApiRequest( headers=headers ) @@ -3870,6 +4408,11 @@ def get_operation( self, operation_id: str, ) -> pai_studio_20220112_models.GetOperationResponse: + """ + @summary 获取资源变更详情 + + @return: GetOperationResponse + """ runtime = util_models.RuntimeOptions() headers = {} return self.get_operation_with_options(operation_id, headers, runtime) @@ -3878,6 +4421,11 @@ async def get_operation_async( self, operation_id: str, ) -> pai_studio_20220112_models.GetOperationResponse: + """ + @summary 获取资源变更详情 + + @return: GetOperationResponse + """ runtime = util_models.RuntimeOptions() headers = {} return await self.get_operation_with_options_async(operation_id, headers, runtime) @@ -3888,6 +4436,14 @@ def get_queue_infos_with_options( headers: Dict[str, str], runtime: util_models.RuntimeOptions, ) -> pai_studio_20220112_models.GetQueueInfosResponse: + """ + @summary 您可以通过GetQueueInfos得到一组队列的排队信息。 + + @param request: GetQueueInfosRequest + @param headers: map + @param runtime: runtime options for this request RuntimeOptions + @return: GetQueueInfosResponse + """ UtilClient.validate_model(request) query = {} if not UtilClient.is_unset(request.order): @@ -3932,6 +4488,14 @@ async def get_queue_infos_with_options_async( headers: Dict[str, str], runtime: util_models.RuntimeOptions, ) -> pai_studio_20220112_models.GetQueueInfosResponse: + """ + @summary 您可以通过GetQueueInfos得到一组队列的排队信息。 + + @param request: GetQueueInfosRequest + @param headers: map + @param runtime: runtime options for this request RuntimeOptions + @return: GetQueueInfosResponse + """ UtilClient.validate_model(request) query = {} if not UtilClient.is_unset(request.order): @@ -3974,6 +4538,12 @@ def get_queue_infos( self, request: pai_studio_20220112_models.GetQueueInfosRequest, ) -> pai_studio_20220112_models.GetQueueInfosResponse: + """ + @summary 您可以通过GetQueueInfos得到一组队列的排队信息。 + + @param request: GetQueueInfosRequest + @return: GetQueueInfosResponse + """ runtime = util_models.RuntimeOptions() headers = {} return self.get_queue_infos_with_options(request, headers, runtime) @@ -3982,6 +4552,12 @@ async def get_queue_infos_async( self, request: pai_studio_20220112_models.GetQueueInfosRequest, ) -> pai_studio_20220112_models.GetQueueInfosResponse: + """ + @summary 您可以通过GetQueueInfos得到一组队列的排队信息。 + + @param request: GetQueueInfosRequest + @return: GetQueueInfosResponse + """ runtime = util_models.RuntimeOptions() headers = {} return await self.get_queue_infos_with_options_async(request, headers, runtime) @@ -3989,11 +4565,25 @@ async def get_queue_infos_async( def get_quota_with_options( self, quota_id: str, + request: pai_studio_20220112_models.GetQuotaRequest, headers: Dict[str, str], runtime: util_models.RuntimeOptions, ) -> pai_studio_20220112_models.GetQuotaResponse: + """ + @summary 获取Quota + + @param request: GetQuotaRequest + @param headers: map + @param runtime: runtime options for this request RuntimeOptions + @return: GetQuotaResponse + """ + UtilClient.validate_model(request) + query = {} + if not UtilClient.is_unset(request.verbose): + query['Verbose'] = request.verbose req = open_api_models.OpenApiRequest( - headers=headers + headers=headers, + query=OpenApiUtilClient.query(query) ) params = open_api_models.Params( action='GetQuota', @@ -4014,11 +4604,25 @@ def get_quota_with_options( async def get_quota_with_options_async( self, quota_id: str, + request: pai_studio_20220112_models.GetQuotaRequest, headers: Dict[str, str], runtime: util_models.RuntimeOptions, ) -> pai_studio_20220112_models.GetQuotaResponse: + """ + @summary 获取Quota + + @param request: GetQuotaRequest + @param headers: map + @param runtime: runtime options for this request RuntimeOptions + @return: GetQuotaResponse + """ + UtilClient.validate_model(request) + query = {} + if not UtilClient.is_unset(request.verbose): + query['Verbose'] = request.verbose req = open_api_models.OpenApiRequest( - headers=headers + headers=headers, + query=OpenApiUtilClient.query(query) ) params = open_api_models.Params( action='GetQuota', @@ -4039,18 +4643,32 @@ async def get_quota_with_options_async( def get_quota( self, quota_id: str, + request: pai_studio_20220112_models.GetQuotaRequest, ) -> pai_studio_20220112_models.GetQuotaResponse: + """ + @summary 获取Quota + + @param request: GetQuotaRequest + @return: GetQuotaResponse + """ runtime = util_models.RuntimeOptions() headers = {} - return self.get_quota_with_options(quota_id, headers, runtime) + return self.get_quota_with_options(quota_id, request, headers, runtime) async def get_quota_async( self, quota_id: str, + request: pai_studio_20220112_models.GetQuotaRequest, ) -> pai_studio_20220112_models.GetQuotaResponse: + """ + @summary 获取Quota + + @param request: GetQuotaRequest + @return: GetQuotaResponse + """ runtime = util_models.RuntimeOptions() headers = {} - return await self.get_quota_with_options_async(quota_id, headers, runtime) + return await self.get_quota_with_options_async(quota_id, request, headers, runtime) def get_quota_job_view_metrics_with_options( self, @@ -4059,6 +4677,14 @@ def get_quota_job_view_metrics_with_options( headers: Dict[str, str], runtime: util_models.RuntimeOptions, ) -> pai_studio_20220112_models.GetQuotaJobViewMetricsResponse: + """ + @summary 获取资源配额内运行的DLC、DSW任务的性能指标 + + @param request: GetQuotaJobViewMetricsRequest + @param headers: map + @param runtime: runtime options for this request RuntimeOptions + @return: GetQuotaJobViewMetricsResponse + """ UtilClient.validate_model(request) query = {} if not UtilClient.is_unset(request.end_time): @@ -4104,6 +4730,14 @@ async def get_quota_job_view_metrics_with_options_async( headers: Dict[str, str], runtime: util_models.RuntimeOptions, ) -> pai_studio_20220112_models.GetQuotaJobViewMetricsResponse: + """ + @summary 获取资源配额内运行的DLC、DSW任务的性能指标 + + @param request: GetQuotaJobViewMetricsRequest + @param headers: map + @param runtime: runtime options for this request RuntimeOptions + @return: GetQuotaJobViewMetricsResponse + """ UtilClient.validate_model(request) query = {} if not UtilClient.is_unset(request.end_time): @@ -4147,6 +4781,12 @@ def get_quota_job_view_metrics( quota_id: str, request: pai_studio_20220112_models.GetQuotaJobViewMetricsRequest, ) -> pai_studio_20220112_models.GetQuotaJobViewMetricsResponse: + """ + @summary 获取资源配额内运行的DLC、DSW任务的性能指标 + + @param request: GetQuotaJobViewMetricsRequest + @return: GetQuotaJobViewMetricsResponse + """ runtime = util_models.RuntimeOptions() headers = {} return self.get_quota_job_view_metrics_with_options(quota_id, request, headers, runtime) @@ -4156,6 +4796,12 @@ async def get_quota_job_view_metrics_async( quota_id: str, request: pai_studio_20220112_models.GetQuotaJobViewMetricsRequest, ) -> pai_studio_20220112_models.GetQuotaJobViewMetricsResponse: + """ + @summary 获取资源配额内运行的DLC、DSW任务的性能指标 + + @param request: GetQuotaJobViewMetricsRequest + @return: GetQuotaJobViewMetricsResponse + """ runtime = util_models.RuntimeOptions() headers = {} return await self.get_quota_job_view_metrics_with_options_async(quota_id, request, headers, runtime) @@ -4168,6 +4814,14 @@ def get_quota_metrics_with_options( headers: Dict[str, str], runtime: util_models.RuntimeOptions, ) -> pai_studio_20220112_models.GetQuotaMetricsResponse: + """ + @summary 资源配额组维度指标 + + @param request: GetQuotaMetricsRequest + @param headers: map + @param runtime: runtime options for this request RuntimeOptions + @return: GetQuotaMetricsResponse + """ UtilClient.validate_model(request) query = {} if not UtilClient.is_unset(request.end_time): @@ -4206,6 +4860,14 @@ async def get_quota_metrics_with_options_async( headers: Dict[str, str], runtime: util_models.RuntimeOptions, ) -> pai_studio_20220112_models.GetQuotaMetricsResponse: + """ + @summary 资源配额组维度指标 + + @param request: GetQuotaMetricsRequest + @param headers: map + @param runtime: runtime options for this request RuntimeOptions + @return: GetQuotaMetricsResponse + """ UtilClient.validate_model(request) query = {} if not UtilClient.is_unset(request.end_time): @@ -4242,6 +4904,12 @@ def get_quota_metrics( metric_type: str, request: pai_studio_20220112_models.GetQuotaMetricsRequest, ) -> pai_studio_20220112_models.GetQuotaMetricsResponse: + """ + @summary 资源配额组维度指标 + + @param request: GetQuotaMetricsRequest + @return: GetQuotaMetricsResponse + """ runtime = util_models.RuntimeOptions() headers = {} return self.get_quota_metrics_with_options(quota_id, metric_type, request, headers, runtime) @@ -4252,6 +4920,12 @@ async def get_quota_metrics_async( metric_type: str, request: pai_studio_20220112_models.GetQuotaMetricsRequest, ) -> pai_studio_20220112_models.GetQuotaMetricsResponse: + """ + @summary 资源配额组维度指标 + + @param request: GetQuotaMetricsRequest + @return: GetQuotaMetricsResponse + """ runtime = util_models.RuntimeOptions() headers = {} return await self.get_quota_metrics_with_options_async(quota_id, metric_type, request, headers, runtime) @@ -4264,6 +4938,14 @@ def get_quota_node_metrics_with_options( headers: Dict[str, str], runtime: util_models.RuntimeOptions, ) -> pai_studio_20220112_models.GetQuotaNodeMetricsResponse: + """ + @summary 资源配额内节点指标 + + @param request: GetQuotaNodeMetricsRequest + @param headers: map + @param runtime: runtime options for this request RuntimeOptions + @return: GetQuotaNodeMetricsResponse + """ UtilClient.validate_model(request) query = {} if not UtilClient.is_unset(request.end_time): @@ -4304,6 +4986,14 @@ async def get_quota_node_metrics_with_options_async( headers: Dict[str, str], runtime: util_models.RuntimeOptions, ) -> pai_studio_20220112_models.GetQuotaNodeMetricsResponse: + """ + @summary 资源配额内节点指标 + + @param request: GetQuotaNodeMetricsRequest + @param headers: map + @param runtime: runtime options for this request RuntimeOptions + @return: GetQuotaNodeMetricsResponse + """ UtilClient.validate_model(request) query = {} if not UtilClient.is_unset(request.end_time): @@ -4342,6 +5032,12 @@ def get_quota_node_metrics( metric_type: str, request: pai_studio_20220112_models.GetQuotaNodeMetricsRequest, ) -> pai_studio_20220112_models.GetQuotaNodeMetricsResponse: + """ + @summary 资源配额内节点指标 + + @param request: GetQuotaNodeMetricsRequest + @return: GetQuotaNodeMetricsResponse + """ runtime = util_models.RuntimeOptions() headers = {} return self.get_quota_node_metrics_with_options(quota_id, metric_type, request, headers, runtime) @@ -4352,6 +5048,12 @@ async def get_quota_node_metrics_async( metric_type: str, request: pai_studio_20220112_models.GetQuotaNodeMetricsRequest, ) -> pai_studio_20220112_models.GetQuotaNodeMetricsResponse: + """ + @summary 资源配额内节点指标 + + @param request: GetQuotaNodeMetricsRequest + @return: GetQuotaNodeMetricsResponse + """ runtime = util_models.RuntimeOptions() headers = {} return await self.get_quota_node_metrics_with_options_async(quota_id, metric_type, request, headers, runtime) @@ -4363,6 +5065,14 @@ def get_quota_node_view_metrics_with_options( headers: Dict[str, str], runtime: util_models.RuntimeOptions, ) -> pai_studio_20220112_models.GetQuotaNodeViewMetricsResponse: + """ + @summary 获取资源配额内节点实时的性能指标 + + @param request: GetQuotaNodeViewMetricsRequest + @param headers: map + @param runtime: runtime options for this request RuntimeOptions + @return: GetQuotaNodeViewMetricsResponse + """ UtilClient.validate_model(request) query = {} if not UtilClient.is_unset(request.node_id): @@ -4414,6 +5124,14 @@ async def get_quota_node_view_metrics_with_options_async( headers: Dict[str, str], runtime: util_models.RuntimeOptions, ) -> pai_studio_20220112_models.GetQuotaNodeViewMetricsResponse: + """ + @summary 获取资源配额内节点实时的性能指标 + + @param request: GetQuotaNodeViewMetricsRequest + @param headers: map + @param runtime: runtime options for this request RuntimeOptions + @return: GetQuotaNodeViewMetricsResponse + """ UtilClient.validate_model(request) query = {} if not UtilClient.is_unset(request.node_id): @@ -4463,6 +5181,12 @@ def get_quota_node_view_metrics( quota_id: str, request: pai_studio_20220112_models.GetQuotaNodeViewMetricsRequest, ) -> pai_studio_20220112_models.GetQuotaNodeViewMetricsResponse: + """ + @summary 获取资源配额内节点实时的性能指标 + + @param request: GetQuotaNodeViewMetricsRequest + @return: GetQuotaNodeViewMetricsResponse + """ runtime = util_models.RuntimeOptions() headers = {} return self.get_quota_node_view_metrics_with_options(quota_id, request, headers, runtime) @@ -4472,6 +5196,12 @@ async def get_quota_node_view_metrics_async( quota_id: str, request: pai_studio_20220112_models.GetQuotaNodeViewMetricsRequest, ) -> pai_studio_20220112_models.GetQuotaNodeViewMetricsResponse: + """ + @summary 获取资源配额内节点实时的性能指标 + + @param request: GetQuotaNodeViewMetricsRequest + @return: GetQuotaNodeViewMetricsResponse + """ runtime = util_models.RuntimeOptions() headers = {} return await self.get_quota_node_view_metrics_with_options_async(quota_id, request, headers, runtime) @@ -4483,6 +5213,14 @@ def get_quota_queue_info_with_options( headers: Dict[str, str], runtime: util_models.RuntimeOptions, ) -> pai_studio_20220112_models.GetQuotaQueueInfoResponse: + """ + @summary 您可以通过 GetQuotaQueueInfo得到使用当前Quota的实例的排队信息。 + + @param request: GetQuotaQueueInfoRequest + @param headers: map + @param runtime: runtime options for this request RuntimeOptions + @return: GetQuotaQueueInfoResponse + """ UtilClient.validate_model(request) query = {} if not UtilClient.is_unset(request.before_workload_id): @@ -4534,6 +5272,14 @@ async def get_quota_queue_info_with_options_async( headers: Dict[str, str], runtime: util_models.RuntimeOptions, ) -> pai_studio_20220112_models.GetQuotaQueueInfoResponse: + """ + @summary 您可以通过 GetQuotaQueueInfo得到使用当前Quota的实例的排队信息。 + + @param request: GetQuotaQueueInfoRequest + @param headers: map + @param runtime: runtime options for this request RuntimeOptions + @return: GetQuotaQueueInfoResponse + """ UtilClient.validate_model(request) query = {} if not UtilClient.is_unset(request.before_workload_id): @@ -4583,6 +5329,12 @@ def get_quota_queue_info( quota_id: str, request: pai_studio_20220112_models.GetQuotaQueueInfoRequest, ) -> pai_studio_20220112_models.GetQuotaQueueInfoResponse: + """ + @summary 您可以通过 GetQuotaQueueInfo得到使用当前Quota的实例的排队信息。 + + @param request: GetQuotaQueueInfoRequest + @return: GetQuotaQueueInfoResponse + """ runtime = util_models.RuntimeOptions() headers = {} return self.get_quota_queue_info_with_options(quota_id, request, headers, runtime) @@ -4592,6 +5344,12 @@ async def get_quota_queue_info_async( quota_id: str, request: pai_studio_20220112_models.GetQuotaQueueInfoRequest, ) -> pai_studio_20220112_models.GetQuotaQueueInfoResponse: + """ + @summary 您可以通过 GetQuotaQueueInfo得到使用当前Quota的实例的排队信息。 + + @param request: GetQuotaQueueInfoRequest + @return: GetQuotaQueueInfoResponse + """ runtime = util_models.RuntimeOptions() headers = {} return await self.get_quota_queue_info_with_options_async(quota_id, request, headers, runtime) @@ -4603,6 +5361,14 @@ def get_quota_range_user_view_metrics_with_options( headers: Dict[str, str], runtime: util_models.RuntimeOptions, ) -> pai_studio_20220112_models.GetQuotaRangeUserViewMetricsResponse: + """ + @summary 获取资源配额用户视图的历史资源使用情况 + + @param request: GetQuotaRangeUserViewMetricsRequest + @param headers: map + @param runtime: runtime options for this request RuntimeOptions + @return: GetQuotaRangeUserViewMetricsResponse + """ UtilClient.validate_model(request) query = {} if not UtilClient.is_unset(request.end_time): @@ -4648,6 +5414,14 @@ async def get_quota_range_user_view_metrics_with_options_async( headers: Dict[str, str], runtime: util_models.RuntimeOptions, ) -> pai_studio_20220112_models.GetQuotaRangeUserViewMetricsResponse: + """ + @summary 获取资源配额用户视图的历史资源使用情况 + + @param request: GetQuotaRangeUserViewMetricsRequest + @param headers: map + @param runtime: runtime options for this request RuntimeOptions + @return: GetQuotaRangeUserViewMetricsResponse + """ UtilClient.validate_model(request) query = {} if not UtilClient.is_unset(request.end_time): @@ -4691,6 +5465,12 @@ def get_quota_range_user_view_metrics( quota_id: str, request: pai_studio_20220112_models.GetQuotaRangeUserViewMetricsRequest, ) -> pai_studio_20220112_models.GetQuotaRangeUserViewMetricsResponse: + """ + @summary 获取资源配额用户视图的历史资源使用情况 + + @param request: GetQuotaRangeUserViewMetricsRequest + @return: GetQuotaRangeUserViewMetricsResponse + """ runtime = util_models.RuntimeOptions() headers = {} return self.get_quota_range_user_view_metrics_with_options(quota_id, request, headers, runtime) @@ -4700,10 +5480,132 @@ async def get_quota_range_user_view_metrics_async( quota_id: str, request: pai_studio_20220112_models.GetQuotaRangeUserViewMetricsRequest, ) -> pai_studio_20220112_models.GetQuotaRangeUserViewMetricsResponse: + """ + @summary 获取资源配额用户视图的历史资源使用情况 + + @param request: GetQuotaRangeUserViewMetricsRequest + @return: GetQuotaRangeUserViewMetricsResponse + """ runtime = util_models.RuntimeOptions() headers = {} return await self.get_quota_range_user_view_metrics_with_options_async(quota_id, request, headers, runtime) + def get_quota_topo_with_options( + self, + quota_id: str, + request: pai_studio_20220112_models.GetQuotaTopoRequest, + headers: Dict[str, str], + runtime: util_models.RuntimeOptions, + ) -> pai_studio_20220112_models.GetQuotaTopoResponse: + """ + @summary 获取Quota拓扑信息 + + @param request: GetQuotaTopoRequest + @param headers: map + @param runtime: runtime options for this request RuntimeOptions + @return: GetQuotaTopoResponse + """ + UtilClient.validate_model(request) + query = {} + if not UtilClient.is_unset(request.depth): + query['Depth'] = request.depth + if not UtilClient.is_unset(request.show_own_workloads): + query['ShowOwnWorkloads'] = request.show_own_workloads + if not UtilClient.is_unset(request.verbose): + query['Verbose'] = request.verbose + req = open_api_models.OpenApiRequest( + headers=headers, + query=OpenApiUtilClient.query(query) + ) + params = open_api_models.Params( + action='GetQuotaTopo', + version='2022-01-12', + protocol='HTTPS', + pathname=f'/api/v1/quotas/%5BQuotaId%5D/topo', + method='GET', + auth_type='AK', + style='ROA', + req_body_type='json', + body_type='json' + ) + return TeaCore.from_map( + pai_studio_20220112_models.GetQuotaTopoResponse(), + self.call_api(params, req, runtime) + ) + + async def get_quota_topo_with_options_async( + self, + quota_id: str, + request: pai_studio_20220112_models.GetQuotaTopoRequest, + headers: Dict[str, str], + runtime: util_models.RuntimeOptions, + ) -> pai_studio_20220112_models.GetQuotaTopoResponse: + """ + @summary 获取Quota拓扑信息 + + @param request: GetQuotaTopoRequest + @param headers: map + @param runtime: runtime options for this request RuntimeOptions + @return: GetQuotaTopoResponse + """ + UtilClient.validate_model(request) + query = {} + if not UtilClient.is_unset(request.depth): + query['Depth'] = request.depth + if not UtilClient.is_unset(request.show_own_workloads): + query['ShowOwnWorkloads'] = request.show_own_workloads + if not UtilClient.is_unset(request.verbose): + query['Verbose'] = request.verbose + req = open_api_models.OpenApiRequest( + headers=headers, + query=OpenApiUtilClient.query(query) + ) + params = open_api_models.Params( + action='GetQuotaTopo', + version='2022-01-12', + protocol='HTTPS', + pathname=f'/api/v1/quotas/%5BQuotaId%5D/topo', + method='GET', + auth_type='AK', + style='ROA', + req_body_type='json', + body_type='json' + ) + return TeaCore.from_map( + pai_studio_20220112_models.GetQuotaTopoResponse(), + await self.call_api_async(params, req, runtime) + ) + + def get_quota_topo( + self, + quota_id: str, + request: pai_studio_20220112_models.GetQuotaTopoRequest, + ) -> pai_studio_20220112_models.GetQuotaTopoResponse: + """ + @summary 获取Quota拓扑信息 + + @param request: GetQuotaTopoRequest + @return: GetQuotaTopoResponse + """ + runtime = util_models.RuntimeOptions() + headers = {} + return self.get_quota_topo_with_options(quota_id, request, headers, runtime) + + async def get_quota_topo_async( + self, + quota_id: str, + request: pai_studio_20220112_models.GetQuotaTopoRequest, + ) -> pai_studio_20220112_models.GetQuotaTopoResponse: + """ + @summary 获取Quota拓扑信息 + + @param request: GetQuotaTopoRequest + @return: GetQuotaTopoResponse + """ + runtime = util_models.RuntimeOptions() + headers = {} + return await self.get_quota_topo_with_options_async(quota_id, request, headers, runtime) + def get_quota_user_view_metrics_with_options( self, quota_id: str, @@ -4711,6 +5613,14 @@ def get_quota_user_view_metrics_with_options( headers: Dict[str, str], runtime: util_models.RuntimeOptions, ) -> pai_studio_20220112_models.GetQuotaUserViewMetricsResponse: + """ + @summary 获取用户视图的资源使用情况 + + @param request: GetQuotaUserViewMetricsRequest + @param headers: map + @param runtime: runtime options for this request RuntimeOptions + @return: GetQuotaUserViewMetricsResponse + """ UtilClient.validate_model(request) query = {} if not UtilClient.is_unset(request.order): @@ -4754,6 +5664,14 @@ async def get_quota_user_view_metrics_with_options_async( headers: Dict[str, str], runtime: util_models.RuntimeOptions, ) -> pai_studio_20220112_models.GetQuotaUserViewMetricsResponse: + """ + @summary 获取用户视图的资源使用情况 + + @param request: GetQuotaUserViewMetricsRequest + @param headers: map + @param runtime: runtime options for this request RuntimeOptions + @return: GetQuotaUserViewMetricsResponse + """ UtilClient.validate_model(request) query = {} if not UtilClient.is_unset(request.order): @@ -4795,6 +5713,12 @@ def get_quota_user_view_metrics( quota_id: str, request: pai_studio_20220112_models.GetQuotaUserViewMetricsRequest, ) -> pai_studio_20220112_models.GetQuotaUserViewMetricsResponse: + """ + @summary 获取用户视图的资源使用情况 + + @param request: GetQuotaUserViewMetricsRequest + @return: GetQuotaUserViewMetricsResponse + """ runtime = util_models.RuntimeOptions() headers = {} return self.get_quota_user_view_metrics_with_options(quota_id, request, headers, runtime) @@ -4804,6 +5728,12 @@ async def get_quota_user_view_metrics_async( quota_id: str, request: pai_studio_20220112_models.GetQuotaUserViewMetricsRequest, ) -> pai_studio_20220112_models.GetQuotaUserViewMetricsResponse: + """ + @summary 获取用户视图的资源使用情况 + + @param request: GetQuotaUserViewMetricsRequest + @return: GetQuotaUserViewMetricsResponse + """ runtime = util_models.RuntimeOptions() headers = {} return await self.get_quota_user_view_metrics_with_options_async(quota_id, request, headers, runtime) @@ -4815,6 +5745,14 @@ def get_range_user_view_metrics_with_options( headers: Dict[str, str], runtime: util_models.RuntimeOptions, ) -> pai_studio_20220112_models.GetRangeUserViewMetricsResponse: + """ + @summary 获取按照user统计的性能指标的历史数据 + + @param request: GetRangeUserViewMetricsRequest + @param headers: map + @param runtime: runtime options for this request RuntimeOptions + @return: GetRangeUserViewMetricsResponse + """ UtilClient.validate_model(request) query = {} if not UtilClient.is_unset(request.end_time): @@ -4860,6 +5798,14 @@ async def get_range_user_view_metrics_with_options_async( headers: Dict[str, str], runtime: util_models.RuntimeOptions, ) -> pai_studio_20220112_models.GetRangeUserViewMetricsResponse: + """ + @summary 获取按照user统计的性能指标的历史数据 + + @param request: GetRangeUserViewMetricsRequest + @param headers: map + @param runtime: runtime options for this request RuntimeOptions + @return: GetRangeUserViewMetricsResponse + """ UtilClient.validate_model(request) query = {} if not UtilClient.is_unset(request.end_time): @@ -4903,6 +5849,12 @@ def get_range_user_view_metrics( resource_group_id: str, request: pai_studio_20220112_models.GetRangeUserViewMetricsRequest, ) -> pai_studio_20220112_models.GetRangeUserViewMetricsResponse: + """ + @summary 获取按照user统计的性能指标的历史数据 + + @param request: GetRangeUserViewMetricsRequest + @return: GetRangeUserViewMetricsResponse + """ runtime = util_models.RuntimeOptions() headers = {} return self.get_range_user_view_metrics_with_options(resource_group_id, request, headers, runtime) @@ -4912,6 +5864,12 @@ async def get_range_user_view_metrics_async( resource_group_id: str, request: pai_studio_20220112_models.GetRangeUserViewMetricsRequest, ) -> pai_studio_20220112_models.GetRangeUserViewMetricsResponse: + """ + @summary 获取按照user统计的性能指标的历史数据 + + @param request: GetRangeUserViewMetricsRequest + @return: GetRangeUserViewMetricsResponse + """ runtime = util_models.RuntimeOptions() headers = {} return await self.get_range_user_view_metrics_with_options_async(resource_group_id, request, headers, runtime) @@ -4923,6 +5881,14 @@ def get_resource_group_with_options( headers: Dict[str, str], runtime: util_models.RuntimeOptions, ) -> pai_studio_20220112_models.GetResourceGroupResponse: + """ + @summary get resource group by group id + + @param tmp_req: GetResourceGroupRequest + @param headers: map + @param runtime: runtime options for this request RuntimeOptions + @return: GetResourceGroupResponse + """ UtilClient.validate_model(tmp_req) request = pai_studio_20220112_models.GetResourceGroupShrinkRequest() OpenApiUtilClient.convert(tmp_req, request) @@ -4960,6 +5926,14 @@ async def get_resource_group_with_options_async( headers: Dict[str, str], runtime: util_models.RuntimeOptions, ) -> pai_studio_20220112_models.GetResourceGroupResponse: + """ + @summary get resource group by group id + + @param tmp_req: GetResourceGroupRequest + @param headers: map + @param runtime: runtime options for this request RuntimeOptions + @return: GetResourceGroupResponse + """ UtilClient.validate_model(tmp_req) request = pai_studio_20220112_models.GetResourceGroupShrinkRequest() OpenApiUtilClient.convert(tmp_req, request) @@ -4995,6 +5969,12 @@ def get_resource_group( resource_group_id: str, request: pai_studio_20220112_models.GetResourceGroupRequest, ) -> pai_studio_20220112_models.GetResourceGroupResponse: + """ + @summary get resource group by group id + + @param request: GetResourceGroupRequest + @return: GetResourceGroupResponse + """ runtime = util_models.RuntimeOptions() headers = {} return self.get_resource_group_with_options(resource_group_id, request, headers, runtime) @@ -5004,6 +5984,12 @@ async def get_resource_group_async( resource_group_id: str, request: pai_studio_20220112_models.GetResourceGroupRequest, ) -> pai_studio_20220112_models.GetResourceGroupResponse: + """ + @summary get resource group by group id + + @param request: GetResourceGroupRequest + @return: GetResourceGroupResponse + """ runtime = util_models.RuntimeOptions() headers = {} return await self.get_resource_group_with_options_async(resource_group_id, request, headers, runtime) @@ -5016,6 +6002,14 @@ def get_resource_group_machine_group_with_options( headers: Dict[str, str], runtime: util_models.RuntimeOptions, ) -> pai_studio_20220112_models.GetResourceGroupMachineGroupResponse: + """ + @summary get machine group + + @param tmp_req: GetResourceGroupMachineGroupRequest + @param headers: map + @param runtime: runtime options for this request RuntimeOptions + @return: GetResourceGroupMachineGroupResponse + """ UtilClient.validate_model(tmp_req) request = pai_studio_20220112_models.GetResourceGroupMachineGroupShrinkRequest() OpenApiUtilClient.convert(tmp_req, request) @@ -5052,6 +6046,14 @@ async def get_resource_group_machine_group_with_options_async( headers: Dict[str, str], runtime: util_models.RuntimeOptions, ) -> pai_studio_20220112_models.GetResourceGroupMachineGroupResponse: + """ + @summary get machine group + + @param tmp_req: GetResourceGroupMachineGroupRequest + @param headers: map + @param runtime: runtime options for this request RuntimeOptions + @return: GetResourceGroupMachineGroupResponse + """ UtilClient.validate_model(tmp_req) request = pai_studio_20220112_models.GetResourceGroupMachineGroupShrinkRequest() OpenApiUtilClient.convert(tmp_req, request) @@ -5086,6 +6088,12 @@ def get_resource_group_machine_group( resource_group_id: str, request: pai_studio_20220112_models.GetResourceGroupMachineGroupRequest, ) -> pai_studio_20220112_models.GetResourceGroupMachineGroupResponse: + """ + @summary get machine group + + @param request: GetResourceGroupMachineGroupRequest + @return: GetResourceGroupMachineGroupResponse + """ runtime = util_models.RuntimeOptions() headers = {} return self.get_resource_group_machine_group_with_options(machine_group_id, resource_group_id, request, headers, runtime) @@ -5096,6 +6104,12 @@ async def get_resource_group_machine_group_async( resource_group_id: str, request: pai_studio_20220112_models.GetResourceGroupMachineGroupRequest, ) -> pai_studio_20220112_models.GetResourceGroupMachineGroupResponse: + """ + @summary get machine group + + @param request: GetResourceGroupMachineGroupRequest + @return: GetResourceGroupMachineGroupResponse + """ runtime = util_models.RuntimeOptions() headers = {} return await self.get_resource_group_machine_group_with_options_async(machine_group_id, resource_group_id, request, headers, runtime) @@ -5108,6 +6122,14 @@ def get_resource_group_metrics_with_options( headers: Dict[str, str], runtime: util_models.RuntimeOptions, ) -> pai_studio_20220112_models.GetResourceGroupMetricsResponse: + """ + @summary 获取资源组卡型的使用率 + + @param request: GetResourceGroupMetricsRequest + @param headers: map + @param runtime: runtime options for this request RuntimeOptions + @return: GetResourceGroupMetricsResponse + """ UtilClient.validate_model(request) query = {} if not UtilClient.is_unset(request.end_time): @@ -5146,6 +6168,14 @@ async def get_resource_group_metrics_with_options_async( headers: Dict[str, str], runtime: util_models.RuntimeOptions, ) -> pai_studio_20220112_models.GetResourceGroupMetricsResponse: + """ + @summary 获取资源组卡型的使用率 + + @param request: GetResourceGroupMetricsRequest + @param headers: map + @param runtime: runtime options for this request RuntimeOptions + @return: GetResourceGroupMetricsResponse + """ UtilClient.validate_model(request) query = {} if not UtilClient.is_unset(request.end_time): @@ -5182,6 +6212,12 @@ def get_resource_group_metrics( metric_type: str, request: pai_studio_20220112_models.GetResourceGroupMetricsRequest, ) -> pai_studio_20220112_models.GetResourceGroupMetricsResponse: + """ + @summary 获取资源组卡型的使用率 + + @param request: GetResourceGroupMetricsRequest + @return: GetResourceGroupMetricsResponse + """ runtime = util_models.RuntimeOptions() headers = {} return self.get_resource_group_metrics_with_options(resource_group_id, metric_type, request, headers, runtime) @@ -5192,6 +6228,12 @@ async def get_resource_group_metrics_async( metric_type: str, request: pai_studio_20220112_models.GetResourceGroupMetricsRequest, ) -> pai_studio_20220112_models.GetResourceGroupMetricsResponse: + """ + @summary 获取资源组卡型的使用率 + + @param request: GetResourceGroupMetricsRequest + @return: GetResourceGroupMetricsResponse + """ runtime = util_models.RuntimeOptions() headers = {} return await self.get_resource_group_metrics_with_options_async(resource_group_id, metric_type, request, headers, runtime) @@ -5202,6 +6244,14 @@ def get_resource_group_request_with_options( headers: Dict[str, str], runtime: util_models.RuntimeOptions, ) -> pai_studio_20220112_models.GetResourceGroupRequestResponse: + """ + @summary get resource group requested resource by resource group id + + @param request: GetResourceGroupRequestRequest + @param headers: map + @param runtime: runtime options for this request RuntimeOptions + @return: GetResourceGroupRequestResponse + """ UtilClient.validate_model(request) query = {} if not UtilClient.is_unset(request.pod_status): @@ -5234,6 +6284,14 @@ async def get_resource_group_request_with_options_async( headers: Dict[str, str], runtime: util_models.RuntimeOptions, ) -> pai_studio_20220112_models.GetResourceGroupRequestResponse: + """ + @summary get resource group requested resource by resource group id + + @param request: GetResourceGroupRequestRequest + @param headers: map + @param runtime: runtime options for this request RuntimeOptions + @return: GetResourceGroupRequestResponse + """ UtilClient.validate_model(request) query = {} if not UtilClient.is_unset(request.pod_status): @@ -5264,6 +6322,12 @@ def get_resource_group_request( self, request: pai_studio_20220112_models.GetResourceGroupRequestRequest, ) -> pai_studio_20220112_models.GetResourceGroupRequestResponse: + """ + @summary get resource group requested resource by resource group id + + @param request: GetResourceGroupRequestRequest + @return: GetResourceGroupRequestResponse + """ runtime = util_models.RuntimeOptions() headers = {} return self.get_resource_group_request_with_options(request, headers, runtime) @@ -5272,6 +6336,12 @@ async def get_resource_group_request_async( self, request: pai_studio_20220112_models.GetResourceGroupRequestRequest, ) -> pai_studio_20220112_models.GetResourceGroupRequestResponse: + """ + @summary get resource group requested resource by resource group id + + @param request: GetResourceGroupRequestRequest + @return: GetResourceGroupRequestResponse + """ runtime = util_models.RuntimeOptions() headers = {} return await self.get_resource_group_request_with_options_async(request, headers, runtime) @@ -5282,6 +6352,14 @@ def get_resource_group_total_with_options( headers: Dict[str, str], runtime: util_models.RuntimeOptions, ) -> pai_studio_20220112_models.GetResourceGroupTotalResponse: + """ + @summary get resource group total resource by group id + + @param request: GetResourceGroupTotalRequest + @param headers: map + @param runtime: runtime options for this request RuntimeOptions + @return: GetResourceGroupTotalResponse + """ UtilClient.validate_model(request) query = {} if not UtilClient.is_unset(request.resource_group_id): @@ -5312,6 +6390,14 @@ async def get_resource_group_total_with_options_async( headers: Dict[str, str], runtime: util_models.RuntimeOptions, ) -> pai_studio_20220112_models.GetResourceGroupTotalResponse: + """ + @summary get resource group total resource by group id + + @param request: GetResourceGroupTotalRequest + @param headers: map + @param runtime: runtime options for this request RuntimeOptions + @return: GetResourceGroupTotalResponse + """ UtilClient.validate_model(request) query = {} if not UtilClient.is_unset(request.resource_group_id): @@ -5340,6 +6426,12 @@ def get_resource_group_total( self, request: pai_studio_20220112_models.GetResourceGroupTotalRequest, ) -> pai_studio_20220112_models.GetResourceGroupTotalResponse: + """ + @summary get resource group total resource by group id + + @param request: GetResourceGroupTotalRequest + @return: GetResourceGroupTotalResponse + """ runtime = util_models.RuntimeOptions() headers = {} return self.get_resource_group_total_with_options(request, headers, runtime) @@ -5348,6 +6440,12 @@ async def get_resource_group_total_async( self, request: pai_studio_20220112_models.GetResourceGroupTotalRequest, ) -> pai_studio_20220112_models.GetResourceGroupTotalResponse: + """ + @summary get resource group total resource by group id + + @param request: GetResourceGroupTotalRequest + @return: GetResourceGroupTotalResponse + """ runtime = util_models.RuntimeOptions() headers = {} return await self.get_resource_group_total_with_options_async(request, headers, runtime) @@ -5358,6 +6456,13 @@ def get_service_identity_role_with_options( headers: Dict[str, str], runtime: util_models.RuntimeOptions, ) -> pai_studio_20220112_models.GetServiceIdentityRoleResponse: + """ + @summary 获取服务认证角色 + + @param headers: map + @param runtime: runtime options for this request RuntimeOptions + @return: GetServiceIdentityRoleResponse + """ req = open_api_models.OpenApiRequest( headers=headers ) @@ -5383,6 +6488,13 @@ async def get_service_identity_role_with_options_async( headers: Dict[str, str], runtime: util_models.RuntimeOptions, ) -> pai_studio_20220112_models.GetServiceIdentityRoleResponse: + """ + @summary 获取服务认证角色 + + @param headers: map + @param runtime: runtime options for this request RuntimeOptions + @return: GetServiceIdentityRoleResponse + """ req = open_api_models.OpenApiRequest( headers=headers ) @@ -5406,6 +6518,11 @@ def get_service_identity_role( self, role_name: str, ) -> pai_studio_20220112_models.GetServiceIdentityRoleResponse: + """ + @summary 获取服务认证角色 + + @return: GetServiceIdentityRoleResponse + """ runtime = util_models.RuntimeOptions() headers = {} return self.get_service_identity_role_with_options(role_name, headers, runtime) @@ -5414,6 +6531,11 @@ async def get_service_identity_role_async( self, role_name: str, ) -> pai_studio_20220112_models.GetServiceIdentityRoleResponse: + """ + @summary 获取服务认证角色 + + @return: GetServiceIdentityRoleResponse + """ runtime = util_models.RuntimeOptions() headers = {} return await self.get_service_identity_role_with_options_async(role_name, headers, runtime) @@ -5425,6 +6547,14 @@ def get_spot_price_history_with_options( headers: Dict[str, str], runtime: util_models.RuntimeOptions, ) -> pai_studio_20220112_models.GetSpotPriceHistoryResponse: + """ + @summary 获取抢占式实例历史价格 + + @param request: GetSpotPriceHistoryRequest + @param headers: map + @param runtime: runtime options for this request RuntimeOptions + @return: GetSpotPriceHistoryResponse + """ UtilClient.validate_model(request) query = {} if not UtilClient.is_unset(request.end_time): @@ -5466,6 +6596,14 @@ async def get_spot_price_history_with_options_async( headers: Dict[str, str], runtime: util_models.RuntimeOptions, ) -> pai_studio_20220112_models.GetSpotPriceHistoryResponse: + """ + @summary 获取抢占式实例历史价格 + + @param request: GetSpotPriceHistoryRequest + @param headers: map + @param runtime: runtime options for this request RuntimeOptions + @return: GetSpotPriceHistoryResponse + """ UtilClient.validate_model(request) query = {} if not UtilClient.is_unset(request.end_time): @@ -5505,6 +6643,12 @@ def get_spot_price_history( instance_type: str, request: pai_studio_20220112_models.GetSpotPriceHistoryRequest, ) -> pai_studio_20220112_models.GetSpotPriceHistoryResponse: + """ + @summary 获取抢占式实例历史价格 + + @param request: GetSpotPriceHistoryRequest + @return: GetSpotPriceHistoryResponse + """ runtime = util_models.RuntimeOptions() headers = {} return self.get_spot_price_history_with_options(instance_type, request, headers, runtime) @@ -5514,6 +6658,12 @@ async def get_spot_price_history_async( instance_type: str, request: pai_studio_20220112_models.GetSpotPriceHistoryRequest, ) -> pai_studio_20220112_models.GetSpotPriceHistoryResponse: + """ + @summary 获取抢占式实例历史价格 + + @param request: GetSpotPriceHistoryRequest + @return: GetSpotPriceHistoryResponse + """ runtime = util_models.RuntimeOptions() headers = {} return await self.get_spot_price_history_with_options_async(instance_type, request, headers, runtime) @@ -5524,6 +6674,13 @@ def get_spot_stock_preview_with_options( headers: Dict[str, str], runtime: util_models.RuntimeOptions, ) -> pai_studio_20220112_models.GetSpotStockPreviewResponse: + """ + @summary 获取抢占式实例的库存概览 + + @param headers: map + @param runtime: runtime options for this request RuntimeOptions + @return: GetSpotStockPreviewResponse + """ req = open_api_models.OpenApiRequest( headers=headers ) @@ -5549,6 +6706,13 @@ async def get_spot_stock_preview_with_options_async( headers: Dict[str, str], runtime: util_models.RuntimeOptions, ) -> pai_studio_20220112_models.GetSpotStockPreviewResponse: + """ + @summary 获取抢占式实例的库存概览 + + @param headers: map + @param runtime: runtime options for this request RuntimeOptions + @return: GetSpotStockPreviewResponse + """ req = open_api_models.OpenApiRequest( headers=headers ) @@ -5572,6 +6736,11 @@ def get_spot_stock_preview( self, instance_type: str, ) -> pai_studio_20220112_models.GetSpotStockPreviewResponse: + """ + @summary 获取抢占式实例的库存概览 + + @return: GetSpotStockPreviewResponse + """ runtime = util_models.RuntimeOptions() headers = {} return self.get_spot_stock_preview_with_options(instance_type, headers, runtime) @@ -5580,6 +6749,11 @@ async def get_spot_stock_preview_async( self, instance_type: str, ) -> pai_studio_20220112_models.GetSpotStockPreviewResponse: + """ + @summary 获取抢占式实例的库存概览 + + @return: GetSpotStockPreviewResponse + """ runtime = util_models.RuntimeOptions() headers = {} return await self.get_spot_stock_preview_with_options_async(instance_type, headers, runtime) @@ -5590,6 +6764,14 @@ def get_token_with_options( headers: Dict[str, str], runtime: util_models.RuntimeOptions, ) -> pai_studio_20220112_models.GetTokenResponse: + """ + @summary 调用GetToken获取临时鉴权信息 + + @param request: GetTokenRequest + @param headers: map + @param runtime: runtime options for this request RuntimeOptions + @return: GetTokenResponse + """ UtilClient.validate_model(request) query = {} if not UtilClient.is_unset(request.expire_time): @@ -5622,6 +6804,14 @@ async def get_token_with_options_async( headers: Dict[str, str], runtime: util_models.RuntimeOptions, ) -> pai_studio_20220112_models.GetTokenResponse: + """ + @summary 调用GetToken获取临时鉴权信息 + + @param request: GetTokenRequest + @param headers: map + @param runtime: runtime options for this request RuntimeOptions + @return: GetTokenResponse + """ UtilClient.validate_model(request) query = {} if not UtilClient.is_unset(request.expire_time): @@ -5652,6 +6842,12 @@ def get_token( self, request: pai_studio_20220112_models.GetTokenRequest, ) -> pai_studio_20220112_models.GetTokenResponse: + """ + @summary 调用GetToken获取临时鉴权信息 + + @param request: GetTokenRequest + @return: GetTokenResponse + """ runtime = util_models.RuntimeOptions() headers = {} return self.get_token_with_options(request, headers, runtime) @@ -5660,6 +6856,12 @@ async def get_token_async( self, request: pai_studio_20220112_models.GetTokenRequest, ) -> pai_studio_20220112_models.GetTokenResponse: + """ + @summary 调用GetToken获取临时鉴权信息 + + @param request: GetTokenRequest + @return: GetTokenResponse + """ runtime = util_models.RuntimeOptions() headers = {} return await self.get_token_with_options_async(request, headers, runtime) @@ -5671,6 +6873,14 @@ def get_training_job_with_options( headers: Dict[str, str], runtime: util_models.RuntimeOptions, ) -> pai_studio_20220112_models.GetTrainingJobResponse: + """ + @summary 获取TrainingJob的详情 + + @param request: GetTrainingJobRequest + @param headers: map + @param runtime: runtime options for this request RuntimeOptions + @return: GetTrainingJobResponse + """ UtilClient.validate_model(request) query = {} if not UtilClient.is_unset(request.token): @@ -5702,6 +6912,14 @@ async def get_training_job_with_options_async( headers: Dict[str, str], runtime: util_models.RuntimeOptions, ) -> pai_studio_20220112_models.GetTrainingJobResponse: + """ + @summary 获取TrainingJob的详情 + + @param request: GetTrainingJobRequest + @param headers: map + @param runtime: runtime options for this request RuntimeOptions + @return: GetTrainingJobResponse + """ UtilClient.validate_model(request) query = {} if not UtilClient.is_unset(request.token): @@ -5731,6 +6949,12 @@ def get_training_job( training_job_id: str, request: pai_studio_20220112_models.GetTrainingJobRequest, ) -> pai_studio_20220112_models.GetTrainingJobResponse: + """ + @summary 获取TrainingJob的详情 + + @param request: GetTrainingJobRequest + @return: GetTrainingJobResponse + """ runtime = util_models.RuntimeOptions() headers = {} return self.get_training_job_with_options(training_job_id, request, headers, runtime) @@ -5740,6 +6964,12 @@ async def get_training_job_async( training_job_id: str, request: pai_studio_20220112_models.GetTrainingJobRequest, ) -> pai_studio_20220112_models.GetTrainingJobResponse: + """ + @summary 获取TrainingJob的详情 + + @param request: GetTrainingJobRequest + @return: GetTrainingJobResponse + """ runtime = util_models.RuntimeOptions() headers = {} return await self.get_training_job_with_options_async(training_job_id, request, headers, runtime) @@ -5751,6 +6981,14 @@ def get_training_job_error_info_with_options( headers: Dict[str, str], runtime: util_models.RuntimeOptions, ) -> pai_studio_20220112_models.GetTrainingJobErrorInfoResponse: + """ + @summary 获取Training Job的算法错误信息 + + @param request: GetTrainingJobErrorInfoRequest + @param headers: map + @param runtime: runtime options for this request RuntimeOptions + @return: GetTrainingJobErrorInfoResponse + """ UtilClient.validate_model(request) query = {} if not UtilClient.is_unset(request.token): @@ -5782,6 +7020,14 @@ async def get_training_job_error_info_with_options_async( headers: Dict[str, str], runtime: util_models.RuntimeOptions, ) -> pai_studio_20220112_models.GetTrainingJobErrorInfoResponse: + """ + @summary 获取Training Job的算法错误信息 + + @param request: GetTrainingJobErrorInfoRequest + @param headers: map + @param runtime: runtime options for this request RuntimeOptions + @return: GetTrainingJobErrorInfoResponse + """ UtilClient.validate_model(request) query = {} if not UtilClient.is_unset(request.token): @@ -5811,6 +7057,12 @@ def get_training_job_error_info( training_job_id: str, request: pai_studio_20220112_models.GetTrainingJobErrorInfoRequest, ) -> pai_studio_20220112_models.GetTrainingJobErrorInfoResponse: + """ + @summary 获取Training Job的算法错误信息 + + @param request: GetTrainingJobErrorInfoRequest + @return: GetTrainingJobErrorInfoResponse + """ runtime = util_models.RuntimeOptions() headers = {} return self.get_training_job_error_info_with_options(training_job_id, request, headers, runtime) @@ -5820,6 +7072,12 @@ async def get_training_job_error_info_async( training_job_id: str, request: pai_studio_20220112_models.GetTrainingJobErrorInfoRequest, ) -> pai_studio_20220112_models.GetTrainingJobErrorInfoResponse: + """ + @summary 获取Training Job的算法错误信息 + + @param request: GetTrainingJobErrorInfoRequest + @return: GetTrainingJobErrorInfoResponse + """ runtime = util_models.RuntimeOptions() headers = {} return await self.get_training_job_error_info_with_options_async(training_job_id, request, headers, runtime) @@ -5831,6 +7089,14 @@ def get_training_job_latest_metrics_with_options( headers: Dict[str, str], runtime: util_models.RuntimeOptions, ) -> pai_studio_20220112_models.GetTrainingJobLatestMetricsResponse: + """ + @summary 获取TrainingJob最近的Metrics + + @param request: GetTrainingJobLatestMetricsRequest + @param headers: map + @param runtime: runtime options for this request RuntimeOptions + @return: GetTrainingJobLatestMetricsResponse + """ UtilClient.validate_model(request) query = {} if not UtilClient.is_unset(request.names): @@ -5864,6 +7130,14 @@ async def get_training_job_latest_metrics_with_options_async( headers: Dict[str, str], runtime: util_models.RuntimeOptions, ) -> pai_studio_20220112_models.GetTrainingJobLatestMetricsResponse: + """ + @summary 获取TrainingJob最近的Metrics + + @param request: GetTrainingJobLatestMetricsRequest + @param headers: map + @param runtime: runtime options for this request RuntimeOptions + @return: GetTrainingJobLatestMetricsResponse + """ UtilClient.validate_model(request) query = {} if not UtilClient.is_unset(request.names): @@ -5895,6 +7169,12 @@ def get_training_job_latest_metrics( training_job_id: str, request: pai_studio_20220112_models.GetTrainingJobLatestMetricsRequest, ) -> pai_studio_20220112_models.GetTrainingJobLatestMetricsResponse: + """ + @summary 获取TrainingJob最近的Metrics + + @param request: GetTrainingJobLatestMetricsRequest + @return: GetTrainingJobLatestMetricsResponse + """ runtime = util_models.RuntimeOptions() headers = {} return self.get_training_job_latest_metrics_with_options(training_job_id, request, headers, runtime) @@ -5904,6 +7184,12 @@ async def get_training_job_latest_metrics_async( training_job_id: str, request: pai_studio_20220112_models.GetTrainingJobLatestMetricsRequest, ) -> pai_studio_20220112_models.GetTrainingJobLatestMetricsResponse: + """ + @summary 获取TrainingJob最近的Metrics + + @param request: GetTrainingJobLatestMetricsRequest + @return: GetTrainingJobLatestMetricsResponse + """ runtime = util_models.RuntimeOptions() headers = {} return await self.get_training_job_latest_metrics_with_options_async(training_job_id, request, headers, runtime) @@ -5915,6 +7201,14 @@ def get_user_view_metrics_with_options( headers: Dict[str, str], runtime: util_models.RuntimeOptions, ) -> pai_studio_20220112_models.GetUserViewMetricsResponse: + """ + @summary get user view metrics + + @param request: GetUserViewMetricsRequest + @param headers: map + @param runtime: runtime options for this request RuntimeOptions + @return: GetUserViewMetricsResponse + """ UtilClient.validate_model(request) query = {} if not UtilClient.is_unset(request.order): @@ -5958,6 +7252,14 @@ async def get_user_view_metrics_with_options_async( headers: Dict[str, str], runtime: util_models.RuntimeOptions, ) -> pai_studio_20220112_models.GetUserViewMetricsResponse: + """ + @summary get user view metrics + + @param request: GetUserViewMetricsRequest + @param headers: map + @param runtime: runtime options for this request RuntimeOptions + @return: GetUserViewMetricsResponse + """ UtilClient.validate_model(request) query = {} if not UtilClient.is_unset(request.order): @@ -5999,6 +7301,12 @@ def get_user_view_metrics( resource_group_id: str, request: pai_studio_20220112_models.GetUserViewMetricsRequest, ) -> pai_studio_20220112_models.GetUserViewMetricsResponse: + """ + @summary get user view metrics + + @param request: GetUserViewMetricsRequest + @return: GetUserViewMetricsResponse + """ runtime = util_models.RuntimeOptions() headers = {} return self.get_user_view_metrics_with_options(resource_group_id, request, headers, runtime) @@ -6008,6 +7316,12 @@ async def get_user_view_metrics_async( resource_group_id: str, request: pai_studio_20220112_models.GetUserViewMetricsRequest, ) -> pai_studio_20220112_models.GetUserViewMetricsResponse: + """ + @summary get user view metrics + + @param request: GetUserViewMetricsRequest + @return: GetUserViewMetricsResponse + """ runtime = util_models.RuntimeOptions() headers = {} return await self.get_user_view_metrics_with_options_async(resource_group_id, request, headers, runtime) @@ -6018,6 +7332,14 @@ def list_ai4dserivces_with_options( headers: Dict[str, str], runtime: util_models.RuntimeOptions, ) -> pai_studio_20220112_models.ListAI4DSerivcesResponse: + """ + @summary 获取AI4D服务列表 + + @param request: ListAI4DSerivcesRequest + @param headers: map + @param runtime: runtime options for this request RuntimeOptions + @return: ListAI4DSerivcesResponse + """ UtilClient.validate_model(request) query = {} if not UtilClient.is_unset(request.service_type): @@ -6050,6 +7372,14 @@ async def list_ai4dserivces_with_options_async( headers: Dict[str, str], runtime: util_models.RuntimeOptions, ) -> pai_studio_20220112_models.ListAI4DSerivcesResponse: + """ + @summary 获取AI4D服务列表 + + @param request: ListAI4DSerivcesRequest + @param headers: map + @param runtime: runtime options for this request RuntimeOptions + @return: ListAI4DSerivcesResponse + """ UtilClient.validate_model(request) query = {} if not UtilClient.is_unset(request.service_type): @@ -6080,6 +7410,12 @@ def list_ai4dserivces( self, request: pai_studio_20220112_models.ListAI4DSerivcesRequest, ) -> pai_studio_20220112_models.ListAI4DSerivcesResponse: + """ + @summary 获取AI4D服务列表 + + @param request: ListAI4DSerivcesRequest + @return: ListAI4DSerivcesResponse + """ runtime = util_models.RuntimeOptions() headers = {} return self.list_ai4dserivces_with_options(request, headers, runtime) @@ -6088,6 +7424,12 @@ async def list_ai4dserivces_async( self, request: pai_studio_20220112_models.ListAI4DSerivcesRequest, ) -> pai_studio_20220112_models.ListAI4DSerivcesResponse: + """ + @summary 获取AI4D服务列表 + + @param request: ListAI4DSerivcesRequest + @return: ListAI4DSerivcesResponse + """ runtime = util_models.RuntimeOptions() headers = {} return await self.list_ai4dserivces_with_options_async(request, headers, runtime) @@ -6098,6 +7440,14 @@ def list_ai4dservice_templates_with_options( headers: Dict[str, str], runtime: util_models.RuntimeOptions, ) -> pai_studio_20220112_models.ListAI4DServiceTemplatesResponse: + """ + @summary 获取AI4D服务模板 + + @param request: ListAI4DServiceTemplatesRequest + @param headers: map + @param runtime: runtime options for this request RuntimeOptions + @return: ListAI4DServiceTemplatesResponse + """ UtilClient.validate_model(request) query = {} if not UtilClient.is_unset(request.service_type): @@ -6130,6 +7480,14 @@ async def list_ai4dservice_templates_with_options_async( headers: Dict[str, str], runtime: util_models.RuntimeOptions, ) -> pai_studio_20220112_models.ListAI4DServiceTemplatesResponse: + """ + @summary 获取AI4D服务模板 + + @param request: ListAI4DServiceTemplatesRequest + @param headers: map + @param runtime: runtime options for this request RuntimeOptions + @return: ListAI4DServiceTemplatesResponse + """ UtilClient.validate_model(request) query = {} if not UtilClient.is_unset(request.service_type): @@ -6160,6 +7518,12 @@ def list_ai4dservice_templates( self, request: pai_studio_20220112_models.ListAI4DServiceTemplatesRequest, ) -> pai_studio_20220112_models.ListAI4DServiceTemplatesResponse: + """ + @summary 获取AI4D服务模板 + + @param request: ListAI4DServiceTemplatesRequest + @return: ListAI4DServiceTemplatesResponse + """ runtime = util_models.RuntimeOptions() headers = {} return self.list_ai4dservice_templates_with_options(request, headers, runtime) @@ -6168,6 +7532,12 @@ async def list_ai4dservice_templates_async( self, request: pai_studio_20220112_models.ListAI4DServiceTemplatesRequest, ) -> pai_studio_20220112_models.ListAI4DServiceTemplatesResponse: + """ + @summary 获取AI4D服务模板 + + @param request: ListAI4DServiceTemplatesRequest + @return: ListAI4DServiceTemplatesResponse + """ runtime = util_models.RuntimeOptions() headers = {} return await self.list_ai4dservice_templates_with_options_async(request, headers, runtime) @@ -6179,6 +7549,14 @@ def list_algorithm_versions_with_options( headers: Dict[str, str], runtime: util_models.RuntimeOptions, ) -> pai_studio_20220112_models.ListAlgorithmVersionsResponse: + """ + @summary 获取算法的所有版本信息 + + @param request: ListAlgorithmVersionsRequest + @param headers: map + @param runtime: runtime options for this request RuntimeOptions + @return: ListAlgorithmVersionsResponse + """ UtilClient.validate_model(request) query = {} if not UtilClient.is_unset(request.page_number): @@ -6212,6 +7590,14 @@ async def list_algorithm_versions_with_options_async( headers: Dict[str, str], runtime: util_models.RuntimeOptions, ) -> pai_studio_20220112_models.ListAlgorithmVersionsResponse: + """ + @summary 获取算法的所有版本信息 + + @param request: ListAlgorithmVersionsRequest + @param headers: map + @param runtime: runtime options for this request RuntimeOptions + @return: ListAlgorithmVersionsResponse + """ UtilClient.validate_model(request) query = {} if not UtilClient.is_unset(request.page_number): @@ -6243,6 +7629,12 @@ def list_algorithm_versions( algorithm_id: str, request: pai_studio_20220112_models.ListAlgorithmVersionsRequest, ) -> pai_studio_20220112_models.ListAlgorithmVersionsResponse: + """ + @summary 获取算法的所有版本信息 + + @param request: ListAlgorithmVersionsRequest + @return: ListAlgorithmVersionsResponse + """ runtime = util_models.RuntimeOptions() headers = {} return self.list_algorithm_versions_with_options(algorithm_id, request, headers, runtime) @@ -6252,6 +7644,12 @@ async def list_algorithm_versions_async( algorithm_id: str, request: pai_studio_20220112_models.ListAlgorithmVersionsRequest, ) -> pai_studio_20220112_models.ListAlgorithmVersionsResponse: + """ + @summary 获取算法的所有版本信息 + + @param request: ListAlgorithmVersionsRequest + @return: ListAlgorithmVersionsResponse + """ runtime = util_models.RuntimeOptions() headers = {} return await self.list_algorithm_versions_with_options_async(algorithm_id, request, headers, runtime) @@ -6262,6 +7660,14 @@ def list_algorithms_with_options( headers: Dict[str, str], runtime: util_models.RuntimeOptions, ) -> pai_studio_20220112_models.ListAlgorithmsResponse: + """ + @summary 获取算法列表 + + @param request: ListAlgorithmsRequest + @param headers: map + @param runtime: runtime options for this request RuntimeOptions + @return: ListAlgorithmsResponse + """ UtilClient.validate_model(request) query = {} if not UtilClient.is_unset(request.algorithm_id): @@ -6302,6 +7708,14 @@ async def list_algorithms_with_options_async( headers: Dict[str, str], runtime: util_models.RuntimeOptions, ) -> pai_studio_20220112_models.ListAlgorithmsResponse: + """ + @summary 获取算法列表 + + @param request: ListAlgorithmsRequest + @param headers: map + @param runtime: runtime options for this request RuntimeOptions + @return: ListAlgorithmsResponse + """ UtilClient.validate_model(request) query = {} if not UtilClient.is_unset(request.algorithm_id): @@ -6340,6 +7754,12 @@ def list_algorithms( self, request: pai_studio_20220112_models.ListAlgorithmsRequest, ) -> pai_studio_20220112_models.ListAlgorithmsResponse: + """ + @summary 获取算法列表 + + @param request: ListAlgorithmsRequest + @return: ListAlgorithmsResponse + """ runtime = util_models.RuntimeOptions() headers = {} return self.list_algorithms_with_options(request, headers, runtime) @@ -6348,6 +7768,12 @@ async def list_algorithms_async( self, request: pai_studio_20220112_models.ListAlgorithmsRequest, ) -> pai_studio_20220112_models.ListAlgorithmsResponse: + """ + @summary 获取算法列表 + + @param request: ListAlgorithmsRequest + @return: ListAlgorithmsResponse + """ runtime = util_models.RuntimeOptions() headers = {} return await self.list_algorithms_with_options_async(request, headers, runtime) @@ -6358,6 +7784,14 @@ def list_component_version_snapshots_with_options( headers: Dict[str, str], runtime: util_models.RuntimeOptions, ) -> pai_studio_20220112_models.ListComponentVersionSnapshotsResponse: + """ + @summary 更新组件版本快照 + + @param request: ListComponentVersionSnapshotsRequest + @param headers: map + @param runtime: runtime options for this request RuntimeOptions + @return: ListComponentVersionSnapshotsResponse + """ UtilClient.validate_model(request) query = {} if not UtilClient.is_unset(request.component_id): @@ -6400,6 +7834,14 @@ async def list_component_version_snapshots_with_options_async( headers: Dict[str, str], runtime: util_models.RuntimeOptions, ) -> pai_studio_20220112_models.ListComponentVersionSnapshotsResponse: + """ + @summary 更新组件版本快照 + + @param request: ListComponentVersionSnapshotsRequest + @param headers: map + @param runtime: runtime options for this request RuntimeOptions + @return: ListComponentVersionSnapshotsResponse + """ UtilClient.validate_model(request) query = {} if not UtilClient.is_unset(request.component_id): @@ -6440,6 +7882,12 @@ def list_component_version_snapshots( self, request: pai_studio_20220112_models.ListComponentVersionSnapshotsRequest, ) -> pai_studio_20220112_models.ListComponentVersionSnapshotsResponse: + """ + @summary 更新组件版本快照 + + @param request: ListComponentVersionSnapshotsRequest + @return: ListComponentVersionSnapshotsResponse + """ runtime = util_models.RuntimeOptions() headers = {} return self.list_component_version_snapshots_with_options(request, headers, runtime) @@ -6448,6 +7896,12 @@ async def list_component_version_snapshots_async( self, request: pai_studio_20220112_models.ListComponentVersionSnapshotsRequest, ) -> pai_studio_20220112_models.ListComponentVersionSnapshotsResponse: + """ + @summary 更新组件版本快照 + + @param request: ListComponentVersionSnapshotsRequest + @return: ListComponentVersionSnapshotsResponse + """ runtime = util_models.RuntimeOptions() headers = {} return await self.list_component_version_snapshots_with_options_async(request, headers, runtime) @@ -6459,6 +7913,14 @@ def list_component_versions_with_options( headers: Dict[str, str], runtime: util_models.RuntimeOptions, ) -> pai_studio_20220112_models.ListComponentVersionsResponse: + """ + @summary 获取组件版本列表 + + @param tmp_req: ListComponentVersionsRequest + @param headers: map + @param runtime: runtime options for this request RuntimeOptions + @return: ListComponentVersionsResponse + """ UtilClient.validate_model(tmp_req) request = pai_studio_20220112_models.ListComponentVersionsShrinkRequest() OpenApiUtilClient.convert(tmp_req, request) @@ -6504,6 +7966,14 @@ async def list_component_versions_with_options_async( headers: Dict[str, str], runtime: util_models.RuntimeOptions, ) -> pai_studio_20220112_models.ListComponentVersionsResponse: + """ + @summary 获取组件版本列表 + + @param tmp_req: ListComponentVersionsRequest + @param headers: map + @param runtime: runtime options for this request RuntimeOptions + @return: ListComponentVersionsResponse + """ UtilClient.validate_model(tmp_req) request = pai_studio_20220112_models.ListComponentVersionsShrinkRequest() OpenApiUtilClient.convert(tmp_req, request) @@ -6547,6 +8017,12 @@ def list_component_versions( component_id: str, request: pai_studio_20220112_models.ListComponentVersionsRequest, ) -> pai_studio_20220112_models.ListComponentVersionsResponse: + """ + @summary 获取组件版本列表 + + @param request: ListComponentVersionsRequest + @return: ListComponentVersionsResponse + """ runtime = util_models.RuntimeOptions() headers = {} return self.list_component_versions_with_options(component_id, request, headers, runtime) @@ -6556,6 +8032,12 @@ async def list_component_versions_async( component_id: str, request: pai_studio_20220112_models.ListComponentVersionsRequest, ) -> pai_studio_20220112_models.ListComponentVersionsResponse: + """ + @summary 获取组件版本列表 + + @param request: ListComponentVersionsRequest + @return: ListComponentVersionsResponse + """ runtime = util_models.RuntimeOptions() headers = {} return await self.list_component_versions_with_options_async(component_id, request, headers, runtime) @@ -6566,6 +8048,14 @@ def list_components_with_options( headers: Dict[str, str], runtime: util_models.RuntimeOptions, ) -> pai_studio_20220112_models.ListComponentsResponse: + """ + @summary 获取组件列表 + + @param tmp_req: ListComponentsRequest + @param headers: map + @param runtime: runtime options for this request RuntimeOptions + @return: ListComponentsResponse + """ UtilClient.validate_model(tmp_req) request = pai_studio_20220112_models.ListComponentsShrinkRequest() OpenApiUtilClient.convert(tmp_req, request) @@ -6618,6 +8108,14 @@ async def list_components_with_options_async( headers: Dict[str, str], runtime: util_models.RuntimeOptions, ) -> pai_studio_20220112_models.ListComponentsResponse: + """ + @summary 获取组件列表 + + @param tmp_req: ListComponentsRequest + @param headers: map + @param runtime: runtime options for this request RuntimeOptions + @return: ListComponentsResponse + """ UtilClient.validate_model(tmp_req) request = pai_studio_20220112_models.ListComponentsShrinkRequest() OpenApiUtilClient.convert(tmp_req, request) @@ -6668,6 +8166,12 @@ def list_components( self, request: pai_studio_20220112_models.ListComponentsRequest, ) -> pai_studio_20220112_models.ListComponentsResponse: + """ + @summary 获取组件列表 + + @param request: ListComponentsRequest + @return: ListComponentsResponse + """ runtime = util_models.RuntimeOptions() headers = {} return self.list_components_with_options(request, headers, runtime) @@ -6676,6 +8180,12 @@ async def list_components_async( self, request: pai_studio_20220112_models.ListComponentsRequest, ) -> pai_studio_20220112_models.ListComponentsResponse: + """ + @summary 获取组件列表 + + @param request: ListComponentsRequest + @return: ListComponentsResponse + """ runtime = util_models.RuntimeOptions() headers = {} return await self.list_components_with_options_async(request, headers, runtime) @@ -6686,6 +8196,14 @@ def list_instance_jobs_with_options( headers: Dict[str, str], runtime: util_models.RuntimeOptions, ) -> pai_studio_20220112_models.ListInstanceJobsResponse: + """ + @summary 获取实例任务列表 + + @param request: ListInstanceJobsRequest + @param headers: map + @param runtime: runtime options for this request RuntimeOptions + @return: ListInstanceJobsResponse + """ UtilClient.validate_model(request) query = {} if not UtilClient.is_unset(request.instance_job_type): @@ -6726,6 +8244,14 @@ async def list_instance_jobs_with_options_async( headers: Dict[str, str], runtime: util_models.RuntimeOptions, ) -> pai_studio_20220112_models.ListInstanceJobsResponse: + """ + @summary 获取实例任务列表 + + @param request: ListInstanceJobsRequest + @param headers: map + @param runtime: runtime options for this request RuntimeOptions + @return: ListInstanceJobsResponse + """ UtilClient.validate_model(request) query = {} if not UtilClient.is_unset(request.instance_job_type): @@ -6764,6 +8290,12 @@ def list_instance_jobs( self, request: pai_studio_20220112_models.ListInstanceJobsRequest, ) -> pai_studio_20220112_models.ListInstanceJobsResponse: + """ + @summary 获取实例任务列表 + + @param request: ListInstanceJobsRequest + @return: ListInstanceJobsResponse + """ runtime = util_models.RuntimeOptions() headers = {} return self.list_instance_jobs_with_options(request, headers, runtime) @@ -6772,132 +8304,52 @@ async def list_instance_jobs_async( self, request: pai_studio_20220112_models.ListInstanceJobsRequest, ) -> pai_studio_20220112_models.ListInstanceJobsResponse: + """ + @summary 获取实例任务列表 + + @param request: ListInstanceJobsRequest + @return: ListInstanceJobsResponse + """ runtime = util_models.RuntimeOptions() headers = {} return await self.list_instance_jobs_with_options_async(request, headers, runtime) - def list_llmprojects_with_options( - self, - request: pai_studio_20220112_models.ListLLMProjectsRequest, - headers: Dict[str, str], - runtime: util_models.RuntimeOptions, - ) -> pai_studio_20220112_models.ListLLMProjectsResponse: - UtilClient.validate_model(request) - query = {} - if not UtilClient.is_unset(request.order): - query['Order'] = request.order - if not UtilClient.is_unset(request.page_number): - query['PageNumber'] = request.page_number - if not UtilClient.is_unset(request.page_size): - query['PageSize'] = request.page_size - if not UtilClient.is_unset(request.project_name): - query['ProjectName'] = request.project_name - if not UtilClient.is_unset(request.sort_by): - query['SortBy'] = request.sort_by - if not UtilClient.is_unset(request.workspace_id): - query['WorkspaceId'] = request.workspace_id - req = open_api_models.OpenApiRequest( - headers=headers, - query=OpenApiUtilClient.query(query) - ) - params = open_api_models.Params( - action='ListLLMProjects', - version='2022-01-12', - protocol='HTTPS', - pathname=f'/api/v1/langstudio/projects', - method='GET', - auth_type='AK', - style='ROA', - req_body_type='json', - body_type='json' - ) - return TeaCore.from_map( - pai_studio_20220112_models.ListLLMProjectsResponse(), - self.call_api(params, req, runtime) - ) - - async def list_llmprojects_with_options_async( + def list_node_gpumetrics_with_options( self, - request: pai_studio_20220112_models.ListLLMProjectsRequest, - headers: Dict[str, str], - runtime: util_models.RuntimeOptions, - ) -> pai_studio_20220112_models.ListLLMProjectsResponse: - UtilClient.validate_model(request) - query = {} - if not UtilClient.is_unset(request.order): - query['Order'] = request.order - if not UtilClient.is_unset(request.page_number): - query['PageNumber'] = request.page_number - if not UtilClient.is_unset(request.page_size): - query['PageSize'] = request.page_size - if not UtilClient.is_unset(request.project_name): - query['ProjectName'] = request.project_name - if not UtilClient.is_unset(request.sort_by): - query['SortBy'] = request.sort_by - if not UtilClient.is_unset(request.workspace_id): - query['WorkspaceId'] = request.workspace_id - req = open_api_models.OpenApiRequest( - headers=headers, - query=OpenApiUtilClient.query(query) - ) - params = open_api_models.Params( - action='ListLLMProjects', - version='2022-01-12', - protocol='HTTPS', - pathname=f'/api/v1/langstudio/projects', - method='GET', - auth_type='AK', - style='ROA', - req_body_type='json', - body_type='json' - ) - return TeaCore.from_map( - pai_studio_20220112_models.ListLLMProjectsResponse(), - await self.call_api_async(params, req, runtime) - ) - - def list_llmprojects( - self, - request: pai_studio_20220112_models.ListLLMProjectsRequest, - ) -> pai_studio_20220112_models.ListLLMProjectsResponse: - runtime = util_models.RuntimeOptions() - headers = {} - return self.list_llmprojects_with_options(request, headers, runtime) - - async def list_llmprojects_async( - self, - request: pai_studio_20220112_models.ListLLMProjectsRequest, - ) -> pai_studio_20220112_models.ListLLMProjectsResponse: - runtime = util_models.RuntimeOptions() - headers = {} - return await self.list_llmprojects_with_options_async(request, headers, runtime) - - def list_llmsnapshots_with_options( - self, - project_id: str, - request: pai_studio_20220112_models.ListLLMSnapshotsRequest, + quota_id: str, + request: pai_studio_20220112_models.ListNodeGPUMetricsRequest, headers: Dict[str, str], runtime: util_models.RuntimeOptions, - ) -> pai_studio_20220112_models.ListLLMSnapshotsResponse: + ) -> pai_studio_20220112_models.ListNodeGPUMetricsResponse: + """ + @summary 查询某资源配额下所有节点的性能指标列表 + + @param request: ListNodeGPUMetricsRequest + @param headers: map + @param runtime: runtime options for this request RuntimeOptions + @return: ListNodeGPUMetricsResponse + """ UtilClient.validate_model(request) query = {} - if not UtilClient.is_unset(request.order): - query['Order'] = request.order - if not UtilClient.is_unset(request.page_number): - query['PageNumber'] = request.page_number - if not UtilClient.is_unset(request.page_size): - query['PageSize'] = request.page_size - if not UtilClient.is_unset(request.sort_by): - query['SortBy'] = request.sort_by + if not UtilClient.is_unset(request.end_time): + query['EndTime'] = request.end_time + if not UtilClient.is_unset(request.gputype): + query['GPUType'] = request.gputype + if not UtilClient.is_unset(request.metric_type): + query['MetricType'] = request.metric_type + if not UtilClient.is_unset(request.node_type): + query['NodeType'] = request.node_type + if not UtilClient.is_unset(request.start_time): + query['StartTime'] = request.start_time req = open_api_models.OpenApiRequest( headers=headers, query=OpenApiUtilClient.query(query) ) params = open_api_models.Params( - action='ListLLMSnapshots', + action='ListNodeGPUMetrics', version='2022-01-12', protocol='HTTPS', - pathname=f'/api/v1/langstudio/projects/{OpenApiUtilClient.get_encode_param(project_id)}/snapshots', + pathname=f'/api/v1/quotas/{OpenApiUtilClient.get_encode_param(quota_id)}/nodegpumetrics', method='GET', auth_type='AK', style='ROA', @@ -6905,36 +8357,46 @@ def list_llmsnapshots_with_options( body_type='json' ) return TeaCore.from_map( - pai_studio_20220112_models.ListLLMSnapshotsResponse(), + pai_studio_20220112_models.ListNodeGPUMetricsResponse(), self.call_api(params, req, runtime) ) - async def list_llmsnapshots_with_options_async( + async def list_node_gpumetrics_with_options_async( self, - project_id: str, - request: pai_studio_20220112_models.ListLLMSnapshotsRequest, + quota_id: str, + request: pai_studio_20220112_models.ListNodeGPUMetricsRequest, headers: Dict[str, str], runtime: util_models.RuntimeOptions, - ) -> pai_studio_20220112_models.ListLLMSnapshotsResponse: + ) -> pai_studio_20220112_models.ListNodeGPUMetricsResponse: + """ + @summary 查询某资源配额下所有节点的性能指标列表 + + @param request: ListNodeGPUMetricsRequest + @param headers: map + @param runtime: runtime options for this request RuntimeOptions + @return: ListNodeGPUMetricsResponse + """ UtilClient.validate_model(request) query = {} - if not UtilClient.is_unset(request.order): - query['Order'] = request.order - if not UtilClient.is_unset(request.page_number): - query['PageNumber'] = request.page_number - if not UtilClient.is_unset(request.page_size): - query['PageSize'] = request.page_size - if not UtilClient.is_unset(request.sort_by): - query['SortBy'] = request.sort_by + if not UtilClient.is_unset(request.end_time): + query['EndTime'] = request.end_time + if not UtilClient.is_unset(request.gputype): + query['GPUType'] = request.gputype + if not UtilClient.is_unset(request.metric_type): + query['MetricType'] = request.metric_type + if not UtilClient.is_unset(request.node_type): + query['NodeType'] = request.node_type + if not UtilClient.is_unset(request.start_time): + query['StartTime'] = request.start_time req = open_api_models.OpenApiRequest( headers=headers, query=OpenApiUtilClient.query(query) ) params = open_api_models.Params( - action='ListLLMSnapshots', + action='ListNodeGPUMetrics', version='2022-01-12', protocol='HTTPS', - pathname=f'/api/v1/langstudio/projects/{OpenApiUtilClient.get_encode_param(project_id)}/snapshots', + pathname=f'/api/v1/quotas/{OpenApiUtilClient.get_encode_param(quota_id)}/nodegpumetrics', method='GET', auth_type='AK', style='ROA', @@ -6942,27 +8404,39 @@ async def list_llmsnapshots_with_options_async( body_type='json' ) return TeaCore.from_map( - pai_studio_20220112_models.ListLLMSnapshotsResponse(), + pai_studio_20220112_models.ListNodeGPUMetricsResponse(), await self.call_api_async(params, req, runtime) ) - def list_llmsnapshots( + def list_node_gpumetrics( self, - project_id: str, - request: pai_studio_20220112_models.ListLLMSnapshotsRequest, - ) -> pai_studio_20220112_models.ListLLMSnapshotsResponse: + quota_id: str, + request: pai_studio_20220112_models.ListNodeGPUMetricsRequest, + ) -> pai_studio_20220112_models.ListNodeGPUMetricsResponse: + """ + @summary 查询某资源配额下所有节点的性能指标列表 + + @param request: ListNodeGPUMetricsRequest + @return: ListNodeGPUMetricsResponse + """ runtime = util_models.RuntimeOptions() headers = {} - return self.list_llmsnapshots_with_options(project_id, request, headers, runtime) + return self.list_node_gpumetrics_with_options(quota_id, request, headers, runtime) - async def list_llmsnapshots_async( + async def list_node_gpumetrics_async( self, - project_id: str, - request: pai_studio_20220112_models.ListLLMSnapshotsRequest, - ) -> pai_studio_20220112_models.ListLLMSnapshotsResponse: + quota_id: str, + request: pai_studio_20220112_models.ListNodeGPUMetricsRequest, + ) -> pai_studio_20220112_models.ListNodeGPUMetricsResponse: + """ + @summary 查询某资源配额下所有节点的性能指标列表 + + @param request: ListNodeGPUMetricsRequest + @return: ListNodeGPUMetricsResponse + """ runtime = util_models.RuntimeOptions() headers = {} - return await self.list_llmsnapshots_with_options_async(project_id, request, headers, runtime) + return await self.list_node_gpumetrics_with_options_async(quota_id, request, headers, runtime) def list_node_pods_with_options( self, @@ -6971,6 +8445,14 @@ def list_node_pods_with_options( headers: Dict[str, str], runtime: util_models.RuntimeOptions, ) -> pai_studio_20220112_models.ListNodePodsResponse: + """ + @summary 您可以通过ListNodePods得到节点上的Pod信息 + + @param request: ListNodePodsRequest + @param headers: map + @param runtime: runtime options for this request RuntimeOptions + @return: ListNodePodsResponse + """ UtilClient.validate_model(request) query = {} if not UtilClient.is_unset(request.resource_group_id): @@ -7002,6 +8484,14 @@ async def list_node_pods_with_options_async( headers: Dict[str, str], runtime: util_models.RuntimeOptions, ) -> pai_studio_20220112_models.ListNodePodsResponse: + """ + @summary 您可以通过ListNodePods得到节点上的Pod信息 + + @param request: ListNodePodsRequest + @param headers: map + @param runtime: runtime options for this request RuntimeOptions + @return: ListNodePodsResponse + """ UtilClient.validate_model(request) query = {} if not UtilClient.is_unset(request.resource_group_id): @@ -7031,6 +8521,12 @@ def list_node_pods( node_id: str, request: pai_studio_20220112_models.ListNodePodsRequest, ) -> pai_studio_20220112_models.ListNodePodsResponse: + """ + @summary 您可以通过ListNodePods得到节点上的Pod信息 + + @param request: ListNodePodsRequest + @return: ListNodePodsResponse + """ runtime = util_models.RuntimeOptions() headers = {} return self.list_node_pods_with_options(node_id, request, headers, runtime) @@ -7040,6 +8536,12 @@ async def list_node_pods_async( node_id: str, request: pai_studio_20220112_models.ListNodePodsRequest, ) -> pai_studio_20220112_models.ListNodePodsResponse: + """ + @summary 您可以通过ListNodePods得到节点上的Pod信息 + + @param request: ListNodePodsRequest + @return: ListNodePodsResponse + """ runtime = util_models.RuntimeOptions() headers = {} return await self.list_node_pods_with_options_async(node_id, request, headers, runtime) @@ -7050,6 +8552,14 @@ def list_node_types_with_options( headers: Dict[str, str], runtime: util_models.RuntimeOptions, ) -> pai_studio_20220112_models.ListNodeTypesResponse: + """ + @summary 获取节点规格列表 + + @param request: ListNodeTypesRequest + @param headers: map + @param runtime: runtime options for this request RuntimeOptions + @return: ListNodeTypesResponse + """ UtilClient.validate_model(request) query = {} if not UtilClient.is_unset(request.accelerator_type): @@ -7088,6 +8598,14 @@ async def list_node_types_with_options_async( headers: Dict[str, str], runtime: util_models.RuntimeOptions, ) -> pai_studio_20220112_models.ListNodeTypesResponse: + """ + @summary 获取节点规格列表 + + @param request: ListNodeTypesRequest + @param headers: map + @param runtime: runtime options for this request RuntimeOptions + @return: ListNodeTypesResponse + """ UtilClient.validate_model(request) query = {} if not UtilClient.is_unset(request.accelerator_type): @@ -7124,6 +8642,12 @@ def list_node_types( self, request: pai_studio_20220112_models.ListNodeTypesRequest, ) -> pai_studio_20220112_models.ListNodeTypesResponse: + """ + @summary 获取节点规格列表 + + @param request: ListNodeTypesRequest + @return: ListNodeTypesResponse + """ runtime = util_models.RuntimeOptions() headers = {} return self.list_node_types_with_options(request, headers, runtime) @@ -7132,6 +8656,12 @@ async def list_node_types_async( self, request: pai_studio_20220112_models.ListNodeTypesRequest, ) -> pai_studio_20220112_models.ListNodeTypesResponse: + """ + @summary 获取节点规格列表 + + @param request: ListNodeTypesRequest + @return: ListNodeTypesResponse + """ runtime = util_models.RuntimeOptions() headers = {} return await self.list_node_types_with_options_async(request, headers, runtime) @@ -7142,12 +8672,26 @@ def list_nodes_with_options( headers: Dict[str, str], runtime: util_models.RuntimeOptions, ) -> pai_studio_20220112_models.ListNodesResponse: + """ + @summary 获取资源节点列表 + + @param request: ListNodesRequest + @param headers: map + @param runtime: runtime options for this request RuntimeOptions + @return: ListNodesResponse + """ UtilClient.validate_model(request) query = {} if not UtilClient.is_unset(request.accelerator_type): query['AcceleratorType'] = request.accelerator_type + if not UtilClient.is_unset(request.filter_by_quota_id): + query['FilterByQuotaId'] = request.filter_by_quota_id + if not UtilClient.is_unset(request.filter_by_resource_group_ids): + query['FilterByResourceGroupIds'] = request.filter_by_resource_group_ids if not UtilClient.is_unset(request.gputype): query['GPUType'] = request.gputype + if not UtilClient.is_unset(request.node_names): + query['NodeNames'] = request.node_names if not UtilClient.is_unset(request.node_statuses): query['NodeStatuses'] = request.node_statuses if not UtilClient.is_unset(request.node_types): @@ -7194,12 +8738,26 @@ async def list_nodes_with_options_async( headers: Dict[str, str], runtime: util_models.RuntimeOptions, ) -> pai_studio_20220112_models.ListNodesResponse: + """ + @summary 获取资源节点列表 + + @param request: ListNodesRequest + @param headers: map + @param runtime: runtime options for this request RuntimeOptions + @return: ListNodesResponse + """ UtilClient.validate_model(request) query = {} if not UtilClient.is_unset(request.accelerator_type): query['AcceleratorType'] = request.accelerator_type + if not UtilClient.is_unset(request.filter_by_quota_id): + query['FilterByQuotaId'] = request.filter_by_quota_id + if not UtilClient.is_unset(request.filter_by_resource_group_ids): + query['FilterByResourceGroupIds'] = request.filter_by_resource_group_ids if not UtilClient.is_unset(request.gputype): query['GPUType'] = request.gputype + if not UtilClient.is_unset(request.node_names): + query['NodeNames'] = request.node_names if not UtilClient.is_unset(request.node_statuses): query['NodeStatuses'] = request.node_statuses if not UtilClient.is_unset(request.node_types): @@ -7244,6 +8802,12 @@ def list_nodes( self, request: pai_studio_20220112_models.ListNodesRequest, ) -> pai_studio_20220112_models.ListNodesResponse: + """ + @summary 获取资源节点列表 + + @param request: ListNodesRequest + @return: ListNodesResponse + """ runtime = util_models.RuntimeOptions() headers = {} return self.list_nodes_with_options(request, headers, runtime) @@ -7252,45 +8816,276 @@ async def list_nodes_async( self, request: pai_studio_20220112_models.ListNodesRequest, ) -> pai_studio_20220112_models.ListNodesResponse: + """ + @summary 获取资源节点列表 + + @param request: ListNodesRequest + @return: ListNodesResponse + """ + runtime = util_models.RuntimeOptions() + headers = {} + return await self.list_nodes_with_options_async(request, headers, runtime) + + def list_operations_with_options( + self, + request: pai_studio_20220112_models.ListOperationsRequest, + headers: Dict[str, str], + runtime: util_models.RuntimeOptions, + ) -> pai_studio_20220112_models.ListOperationsResponse: + """ + @summary 获取资源变更列表 + + @param request: ListOperationsRequest + @param headers: map + @param runtime: runtime options for this request RuntimeOptions + @return: ListOperationsResponse + """ + UtilClient.validate_model(request) + query = {} + if not UtilClient.is_unset(request.object_id): + query['ObjectId'] = request.object_id + if not UtilClient.is_unset(request.object_type): + query['ObjectType'] = request.object_type + if not UtilClient.is_unset(request.operation_id): + query['OperationId'] = request.operation_id + if not UtilClient.is_unset(request.operation_type): + query['OperationType'] = request.operation_type + if not UtilClient.is_unset(request.order): + query['Order'] = request.order + if not UtilClient.is_unset(request.page_number): + query['PageNumber'] = request.page_number + if not UtilClient.is_unset(request.page_size): + query['PageSize'] = request.page_size + if not UtilClient.is_unset(request.sort_by): + query['SortBy'] = request.sort_by + if not UtilClient.is_unset(request.status): + query['Status'] = request.status + req = open_api_models.OpenApiRequest( + headers=headers, + query=OpenApiUtilClient.query(query) + ) + params = open_api_models.Params( + action='ListOperations', + version='2022-01-12', + protocol='HTTPS', + pathname=f'/api/v1/operations', + method='GET', + auth_type='AK', + style='ROA', + req_body_type='json', + body_type='json' + ) + return TeaCore.from_map( + pai_studio_20220112_models.ListOperationsResponse(), + self.call_api(params, req, runtime) + ) + + async def list_operations_with_options_async( + self, + request: pai_studio_20220112_models.ListOperationsRequest, + headers: Dict[str, str], + runtime: util_models.RuntimeOptions, + ) -> pai_studio_20220112_models.ListOperationsResponse: + """ + @summary 获取资源变更列表 + + @param request: ListOperationsRequest + @param headers: map + @param runtime: runtime options for this request RuntimeOptions + @return: ListOperationsResponse + """ + UtilClient.validate_model(request) + query = {} + if not UtilClient.is_unset(request.object_id): + query['ObjectId'] = request.object_id + if not UtilClient.is_unset(request.object_type): + query['ObjectType'] = request.object_type + if not UtilClient.is_unset(request.operation_id): + query['OperationId'] = request.operation_id + if not UtilClient.is_unset(request.operation_type): + query['OperationType'] = request.operation_type + if not UtilClient.is_unset(request.order): + query['Order'] = request.order + if not UtilClient.is_unset(request.page_number): + query['PageNumber'] = request.page_number + if not UtilClient.is_unset(request.page_size): + query['PageSize'] = request.page_size + if not UtilClient.is_unset(request.sort_by): + query['SortBy'] = request.sort_by + if not UtilClient.is_unset(request.status): + query['Status'] = request.status + req = open_api_models.OpenApiRequest( + headers=headers, + query=OpenApiUtilClient.query(query) + ) + params = open_api_models.Params( + action='ListOperations', + version='2022-01-12', + protocol='HTTPS', + pathname=f'/api/v1/operations', + method='GET', + auth_type='AK', + style='ROA', + req_body_type='json', + body_type='json' + ) + return TeaCore.from_map( + pai_studio_20220112_models.ListOperationsResponse(), + await self.call_api_async(params, req, runtime) + ) + + def list_operations( + self, + request: pai_studio_20220112_models.ListOperationsRequest, + ) -> pai_studio_20220112_models.ListOperationsResponse: + """ + @summary 获取资源变更列表 + + @param request: ListOperationsRequest + @return: ListOperationsResponse + """ + runtime = util_models.RuntimeOptions() + headers = {} + return self.list_operations_with_options(request, headers, runtime) + + async def list_operations_async( + self, + request: pai_studio_20220112_models.ListOperationsRequest, + ) -> pai_studio_20220112_models.ListOperationsResponse: + """ + @summary 获取资源变更列表 + + @param request: ListOperationsRequest + @return: ListOperationsResponse + """ + runtime = util_models.RuntimeOptions() + headers = {} + return await self.list_operations_with_options_async(request, headers, runtime) + + def list_permissions_with_options( + self, + headers: Dict[str, str], + runtime: util_models.RuntimeOptions, + ) -> pai_studio_20220112_models.ListPermissionsResponse: + """ + @summary ListPermissions + + @param headers: map + @param runtime: runtime options for this request RuntimeOptions + @return: ListPermissionsResponse + """ + req = open_api_models.OpenApiRequest( + headers=headers + ) + params = open_api_models.Params( + action='ListPermissions', + version='2022-01-12', + protocol='HTTPS', + pathname=f'/api/v1/permissions', + method='GET', + auth_type='AK', + style='ROA', + req_body_type='json', + body_type='json' + ) + return TeaCore.from_map( + pai_studio_20220112_models.ListPermissionsResponse(), + self.call_api(params, req, runtime) + ) + + async def list_permissions_with_options_async( + self, + headers: Dict[str, str], + runtime: util_models.RuntimeOptions, + ) -> pai_studio_20220112_models.ListPermissionsResponse: + """ + @summary ListPermissions + + @param headers: map + @param runtime: runtime options for this request RuntimeOptions + @return: ListPermissionsResponse + """ + req = open_api_models.OpenApiRequest( + headers=headers + ) + params = open_api_models.Params( + action='ListPermissions', + version='2022-01-12', + protocol='HTTPS', + pathname=f'/api/v1/permissions', + method='GET', + auth_type='AK', + style='ROA', + req_body_type='json', + body_type='json' + ) + return TeaCore.from_map( + pai_studio_20220112_models.ListPermissionsResponse(), + await self.call_api_async(params, req, runtime) + ) + + def list_permissions(self) -> pai_studio_20220112_models.ListPermissionsResponse: + """ + @summary ListPermissions + + @return: ListPermissionsResponse + """ + runtime = util_models.RuntimeOptions() + headers = {} + return self.list_permissions_with_options(headers, runtime) + + async def list_permissions_async(self) -> pai_studio_20220112_models.ListPermissionsResponse: + """ + @summary ListPermissions + + @return: ListPermissionsResponse + """ runtime = util_models.RuntimeOptions() headers = {} - return await self.list_nodes_with_options_async(request, headers, runtime) + return await self.list_permissions_with_options_async(headers, runtime) - def list_operations_with_options( + def list_quota_users_with_options( self, - request: pai_studio_20220112_models.ListOperationsRequest, + quota_id: str, + request: pai_studio_20220112_models.ListQuotaUsersRequest, headers: Dict[str, str], runtime: util_models.RuntimeOptions, - ) -> pai_studio_20220112_models.ListOperationsResponse: + ) -> pai_studio_20220112_models.ListQuotaUsersResponse: + """ + @summary 获取当前资源配额用户列表和其所使用的资源 + + @param request: ListQuotaUsersRequest + @param headers: map + @param runtime: runtime options for this request RuntimeOptions + @return: ListQuotaUsersResponse + """ UtilClient.validate_model(request) query = {} - if not UtilClient.is_unset(request.object_id): - query['ObjectId'] = request.object_id - if not UtilClient.is_unset(request.object_type): - query['ObjectType'] = request.object_type - if not UtilClient.is_unset(request.operation_id): - query['OperationId'] = request.operation_id - if not UtilClient.is_unset(request.operation_type): - query['OperationType'] = request.operation_type if not UtilClient.is_unset(request.order): query['Order'] = request.order if not UtilClient.is_unset(request.page_number): query['PageNumber'] = request.page_number if not UtilClient.is_unset(request.page_size): query['PageSize'] = request.page_size + if not UtilClient.is_unset(request.self_only): + query['SelfOnly'] = request.self_only if not UtilClient.is_unset(request.sort_by): query['SortBy'] = request.sort_by - if not UtilClient.is_unset(request.status): - query['Status'] = request.status + if not UtilClient.is_unset(request.user_id): + query['UserId'] = request.user_id + if not UtilClient.is_unset(request.username): + query['Username'] = request.username + if not UtilClient.is_unset(request.workspace_id): + query['WorkspaceId'] = request.workspace_id req = open_api_models.OpenApiRequest( headers=headers, query=OpenApiUtilClient.query(query) ) params = open_api_models.Params( - action='ListOperations', + action='ListQuotaUsers', version='2022-01-12', protocol='HTTPS', - pathname=f'/api/v1/operations', + pathname=f'/api/v1/quotas/{OpenApiUtilClient.get_encode_param(quota_id)}/users', method='GET', auth_type='AK', style='ROA', @@ -7298,45 +9093,52 @@ def list_operations_with_options( body_type='json' ) return TeaCore.from_map( - pai_studio_20220112_models.ListOperationsResponse(), + pai_studio_20220112_models.ListQuotaUsersResponse(), self.call_api(params, req, runtime) ) - async def list_operations_with_options_async( + async def list_quota_users_with_options_async( self, - request: pai_studio_20220112_models.ListOperationsRequest, + quota_id: str, + request: pai_studio_20220112_models.ListQuotaUsersRequest, headers: Dict[str, str], runtime: util_models.RuntimeOptions, - ) -> pai_studio_20220112_models.ListOperationsResponse: + ) -> pai_studio_20220112_models.ListQuotaUsersResponse: + """ + @summary 获取当前资源配额用户列表和其所使用的资源 + + @param request: ListQuotaUsersRequest + @param headers: map + @param runtime: runtime options for this request RuntimeOptions + @return: ListQuotaUsersResponse + """ UtilClient.validate_model(request) query = {} - if not UtilClient.is_unset(request.object_id): - query['ObjectId'] = request.object_id - if not UtilClient.is_unset(request.object_type): - query['ObjectType'] = request.object_type - if not UtilClient.is_unset(request.operation_id): - query['OperationId'] = request.operation_id - if not UtilClient.is_unset(request.operation_type): - query['OperationType'] = request.operation_type if not UtilClient.is_unset(request.order): query['Order'] = request.order if not UtilClient.is_unset(request.page_number): query['PageNumber'] = request.page_number if not UtilClient.is_unset(request.page_size): query['PageSize'] = request.page_size + if not UtilClient.is_unset(request.self_only): + query['SelfOnly'] = request.self_only if not UtilClient.is_unset(request.sort_by): query['SortBy'] = request.sort_by - if not UtilClient.is_unset(request.status): - query['Status'] = request.status + if not UtilClient.is_unset(request.user_id): + query['UserId'] = request.user_id + if not UtilClient.is_unset(request.username): + query['Username'] = request.username + if not UtilClient.is_unset(request.workspace_id): + query['WorkspaceId'] = request.workspace_id req = open_api_models.OpenApiRequest( headers=headers, query=OpenApiUtilClient.query(query) ) params = open_api_models.Params( - action='ListOperations', + action='ListQuotaUsers', version='2022-01-12', protocol='HTTPS', - pathname=f'/api/v1/operations', + pathname=f'/api/v1/quotas/{OpenApiUtilClient.get_encode_param(quota_id)}/users', method='GET', auth_type='AK', style='ROA', @@ -7344,39 +9146,94 @@ async def list_operations_with_options_async( body_type='json' ) return TeaCore.from_map( - pai_studio_20220112_models.ListOperationsResponse(), + pai_studio_20220112_models.ListQuotaUsersResponse(), await self.call_api_async(params, req, runtime) ) - def list_operations( + def list_quota_users( self, - request: pai_studio_20220112_models.ListOperationsRequest, - ) -> pai_studio_20220112_models.ListOperationsResponse: + quota_id: str, + request: pai_studio_20220112_models.ListQuotaUsersRequest, + ) -> pai_studio_20220112_models.ListQuotaUsersResponse: + """ + @summary 获取当前资源配额用户列表和其所使用的资源 + + @param request: ListQuotaUsersRequest + @return: ListQuotaUsersResponse + """ runtime = util_models.RuntimeOptions() headers = {} - return self.list_operations_with_options(request, headers, runtime) + return self.list_quota_users_with_options(quota_id, request, headers, runtime) - async def list_operations_async( + async def list_quota_users_async( self, - request: pai_studio_20220112_models.ListOperationsRequest, - ) -> pai_studio_20220112_models.ListOperationsResponse: + quota_id: str, + request: pai_studio_20220112_models.ListQuotaUsersRequest, + ) -> pai_studio_20220112_models.ListQuotaUsersResponse: + """ + @summary 获取当前资源配额用户列表和其所使用的资源 + + @param request: ListQuotaUsersRequest + @return: ListQuotaUsersResponse + """ runtime = util_models.RuntimeOptions() headers = {} - return await self.list_operations_with_options_async(request, headers, runtime) + return await self.list_quota_users_with_options_async(quota_id, request, headers, runtime) - def list_permissions_with_options( + def list_quota_workloads_with_options( self, + quota_id: str, + request: pai_studio_20220112_models.ListQuotaWorkloadsRequest, headers: Dict[str, str], runtime: util_models.RuntimeOptions, - ) -> pai_studio_20220112_models.ListPermissionsResponse: + ) -> pai_studio_20220112_models.ListQuotaWorkloadsResponse: + """ + @summary 您可以通过此API获取Quota上的任务信息 + + @param request: ListQuotaWorkloadsRequest + @param headers: map + @param runtime: runtime options for this request RuntimeOptions + @return: ListQuotaWorkloadsResponse + """ + UtilClient.validate_model(request) + query = {} + if not UtilClient.is_unset(request.before_workload_id): + query['BeforeWorkloadId'] = request.before_workload_id + if not UtilClient.is_unset(request.node_name): + query['NodeName'] = request.node_name + if not UtilClient.is_unset(request.order): + query['Order'] = request.order + if not UtilClient.is_unset(request.page_number): + query['PageNumber'] = request.page_number + if not UtilClient.is_unset(request.page_size): + query['PageSize'] = request.page_size + if not UtilClient.is_unset(request.show_own): + query['ShowOwn'] = request.show_own + if not UtilClient.is_unset(request.sort_by): + query['SortBy'] = request.sort_by + if not UtilClient.is_unset(request.status): + query['Status'] = request.status + if not UtilClient.is_unset(request.sub_quota_ids): + query['SubQuotaIds'] = request.sub_quota_ids + if not UtilClient.is_unset(request.user_ids): + query['UserIds'] = request.user_ids + if not UtilClient.is_unset(request.workload_created_time_range): + query['WorkloadCreatedTimeRange'] = request.workload_created_time_range + if not UtilClient.is_unset(request.workload_ids): + query['WorkloadIds'] = request.workload_ids + if not UtilClient.is_unset(request.workload_type): + query['WorkloadType'] = request.workload_type + if not UtilClient.is_unset(request.workspace_ids): + query['WorkspaceIds'] = request.workspace_ids req = open_api_models.OpenApiRequest( - headers=headers + headers=headers, + query=OpenApiUtilClient.query(query) ) params = open_api_models.Params( - action='ListPermissions', + action='ListQuotaWorkloads', version='2022-01-12', protocol='HTTPS', - pathname=f'/api/v1/permissions', + pathname=f'/api/v1/quotas/{OpenApiUtilClient.get_encode_param(quota_id)}/workloads', method='GET', auth_type='AK', style='ROA', @@ -7384,23 +9241,64 @@ def list_permissions_with_options( body_type='json' ) return TeaCore.from_map( - pai_studio_20220112_models.ListPermissionsResponse(), + pai_studio_20220112_models.ListQuotaWorkloadsResponse(), self.call_api(params, req, runtime) ) - async def list_permissions_with_options_async( + async def list_quota_workloads_with_options_async( self, + quota_id: str, + request: pai_studio_20220112_models.ListQuotaWorkloadsRequest, headers: Dict[str, str], runtime: util_models.RuntimeOptions, - ) -> pai_studio_20220112_models.ListPermissionsResponse: + ) -> pai_studio_20220112_models.ListQuotaWorkloadsResponse: + """ + @summary 您可以通过此API获取Quota上的任务信息 + + @param request: ListQuotaWorkloadsRequest + @param headers: map + @param runtime: runtime options for this request RuntimeOptions + @return: ListQuotaWorkloadsResponse + """ + UtilClient.validate_model(request) + query = {} + if not UtilClient.is_unset(request.before_workload_id): + query['BeforeWorkloadId'] = request.before_workload_id + if not UtilClient.is_unset(request.node_name): + query['NodeName'] = request.node_name + if not UtilClient.is_unset(request.order): + query['Order'] = request.order + if not UtilClient.is_unset(request.page_number): + query['PageNumber'] = request.page_number + if not UtilClient.is_unset(request.page_size): + query['PageSize'] = request.page_size + if not UtilClient.is_unset(request.show_own): + query['ShowOwn'] = request.show_own + if not UtilClient.is_unset(request.sort_by): + query['SortBy'] = request.sort_by + if not UtilClient.is_unset(request.status): + query['Status'] = request.status + if not UtilClient.is_unset(request.sub_quota_ids): + query['SubQuotaIds'] = request.sub_quota_ids + if not UtilClient.is_unset(request.user_ids): + query['UserIds'] = request.user_ids + if not UtilClient.is_unset(request.workload_created_time_range): + query['WorkloadCreatedTimeRange'] = request.workload_created_time_range + if not UtilClient.is_unset(request.workload_ids): + query['WorkloadIds'] = request.workload_ids + if not UtilClient.is_unset(request.workload_type): + query['WorkloadType'] = request.workload_type + if not UtilClient.is_unset(request.workspace_ids): + query['WorkspaceIds'] = request.workspace_ids req = open_api_models.OpenApiRequest( - headers=headers + headers=headers, + query=OpenApiUtilClient.query(query) ) params = open_api_models.Params( - action='ListPermissions', + action='ListQuotaWorkloads', version='2022-01-12', protocol='HTTPS', - pathname=f'/api/v1/permissions', + pathname=f'/api/v1/quotas/{OpenApiUtilClient.get_encode_param(quota_id)}/workloads', method='GET', auth_type='AK', style='ROA', @@ -7408,19 +9306,39 @@ async def list_permissions_with_options_async( body_type='json' ) return TeaCore.from_map( - pai_studio_20220112_models.ListPermissionsResponse(), + pai_studio_20220112_models.ListQuotaWorkloadsResponse(), await self.call_api_async(params, req, runtime) ) - def list_permissions(self) -> pai_studio_20220112_models.ListPermissionsResponse: + def list_quota_workloads( + self, + quota_id: str, + request: pai_studio_20220112_models.ListQuotaWorkloadsRequest, + ) -> pai_studio_20220112_models.ListQuotaWorkloadsResponse: + """ + @summary 您可以通过此API获取Quota上的任务信息 + + @param request: ListQuotaWorkloadsRequest + @return: ListQuotaWorkloadsResponse + """ runtime = util_models.RuntimeOptions() headers = {} - return self.list_permissions_with_options(headers, runtime) + return self.list_quota_workloads_with_options(quota_id, request, headers, runtime) - async def list_permissions_async(self) -> pai_studio_20220112_models.ListPermissionsResponse: + async def list_quota_workloads_async( + self, + quota_id: str, + request: pai_studio_20220112_models.ListQuotaWorkloadsRequest, + ) -> pai_studio_20220112_models.ListQuotaWorkloadsResponse: + """ + @summary 您可以通过此API获取Quota上的任务信息 + + @param request: ListQuotaWorkloadsRequest + @return: ListQuotaWorkloadsResponse + """ runtime = util_models.RuntimeOptions() headers = {} - return await self.list_permissions_with_options_async(headers, runtime) + return await self.list_quota_workloads_with_options_async(quota_id, request, headers, runtime) def list_quotas_with_options( self, @@ -7428,6 +9346,14 @@ def list_quotas_with_options( headers: Dict[str, str], runtime: util_models.RuntimeOptions, ) -> pai_studio_20220112_models.ListQuotasResponse: + """ + @summary 获取Quota列表 + + @param request: ListQuotasRequest + @param headers: map + @param runtime: runtime options for this request RuntimeOptions + @return: ListQuotasResponse + """ UtilClient.validate_model(request) query = {} if not UtilClient.is_unset(request.labels): @@ -7452,6 +9378,8 @@ def list_quotas_with_options( query['SortBy'] = request.sort_by if not UtilClient.is_unset(request.statuses): query['Statuses'] = request.statuses + if not UtilClient.is_unset(request.verbose): + query['Verbose'] = request.verbose if not UtilClient.is_unset(request.workspace_ids): query['WorkspaceIds'] = request.workspace_ids req = open_api_models.OpenApiRequest( @@ -7480,6 +9408,14 @@ async def list_quotas_with_options_async( headers: Dict[str, str], runtime: util_models.RuntimeOptions, ) -> pai_studio_20220112_models.ListQuotasResponse: + """ + @summary 获取Quota列表 + + @param request: ListQuotasRequest + @param headers: map + @param runtime: runtime options for this request RuntimeOptions + @return: ListQuotasResponse + """ UtilClient.validate_model(request) query = {} if not UtilClient.is_unset(request.labels): @@ -7504,6 +9440,8 @@ async def list_quotas_with_options_async( query['SortBy'] = request.sort_by if not UtilClient.is_unset(request.statuses): query['Statuses'] = request.statuses + if not UtilClient.is_unset(request.verbose): + query['Verbose'] = request.verbose if not UtilClient.is_unset(request.workspace_ids): query['WorkspaceIds'] = request.workspace_ids req = open_api_models.OpenApiRequest( @@ -7530,6 +9468,12 @@ def list_quotas( self, request: pai_studio_20220112_models.ListQuotasRequest, ) -> pai_studio_20220112_models.ListQuotasResponse: + """ + @summary 获取Quota列表 + + @param request: ListQuotasRequest + @return: ListQuotasResponse + """ runtime = util_models.RuntimeOptions() headers = {} return self.list_quotas_with_options(request, headers, runtime) @@ -7538,6 +9482,12 @@ async def list_quotas_async( self, request: pai_studio_20220112_models.ListQuotasRequest, ) -> pai_studio_20220112_models.ListQuotasResponse: + """ + @summary 获取Quota列表 + + @param request: ListQuotasRequest + @return: ListQuotasResponse + """ runtime = util_models.RuntimeOptions() headers = {} return await self.list_quotas_with_options_async(request, headers, runtime) @@ -7549,6 +9499,14 @@ def list_resource_group_machine_groups_with_options( headers: Dict[str, str], runtime: util_models.RuntimeOptions, ) -> pai_studio_20220112_models.ListResourceGroupMachineGroupsResponse: + """ + @summary list machine groups + + @param request: ListResourceGroupMachineGroupsRequest + @param headers: map + @param runtime: runtime options for this request RuntimeOptions + @return: ListResourceGroupMachineGroupsResponse + """ UtilClient.validate_model(request) query = {} if not UtilClient.is_unset(request.creator_id): @@ -7559,6 +9517,8 @@ def list_resource_group_machine_groups_with_options( query['Name'] = request.name if not UtilClient.is_unset(request.order): query['Order'] = request.order + if not UtilClient.is_unset(request.order_instance_id): + query['OrderInstanceId'] = request.order_instance_id if not UtilClient.is_unset(request.page_number): query['PageNumber'] = request.page_number if not UtilClient.is_unset(request.page_size): @@ -7600,6 +9560,14 @@ async def list_resource_group_machine_groups_with_options_async( headers: Dict[str, str], runtime: util_models.RuntimeOptions, ) -> pai_studio_20220112_models.ListResourceGroupMachineGroupsResponse: + """ + @summary list machine groups + + @param request: ListResourceGroupMachineGroupsRequest + @param headers: map + @param runtime: runtime options for this request RuntimeOptions + @return: ListResourceGroupMachineGroupsResponse + """ UtilClient.validate_model(request) query = {} if not UtilClient.is_unset(request.creator_id): @@ -7610,6 +9578,8 @@ async def list_resource_group_machine_groups_with_options_async( query['Name'] = request.name if not UtilClient.is_unset(request.order): query['Order'] = request.order + if not UtilClient.is_unset(request.order_instance_id): + query['OrderInstanceId'] = request.order_instance_id if not UtilClient.is_unset(request.page_number): query['PageNumber'] = request.page_number if not UtilClient.is_unset(request.page_size): @@ -7649,6 +9619,12 @@ def list_resource_group_machine_groups( resource_group_id: str, request: pai_studio_20220112_models.ListResourceGroupMachineGroupsRequest, ) -> pai_studio_20220112_models.ListResourceGroupMachineGroupsResponse: + """ + @summary list machine groups + + @param request: ListResourceGroupMachineGroupsRequest + @return: ListResourceGroupMachineGroupsResponse + """ runtime = util_models.RuntimeOptions() headers = {} return self.list_resource_group_machine_groups_with_options(resource_group_id, request, headers, runtime) @@ -7658,6 +9634,12 @@ async def list_resource_group_machine_groups_async( resource_group_id: str, request: pai_studio_20220112_models.ListResourceGroupMachineGroupsRequest, ) -> pai_studio_20220112_models.ListResourceGroupMachineGroupsResponse: + """ + @summary list machine groups + + @param request: ListResourceGroupMachineGroupsRequest + @return: ListResourceGroupMachineGroupsResponse + """ runtime = util_models.RuntimeOptions() headers = {} return await self.list_resource_group_machine_groups_with_options_async(resource_group_id, request, headers, runtime) @@ -7668,6 +9650,14 @@ def list_resource_groups_with_options( headers: Dict[str, str], runtime: util_models.RuntimeOptions, ) -> pai_studio_20220112_models.ListResourceGroupsResponse: + """ + @summary list resource group + + @param request: ListResourceGroupsRequest + @param headers: map + @param runtime: runtime options for this request RuntimeOptions + @return: ListResourceGroupsResponse + """ UtilClient.validate_model(request) query = {} if not UtilClient.is_unset(request.computing_resource_provider): @@ -7714,6 +9704,14 @@ async def list_resource_groups_with_options_async( headers: Dict[str, str], runtime: util_models.RuntimeOptions, ) -> pai_studio_20220112_models.ListResourceGroupsResponse: + """ + @summary list resource group + + @param request: ListResourceGroupsRequest + @param headers: map + @param runtime: runtime options for this request RuntimeOptions + @return: ListResourceGroupsResponse + """ UtilClient.validate_model(request) query = {} if not UtilClient.is_unset(request.computing_resource_provider): @@ -7758,6 +9756,12 @@ def list_resource_groups( self, request: pai_studio_20220112_models.ListResourceGroupsRequest, ) -> pai_studio_20220112_models.ListResourceGroupsResponse: + """ + @summary list resource group + + @param request: ListResourceGroupsRequest + @return: ListResourceGroupsResponse + """ runtime = util_models.RuntimeOptions() headers = {} return self.list_resource_groups_with_options(request, headers, runtime) @@ -7766,6 +9770,12 @@ async def list_resource_groups_async( self, request: pai_studio_20220112_models.ListResourceGroupsRequest, ) -> pai_studio_20220112_models.ListResourceGroupsResponse: + """ + @summary list resource group + + @param request: ListResourceGroupsRequest + @return: ListResourceGroupsResponse + """ runtime = util_models.RuntimeOptions() headers = {} return await self.list_resource_groups_with_options_async(request, headers, runtime) @@ -7776,6 +9786,14 @@ def list_spots_stock_preview_with_options( headers: Dict[str, str], runtime: util_models.RuntimeOptions, ) -> pai_studio_20220112_models.ListSpotsStockPreviewResponse: + """ + @summary 获取多个抢占式实例的库存概览 + + @param request: ListSpotsStockPreviewRequest + @param headers: map + @param runtime: runtime options for this request RuntimeOptions + @return: ListSpotsStockPreviewResponse + """ UtilClient.validate_model(request) query = {} if not UtilClient.is_unset(request.instance_types): @@ -7806,6 +9824,14 @@ async def list_spots_stock_preview_with_options_async( headers: Dict[str, str], runtime: util_models.RuntimeOptions, ) -> pai_studio_20220112_models.ListSpotsStockPreviewResponse: + """ + @summary 获取多个抢占式实例的库存概览 + + @param request: ListSpotsStockPreviewRequest + @param headers: map + @param runtime: runtime options for this request RuntimeOptions + @return: ListSpotsStockPreviewResponse + """ UtilClient.validate_model(request) query = {} if not UtilClient.is_unset(request.instance_types): @@ -7834,6 +9860,12 @@ def list_spots_stock_preview( self, request: pai_studio_20220112_models.ListSpotsStockPreviewRequest, ) -> pai_studio_20220112_models.ListSpotsStockPreviewResponse: + """ + @summary 获取多个抢占式实例的库存概览 + + @param request: ListSpotsStockPreviewRequest + @return: ListSpotsStockPreviewResponse + """ runtime = util_models.RuntimeOptions() headers = {} return self.list_spots_stock_preview_with_options(request, headers, runtime) @@ -7842,6 +9874,12 @@ async def list_spots_stock_preview_async( self, request: pai_studio_20220112_models.ListSpotsStockPreviewRequest, ) -> pai_studio_20220112_models.ListSpotsStockPreviewResponse: + """ + @summary 获取多个抢占式实例的库存概览 + + @param request: ListSpotsStockPreviewRequest + @return: ListSpotsStockPreviewResponse + """ runtime = util_models.RuntimeOptions() headers = {} return await self.list_spots_stock_preview_with_options_async(request, headers, runtime) @@ -7852,6 +9890,14 @@ def list_tag_resources_with_options( headers: Dict[str, str], runtime: util_models.RuntimeOptions, ) -> pai_studio_20220112_models.ListTagResourcesResponse: + """ + @summary 查标签接口 + + @param tmp_req: ListTagResourcesRequest + @param headers: map + @param runtime: runtime options for this request RuntimeOptions + @return: ListTagResourcesResponse + """ UtilClient.validate_model(tmp_req) request = pai_studio_20220112_models.ListTagResourcesShrinkRequest() OpenApiUtilClient.convert(tmp_req, request) @@ -7896,6 +9942,14 @@ async def list_tag_resources_with_options_async( headers: Dict[str, str], runtime: util_models.RuntimeOptions, ) -> pai_studio_20220112_models.ListTagResourcesResponse: + """ + @summary 查标签接口 + + @param tmp_req: ListTagResourcesRequest + @param headers: map + @param runtime: runtime options for this request RuntimeOptions + @return: ListTagResourcesResponse + """ UtilClient.validate_model(tmp_req) request = pai_studio_20220112_models.ListTagResourcesShrinkRequest() OpenApiUtilClient.convert(tmp_req, request) @@ -7938,6 +9992,12 @@ def list_tag_resources( self, request: pai_studio_20220112_models.ListTagResourcesRequest, ) -> pai_studio_20220112_models.ListTagResourcesResponse: + """ + @summary 查标签接口 + + @param request: ListTagResourcesRequest + @return: ListTagResourcesResponse + """ runtime = util_models.RuntimeOptions() headers = {} return self.list_tag_resources_with_options(request, headers, runtime) @@ -7946,6 +10006,12 @@ async def list_tag_resources_async( self, request: pai_studio_20220112_models.ListTagResourcesRequest, ) -> pai_studio_20220112_models.ListTagResourcesResponse: + """ + @summary 查标签接口 + + @param request: ListTagResourcesRequest + @return: ListTagResourcesResponse + """ runtime = util_models.RuntimeOptions() headers = {} return await self.list_tag_resources_with_options_async(request, headers, runtime) @@ -7957,6 +10023,14 @@ def list_training_job_events_with_options( headers: Dict[str, str], runtime: util_models.RuntimeOptions, ) -> pai_studio_20220112_models.ListTrainingJobEventsResponse: + """ + @summary 获取指定TrainingJob的事件。 + + @param request: ListTrainingJobEventsRequest + @param headers: map + @param runtime: runtime options for this request RuntimeOptions + @return: ListTrainingJobEventsResponse + """ UtilClient.validate_model(request) query = {} if not UtilClient.is_unset(request.end_time): @@ -7996,6 +10070,14 @@ async def list_training_job_events_with_options_async( headers: Dict[str, str], runtime: util_models.RuntimeOptions, ) -> pai_studio_20220112_models.ListTrainingJobEventsResponse: + """ + @summary 获取指定TrainingJob的事件。 + + @param request: ListTrainingJobEventsRequest + @param headers: map + @param runtime: runtime options for this request RuntimeOptions + @return: ListTrainingJobEventsResponse + """ UtilClient.validate_model(request) query = {} if not UtilClient.is_unset(request.end_time): @@ -8033,6 +10115,12 @@ def list_training_job_events( training_job_id: str, request: pai_studio_20220112_models.ListTrainingJobEventsRequest, ) -> pai_studio_20220112_models.ListTrainingJobEventsResponse: + """ + @summary 获取指定TrainingJob的事件。 + + @param request: ListTrainingJobEventsRequest + @return: ListTrainingJobEventsResponse + """ runtime = util_models.RuntimeOptions() headers = {} return self.list_training_job_events_with_options(training_job_id, request, headers, runtime) @@ -8042,6 +10130,12 @@ async def list_training_job_events_async( training_job_id: str, request: pai_studio_20220112_models.ListTrainingJobEventsRequest, ) -> pai_studio_20220112_models.ListTrainingJobEventsResponse: + """ + @summary 获取指定TrainingJob的事件。 + + @param request: ListTrainingJobEventsRequest + @return: ListTrainingJobEventsResponse + """ runtime = util_models.RuntimeOptions() headers = {} return await self.list_training_job_events_with_options_async(training_job_id, request, headers, runtime) @@ -8054,6 +10148,14 @@ def list_training_job_instance_events_with_options( headers: Dict[str, str], runtime: util_models.RuntimeOptions, ) -> pai_studio_20220112_models.ListTrainingJobInstanceEventsResponse: + """ + @summary 获取指定Instance(TrainingJob的运行单元)的日志。 + + @param request: ListTrainingJobInstanceEventsRequest + @param headers: map + @param runtime: runtime options for this request RuntimeOptions + @return: ListTrainingJobInstanceEventsResponse + """ UtilClient.validate_model(request) query = {} if not UtilClient.is_unset(request.end_time): @@ -8094,6 +10196,14 @@ async def list_training_job_instance_events_with_options_async( headers: Dict[str, str], runtime: util_models.RuntimeOptions, ) -> pai_studio_20220112_models.ListTrainingJobInstanceEventsResponse: + """ + @summary 获取指定Instance(TrainingJob的运行单元)的日志。 + + @param request: ListTrainingJobInstanceEventsRequest + @param headers: map + @param runtime: runtime options for this request RuntimeOptions + @return: ListTrainingJobInstanceEventsResponse + """ UtilClient.validate_model(request) query = {} if not UtilClient.is_unset(request.end_time): @@ -8132,6 +10242,12 @@ def list_training_job_instance_events( instance_id: str, request: pai_studio_20220112_models.ListTrainingJobInstanceEventsRequest, ) -> pai_studio_20220112_models.ListTrainingJobInstanceEventsResponse: + """ + @summary 获取指定Instance(TrainingJob的运行单元)的日志。 + + @param request: ListTrainingJobInstanceEventsRequest + @return: ListTrainingJobInstanceEventsResponse + """ runtime = util_models.RuntimeOptions() headers = {} return self.list_training_job_instance_events_with_options(training_job_id, instance_id, request, headers, runtime) @@ -8142,6 +10258,12 @@ async def list_training_job_instance_events_async( instance_id: str, request: pai_studio_20220112_models.ListTrainingJobInstanceEventsRequest, ) -> pai_studio_20220112_models.ListTrainingJobInstanceEventsResponse: + """ + @summary 获取指定Instance(TrainingJob的运行单元)的日志。 + + @param request: ListTrainingJobInstanceEventsRequest + @return: ListTrainingJobInstanceEventsResponse + """ runtime = util_models.RuntimeOptions() headers = {} return await self.list_training_job_instance_events_with_options_async(training_job_id, instance_id, request, headers, runtime) @@ -8153,6 +10275,14 @@ def list_training_job_instance_metrics_with_options( headers: Dict[str, str], runtime: util_models.RuntimeOptions, ) -> pai_studio_20220112_models.ListTrainingJobInstanceMetricsResponse: + """ + @summary 获取Training Job实例的Metrics + + @param request: ListTrainingJobInstanceMetricsRequest + @param headers: map + @param runtime: runtime options for this request RuntimeOptions + @return: ListTrainingJobInstanceMetricsResponse + """ UtilClient.validate_model(request) query = {} if not UtilClient.is_unset(request.end_time): @@ -8194,6 +10324,14 @@ async def list_training_job_instance_metrics_with_options_async( headers: Dict[str, str], runtime: util_models.RuntimeOptions, ) -> pai_studio_20220112_models.ListTrainingJobInstanceMetricsResponse: + """ + @summary 获取Training Job实例的Metrics + + @param request: ListTrainingJobInstanceMetricsRequest + @param headers: map + @param runtime: runtime options for this request RuntimeOptions + @return: ListTrainingJobInstanceMetricsResponse + """ UtilClient.validate_model(request) query = {} if not UtilClient.is_unset(request.end_time): @@ -8233,6 +10371,12 @@ def list_training_job_instance_metrics( training_job_id: str, request: pai_studio_20220112_models.ListTrainingJobInstanceMetricsRequest, ) -> pai_studio_20220112_models.ListTrainingJobInstanceMetricsResponse: + """ + @summary 获取Training Job实例的Metrics + + @param request: ListTrainingJobInstanceMetricsRequest + @return: ListTrainingJobInstanceMetricsResponse + """ runtime = util_models.RuntimeOptions() headers = {} return self.list_training_job_instance_metrics_with_options(training_job_id, request, headers, runtime) @@ -8242,6 +10386,12 @@ async def list_training_job_instance_metrics_async( training_job_id: str, request: pai_studio_20220112_models.ListTrainingJobInstanceMetricsRequest, ) -> pai_studio_20220112_models.ListTrainingJobInstanceMetricsResponse: + """ + @summary 获取Training Job实例的Metrics + + @param request: ListTrainingJobInstanceMetricsRequest + @return: ListTrainingJobInstanceMetricsResponse + """ runtime = util_models.RuntimeOptions() headers = {} return await self.list_training_job_instance_metrics_with_options_async(training_job_id, request, headers, runtime) @@ -8253,6 +10403,14 @@ def list_training_job_logs_with_options( headers: Dict[str, str], runtime: util_models.RuntimeOptions, ) -> pai_studio_20220112_models.ListTrainingJobLogsResponse: + """ + @summary 获取Training Job的日志 + + @param request: ListTrainingJobLogsRequest + @param headers: map + @param runtime: runtime options for this request RuntimeOptions + @return: ListTrainingJobLogsResponse + """ UtilClient.validate_model(request) query = {} if not UtilClient.is_unset(request.end_time): @@ -8296,6 +10454,14 @@ async def list_training_job_logs_with_options_async( headers: Dict[str, str], runtime: util_models.RuntimeOptions, ) -> pai_studio_20220112_models.ListTrainingJobLogsResponse: + """ + @summary 获取Training Job的日志 + + @param request: ListTrainingJobLogsRequest + @param headers: map + @param runtime: runtime options for this request RuntimeOptions + @return: ListTrainingJobLogsResponse + """ UtilClient.validate_model(request) query = {} if not UtilClient.is_unset(request.end_time): @@ -8337,6 +10503,12 @@ def list_training_job_logs( training_job_id: str, request: pai_studio_20220112_models.ListTrainingJobLogsRequest, ) -> pai_studio_20220112_models.ListTrainingJobLogsResponse: + """ + @summary 获取Training Job的日志 + + @param request: ListTrainingJobLogsRequest + @return: ListTrainingJobLogsResponse + """ runtime = util_models.RuntimeOptions() headers = {} return self.list_training_job_logs_with_options(training_job_id, request, headers, runtime) @@ -8346,6 +10518,12 @@ async def list_training_job_logs_async( training_job_id: str, request: pai_studio_20220112_models.ListTrainingJobLogsRequest, ) -> pai_studio_20220112_models.ListTrainingJobLogsResponse: + """ + @summary 获取Training Job的日志 + + @param request: ListTrainingJobLogsRequest + @return: ListTrainingJobLogsResponse + """ runtime = util_models.RuntimeOptions() headers = {} return await self.list_training_job_logs_with_options_async(training_job_id, request, headers, runtime) @@ -8357,6 +10535,14 @@ def list_training_job_metrics_with_options( headers: Dict[str, str], runtime: util_models.RuntimeOptions, ) -> pai_studio_20220112_models.ListTrainingJobMetricsResponse: + """ + @summary 获取Training Job的Metrics + + @param request: ListTrainingJobMetricsRequest + @param headers: map + @param runtime: runtime options for this request RuntimeOptions + @return: ListTrainingJobMetricsResponse + """ UtilClient.validate_model(request) query = {} if not UtilClient.is_unset(request.end_time): @@ -8400,6 +10586,14 @@ async def list_training_job_metrics_with_options_async( headers: Dict[str, str], runtime: util_models.RuntimeOptions, ) -> pai_studio_20220112_models.ListTrainingJobMetricsResponse: + """ + @summary 获取Training Job的Metrics + + @param request: ListTrainingJobMetricsRequest + @param headers: map + @param runtime: runtime options for this request RuntimeOptions + @return: ListTrainingJobMetricsResponse + """ UtilClient.validate_model(request) query = {} if not UtilClient.is_unset(request.end_time): @@ -8441,6 +10635,12 @@ def list_training_job_metrics( training_job_id: str, request: pai_studio_20220112_models.ListTrainingJobMetricsRequest, ) -> pai_studio_20220112_models.ListTrainingJobMetricsResponse: + """ + @summary 获取Training Job的Metrics + + @param request: ListTrainingJobMetricsRequest + @return: ListTrainingJobMetricsResponse + """ runtime = util_models.RuntimeOptions() headers = {} return self.list_training_job_metrics_with_options(training_job_id, request, headers, runtime) @@ -8450,6 +10650,12 @@ async def list_training_job_metrics_async( training_job_id: str, request: pai_studio_20220112_models.ListTrainingJobMetricsRequest, ) -> pai_studio_20220112_models.ListTrainingJobMetricsResponse: + """ + @summary 获取Training Job的Metrics + + @param request: ListTrainingJobMetricsRequest + @return: ListTrainingJobMetricsResponse + """ runtime = util_models.RuntimeOptions() headers = {} return await self.list_training_job_metrics_with_options_async(training_job_id, request, headers, runtime) @@ -8461,6 +10667,14 @@ def list_training_job_output_models_with_options( headers: Dict[str, str], runtime: util_models.RuntimeOptions, ) -> pai_studio_20220112_models.ListTrainingJobOutputModelsResponse: + """ + @summary 获取Training Job 产出的所有模型信息 + + @param request: ListTrainingJobOutputModelsRequest + @param headers: map + @param runtime: runtime options for this request RuntimeOptions + @return: ListTrainingJobOutputModelsResponse + """ UtilClient.validate_model(request) query = {} if not UtilClient.is_unset(request.token): @@ -8492,6 +10706,14 @@ async def list_training_job_output_models_with_options_async( headers: Dict[str, str], runtime: util_models.RuntimeOptions, ) -> pai_studio_20220112_models.ListTrainingJobOutputModelsResponse: + """ + @summary 获取Training Job 产出的所有模型信息 + + @param request: ListTrainingJobOutputModelsRequest + @param headers: map + @param runtime: runtime options for this request RuntimeOptions + @return: ListTrainingJobOutputModelsResponse + """ UtilClient.validate_model(request) query = {} if not UtilClient.is_unset(request.token): @@ -8521,6 +10743,12 @@ def list_training_job_output_models( training_job_id: str, request: pai_studio_20220112_models.ListTrainingJobOutputModelsRequest, ) -> pai_studio_20220112_models.ListTrainingJobOutputModelsResponse: + """ + @summary 获取Training Job 产出的所有模型信息 + + @param request: ListTrainingJobOutputModelsRequest + @return: ListTrainingJobOutputModelsResponse + """ runtime = util_models.RuntimeOptions() headers = {} return self.list_training_job_output_models_with_options(training_job_id, request, headers, runtime) @@ -8530,6 +10758,12 @@ async def list_training_job_output_models_async( training_job_id: str, request: pai_studio_20220112_models.ListTrainingJobOutputModelsRequest, ) -> pai_studio_20220112_models.ListTrainingJobOutputModelsResponse: + """ + @summary 获取Training Job 产出的所有模型信息 + + @param request: ListTrainingJobOutputModelsRequest + @return: ListTrainingJobOutputModelsResponse + """ runtime = util_models.RuntimeOptions() headers = {} return await self.list_training_job_output_models_with_options_async(training_job_id, request, headers, runtime) @@ -8540,6 +10774,14 @@ def list_training_jobs_with_options( headers: Dict[str, str], runtime: util_models.RuntimeOptions, ) -> pai_studio_20220112_models.ListTrainingJobsResponse: + """ + @summary 获取TrainingJob的列表 + + @param tmp_req: ListTrainingJobsRequest + @param headers: map + @param runtime: runtime options for this request RuntimeOptions + @return: ListTrainingJobsResponse + """ UtilClient.validate_model(tmp_req) request = pai_studio_20220112_models.ListTrainingJobsShrinkRequest() OpenApiUtilClient.convert(tmp_req, request) @@ -8600,6 +10842,14 @@ async def list_training_jobs_with_options_async( headers: Dict[str, str], runtime: util_models.RuntimeOptions, ) -> pai_studio_20220112_models.ListTrainingJobsResponse: + """ + @summary 获取TrainingJob的列表 + + @param tmp_req: ListTrainingJobsRequest + @param headers: map + @param runtime: runtime options for this request RuntimeOptions + @return: ListTrainingJobsResponse + """ UtilClient.validate_model(tmp_req) request = pai_studio_20220112_models.ListTrainingJobsShrinkRequest() OpenApiUtilClient.convert(tmp_req, request) @@ -8658,6 +10908,12 @@ def list_training_jobs( self, request: pai_studio_20220112_models.ListTrainingJobsRequest, ) -> pai_studio_20220112_models.ListTrainingJobsResponse: + """ + @summary 获取TrainingJob的列表 + + @param request: ListTrainingJobsRequest + @return: ListTrainingJobsResponse + """ runtime = util_models.RuntimeOptions() headers = {} return self.list_training_jobs_with_options(request, headers, runtime) @@ -8666,6 +10922,12 @@ async def list_training_jobs_async( self, request: pai_studio_20220112_models.ListTrainingJobsRequest, ) -> pai_studio_20220112_models.ListTrainingJobsResponse: + """ + @summary 获取TrainingJob的列表 + + @param request: ListTrainingJobsRequest + @return: ListTrainingJobsResponse + """ runtime = util_models.RuntimeOptions() headers = {} return await self.list_training_jobs_with_options_async(request, headers, runtime) @@ -8677,6 +10939,14 @@ def operate_node_with_options( headers: Dict[str, str], runtime: util_models.RuntimeOptions, ) -> pai_studio_20220112_models.OperateNodeResponse: + """ + @summary 您可以通过OperateNode对节点进行操作 + + @param request: OperateNodeRequest + @param headers: map + @param runtime: runtime options for this request RuntimeOptions + @return: OperateNodeResponse + """ UtilClient.validate_model(request) body = {} if not UtilClient.is_unset(request.operation): @@ -8710,6 +10980,14 @@ async def operate_node_with_options_async( headers: Dict[str, str], runtime: util_models.RuntimeOptions, ) -> pai_studio_20220112_models.OperateNodeResponse: + """ + @summary 您可以通过OperateNode对节点进行操作 + + @param request: OperateNodeRequest + @param headers: map + @param runtime: runtime options for this request RuntimeOptions + @return: OperateNodeResponse + """ UtilClient.validate_model(request) body = {} if not UtilClient.is_unset(request.operation): @@ -8741,6 +11019,12 @@ def operate_node( node_id: str, request: pai_studio_20220112_models.OperateNodeRequest, ) -> pai_studio_20220112_models.OperateNodeResponse: + """ + @summary 您可以通过OperateNode对节点进行操作 + + @param request: OperateNodeRequest + @return: OperateNodeResponse + """ runtime = util_models.RuntimeOptions() headers = {} return self.operate_node_with_options(node_id, request, headers, runtime) @@ -8750,6 +11034,12 @@ async def operate_node_async( node_id: str, request: pai_studio_20220112_models.OperateNodeRequest, ) -> pai_studio_20220112_models.OperateNodeResponse: + """ + @summary 您可以通过OperateNode对节点进行操作 + + @param request: OperateNodeRequest + @return: OperateNodeResponse + """ runtime = util_models.RuntimeOptions() headers = {} return await self.operate_node_with_options_async(node_id, request, headers, runtime) @@ -8761,6 +11051,14 @@ def release_algorithm_with_options( headers: Dict[str, str], runtime: util_models.RuntimeOptions, ) -> pai_studio_20220112_models.ReleaseAlgorithmResponse: + """ + @summary 发布算法为公共算法 + + @param request: ReleaseAlgorithmRequest + @param headers: map + @param runtime: runtime options for this request RuntimeOptions + @return: ReleaseAlgorithmResponse + """ UtilClient.validate_model(request) query = {} if not UtilClient.is_unset(request.target_algorithm_name): @@ -8794,6 +11092,14 @@ async def release_algorithm_with_options_async( headers: Dict[str, str], runtime: util_models.RuntimeOptions, ) -> pai_studio_20220112_models.ReleaseAlgorithmResponse: + """ + @summary 发布算法为公共算法 + + @param request: ReleaseAlgorithmRequest + @param headers: map + @param runtime: runtime options for this request RuntimeOptions + @return: ReleaseAlgorithmResponse + """ UtilClient.validate_model(request) query = {} if not UtilClient.is_unset(request.target_algorithm_name): @@ -8825,6 +11131,12 @@ def release_algorithm( algorithm_id: str, request: pai_studio_20220112_models.ReleaseAlgorithmRequest, ) -> pai_studio_20220112_models.ReleaseAlgorithmResponse: + """ + @summary 发布算法为公共算法 + + @param request: ReleaseAlgorithmRequest + @return: ReleaseAlgorithmResponse + """ runtime = util_models.RuntimeOptions() headers = {} return self.release_algorithm_with_options(algorithm_id, request, headers, runtime) @@ -8834,6 +11146,12 @@ async def release_algorithm_async( algorithm_id: str, request: pai_studio_20220112_models.ReleaseAlgorithmRequest, ) -> pai_studio_20220112_models.ReleaseAlgorithmResponse: + """ + @summary 发布算法为公共算法 + + @param request: ReleaseAlgorithmRequest + @return: ReleaseAlgorithmResponse + """ runtime = util_models.RuntimeOptions() headers = {} return await self.release_algorithm_with_options_async(algorithm_id, request, headers, runtime) @@ -8846,6 +11164,14 @@ def release_algorithm_version_with_options( headers: Dict[str, str], runtime: util_models.RuntimeOptions, ) -> pai_studio_20220112_models.ReleaseAlgorithmVersionResponse: + """ + @summary 发布公共算法版本 + + @param request: ReleaseAlgorithmVersionRequest + @param headers: map + @param runtime: runtime options for this request RuntimeOptions + @return: ReleaseAlgorithmVersionResponse + """ UtilClient.validate_model(request) query = {} if not UtilClient.is_unset(request.target_algorithm_name): @@ -8882,6 +11208,14 @@ async def release_algorithm_version_with_options_async( headers: Dict[str, str], runtime: util_models.RuntimeOptions, ) -> pai_studio_20220112_models.ReleaseAlgorithmVersionResponse: + """ + @summary 发布公共算法版本 + + @param request: ReleaseAlgorithmVersionRequest + @param headers: map + @param runtime: runtime options for this request RuntimeOptions + @return: ReleaseAlgorithmVersionResponse + """ UtilClient.validate_model(request) query = {} if not UtilClient.is_unset(request.target_algorithm_name): @@ -8916,6 +11250,12 @@ def release_algorithm_version( algorithm_version: str, request: pai_studio_20220112_models.ReleaseAlgorithmVersionRequest, ) -> pai_studio_20220112_models.ReleaseAlgorithmVersionResponse: + """ + @summary 发布公共算法版本 + + @param request: ReleaseAlgorithmVersionRequest + @return: ReleaseAlgorithmVersionResponse + """ runtime = util_models.RuntimeOptions() headers = {} return self.release_algorithm_version_with_options(algorithm_id, algorithm_version, request, headers, runtime) @@ -8926,10 +11266,110 @@ async def release_algorithm_version_async( algorithm_version: str, request: pai_studio_20220112_models.ReleaseAlgorithmVersionRequest, ) -> pai_studio_20220112_models.ReleaseAlgorithmVersionResponse: + """ + @summary 发布公共算法版本 + + @param request: ReleaseAlgorithmVersionRequest + @return: ReleaseAlgorithmVersionResponse + """ runtime = util_models.RuntimeOptions() headers = {} return await self.release_algorithm_version_with_options_async(algorithm_id, algorithm_version, request, headers, runtime) + def release_machine_group_with_options( + self, + resource_group_id: str, + machine_group_id: str, + headers: Dict[str, str], + runtime: util_models.RuntimeOptions, + ) -> pai_studio_20220112_models.ReleaseMachineGroupResponse: + """ + @summary 释放到期的机器组 + + @param headers: map + @param runtime: runtime options for this request RuntimeOptions + @return: ReleaseMachineGroupResponse + """ + req = open_api_models.OpenApiRequest( + headers=headers + ) + params = open_api_models.Params( + action='ReleaseMachineGroup', + version='2022-01-12', + protocol='HTTPS', + pathname=f'/api/v1/resources/{OpenApiUtilClient.get_encode_param(resource_group_id)}/machinegroups/{OpenApiUtilClient.get_encode_param(machine_group_id)}', + method='POST', + auth_type='AK', + style='ROA', + req_body_type='json', + body_type='json' + ) + return TeaCore.from_map( + pai_studio_20220112_models.ReleaseMachineGroupResponse(), + self.call_api(params, req, runtime) + ) + + async def release_machine_group_with_options_async( + self, + resource_group_id: str, + machine_group_id: str, + headers: Dict[str, str], + runtime: util_models.RuntimeOptions, + ) -> pai_studio_20220112_models.ReleaseMachineGroupResponse: + """ + @summary 释放到期的机器组 + + @param headers: map + @param runtime: runtime options for this request RuntimeOptions + @return: ReleaseMachineGroupResponse + """ + req = open_api_models.OpenApiRequest( + headers=headers + ) + params = open_api_models.Params( + action='ReleaseMachineGroup', + version='2022-01-12', + protocol='HTTPS', + pathname=f'/api/v1/resources/{OpenApiUtilClient.get_encode_param(resource_group_id)}/machinegroups/{OpenApiUtilClient.get_encode_param(machine_group_id)}', + method='POST', + auth_type='AK', + style='ROA', + req_body_type='json', + body_type='json' + ) + return TeaCore.from_map( + pai_studio_20220112_models.ReleaseMachineGroupResponse(), + await self.call_api_async(params, req, runtime) + ) + + def release_machine_group( + self, + resource_group_id: str, + machine_group_id: str, + ) -> pai_studio_20220112_models.ReleaseMachineGroupResponse: + """ + @summary 释放到期的机器组 + + @return: ReleaseMachineGroupResponse + """ + runtime = util_models.RuntimeOptions() + headers = {} + return self.release_machine_group_with_options(resource_group_id, machine_group_id, headers, runtime) + + async def release_machine_group_async( + self, + resource_group_id: str, + machine_group_id: str, + ) -> pai_studio_20220112_models.ReleaseMachineGroupResponse: + """ + @summary 释放到期的机器组 + + @return: ReleaseMachineGroupResponse + """ + runtime = util_models.RuntimeOptions() + headers = {} + return await self.release_machine_group_with_options_async(resource_group_id, machine_group_id, headers, runtime) + def scale_quota_with_options( self, quota_id: str, @@ -8937,6 +11377,14 @@ def scale_quota_with_options( headers: Dict[str, str], runtime: util_models.RuntimeOptions, ) -> pai_studio_20220112_models.ScaleQuotaResponse: + """ + @summary 扩缩容Quota + + @param request: ScaleQuotaRequest + @param headers: map + @param runtime: runtime options for this request RuntimeOptions + @return: ScaleQuotaResponse + """ UtilClient.validate_model(request) body = {} if not UtilClient.is_unset(request.min): @@ -8970,6 +11418,14 @@ async def scale_quota_with_options_async( headers: Dict[str, str], runtime: util_models.RuntimeOptions, ) -> pai_studio_20220112_models.ScaleQuotaResponse: + """ + @summary 扩缩容Quota + + @param request: ScaleQuotaRequest + @param headers: map + @param runtime: runtime options for this request RuntimeOptions + @return: ScaleQuotaResponse + """ UtilClient.validate_model(request) body = {} if not UtilClient.is_unset(request.min): @@ -9001,6 +11457,12 @@ def scale_quota( quota_id: str, request: pai_studio_20220112_models.ScaleQuotaRequest, ) -> pai_studio_20220112_models.ScaleQuotaResponse: + """ + @summary 扩缩容Quota + + @param request: ScaleQuotaRequest + @return: ScaleQuotaResponse + """ runtime = util_models.RuntimeOptions() headers = {} return self.scale_quota_with_options(quota_id, request, headers, runtime) @@ -9010,6 +11472,12 @@ async def scale_quota_async( quota_id: str, request: pai_studio_20220112_models.ScaleQuotaRequest, ) -> pai_studio_20220112_models.ScaleQuotaResponse: + """ + @summary 扩缩容Quota + + @param request: ScaleQuotaRequest + @return: ScaleQuotaResponse + """ runtime = util_models.RuntimeOptions() headers = {} return await self.scale_quota_with_options_async(quota_id, request, headers, runtime) @@ -9020,6 +11488,13 @@ def stop_training_job_with_options( headers: Dict[str, str], runtime: util_models.RuntimeOptions, ) -> pai_studio_20220112_models.StopTrainingJobResponse: + """ + @summary 停止一个TrainingJob + + @param headers: map + @param runtime: runtime options for this request RuntimeOptions + @return: StopTrainingJobResponse + """ req = open_api_models.OpenApiRequest( headers=headers ) @@ -9045,6 +11520,13 @@ async def stop_training_job_with_options_async( headers: Dict[str, str], runtime: util_models.RuntimeOptions, ) -> pai_studio_20220112_models.StopTrainingJobResponse: + """ + @summary 停止一个TrainingJob + + @param headers: map + @param runtime: runtime options for this request RuntimeOptions + @return: StopTrainingJobResponse + """ req = open_api_models.OpenApiRequest( headers=headers ) @@ -9068,6 +11550,11 @@ def stop_training_job( self, training_job_id: str, ) -> pai_studio_20220112_models.StopTrainingJobResponse: + """ + @summary 停止一个TrainingJob + + @return: StopTrainingJobResponse + """ runtime = util_models.RuntimeOptions() headers = {} return self.stop_training_job_with_options(training_job_id, headers, runtime) @@ -9076,6 +11563,11 @@ async def stop_training_job_async( self, training_job_id: str, ) -> pai_studio_20220112_models.StopTrainingJobResponse: + """ + @summary 停止一个TrainingJob + + @return: StopTrainingJobResponse + """ runtime = util_models.RuntimeOptions() headers = {} return await self.stop_training_job_with_options_async(training_job_id, headers, runtime) @@ -9086,6 +11578,14 @@ def tag_resources_with_options( headers: Dict[str, str], runtime: util_models.RuntimeOptions, ) -> pai_studio_20220112_models.TagResourcesResponse: + """ + @summary 打标签接口 + + @param request: TagResourcesRequest + @param headers: map + @param runtime: runtime options for this request RuntimeOptions + @return: TagResourcesResponse + """ UtilClient.validate_model(request) body = {} if not UtilClient.is_unset(request.region_id): @@ -9122,6 +11622,14 @@ async def tag_resources_with_options_async( headers: Dict[str, str], runtime: util_models.RuntimeOptions, ) -> pai_studio_20220112_models.TagResourcesResponse: + """ + @summary 打标签接口 + + @param request: TagResourcesRequest + @param headers: map + @param runtime: runtime options for this request RuntimeOptions + @return: TagResourcesResponse + """ UtilClient.validate_model(request) body = {} if not UtilClient.is_unset(request.region_id): @@ -9156,6 +11664,12 @@ def tag_resources( self, request: pai_studio_20220112_models.TagResourcesRequest, ) -> pai_studio_20220112_models.TagResourcesResponse: + """ + @summary 打标签接口 + + @param request: TagResourcesRequest + @return: TagResourcesResponse + """ runtime = util_models.RuntimeOptions() headers = {} return self.tag_resources_with_options(request, headers, runtime) @@ -9164,6 +11678,12 @@ async def tag_resources_async( self, request: pai_studio_20220112_models.TagResourcesRequest, ) -> pai_studio_20220112_models.TagResourcesResponse: + """ + @summary 打标签接口 + + @param request: TagResourcesRequest + @return: TagResourcesResponse + """ runtime = util_models.RuntimeOptions() headers = {} return await self.tag_resources_with_options_async(request, headers, runtime) @@ -9174,6 +11694,14 @@ def untag_resources_with_options( headers: Dict[str, str], runtime: util_models.RuntimeOptions, ) -> pai_studio_20220112_models.UntagResourcesResponse: + """ + @summary 删标签接口 + + @param tmp_req: UntagResourcesRequest + @param headers: map + @param runtime: runtime options for this request RuntimeOptions + @return: UntagResourcesResponse + """ UtilClient.validate_model(tmp_req) request = pai_studio_20220112_models.UntagResourcesShrinkRequest() OpenApiUtilClient.convert(tmp_req, request) @@ -9218,6 +11746,14 @@ async def untag_resources_with_options_async( headers: Dict[str, str], runtime: util_models.RuntimeOptions, ) -> pai_studio_20220112_models.UntagResourcesResponse: + """ + @summary 删标签接口 + + @param tmp_req: UntagResourcesRequest + @param headers: map + @param runtime: runtime options for this request RuntimeOptions + @return: UntagResourcesResponse + """ UtilClient.validate_model(tmp_req) request = pai_studio_20220112_models.UntagResourcesShrinkRequest() OpenApiUtilClient.convert(tmp_req, request) @@ -9260,6 +11796,12 @@ def untag_resources( self, request: pai_studio_20220112_models.UntagResourcesRequest, ) -> pai_studio_20220112_models.UntagResourcesResponse: + """ + @summary 删标签接口 + + @param request: UntagResourcesRequest + @return: UntagResourcesResponse + """ runtime = util_models.RuntimeOptions() headers = {} return self.untag_resources_with_options(request, headers, runtime) @@ -9268,6 +11810,12 @@ async def untag_resources_async( self, request: pai_studio_20220112_models.UntagResourcesRequest, ) -> pai_studio_20220112_models.UntagResourcesResponse: + """ + @summary 删标签接口 + + @param request: UntagResourcesRequest + @return: UntagResourcesResponse + """ runtime = util_models.RuntimeOptions() headers = {} return await self.untag_resources_with_options_async(request, headers, runtime) @@ -9279,6 +11827,14 @@ def update_algorithm_with_options( headers: Dict[str, str], runtime: util_models.RuntimeOptions, ) -> pai_studio_20220112_models.UpdateAlgorithmResponse: + """ + @summary 更新算法 + + @param request: UpdateAlgorithmRequest + @param headers: map + @param runtime: runtime options for this request RuntimeOptions + @return: UpdateAlgorithmResponse + """ UtilClient.validate_model(request) body = {} if not UtilClient.is_unset(request.algorithm_description): @@ -9312,6 +11868,14 @@ async def update_algorithm_with_options_async( headers: Dict[str, str], runtime: util_models.RuntimeOptions, ) -> pai_studio_20220112_models.UpdateAlgorithmResponse: + """ + @summary 更新算法 + + @param request: UpdateAlgorithmRequest + @param headers: map + @param runtime: runtime options for this request RuntimeOptions + @return: UpdateAlgorithmResponse + """ UtilClient.validate_model(request) body = {} if not UtilClient.is_unset(request.algorithm_description): @@ -9343,6 +11907,12 @@ def update_algorithm( algorithm_id: str, request: pai_studio_20220112_models.UpdateAlgorithmRequest, ) -> pai_studio_20220112_models.UpdateAlgorithmResponse: + """ + @summary 更新算法 + + @param request: UpdateAlgorithmRequest + @return: UpdateAlgorithmResponse + """ runtime = util_models.RuntimeOptions() headers = {} return self.update_algorithm_with_options(algorithm_id, request, headers, runtime) @@ -9352,6 +11922,12 @@ async def update_algorithm_async( algorithm_id: str, request: pai_studio_20220112_models.UpdateAlgorithmRequest, ) -> pai_studio_20220112_models.UpdateAlgorithmResponse: + """ + @summary 更新算法 + + @param request: UpdateAlgorithmRequest + @return: UpdateAlgorithmResponse + """ runtime = util_models.RuntimeOptions() headers = {} return await self.update_algorithm_with_options_async(algorithm_id, request, headers, runtime) @@ -9364,6 +11940,14 @@ def update_algorithm_version_with_options( headers: Dict[str, str], runtime: util_models.RuntimeOptions, ) -> pai_studio_20220112_models.UpdateAlgorithmVersionResponse: + """ + @summary 更新算法 + + @param tmp_req: UpdateAlgorithmVersionRequest + @param headers: map + @param runtime: runtime options for this request RuntimeOptions + @return: UpdateAlgorithmVersionResponse + """ UtilClient.validate_model(tmp_req) request = pai_studio_20220112_models.UpdateAlgorithmVersionShrinkRequest() OpenApiUtilClient.convert(tmp_req, request) @@ -9400,6 +11984,14 @@ async def update_algorithm_version_with_options_async( headers: Dict[str, str], runtime: util_models.RuntimeOptions, ) -> pai_studio_20220112_models.UpdateAlgorithmVersionResponse: + """ + @summary 更新算法 + + @param tmp_req: UpdateAlgorithmVersionRequest + @param headers: map + @param runtime: runtime options for this request RuntimeOptions + @return: UpdateAlgorithmVersionResponse + """ UtilClient.validate_model(tmp_req) request = pai_studio_20220112_models.UpdateAlgorithmVersionShrinkRequest() OpenApiUtilClient.convert(tmp_req, request) @@ -9434,6 +12026,12 @@ def update_algorithm_version( algorithm_version: str, request: pai_studio_20220112_models.UpdateAlgorithmVersionRequest, ) -> pai_studio_20220112_models.UpdateAlgorithmVersionResponse: + """ + @summary 更新算法 + + @param request: UpdateAlgorithmVersionRequest + @return: UpdateAlgorithmVersionResponse + """ runtime = util_models.RuntimeOptions() headers = {} return self.update_algorithm_version_with_options(algorithm_id, algorithm_version, request, headers, runtime) @@ -9444,6 +12042,12 @@ async def update_algorithm_version_async( algorithm_version: str, request: pai_studio_20220112_models.UpdateAlgorithmVersionRequest, ) -> pai_studio_20220112_models.UpdateAlgorithmVersionResponse: + """ + @summary 更新算法 + + @param request: UpdateAlgorithmVersionRequest + @return: UpdateAlgorithmVersionResponse + """ runtime = util_models.RuntimeOptions() headers = {} return await self.update_algorithm_version_with_options_async(algorithm_id, algorithm_version, request, headers, runtime) @@ -9455,6 +12059,14 @@ def update_component_with_options( headers: Dict[str, str], runtime: util_models.RuntimeOptions, ) -> pai_studio_20220112_models.UpdateComponentResponse: + """ + @summary 更新组件 + + @param request: UpdateComponentRequest + @param headers: map + @param runtime: runtime options for this request RuntimeOptions + @return: UpdateComponentResponse + """ UtilClient.validate_model(request) body = {} if not UtilClient.is_unset(request.description): @@ -9490,6 +12102,14 @@ async def update_component_with_options_async( headers: Dict[str, str], runtime: util_models.RuntimeOptions, ) -> pai_studio_20220112_models.UpdateComponentResponse: + """ + @summary 更新组件 + + @param request: UpdateComponentRequest + @param headers: map + @param runtime: runtime options for this request RuntimeOptions + @return: UpdateComponentResponse + """ UtilClient.validate_model(request) body = {} if not UtilClient.is_unset(request.description): @@ -9523,6 +12143,12 @@ def update_component( component_id: str, request: pai_studio_20220112_models.UpdateComponentRequest, ) -> pai_studio_20220112_models.UpdateComponentResponse: + """ + @summary 更新组件 + + @param request: UpdateComponentRequest + @return: UpdateComponentResponse + """ runtime = util_models.RuntimeOptions() headers = {} return self.update_component_with_options(component_id, request, headers, runtime) @@ -9532,6 +12158,12 @@ async def update_component_async( component_id: str, request: pai_studio_20220112_models.UpdateComponentRequest, ) -> pai_studio_20220112_models.UpdateComponentResponse: + """ + @summary 更新组件 + + @param request: UpdateComponentRequest + @return: UpdateComponentResponse + """ runtime = util_models.RuntimeOptions() headers = {} return await self.update_component_with_options_async(component_id, request, headers, runtime) @@ -9544,6 +12176,14 @@ def update_component_version_with_options( headers: Dict[str, str], runtime: util_models.RuntimeOptions, ) -> pai_studio_20220112_models.UpdateComponentVersionResponse: + """ + @summary 更新组件版本 + + @param request: UpdateComponentVersionRequest + @param headers: map + @param runtime: runtime options for this request RuntimeOptions + @return: UpdateComponentVersionResponse + """ UtilClient.validate_model(request) query = {} if not UtilClient.is_unset(request.labels): @@ -9578,6 +12218,14 @@ async def update_component_version_with_options_async( headers: Dict[str, str], runtime: util_models.RuntimeOptions, ) -> pai_studio_20220112_models.UpdateComponentVersionResponse: + """ + @summary 更新组件版本 + + @param request: UpdateComponentVersionRequest + @param headers: map + @param runtime: runtime options for this request RuntimeOptions + @return: UpdateComponentVersionResponse + """ UtilClient.validate_model(request) query = {} if not UtilClient.is_unset(request.labels): @@ -9610,6 +12258,12 @@ def update_component_version( version: str, request: pai_studio_20220112_models.UpdateComponentVersionRequest, ) -> pai_studio_20220112_models.UpdateComponentVersionResponse: + """ + @summary 更新组件版本 + + @param request: UpdateComponentVersionRequest + @return: UpdateComponentVersionResponse + """ runtime = util_models.RuntimeOptions() headers = {} return self.update_component_version_with_options(component_id, version, request, headers, runtime) @@ -9620,6 +12274,12 @@ async def update_component_version_async( version: str, request: pai_studio_20220112_models.UpdateComponentVersionRequest, ) -> pai_studio_20220112_models.UpdateComponentVersionResponse: + """ + @summary 更新组件版本 + + @param request: UpdateComponentVersionRequest + @return: UpdateComponentVersionResponse + """ runtime = util_models.RuntimeOptions() headers = {} return await self.update_component_version_with_options_async(component_id, version, request, headers, runtime) @@ -9630,6 +12290,13 @@ def update_component_version_snapshot_with_options( headers: Dict[str, str], runtime: util_models.RuntimeOptions, ) -> pai_studio_20220112_models.UpdateComponentVersionSnapshotResponse: + """ + @summary 更新组件版本快照 + + @param headers: map + @param runtime: runtime options for this request RuntimeOptions + @return: UpdateComponentVersionSnapshotResponse + """ req = open_api_models.OpenApiRequest( headers=headers ) @@ -9655,6 +12322,13 @@ async def update_component_version_snapshot_with_options_async( headers: Dict[str, str], runtime: util_models.RuntimeOptions, ) -> pai_studio_20220112_models.UpdateComponentVersionSnapshotResponse: + """ + @summary 更新组件版本快照 + + @param headers: map + @param runtime: runtime options for this request RuntimeOptions + @return: UpdateComponentVersionSnapshotResponse + """ req = open_api_models.OpenApiRequest( headers=headers ) @@ -9678,6 +12352,11 @@ def update_component_version_snapshot( self, snapshot_id: str, ) -> pai_studio_20220112_models.UpdateComponentVersionSnapshotResponse: + """ + @summary 更新组件版本快照 + + @return: UpdateComponentVersionSnapshotResponse + """ runtime = util_models.RuntimeOptions() headers = {} return self.update_component_version_snapshot_with_options(snapshot_id, headers, runtime) @@ -9686,106 +12365,15 @@ async def update_component_version_snapshot_async( self, snapshot_id: str, ) -> pai_studio_20220112_models.UpdateComponentVersionSnapshotResponse: + """ + @summary 更新组件版本快照 + + @return: UpdateComponentVersionSnapshotResponse + """ runtime = util_models.RuntimeOptions() headers = {} return await self.update_component_version_snapshot_with_options_async(snapshot_id, headers, runtime) - def update_llmproject_with_options( - self, - project_id: str, - request: pai_studio_20220112_models.UpdateLLMProjectRequest, - headers: Dict[str, str], - runtime: util_models.RuntimeOptions, - ) -> pai_studio_20220112_models.UpdateLLMProjectResponse: - UtilClient.validate_model(request) - body = {} - if not UtilClient.is_unset(request.labels): - body['Labels'] = request.labels - if not UtilClient.is_unset(request.project_description): - body['ProjectDescription'] = request.project_description - if not UtilClient.is_unset(request.project_name): - body['ProjectName'] = request.project_name - if not UtilClient.is_unset(request.root_path): - body['RootPath'] = request.root_path - if not UtilClient.is_unset(request.runtime): - body['Runtime'] = request.runtime - req = open_api_models.OpenApiRequest( - headers=headers, - body=OpenApiUtilClient.parse_to_map(body) - ) - params = open_api_models.Params( - action='UpdateLLMProject', - version='2022-01-12', - protocol='HTTPS', - pathname=f'/api/v1/langstudio/projects/{OpenApiUtilClient.get_encode_param(project_id)}', - method='PUT', - auth_type='AK', - style='ROA', - req_body_type='json', - body_type='json' - ) - return TeaCore.from_map( - pai_studio_20220112_models.UpdateLLMProjectResponse(), - self.call_api(params, req, runtime) - ) - - async def update_llmproject_with_options_async( - self, - project_id: str, - request: pai_studio_20220112_models.UpdateLLMProjectRequest, - headers: Dict[str, str], - runtime: util_models.RuntimeOptions, - ) -> pai_studio_20220112_models.UpdateLLMProjectResponse: - UtilClient.validate_model(request) - body = {} - if not UtilClient.is_unset(request.labels): - body['Labels'] = request.labels - if not UtilClient.is_unset(request.project_description): - body['ProjectDescription'] = request.project_description - if not UtilClient.is_unset(request.project_name): - body['ProjectName'] = request.project_name - if not UtilClient.is_unset(request.root_path): - body['RootPath'] = request.root_path - if not UtilClient.is_unset(request.runtime): - body['Runtime'] = request.runtime - req = open_api_models.OpenApiRequest( - headers=headers, - body=OpenApiUtilClient.parse_to_map(body) - ) - params = open_api_models.Params( - action='UpdateLLMProject', - version='2022-01-12', - protocol='HTTPS', - pathname=f'/api/v1/langstudio/projects/{OpenApiUtilClient.get_encode_param(project_id)}', - method='PUT', - auth_type='AK', - style='ROA', - req_body_type='json', - body_type='json' - ) - return TeaCore.from_map( - pai_studio_20220112_models.UpdateLLMProjectResponse(), - await self.call_api_async(params, req, runtime) - ) - - def update_llmproject( - self, - project_id: str, - request: pai_studio_20220112_models.UpdateLLMProjectRequest, - ) -> pai_studio_20220112_models.UpdateLLMProjectResponse: - runtime = util_models.RuntimeOptions() - headers = {} - return self.update_llmproject_with_options(project_id, request, headers, runtime) - - async def update_llmproject_async( - self, - project_id: str, - request: pai_studio_20220112_models.UpdateLLMProjectRequest, - ) -> pai_studio_20220112_models.UpdateLLMProjectResponse: - runtime = util_models.RuntimeOptions() - headers = {} - return await self.update_llmproject_with_options_async(project_id, request, headers, runtime) - def update_quota_with_options( self, quota_id: str, @@ -9793,6 +12381,14 @@ def update_quota_with_options( headers: Dict[str, str], runtime: util_models.RuntimeOptions, ) -> pai_studio_20220112_models.UpdateQuotaResponse: + """ + @summary 更新Quota + + @param request: UpdateQuotaRequest + @param headers: map + @param runtime: runtime options for this request RuntimeOptions + @return: UpdateQuotaResponse + """ UtilClient.validate_model(request) body = {} if not UtilClient.is_unset(request.description): @@ -9801,6 +12397,10 @@ def update_quota_with_options( body['Labels'] = request.labels if not UtilClient.is_unset(request.queue_strategy): body['QueueStrategy'] = request.queue_strategy + if not UtilClient.is_unset(request.quota_config): + body['QuotaConfig'] = request.quota_config + if not UtilClient.is_unset(request.quota_name): + body['QuotaName'] = request.quota_name req = open_api_models.OpenApiRequest( headers=headers, body=OpenApiUtilClient.parse_to_map(body) @@ -9828,6 +12428,14 @@ async def update_quota_with_options_async( headers: Dict[str, str], runtime: util_models.RuntimeOptions, ) -> pai_studio_20220112_models.UpdateQuotaResponse: + """ + @summary 更新Quota + + @param request: UpdateQuotaRequest + @param headers: map + @param runtime: runtime options for this request RuntimeOptions + @return: UpdateQuotaResponse + """ UtilClient.validate_model(request) body = {} if not UtilClient.is_unset(request.description): @@ -9836,6 +12444,10 @@ async def update_quota_with_options_async( body['Labels'] = request.labels if not UtilClient.is_unset(request.queue_strategy): body['QueueStrategy'] = request.queue_strategy + if not UtilClient.is_unset(request.quota_config): + body['QuotaConfig'] = request.quota_config + if not UtilClient.is_unset(request.quota_name): + body['QuotaName'] = request.quota_name req = open_api_models.OpenApiRequest( headers=headers, body=OpenApiUtilClient.parse_to_map(body) @@ -9861,6 +12473,12 @@ def update_quota( quota_id: str, request: pai_studio_20220112_models.UpdateQuotaRequest, ) -> pai_studio_20220112_models.UpdateQuotaResponse: + """ + @summary 更新Quota + + @param request: UpdateQuotaRequest + @return: UpdateQuotaResponse + """ runtime = util_models.RuntimeOptions() headers = {} return self.update_quota_with_options(quota_id, request, headers, runtime) @@ -9870,6 +12488,12 @@ async def update_quota_async( quota_id: str, request: pai_studio_20220112_models.UpdateQuotaRequest, ) -> pai_studio_20220112_models.UpdateQuotaResponse: + """ + @summary 更新Quota + + @param request: UpdateQuotaRequest + @return: UpdateQuotaResponse + """ runtime = util_models.RuntimeOptions() headers = {} return await self.update_quota_with_options_async(quota_id, request, headers, runtime) @@ -9881,6 +12505,14 @@ def update_quota_labels_with_options( headers: Dict[str, str], runtime: util_models.RuntimeOptions, ) -> pai_studio_20220112_models.UpdateQuotaLabelsResponse: + """ + @summary 更新Quota标签 + + @param request: UpdateQuotaLabelsRequest + @param headers: map + @param runtime: runtime options for this request RuntimeOptions + @return: UpdateQuotaLabelsResponse + """ UtilClient.validate_model(request) body = {} if not UtilClient.is_unset(request.labels): @@ -9912,6 +12544,14 @@ async def update_quota_labels_with_options_async( headers: Dict[str, str], runtime: util_models.RuntimeOptions, ) -> pai_studio_20220112_models.UpdateQuotaLabelsResponse: + """ + @summary 更新Quota标签 + + @param request: UpdateQuotaLabelsRequest + @param headers: map + @param runtime: runtime options for this request RuntimeOptions + @return: UpdateQuotaLabelsResponse + """ UtilClient.validate_model(request) body = {} if not UtilClient.is_unset(request.labels): @@ -9941,6 +12581,12 @@ def update_quota_labels( quota_id: str, request: pai_studio_20220112_models.UpdateQuotaLabelsRequest, ) -> pai_studio_20220112_models.UpdateQuotaLabelsResponse: + """ + @summary 更新Quota标签 + + @param request: UpdateQuotaLabelsRequest + @return: UpdateQuotaLabelsResponse + """ runtime = util_models.RuntimeOptions() headers = {} return self.update_quota_labels_with_options(quota_id, request, headers, runtime) @@ -9950,6 +12596,12 @@ async def update_quota_labels_async( quota_id: str, request: pai_studio_20220112_models.UpdateQuotaLabelsRequest, ) -> pai_studio_20220112_models.UpdateQuotaLabelsResponse: + """ + @summary 更新Quota标签 + + @param request: UpdateQuotaLabelsRequest + @return: UpdateQuotaLabelsResponse + """ runtime = util_models.RuntimeOptions() headers = {} return await self.update_quota_labels_with_options_async(quota_id, request, headers, runtime) @@ -9961,6 +12613,14 @@ def update_resource_group_with_options( headers: Dict[str, str], runtime: util_models.RuntimeOptions, ) -> pai_studio_20220112_models.UpdateResourceGroupResponse: + """ + @summary 更新Resource Group + + @param request: UpdateResourceGroupRequest + @param headers: map + @param runtime: runtime options for this request RuntimeOptions + @return: UpdateResourceGroupResponse + """ UtilClient.validate_model(request) body = {} if not UtilClient.is_unset(request.description): @@ -9998,6 +12658,14 @@ async def update_resource_group_with_options_async( headers: Dict[str, str], runtime: util_models.RuntimeOptions, ) -> pai_studio_20220112_models.UpdateResourceGroupResponse: + """ + @summary 更新Resource Group + + @param request: UpdateResourceGroupRequest + @param headers: map + @param runtime: runtime options for this request RuntimeOptions + @return: UpdateResourceGroupResponse + """ UtilClient.validate_model(request) body = {} if not UtilClient.is_unset(request.description): @@ -10033,6 +12701,12 @@ def update_resource_group( resource_group_id: str, request: pai_studio_20220112_models.UpdateResourceGroupRequest, ) -> pai_studio_20220112_models.UpdateResourceGroupResponse: + """ + @summary 更新Resource Group + + @param request: UpdateResourceGroupRequest + @return: UpdateResourceGroupResponse + """ runtime = util_models.RuntimeOptions() headers = {} return self.update_resource_group_with_options(resource_group_id, request, headers, runtime) @@ -10042,6 +12716,12 @@ async def update_resource_group_async( resource_group_id: str, request: pai_studio_20220112_models.UpdateResourceGroupRequest, ) -> pai_studio_20220112_models.UpdateResourceGroupResponse: + """ + @summary 更新Resource Group + + @param request: UpdateResourceGroupRequest + @return: UpdateResourceGroupResponse + """ runtime = util_models.RuntimeOptions() headers = {} return await self.update_resource_group_with_options_async(resource_group_id, request, headers, runtime) @@ -10054,6 +12734,14 @@ def update_resource_group_machine_group_with_options( headers: Dict[str, str], runtime: util_models.RuntimeOptions, ) -> pai_studio_20220112_models.UpdateResourceGroupMachineGroupResponse: + """ + @summary 更新Machine Group + + @param request: UpdateResourceGroupMachineGroupRequest + @param headers: map + @param runtime: runtime options for this request RuntimeOptions + @return: UpdateResourceGroupMachineGroupResponse + """ UtilClient.validate_model(request) body = {} if not UtilClient.is_unset(request.name): @@ -10086,6 +12774,14 @@ async def update_resource_group_machine_group_with_options_async( headers: Dict[str, str], runtime: util_models.RuntimeOptions, ) -> pai_studio_20220112_models.UpdateResourceGroupMachineGroupResponse: + """ + @summary 更新Machine Group + + @param request: UpdateResourceGroupMachineGroupRequest + @param headers: map + @param runtime: runtime options for this request RuntimeOptions + @return: UpdateResourceGroupMachineGroupResponse + """ UtilClient.validate_model(request) body = {} if not UtilClient.is_unset(request.name): @@ -10116,6 +12812,12 @@ def update_resource_group_machine_group( machine_group_id: str, request: pai_studio_20220112_models.UpdateResourceGroupMachineGroupRequest, ) -> pai_studio_20220112_models.UpdateResourceGroupMachineGroupResponse: + """ + @summary 更新Machine Group + + @param request: UpdateResourceGroupMachineGroupRequest + @return: UpdateResourceGroupMachineGroupResponse + """ runtime = util_models.RuntimeOptions() headers = {} return self.update_resource_group_machine_group_with_options(resource_group_id, machine_group_id, request, headers, runtime) @@ -10126,6 +12828,12 @@ async def update_resource_group_machine_group_async( machine_group_id: str, request: pai_studio_20220112_models.UpdateResourceGroupMachineGroupRequest, ) -> pai_studio_20220112_models.UpdateResourceGroupMachineGroupResponse: + """ + @summary 更新Machine Group + + @param request: UpdateResourceGroupMachineGroupRequest + @return: UpdateResourceGroupMachineGroupResponse + """ runtime = util_models.RuntimeOptions() headers = {} return await self.update_resource_group_machine_group_with_options_async(resource_group_id, machine_group_id, request, headers, runtime) @@ -10137,6 +12845,14 @@ def update_training_job_labels_with_options( headers: Dict[str, str], runtime: util_models.RuntimeOptions, ) -> pai_studio_20220112_models.UpdateTrainingJobLabelsResponse: + """ + @summary 更新一个TrainingJob的Labels + + @param request: UpdateTrainingJobLabelsRequest + @param headers: map + @param runtime: runtime options for this request RuntimeOptions + @return: UpdateTrainingJobLabelsResponse + """ UtilClient.validate_model(request) body = {} if not UtilClient.is_unset(request.labels): @@ -10168,6 +12884,14 @@ async def update_training_job_labels_with_options_async( headers: Dict[str, str], runtime: util_models.RuntimeOptions, ) -> pai_studio_20220112_models.UpdateTrainingJobLabelsResponse: + """ + @summary 更新一个TrainingJob的Labels + + @param request: UpdateTrainingJobLabelsRequest + @param headers: map + @param runtime: runtime options for this request RuntimeOptions + @return: UpdateTrainingJobLabelsResponse + """ UtilClient.validate_model(request) body = {} if not UtilClient.is_unset(request.labels): @@ -10197,6 +12921,12 @@ def update_training_job_labels( training_job_id: str, request: pai_studio_20220112_models.UpdateTrainingJobLabelsRequest, ) -> pai_studio_20220112_models.UpdateTrainingJobLabelsResponse: + """ + @summary 更新一个TrainingJob的Labels + + @param request: UpdateTrainingJobLabelsRequest + @return: UpdateTrainingJobLabelsResponse + """ runtime = util_models.RuntimeOptions() headers = {} return self.update_training_job_labels_with_options(training_job_id, request, headers, runtime) @@ -10206,6 +12936,12 @@ async def update_training_job_labels_async( training_job_id: str, request: pai_studio_20220112_models.UpdateTrainingJobLabelsRequest, ) -> pai_studio_20220112_models.UpdateTrainingJobLabelsResponse: + """ + @summary 更新一个TrainingJob的Labels + + @param request: UpdateTrainingJobLabelsRequest + @return: UpdateTrainingJobLabelsResponse + """ runtime = util_models.RuntimeOptions() headers = {} return await self.update_training_job_labels_with_options_async(training_job_id, request, headers, runtime) diff --git a/pai/libs/alibabacloud_paistudio20220112/models.py b/pai/libs/alibabacloud_paistudio20220112/models.py index e5f6ecf..81eda37 100644 --- a/pai/libs/alibabacloud_paistudio20220112/models.py +++ b/pai/libs/alibabacloud_paistudio20220112/models.py @@ -43,7 +43,9 @@ def __init__( value: str = None, version: str = None, ): + # This parameter is required. self.value = value + # This parameter is required. self.version = version def validate(self): @@ -75,6 +77,7 @@ def __init__( self, policy: AlgorithmSpecComputeResourcePolicy = None, ): + # This parameter is required. self.policy = policy def validate(self): @@ -346,9 +349,11 @@ def __init__( self.default_value = default_value self.description = description self.display_name = display_name + # This parameter is required. self.name = name self.range = range self.required = required + # This parameter is required. self.type = type def validate(self): @@ -407,6 +412,7 @@ def __init__( supported_channel_types: List[str] = None, ): self.description = description + # This parameter is required. self.name = name self.properties = properties self.required = required @@ -456,7 +462,9 @@ def __init__( regex: str = None, ): self.description = description + # This parameter is required. self.name = name + # This parameter is required. self.regex = regex def validate(self): @@ -494,8 +502,11 @@ def __init__( operator: str = None, values: List[str] = None, ): + # This parameter is required. self.key = key + # This parameter is required. self.operator = operator + # This parameter is required. self.values = values def validate(self): @@ -545,12 +556,15 @@ def __init__( supports_distributed_training: bool = None, ): self.code_dir = code_dir + # This parameter is required. self.command = command self.compute_resource = compute_resource self.customization = customization self.hyper_parameters = hyper_parameters + # This parameter is required. self.image = image self.input_channels = input_channels + # This parameter is required. self.job_type = job_type self.metric_definitions = metric_definitions self.output_channels = output_channels @@ -761,7 +775,9 @@ def __init__( name: str = None, value: str = None, ): + # This parameter is required. self.name = name + # This parameter is required. self.value = value def validate(self): @@ -802,10 +818,13 @@ def __init__( resource_requirements: List[ConditionExpression] = None, ): self.code_dir = code_dir + # This parameter is required. self.command = command self.hyper_parameters = hyper_parameters + # This parameter is required. self.image = image self.input_channels = input_channels + # This parameter is required. self.job_type = job_type self.metric_definitions = metric_definitions self.output_channels = output_channels @@ -999,6 +1018,51 @@ def from_map(self, m: dict = None): return self +class GPUMetric(TeaModel): + def __init__( + self, + index: int = None, + model: str = None, + status: int = None, + usage_rate: float = None, + ): + self.index = index + self.model = model + self.status = status + self.usage_rate = usage_rate + + def validate(self): + pass + + def to_map(self): + _map = super().to_map() + if _map is not None: + return _map + + result = dict() + if self.index is not None: + result['Index'] = self.index + if self.model is not None: + result['Model'] = self.model + if self.status is not None: + result['Status'] = self.status + if self.usage_rate is not None: + result['UsageRate'] = self.usage_rate + return result + + def from_map(self, m: dict = None): + m = m or dict() + if m.get('Index') is not None: + self.index = m.get('Index') + if m.get('Model') is not None: + self.model = m.get('Model') + if m.get('Status') is not None: + self.status = m.get('Status') + if m.get('UsageRate') is not None: + self.usage_rate = m.get('UsageRate') + return self + + class JobViewMetric(TeaModel): def __init__( self, @@ -1173,6 +1237,7 @@ def __init__( gmt_modified_time: str = None, gmt_started_time: str = None, machine_group_id: str = None, + order_instance_id: str = None, payment_duration: str = None, payment_duration_unit: str = None, payment_type: str = None, @@ -1191,6 +1256,7 @@ def __init__( self.gmt_modified_time = gmt_modified_time self.gmt_started_time = gmt_started_time self.machine_group_id = machine_group_id + self.order_instance_id = order_instance_id self.payment_duration = payment_duration self.payment_duration_unit = payment_duration_unit self.payment_type = payment_type @@ -1227,6 +1293,8 @@ def to_map(self): result['GmtStartedTime'] = self.gmt_started_time if self.machine_group_id is not None: result['MachineGroupID'] = self.machine_group_id + if self.order_instance_id is not None: + result['OrderInstanceId'] = self.order_instance_id if self.payment_duration is not None: result['PaymentDuration'] = self.payment_duration if self.payment_duration_unit is not None: @@ -1265,6 +1333,8 @@ def from_map(self, m: dict = None): self.gmt_started_time = m.get('GmtStartedTime') if m.get('MachineGroupID') is not None: self.machine_group_id = m.get('MachineGroupID') + if m.get('OrderInstanceId') is not None: + self.order_instance_id = m.get('OrderInstanceId') if m.get('PaymentDuration') is not None: self.payment_duration = m.get('PaymentDuration') if m.get('PaymentDurationUnit') is not None: @@ -1350,6 +1420,39 @@ def from_map(self, m: dict = None): return self +class UserInfo(TeaModel): + def __init__( + self, + user_id: str = None, + user_name: str = None, + ): + self.user_id = user_id + self.user_name = user_name + + def validate(self): + pass + + def to_map(self): + _map = super().to_map() + if _map is not None: + return _map + + result = dict() + if self.user_id is not None: + result['UserId'] = self.user_id + if self.user_name is not None: + result['UserName'] = self.user_name + return result + + def from_map(self, m: dict = None): + m = m or dict() + if m.get('UserId') is not None: + self.user_id = m.get('UserId') + if m.get('UserName') is not None: + self.user_name = m.get('UserName') + return self + + class Node(TeaModel): def __init__( self, @@ -1372,6 +1475,7 @@ def __init__( node_status: str = None, node_type: str = None, order_status: str = None, + pod_num: int = None, reason_code: str = None, reason_message: str = None, request_cpu: str = None, @@ -1379,6 +1483,8 @@ def __init__( request_memory: str = None, resource_group_id: str = None, resource_group_name: str = None, + users: List[UserInfo] = None, + workload_num: int = None, ): self.accelerator_type = accelerator_type self.bound_quotas = bound_quotas @@ -1399,6 +1505,7 @@ def __init__( self.node_status = node_status self.node_type = node_type self.order_status = order_status + self.pod_num = pod_num self.reason_code = reason_code self.reason_message = reason_message self.request_cpu = request_cpu @@ -1406,12 +1513,18 @@ def __init__( self.request_memory = request_memory self.resource_group_id = resource_group_id self.resource_group_name = resource_group_name + self.users = users + self.workload_num = workload_num def validate(self): if self.bound_quotas: for k in self.bound_quotas: if k: k.validate() + if self.users: + for k in self.users: + if k: + k.validate() def to_map(self): _map = super().to_map() @@ -1459,6 +1572,8 @@ def to_map(self): result['NodeType'] = self.node_type if self.order_status is not None: result['OrderStatus'] = self.order_status + if self.pod_num is not None: + result['PodNum'] = self.pod_num if self.reason_code is not None: result['ReasonCode'] = self.reason_code if self.reason_message is not None: @@ -1473,6 +1588,12 @@ def to_map(self): result['ResourceGroupId'] = self.resource_group_id if self.resource_group_name is not None: result['ResourceGroupName'] = self.resource_group_name + result['Users'] = [] + if self.users is not None: + for k in self.users: + result['Users'].append(k.to_map() if k else None) + if self.workload_num is not None: + result['WorkloadNum'] = self.workload_num return result def from_map(self, m: dict = None): @@ -1518,6 +1639,8 @@ def from_map(self, m: dict = None): self.node_type = m.get('NodeType') if m.get('OrderStatus') is not None: self.order_status = m.get('OrderStatus') + if m.get('PodNum') is not None: + self.pod_num = m.get('PodNum') if m.get('ReasonCode') is not None: self.reason_code = m.get('ReasonCode') if m.get('ReasonMessage') is not None: @@ -1532,6 +1655,96 @@ def from_map(self, m: dict = None): self.resource_group_id = m.get('ResourceGroupId') if m.get('ResourceGroupName') is not None: self.resource_group_name = m.get('ResourceGroupName') + self.users = [] + if m.get('Users') is not None: + for k in m.get('Users'): + temp_model = UserInfo() + self.users.append(temp_model.from_map(k)) + if m.get('WorkloadNum') is not None: + self.workload_num = m.get('WorkloadNum') + return self + + +class NodeGPUMetric(TeaModel): + def __init__( + self, + accelerator_type: str = None, + gpucount: int = None, + gpumetrics: List[GPUMetric] = None, + gputype: str = None, + memory_util: float = None, + node_id: str = None, + node_type: str = None, + total_memory: float = None, + used_memory: float = None, + ): + self.accelerator_type = accelerator_type + self.gpucount = gpucount + self.gpumetrics = gpumetrics + self.gputype = gputype + self.memory_util = memory_util + self.node_id = node_id + self.node_type = node_type + self.total_memory = total_memory + self.used_memory = used_memory + + def validate(self): + if self.gpumetrics: + for k in self.gpumetrics: + if k: + k.validate() + + def to_map(self): + _map = super().to_map() + if _map is not None: + return _map + + result = dict() + if self.accelerator_type is not None: + result['AcceleratorType'] = self.accelerator_type + if self.gpucount is not None: + result['GPUCount'] = self.gpucount + result['GPUMetrics'] = [] + if self.gpumetrics is not None: + for k in self.gpumetrics: + result['GPUMetrics'].append(k.to_map() if k else None) + if self.gputype is not None: + result['GPUType'] = self.gputype + if self.memory_util is not None: + result['MemoryUtil'] = self.memory_util + if self.node_id is not None: + result['NodeId'] = self.node_id + if self.node_type is not None: + result['NodeType'] = self.node_type + if self.total_memory is not None: + result['TotalMemory'] = self.total_memory + if self.used_memory is not None: + result['UsedMemory'] = self.used_memory + return result + + def from_map(self, m: dict = None): + m = m or dict() + if m.get('AcceleratorType') is not None: + self.accelerator_type = m.get('AcceleratorType') + if m.get('GPUCount') is not None: + self.gpucount = m.get('GPUCount') + self.gpumetrics = [] + if m.get('GPUMetrics') is not None: + for k in m.get('GPUMetrics'): + temp_model = GPUMetric() + self.gpumetrics.append(temp_model.from_map(k)) + if m.get('GPUType') is not None: + self.gputype = m.get('GPUType') + if m.get('MemoryUtil') is not None: + self.memory_util = m.get('MemoryUtil') + if m.get('NodeId') is not None: + self.node_id = m.get('NodeId') + if m.get('NodeType') is not None: + self.node_type = m.get('NodeType') + if m.get('TotalMemory') is not None: + self.total_memory = m.get('TotalMemory') + if m.get('UsedMemory') is not None: + self.used_memory = m.get('UsedMemory') return self @@ -1979,6 +2192,7 @@ def __init__( self, code: str = None, code_type: str = None, + gmt_created_time: str = None, gmt_dequeued_time: str = None, gmt_enqueued_time: str = None, gmt_position_modified_time: str = None, @@ -1992,12 +2206,15 @@ def __init__( status: str = None, sub_status: str = None, user_id: str = None, + user_name: str = None, workload_id: str = None, + workload_name: str = None, workload_type: str = None, workspace_id: str = None, ): self.code = code self.code_type = code_type + self.gmt_created_time = gmt_created_time self.gmt_dequeued_time = gmt_dequeued_time self.gmt_enqueued_time = gmt_enqueued_time self.gmt_position_modified_time = gmt_position_modified_time @@ -2011,7 +2228,9 @@ def __init__( self.status = status self.sub_status = sub_status self.user_id = user_id + self.user_name = user_name self.workload_id = workload_id + self.workload_name = workload_name self.workload_type = workload_type self.workspace_id = workspace_id @@ -2029,6 +2248,8 @@ def to_map(self): result['Code'] = self.code if self.code_type is not None: result['CodeType'] = self.code_type + if self.gmt_created_time is not None: + result['GmtCreatedTime'] = self.gmt_created_time if self.gmt_dequeued_time is not None: result['GmtDequeuedTime'] = self.gmt_dequeued_time if self.gmt_enqueued_time is not None: @@ -2055,8 +2276,12 @@ def to_map(self): result['SubStatus'] = self.sub_status if self.user_id is not None: result['UserId'] = self.user_id + if self.user_name is not None: + result['UserName'] = self.user_name if self.workload_id is not None: result['WorkloadId'] = self.workload_id + if self.workload_name is not None: + result['WorkloadName'] = self.workload_name if self.workload_type is not None: result['WorkloadType'] = self.workload_type if self.workspace_id is not None: @@ -2069,6 +2294,8 @@ def from_map(self, m: dict = None): self.code = m.get('Code') if m.get('CodeType') is not None: self.code_type = m.get('CodeType') + if m.get('GmtCreatedTime') is not None: + self.gmt_created_time = m.get('GmtCreatedTime') if m.get('GmtDequeuedTime') is not None: self.gmt_dequeued_time = m.get('GmtDequeuedTime') if m.get('GmtEnqueuedTime') is not None: @@ -2096,8 +2323,12 @@ def from_map(self, m: dict = None): self.sub_status = m.get('SubStatus') if m.get('UserId') is not None: self.user_id = m.get('UserId') + if m.get('UserName') is not None: + self.user_name = m.get('UserName') if m.get('WorkloadId') is not None: self.workload_id = m.get('WorkloadId') + if m.get('WorkloadName') is not None: + self.workload_name = m.get('WorkloadName') if m.get('WorkloadType') is not None: self.workload_type = m.get('WorkloadType') if m.get('WorkspaceId') is not None: @@ -2315,6 +2546,7 @@ def __init__( acs: ACS = None, cluster_id: str = None, default_gpudriver: str = None, + enable_preempt_subquota_workloads: bool = None, resource_specs: List[WorkspaceSpecs] = None, support_gpudrivers: List[str] = None, support_rdma: bool = None, @@ -2323,6 +2555,7 @@ def __init__( self.acs = acs self.cluster_id = cluster_id self.default_gpudriver = default_gpudriver + self.enable_preempt_subquota_workloads = enable_preempt_subquota_workloads self.resource_specs = resource_specs self.support_gpudrivers = support_gpudrivers self.support_rdma = support_rdma @@ -2350,6 +2583,8 @@ def to_map(self): result['ClusterId'] = self.cluster_id if self.default_gpudriver is not None: result['DefaultGPUDriver'] = self.default_gpudriver + if self.enable_preempt_subquota_workloads is not None: + result['EnablePreemptSubquotaWorkloads'] = self.enable_preempt_subquota_workloads result['ResourceSpecs'] = [] if self.resource_specs is not None: for k in self.resource_specs: @@ -2371,6 +2606,8 @@ def from_map(self, m: dict = None): self.cluster_id = m.get('ClusterId') if m.get('DefaultGPUDriver') is not None: self.default_gpudriver = m.get('DefaultGPUDriver') + if m.get('EnablePreemptSubquotaWorkloads') is not None: + self.enable_preempt_subquota_workloads = m.get('EnablePreemptSubquotaWorkloads') self.resource_specs = [] if m.get('ResourceSpecs') is not None: for k in m.get('ResourceSpecs'): @@ -2651,6 +2888,45 @@ def from_map(self, m: dict = None): return self +class QuotaJob(TeaModel): + def __init__( + self, + queuing: int = None, + running: int = None, + total: int = None, + ): + self.queuing = queuing + self.running = running + self.total = total + + def validate(self): + pass + + def to_map(self): + _map = super().to_map() + if _map is not None: + return _map + + result = dict() + if self.queuing is not None: + result['Queuing'] = self.queuing + if self.running is not None: + result['Running'] = self.running + if self.total is not None: + result['Total'] = self.total + return result + + def from_map(self, m: dict = None): + m = m or dict() + if m.get('Queuing') is not None: + self.queuing = m.get('Queuing') + if m.get('Running') is not None: + self.running = m.get('Running') + if m.get('Total') is not None: + self.total = m.get('Total') + return self + + class QuotaJobViewMetric(TeaModel): def __init__( self, @@ -2968,57 +3244,28 @@ def from_map(self, m: dict = None): return self -class QuotaUserViewMetric(TeaModel): +class WorkloadDetails(TeaModel): def __init__( self, - cpunode_number: int = None, - cpuusage_rate: str = None, - cpu_job_names: List[str] = None, - cpu_node_names: List[str] = None, - disk_read_rate: str = None, - disk_write_rate: str = None, - gpunode_number: int = None, - gpuusage_rate: str = None, - gpu_job_names: List[str] = None, - gpu_node_names: List[str] = None, - job_type: str = None, - memory_usage_rate: str = None, - network_input_rate: str = None, - network_output_rate: str = None, - node_names: List[str] = None, - request_cpu: int = None, - request_gpu: int = None, - request_memory: int = None, - total_cpu: int = None, - total_gpu: int = None, - total_memory: int = None, - user_id: str = None, + dlc: QuotaJob = None, + dsw: QuotaJob = None, + eas: QuotaJob = None, + summary: QuotaJob = None, ): - self.cpunode_number = cpunode_number - self.cpuusage_rate = cpuusage_rate - self.cpu_job_names = cpu_job_names - self.cpu_node_names = cpu_node_names - self.disk_read_rate = disk_read_rate - self.disk_write_rate = disk_write_rate - self.gpunode_number = gpunode_number - self.gpuusage_rate = gpuusage_rate - self.gpu_job_names = gpu_job_names - self.gpu_node_names = gpu_node_names - self.job_type = job_type - self.memory_usage_rate = memory_usage_rate - self.network_input_rate = network_input_rate - self.network_output_rate = network_output_rate - self.node_names = node_names - self.request_cpu = request_cpu - self.request_gpu = request_gpu - self.request_memory = request_memory - self.total_cpu = total_cpu - self.total_gpu = total_gpu - self.total_memory = total_memory - self.user_id = user_id + self.dlc = dlc + self.dsw = dsw + self.eas = eas + self.summary = summary def validate(self): - pass + if self.dlc: + self.dlc.validate() + if self.dsw: + self.dsw.validate() + if self.eas: + self.eas.validate() + if self.summary: + self.summary.validate() def to_map(self): _map = super().to_map() @@ -3026,80 +3273,318 @@ def to_map(self): return _map result = dict() - if self.cpunode_number is not None: - result['CPUNodeNumber'] = self.cpunode_number - if self.cpuusage_rate is not None: - result['CPUUsageRate'] = self.cpuusage_rate - if self.cpu_job_names is not None: - result['CpuJobNames'] = self.cpu_job_names - if self.cpu_node_names is not None: - result['CpuNodeNames'] = self.cpu_node_names - if self.disk_read_rate is not None: - result['DiskReadRate'] = self.disk_read_rate - if self.disk_write_rate is not None: - result['DiskWriteRate'] = self.disk_write_rate - if self.gpunode_number is not None: - result['GPUNodeNumber'] = self.gpunode_number - if self.gpuusage_rate is not None: - result['GPUUsageRate'] = self.gpuusage_rate - if self.gpu_job_names is not None: - result['GpuJobNames'] = self.gpu_job_names - if self.gpu_node_names is not None: - result['GpuNodeNames'] = self.gpu_node_names - if self.job_type is not None: - result['JobType'] = self.job_type - if self.memory_usage_rate is not None: - result['MemoryUsageRate'] = self.memory_usage_rate - if self.network_input_rate is not None: - result['NetworkInputRate'] = self.network_input_rate - if self.network_output_rate is not None: - result['NetworkOutputRate'] = self.network_output_rate - if self.node_names is not None: - result['NodeNames'] = self.node_names - if self.request_cpu is not None: - result['RequestCPU'] = self.request_cpu - if self.request_gpu is not None: - result['RequestGPU'] = self.request_gpu - if self.request_memory is not None: - result['RequestMemory'] = self.request_memory - if self.total_cpu is not None: - result['TotalCPU'] = self.total_cpu - if self.total_gpu is not None: - result['TotalGPU'] = self.total_gpu - if self.total_memory is not None: - result['TotalMemory'] = self.total_memory - if self.user_id is not None: - result['UserId'] = self.user_id + if self.dlc is not None: + result['DLC'] = self.dlc.to_map() + if self.dsw is not None: + result['DSW'] = self.dsw.to_map() + if self.eas is not None: + result['EAS'] = self.eas.to_map() + if self.summary is not None: + result['Summary'] = self.summary.to_map() return result def from_map(self, m: dict = None): m = m or dict() - if m.get('CPUNodeNumber') is not None: - self.cpunode_number = m.get('CPUNodeNumber') - if m.get('CPUUsageRate') is not None: - self.cpuusage_rate = m.get('CPUUsageRate') - if m.get('CpuJobNames') is not None: - self.cpu_job_names = m.get('CpuJobNames') - if m.get('CpuNodeNames') is not None: - self.cpu_node_names = m.get('CpuNodeNames') - if m.get('DiskReadRate') is not None: - self.disk_read_rate = m.get('DiskReadRate') - if m.get('DiskWriteRate') is not None: - self.disk_write_rate = m.get('DiskWriteRate') - if m.get('GPUNodeNumber') is not None: - self.gpunode_number = m.get('GPUNodeNumber') - if m.get('GPUUsageRate') is not None: - self.gpuusage_rate = m.get('GPUUsageRate') - if m.get('GpuJobNames') is not None: - self.gpu_job_names = m.get('GpuJobNames') - if m.get('GpuNodeNames') is not None: - self.gpu_node_names = m.get('GpuNodeNames') - if m.get('JobType') is not None: - self.job_type = m.get('JobType') - if m.get('MemoryUsageRate') is not None: - self.memory_usage_rate = m.get('MemoryUsageRate') - if m.get('NetworkInputRate') is not None: - self.network_input_rate = m.get('NetworkInputRate') + if m.get('DLC') is not None: + temp_model = QuotaJob() + self.dlc = temp_model.from_map(m['DLC']) + if m.get('DSW') is not None: + temp_model = QuotaJob() + self.dsw = temp_model.from_map(m['DSW']) + if m.get('EAS') is not None: + temp_model = QuotaJob() + self.eas = temp_model.from_map(m['EAS']) + if m.get('Summary') is not None: + temp_model = QuotaJob() + self.summary = temp_model.from_map(m['Summary']) + return self + + +class QuotaTopo(TeaModel): + def __init__( + self, + depth: str = None, + parent_quota_id: str = None, + quota_details: QuotaDetails = None, + quota_id: str = None, + quota_name: str = None, + resource_type: str = None, + workload_details: WorkloadDetails = None, + ): + self.depth = depth + self.parent_quota_id = parent_quota_id + self.quota_details = quota_details + self.quota_id = quota_id + self.quota_name = quota_name + self.resource_type = resource_type + self.workload_details = workload_details + + def validate(self): + if self.quota_details: + self.quota_details.validate() + if self.workload_details: + self.workload_details.validate() + + def to_map(self): + _map = super().to_map() + if _map is not None: + return _map + + result = dict() + if self.depth is not None: + result['Depth'] = self.depth + if self.parent_quota_id is not None: + result['ParentQuotaId'] = self.parent_quota_id + if self.quota_details is not None: + result['QuotaDetails'] = self.quota_details.to_map() + if self.quota_id is not None: + result['QuotaId'] = self.quota_id + if self.quota_name is not None: + result['QuotaName'] = self.quota_name + if self.resource_type is not None: + result['ResourceType'] = self.resource_type + if self.workload_details is not None: + result['WorkloadDetails'] = self.workload_details.to_map() + return result + + def from_map(self, m: dict = None): + m = m or dict() + if m.get('Depth') is not None: + self.depth = m.get('Depth') + if m.get('ParentQuotaId') is not None: + self.parent_quota_id = m.get('ParentQuotaId') + if m.get('QuotaDetails') is not None: + temp_model = QuotaDetails() + self.quota_details = temp_model.from_map(m['QuotaDetails']) + if m.get('QuotaId') is not None: + self.quota_id = m.get('QuotaId') + if m.get('QuotaName') is not None: + self.quota_name = m.get('QuotaName') + if m.get('ResourceType') is not None: + self.resource_type = m.get('ResourceType') + if m.get('WorkloadDetails') is not None: + temp_model = WorkloadDetails() + self.workload_details = temp_model.from_map(m['WorkloadDetails']) + return self + + +class QuotaUserResources(TeaModel): + def __init__( + self, + submitted: ResourceAmount = None, + used: ResourceAmount = None, + ): + self.submitted = submitted + self.used = used + + def validate(self): + if self.submitted: + self.submitted.validate() + if self.used: + self.used.validate() + + def to_map(self): + _map = super().to_map() + if _map is not None: + return _map + + result = dict() + if self.submitted is not None: + result['Submitted'] = self.submitted.to_map() + if self.used is not None: + result['Used'] = self.used.to_map() + return result + + def from_map(self, m: dict = None): + m = m or dict() + if m.get('Submitted') is not None: + temp_model = ResourceAmount() + self.submitted = temp_model.from_map(m['Submitted']) + if m.get('Used') is not None: + temp_model = ResourceAmount() + self.used = temp_model.from_map(m['Used']) + return self + + +class QuotaUser(TeaModel): + def __init__( + self, + resources: QuotaUserResources = None, + user_id: str = None, + username: str = None, + workload_count: int = None, + ): + self.resources = resources + self.user_id = user_id + self.username = username + self.workload_count = workload_count + + def validate(self): + if self.resources: + self.resources.validate() + + def to_map(self): + _map = super().to_map() + if _map is not None: + return _map + + result = dict() + if self.resources is not None: + result['Resources'] = self.resources.to_map() + if self.user_id is not None: + result['UserId'] = self.user_id + if self.username is not None: + result['Username'] = self.username + if self.workload_count is not None: + result['WorkloadCount'] = self.workload_count + return result + + def from_map(self, m: dict = None): + m = m or dict() + if m.get('Resources') is not None: + temp_model = QuotaUserResources() + self.resources = temp_model.from_map(m['Resources']) + if m.get('UserId') is not None: + self.user_id = m.get('UserId') + if m.get('Username') is not None: + self.username = m.get('Username') + if m.get('WorkloadCount') is not None: + self.workload_count = m.get('WorkloadCount') + return self + + +class QuotaUserViewMetric(TeaModel): + def __init__( + self, + cpunode_number: int = None, + cpuusage_rate: str = None, + cpu_job_names: List[str] = None, + cpu_node_names: List[str] = None, + disk_read_rate: str = None, + disk_write_rate: str = None, + gpunode_number: int = None, + gpuusage_rate: str = None, + gpu_job_names: List[str] = None, + gpu_node_names: List[str] = None, + job_type: str = None, + memory_usage_rate: str = None, + network_input_rate: str = None, + network_output_rate: str = None, + node_names: List[str] = None, + request_cpu: int = None, + request_gpu: int = None, + request_memory: int = None, + total_cpu: int = None, + total_gpu: int = None, + total_memory: int = None, + user_id: str = None, + ): + self.cpunode_number = cpunode_number + self.cpuusage_rate = cpuusage_rate + self.cpu_job_names = cpu_job_names + self.cpu_node_names = cpu_node_names + self.disk_read_rate = disk_read_rate + self.disk_write_rate = disk_write_rate + self.gpunode_number = gpunode_number + self.gpuusage_rate = gpuusage_rate + self.gpu_job_names = gpu_job_names + self.gpu_node_names = gpu_node_names + self.job_type = job_type + self.memory_usage_rate = memory_usage_rate + self.network_input_rate = network_input_rate + self.network_output_rate = network_output_rate + self.node_names = node_names + self.request_cpu = request_cpu + self.request_gpu = request_gpu + self.request_memory = request_memory + self.total_cpu = total_cpu + self.total_gpu = total_gpu + self.total_memory = total_memory + self.user_id = user_id + + def validate(self): + pass + + def to_map(self): + _map = super().to_map() + if _map is not None: + return _map + + result = dict() + if self.cpunode_number is not None: + result['CPUNodeNumber'] = self.cpunode_number + if self.cpuusage_rate is not None: + result['CPUUsageRate'] = self.cpuusage_rate + if self.cpu_job_names is not None: + result['CpuJobNames'] = self.cpu_job_names + if self.cpu_node_names is not None: + result['CpuNodeNames'] = self.cpu_node_names + if self.disk_read_rate is not None: + result['DiskReadRate'] = self.disk_read_rate + if self.disk_write_rate is not None: + result['DiskWriteRate'] = self.disk_write_rate + if self.gpunode_number is not None: + result['GPUNodeNumber'] = self.gpunode_number + if self.gpuusage_rate is not None: + result['GPUUsageRate'] = self.gpuusage_rate + if self.gpu_job_names is not None: + result['GpuJobNames'] = self.gpu_job_names + if self.gpu_node_names is not None: + result['GpuNodeNames'] = self.gpu_node_names + if self.job_type is not None: + result['JobType'] = self.job_type + if self.memory_usage_rate is not None: + result['MemoryUsageRate'] = self.memory_usage_rate + if self.network_input_rate is not None: + result['NetworkInputRate'] = self.network_input_rate + if self.network_output_rate is not None: + result['NetworkOutputRate'] = self.network_output_rate + if self.node_names is not None: + result['NodeNames'] = self.node_names + if self.request_cpu is not None: + result['RequestCPU'] = self.request_cpu + if self.request_gpu is not None: + result['RequestGPU'] = self.request_gpu + if self.request_memory is not None: + result['RequestMemory'] = self.request_memory + if self.total_cpu is not None: + result['TotalCPU'] = self.total_cpu + if self.total_gpu is not None: + result['TotalGPU'] = self.total_gpu + if self.total_memory is not None: + result['TotalMemory'] = self.total_memory + if self.user_id is not None: + result['UserId'] = self.user_id + return result + + def from_map(self, m: dict = None): + m = m or dict() + if m.get('CPUNodeNumber') is not None: + self.cpunode_number = m.get('CPUNodeNumber') + if m.get('CPUUsageRate') is not None: + self.cpuusage_rate = m.get('CPUUsageRate') + if m.get('CpuJobNames') is not None: + self.cpu_job_names = m.get('CpuJobNames') + if m.get('CpuNodeNames') is not None: + self.cpu_node_names = m.get('CpuNodeNames') + if m.get('DiskReadRate') is not None: + self.disk_read_rate = m.get('DiskReadRate') + if m.get('DiskWriteRate') is not None: + self.disk_write_rate = m.get('DiskWriteRate') + if m.get('GPUNodeNumber') is not None: + self.gpunode_number = m.get('GPUNodeNumber') + if m.get('GPUUsageRate') is not None: + self.gpuusage_rate = m.get('GPUUsageRate') + if m.get('GpuJobNames') is not None: + self.gpu_job_names = m.get('GpuJobNames') + if m.get('GpuNodeNames') is not None: + self.gpu_node_names = m.get('GpuNodeNames') + if m.get('JobType') is not None: + self.job_type = m.get('JobType') + if m.get('MemoryUsageRate') is not None: + self.memory_usage_rate = m.get('MemoryUsageRate') + if m.get('NetworkInputRate') is not None: + self.network_input_rate = m.get('NetworkInputRate') if m.get('NetworkOutputRate') is not None: self.network_output_rate = m.get('NetworkOutputRate') if m.get('NodeNames') is not None: @@ -3428,29 +3913,62 @@ def from_map(self, m: dict = None): return self -class UserViewMetric(TeaModel): +class TimeRangeFilter(TeaModel): def __init__( self, - cpunode_number: int = None, - cpuusage_rate: str = None, - cpu_job_names: List[str] = None, - cpu_node_names: List[str] = None, - disk_read_rate: str = None, - disk_write_rate: str = None, - gpunode_number: int = None, - gpuusage_rate: str = None, - gpu_job_names: List[str] = None, - gpu_node_names: List[str] = None, - job_type: str = None, - memory_usage_rate: str = None, - network_input_rate: str = None, - network_output_rate: str = None, - node_names: List[str] = None, - request_cpu: int = None, - request_gpu: int = None, - request_memory: int = None, - resource_group_id: str = None, - total_cpu: int = None, + end_time: str = None, + start_time: str = None, + ): + self.end_time = end_time + self.start_time = start_time + + def validate(self): + pass + + def to_map(self): + _map = super().to_map() + if _map is not None: + return _map + + result = dict() + if self.end_time is not None: + result['EndTime'] = self.end_time + if self.start_time is not None: + result['StartTime'] = self.start_time + return result + + def from_map(self, m: dict = None): + m = m or dict() + if m.get('EndTime') is not None: + self.end_time = m.get('EndTime') + if m.get('StartTime') is not None: + self.start_time = m.get('StartTime') + return self + + +class UserViewMetric(TeaModel): + def __init__( + self, + cpunode_number: int = None, + cpuusage_rate: str = None, + cpu_job_names: List[str] = None, + cpu_node_names: List[str] = None, + disk_read_rate: str = None, + disk_write_rate: str = None, + gpunode_number: int = None, + gpuusage_rate: str = None, + gpu_job_names: List[str] = None, + gpu_node_names: List[str] = None, + job_type: str = None, + memory_usage_rate: str = None, + network_input_rate: str = None, + network_output_rate: str = None, + node_names: List[str] = None, + request_cpu: int = None, + request_gpu: int = None, + request_memory: int = None, + resource_group_id: str = None, + total_cpu: int = None, total_gpu: int = None, total_memory: int = None, user_id: str = None, @@ -3587,16 +4105,12 @@ def from_map(self, m: dict = None): return self -class BuildLLMSnapshotRequestWorkloadContainer(TeaModel): +class CheckInstanceWebTerminalRequest(TeaModel): def __init__( self, - image: str = None, - port: int = None, - user_command: str = None, + check_info: str = None, ): - self.image = image - self.port = port - self.user_command = user_command + self.check_info = check_info def validate(self): pass @@ -3607,35 +4121,23 @@ def to_map(self): return _map result = dict() - if self.image is not None: - result['Image'] = self.image - if self.port is not None: - result['Port'] = self.port - if self.user_command is not None: - result['UserCommand'] = self.user_command + if self.check_info is not None: + result['CheckInfo'] = self.check_info return result def from_map(self, m: dict = None): m = m or dict() - if m.get('Image') is not None: - self.image = m.get('Image') - if m.get('Port') is not None: - self.port = m.get('Port') - if m.get('UserCommand') is not None: - self.user_command = m.get('UserCommand') + if m.get('CheckInfo') is not None: + self.check_info = m.get('CheckInfo') return self -class BuildLLMSnapshotRequestWorkloadExtraConfig(TeaModel): +class CheckInstanceWebTerminalResponseBody(TeaModel): def __init__( self, - enable_webservice: bool = None, - job_max_running_time_minutes: int = None, - third_party_lib_dir: str = None, + request_id: str = None, ): - self.enable_webservice = enable_webservice - self.job_max_running_time_minutes = job_max_running_time_minutes - self.third_party_lib_dir = third_party_lib_dir + self.request_id = request_id def validate(self): pass @@ -3646,40 +4148,31 @@ def to_map(self): return _map result = dict() - if self.enable_webservice is not None: - result['EnableWebservice'] = self.enable_webservice - if self.job_max_running_time_minutes is not None: - result['JobMaxRunningTimeMinutes'] = self.job_max_running_time_minutes - if self.third_party_lib_dir is not None: - result['ThirdPartyLibDir'] = self.third_party_lib_dir + if self.request_id is not None: + result['RequestId'] = self.request_id return result def from_map(self, m: dict = None): m = m or dict() - if m.get('EnableWebservice') is not None: - self.enable_webservice = m.get('EnableWebservice') - if m.get('JobMaxRunningTimeMinutes') is not None: - self.job_max_running_time_minutes = m.get('JobMaxRunningTimeMinutes') - if m.get('ThirdPartyLibDir') is not None: - self.third_party_lib_dir = m.get('ThirdPartyLibDir') + if m.get('RequestId') is not None: + self.request_id = m.get('RequestId') return self -class BuildLLMSnapshotRequestWorkloadResourceSpecResourceConfig(TeaModel): +class CheckInstanceWebTerminalResponse(TeaModel): def __init__( self, - cpu: int = None, - gpu: int = None, - memory_in_gi_b: int = None, - resource_group: str = None, + headers: Dict[str, str] = None, + status_code: int = None, + body: CheckInstanceWebTerminalResponseBody = None, ): - self.cpu = cpu - self.gpu = gpu - self.memory_in_gi_b = memory_in_gi_b - self.resource_group = resource_group + self.headers = headers + self.status_code = status_code + self.body = body def validate(self): - pass + if self.body: + self.body.validate() def to_map(self): _map = super().to_map() @@ -3687,43 +4180,41 @@ def to_map(self): return _map result = dict() - if self.cpu is not None: - result['Cpu'] = self.cpu - if self.gpu is not None: - result['Gpu'] = self.gpu - if self.memory_in_gi_b is not None: - result['MemoryInGiB'] = self.memory_in_gi_b - if self.resource_group is not None: - result['ResourceGroup'] = self.resource_group + if self.headers is not None: + result['headers'] = self.headers + if self.status_code is not None: + result['statusCode'] = self.status_code + if self.body is not None: + result['body'] = self.body.to_map() return result def from_map(self, m: dict = None): m = m or dict() - if m.get('Cpu') is not None: - self.cpu = m.get('Cpu') - if m.get('Gpu') is not None: - self.gpu = m.get('Gpu') - if m.get('MemoryInGiB') is not None: - self.memory_in_gi_b = m.get('MemoryInGiB') - if m.get('ResourceGroup') is not None: - self.resource_group = m.get('ResourceGroup') + if m.get('headers') is not None: + self.headers = m.get('headers') + if m.get('statusCode') is not None: + self.status_code = m.get('statusCode') + if m.get('body') is not None: + temp_model = CheckInstanceWebTerminalResponseBody() + self.body = temp_model.from_map(m['body']) return self -class BuildLLMSnapshotRequestWorkloadResourceSpec(TeaModel): +class CreateAI4DDefaultBucketResponseBody(TeaModel): def __init__( self, - ecs_spec: str = None, - instance_num: int = None, - resource_config: BuildLLMSnapshotRequestWorkloadResourceSpecResourceConfig = None, + extranet_endpoint: str = None, + intranet_endpoint: str = None, + name: str = None, + request_id: str = None, ): - self.ecs_spec = ecs_spec - self.instance_num = instance_num - self.resource_config = resource_config + self.extranet_endpoint = extranet_endpoint + self.intranet_endpoint = intranet_endpoint + self.name = name + self.request_id = request_id def validate(self): - if self.resource_config: - self.resource_config.validate() + pass def to_map(self): _map = super().to_map() @@ -3731,43 +4222,43 @@ def to_map(self): return _map result = dict() - if self.ecs_spec is not None: - result['EcsSpec'] = self.ecs_spec - if self.instance_num is not None: - result['InstanceNum'] = self.instance_num - if self.resource_config is not None: - result['ResourceConfig'] = self.resource_config.to_map() + if self.extranet_endpoint is not None: + result['ExtranetEndpoint'] = self.extranet_endpoint + if self.intranet_endpoint is not None: + result['IntranetEndpoint'] = self.intranet_endpoint + if self.name is not None: + result['Name'] = self.name + if self.request_id is not None: + result['RequestId'] = self.request_id return result def from_map(self, m: dict = None): m = m or dict() - if m.get('EcsSpec') is not None: - self.ecs_spec = m.get('EcsSpec') - if m.get('InstanceNum') is not None: - self.instance_num = m.get('InstanceNum') - if m.get('ResourceConfig') is not None: - temp_model = BuildLLMSnapshotRequestWorkloadResourceSpecResourceConfig() - self.resource_config = temp_model.from_map(m['ResourceConfig']) + if m.get('ExtranetEndpoint') is not None: + self.extranet_endpoint = m.get('ExtranetEndpoint') + if m.get('IntranetEndpoint') is not None: + self.intranet_endpoint = m.get('IntranetEndpoint') + if m.get('Name') is not None: + self.name = m.get('Name') + if m.get('RequestId') is not None: + self.request_id = m.get('RequestId') return self -class BuildLLMSnapshotRequestWorkloadUserVpc(TeaModel): +class CreateAI4DDefaultBucketResponse(TeaModel): def __init__( self, - default_route: str = None, - extended_cidrs: List[str] = None, - security_group_id: str = None, - switch_id: str = None, - vpc_id: str = None, + headers: Dict[str, str] = None, + status_code: int = None, + body: CreateAI4DDefaultBucketResponseBody = None, ): - self.default_route = default_route - self.extended_cidrs = extended_cidrs - self.security_group_id = security_group_id - self.switch_id = switch_id - self.vpc_id = vpc_id + self.headers = headers + self.status_code = status_code + self.body = body def validate(self): - pass + if self.body: + self.body.validate() def to_map(self): _map = super().to_map() @@ -3775,55 +4266,41 @@ def to_map(self): return _map result = dict() - if self.default_route is not None: - result['DefaultRoute'] = self.default_route - if self.extended_cidrs is not None: - result['ExtendedCIDRs'] = self.extended_cidrs - if self.security_group_id is not None: - result['SecurityGroupId'] = self.security_group_id - if self.switch_id is not None: - result['SwitchId'] = self.switch_id - if self.vpc_id is not None: - result['VpcId'] = self.vpc_id + if self.headers is not None: + result['headers'] = self.headers + if self.status_code is not None: + result['statusCode'] = self.status_code + if self.body is not None: + result['body'] = self.body.to_map() return result def from_map(self, m: dict = None): m = m or dict() - if m.get('DefaultRoute') is not None: - self.default_route = m.get('DefaultRoute') - if m.get('ExtendedCIDRs') is not None: - self.extended_cidrs = m.get('ExtendedCIDRs') - if m.get('SecurityGroupId') is not None: - self.security_group_id = m.get('SecurityGroupId') - if m.get('SwitchId') is not None: - self.switch_id = m.get('SwitchId') - if m.get('VpcId') is not None: - self.vpc_id = m.get('VpcId') + if m.get('headers') is not None: + self.headers = m.get('headers') + if m.get('statusCode') is not None: + self.status_code = m.get('statusCode') + if m.get('body') is not None: + temp_model = CreateAI4DDefaultBucketResponseBody() + self.body = temp_model.from_map(m['body']) return self -class BuildLLMSnapshotRequestWorkload(TeaModel): +class CreateAI4DSerivceRequest(TeaModel): def __init__( self, - container: BuildLLMSnapshotRequestWorkloadContainer = None, - extra_config: BuildLLMSnapshotRequestWorkloadExtraConfig = None, - resource_spec: BuildLLMSnapshotRequestWorkloadResourceSpec = None, - user_vpc: BuildLLMSnapshotRequestWorkloadUserVpc = None, + inference_spec: Dict[str, Any] = None, + service_type: str = None, + workspace_id: str = None, ): - self.container = container - self.extra_config = extra_config - self.resource_spec = resource_spec - self.user_vpc = user_vpc + self.inference_spec = inference_spec + # This parameter is required. + self.service_type = service_type + # This parameter is required. + self.workspace_id = workspace_id def validate(self): - if self.container: - self.container.validate() - if self.extra_config: - self.extra_config.validate() - if self.resource_spec: - self.resource_spec.validate() - if self.user_vpc: - self.user_vpc.validate() + pass def to_map(self): _map = super().to_map() @@ -3831,49 +4308,36 @@ def to_map(self): return _map result = dict() - if self.container is not None: - result['Container'] = self.container.to_map() - if self.extra_config is not None: - result['ExtraConfig'] = self.extra_config.to_map() - if self.resource_spec is not None: - result['ResourceSpec'] = self.resource_spec.to_map() - if self.user_vpc is not None: - result['UserVpc'] = self.user_vpc.to_map() + if self.inference_spec is not None: + result['InferenceSpec'] = self.inference_spec + if self.service_type is not None: + result['ServiceType'] = self.service_type + if self.workspace_id is not None: + result['WorkspaceId'] = self.workspace_id return result def from_map(self, m: dict = None): m = m or dict() - if m.get('Container') is not None: - temp_model = BuildLLMSnapshotRequestWorkloadContainer() - self.container = temp_model.from_map(m['Container']) - if m.get('ExtraConfig') is not None: - temp_model = BuildLLMSnapshotRequestWorkloadExtraConfig() - self.extra_config = temp_model.from_map(m['ExtraConfig']) - if m.get('ResourceSpec') is not None: - temp_model = BuildLLMSnapshotRequestWorkloadResourceSpec() - self.resource_spec = temp_model.from_map(m['ResourceSpec']) - if m.get('UserVpc') is not None: - temp_model = BuildLLMSnapshotRequestWorkloadUserVpc() - self.user_vpc = temp_model.from_map(m['UserVpc']) + if m.get('InferenceSpec') is not None: + self.inference_spec = m.get('InferenceSpec') + if m.get('ServiceType') is not None: + self.service_type = m.get('ServiceType') + if m.get('WorkspaceId') is not None: + self.workspace_id = m.get('WorkspaceId') return self -class BuildLLMSnapshotRequest(TeaModel): +class CreateAI4DSerivceResponseBody(TeaModel): def __init__( self, - description: str = None, - display_name: str = None, - labels: Dict[str, Any] = None, - workload: BuildLLMSnapshotRequestWorkload = None, + request_id: str = None, + service_name: str = None, ): - self.description = description - self.display_name = display_name - self.labels = labels - self.workload = workload + self.request_id = request_id + self.service_name = service_name def validate(self): - if self.workload: - self.workload.validate() + pass def to_map(self): _map = super().to_map() @@ -3881,87 +4345,27 @@ def to_map(self): return _map result = dict() - if self.description is not None: - result['Description'] = self.description - if self.display_name is not None: - result['DisplayName'] = self.display_name - if self.labels is not None: - result['Labels'] = self.labels - if self.workload is not None: - result['Workload'] = self.workload.to_map() + if self.request_id is not None: + result['RequestId'] = self.request_id + if self.service_name is not None: + result['ServiceName'] = self.service_name return result def from_map(self, m: dict = None): m = m or dict() - if m.get('Description') is not None: - self.description = m.get('Description') - if m.get('DisplayName') is not None: - self.display_name = m.get('DisplayName') - if m.get('Labels') is not None: - self.labels = m.get('Labels') - if m.get('Workload') is not None: - temp_model = BuildLLMSnapshotRequestWorkload() - self.workload = temp_model.from_map(m['Workload']) - return self - - -class BuildLLMSnapshotResponseBody(TeaModel): - def __init__( - self, - job_id: str = None, - job_name: str = None, - job_request_id: str = None, - request_id: str = None, - status: str = None, - ): - self.job_id = job_id - self.job_name = job_name - self.job_request_id = job_request_id - self.request_id = request_id - self.status = status - - def validate(self): - pass - - def to_map(self): - _map = super().to_map() - if _map is not None: - return _map - - result = dict() - if self.job_id is not None: - result['JobId'] = self.job_id - if self.job_name is not None: - result['JobName'] = self.job_name - if self.job_request_id is not None: - result['JobRequestId'] = self.job_request_id - if self.request_id is not None: - result['RequestId'] = self.request_id - if self.status is not None: - result['Status'] = self.status - return result - - def from_map(self, m: dict = None): - m = m or dict() - if m.get('JobId') is not None: - self.job_id = m.get('JobId') - if m.get('JobName') is not None: - self.job_name = m.get('JobName') - if m.get('JobRequestId') is not None: - self.job_request_id = m.get('JobRequestId') if m.get('RequestId') is not None: self.request_id = m.get('RequestId') - if m.get('Status') is not None: - self.status = m.get('Status') + if m.get('ServiceName') is not None: + self.service_name = m.get('ServiceName') return self -class BuildLLMSnapshotResponse(TeaModel): +class CreateAI4DSerivceResponse(TeaModel): def __init__( self, headers: Dict[str, str] = None, status_code: int = None, - body: BuildLLMSnapshotResponseBody = None, + body: CreateAI4DSerivceResponseBody = None, ): self.headers = headers self.status_code = status_code @@ -3992,17 +4396,23 @@ def from_map(self, m: dict = None): if m.get('statusCode') is not None: self.status_code = m.get('statusCode') if m.get('body') is not None: - temp_model = BuildLLMSnapshotResponseBody() + temp_model = CreateAI4DSerivceResponseBody() self.body = temp_model.from_map(m['body']) return self -class CheckInstanceWebTerminalRequest(TeaModel): +class CreateAlgorithmRequest(TeaModel): def __init__( self, - check_info: str = None, + algorithm_description: str = None, + algorithm_name: str = None, + display_name: str = None, + workspace_id: str = None, ): - self.check_info = check_info + self.algorithm_description = algorithm_description + self.algorithm_name = algorithm_name + self.display_name = display_name + self.workspace_id = workspace_id def validate(self): pass @@ -4013,22 +4423,36 @@ def to_map(self): return _map result = dict() - if self.check_info is not None: - result['CheckInfo'] = self.check_info + if self.algorithm_description is not None: + result['AlgorithmDescription'] = self.algorithm_description + if self.algorithm_name is not None: + result['AlgorithmName'] = self.algorithm_name + if self.display_name is not None: + result['DisplayName'] = self.display_name + if self.workspace_id is not None: + result['WorkspaceId'] = self.workspace_id return result def from_map(self, m: dict = None): m = m or dict() - if m.get('CheckInfo') is not None: - self.check_info = m.get('CheckInfo') + if m.get('AlgorithmDescription') is not None: + self.algorithm_description = m.get('AlgorithmDescription') + if m.get('AlgorithmName') is not None: + self.algorithm_name = m.get('AlgorithmName') + if m.get('DisplayName') is not None: + self.display_name = m.get('DisplayName') + if m.get('WorkspaceId') is not None: + self.workspace_id = m.get('WorkspaceId') return self -class CheckInstanceWebTerminalResponseBody(TeaModel): +class CreateAlgorithmResponseBody(TeaModel): def __init__( self, + algorithm_id: str = None, request_id: str = None, ): + self.algorithm_id = algorithm_id self.request_id = request_id def validate(self): @@ -4040,23 +4464,27 @@ def to_map(self): return _map result = dict() + if self.algorithm_id is not None: + result['AlgorithmId'] = self.algorithm_id if self.request_id is not None: result['RequestId'] = self.request_id return result def from_map(self, m: dict = None): m = m or dict() + if m.get('AlgorithmId') is not None: + self.algorithm_id = m.get('AlgorithmId') if m.get('RequestId') is not None: self.request_id = m.get('RequestId') return self -class CheckInstanceWebTerminalResponse(TeaModel): +class CreateAlgorithmResponse(TeaModel): def __init__( self, headers: Dict[str, str] = None, status_code: int = None, - body: CheckInstanceWebTerminalResponseBody = None, + body: CreateAlgorithmResponseBody = None, ): self.headers = headers self.status_code = status_code @@ -4087,23 +4515,46 @@ def from_map(self, m: dict = None): if m.get('statusCode') is not None: self.status_code = m.get('statusCode') if m.get('body') is not None: - temp_model = CheckInstanceWebTerminalResponseBody() + temp_model = CreateAlgorithmResponseBody() self.body = temp_model.from_map(m['body']) return self -class CreateAI4DDefaultBucketResponseBody(TeaModel): +class CreateAlgorithmVersionRequest(TeaModel): def __init__( self, - extranet_endpoint: str = None, - intranet_endpoint: str = None, - name: str = None, - request_id: str = None, + algorithm_spec: AlgorithmSpec = None, ): - self.extranet_endpoint = extranet_endpoint - self.intranet_endpoint = intranet_endpoint - self.name = name - self.request_id = request_id + self.algorithm_spec = algorithm_spec + + def validate(self): + if self.algorithm_spec: + self.algorithm_spec.validate() + + def to_map(self): + _map = super().to_map() + if _map is not None: + return _map + + result = dict() + if self.algorithm_spec is not None: + result['AlgorithmSpec'] = self.algorithm_spec.to_map() + return result + + def from_map(self, m: dict = None): + m = m or dict() + if m.get('AlgorithmSpec') is not None: + temp_model = AlgorithmSpec() + self.algorithm_spec = temp_model.from_map(m['AlgorithmSpec']) + return self + + +class CreateAlgorithmVersionShrinkRequest(TeaModel): + def __init__( + self, + algorithm_spec_shrink: str = None, + ): + self.algorithm_spec_shrink = algorithm_spec_shrink def validate(self): pass @@ -4114,35 +4565,56 @@ def to_map(self): return _map result = dict() - if self.extranet_endpoint is not None: - result['ExtranetEndpoint'] = self.extranet_endpoint - if self.intranet_endpoint is not None: - result['IntranetEndpoint'] = self.intranet_endpoint - if self.name is not None: - result['Name'] = self.name - if self.request_id is not None: - result['RequestId'] = self.request_id + if self.algorithm_spec_shrink is not None: + result['AlgorithmSpec'] = self.algorithm_spec_shrink return result def from_map(self, m: dict = None): m = m or dict() - if m.get('ExtranetEndpoint') is not None: - self.extranet_endpoint = m.get('ExtranetEndpoint') - if m.get('IntranetEndpoint') is not None: - self.intranet_endpoint = m.get('IntranetEndpoint') - if m.get('Name') is not None: - self.name = m.get('Name') - if m.get('RequestId') is not None: - self.request_id = m.get('RequestId') + if m.get('AlgorithmSpec') is not None: + self.algorithm_spec_shrink = m.get('AlgorithmSpec') return self -class CreateAI4DDefaultBucketResponse(TeaModel): +class CreateAlgorithmVersionResponseBody(TeaModel): + def __init__( + self, + algorithm_id: str = None, + algorithm_version: str = None, + ): + self.algorithm_id = algorithm_id + self.algorithm_version = algorithm_version + + def validate(self): + pass + + def to_map(self): + _map = super().to_map() + if _map is not None: + return _map + + result = dict() + if self.algorithm_id is not None: + result['AlgorithmId'] = self.algorithm_id + if self.algorithm_version is not None: + result['AlgorithmVersion'] = self.algorithm_version + return result + + def from_map(self, m: dict = None): + m = m or dict() + if m.get('AlgorithmId') is not None: + self.algorithm_id = m.get('AlgorithmId') + if m.get('AlgorithmVersion') is not None: + self.algorithm_version = m.get('AlgorithmVersion') + return self + + +class CreateAlgorithmVersionResponse(TeaModel): def __init__( self, headers: Dict[str, str] = None, status_code: int = None, - body: CreateAI4DDefaultBucketResponseBody = None, + body: CreateAlgorithmVersionResponseBody = None, ): self.headers = headers self.status_code = status_code @@ -4173,24 +4645,34 @@ def from_map(self, m: dict = None): if m.get('statusCode') is not None: self.status_code = m.get('statusCode') if m.get('body') is not None: - temp_model = CreateAI4DDefaultBucketResponseBody() + temp_model = CreateAlgorithmVersionResponseBody() self.body = temp_model.from_map(m['body']) return self -class CreateAI4DSerivceRequest(TeaModel): +class CreateComponentRequest(TeaModel): def __init__( self, - inference_spec: Dict[str, Any] = None, - service_type: str = None, + description: str = None, + display_name: str = None, + labels: List[Label] = None, + name: str = None, workspace_id: str = None, ): - self.inference_spec = inference_spec - self.service_type = service_type + # This parameter is required. + self.description = description + self.display_name = display_name + self.labels = labels + # This parameter is required. + self.name = name + # This parameter is required. self.workspace_id = workspace_id def validate(self): - pass + if self.labels: + for k in self.labels: + if k: + k.validate() def to_map(self): _map = super().to_map() @@ -4198,33 +4680,46 @@ def to_map(self): return _map result = dict() - if self.inference_spec is not None: - result['InferenceSpec'] = self.inference_spec - if self.service_type is not None: - result['ServiceType'] = self.service_type + if self.description is not None: + result['Description'] = self.description + if self.display_name is not None: + result['DisplayName'] = self.display_name + result['Labels'] = [] + if self.labels is not None: + for k in self.labels: + result['Labels'].append(k.to_map() if k else None) + if self.name is not None: + result['Name'] = self.name if self.workspace_id is not None: result['WorkspaceId'] = self.workspace_id return result def from_map(self, m: dict = None): m = m or dict() - if m.get('InferenceSpec') is not None: - self.inference_spec = m.get('InferenceSpec') - if m.get('ServiceType') is not None: - self.service_type = m.get('ServiceType') + if m.get('Description') is not None: + self.description = m.get('Description') + if m.get('DisplayName') is not None: + self.display_name = m.get('DisplayName') + self.labels = [] + if m.get('Labels') is not None: + for k in m.get('Labels'): + temp_model = Label() + self.labels.append(temp_model.from_map(k)) + if m.get('Name') is not None: + self.name = m.get('Name') if m.get('WorkspaceId') is not None: self.workspace_id = m.get('WorkspaceId') return self -class CreateAI4DSerivceResponseBody(TeaModel): +class CreateComponentResponseBody(TeaModel): def __init__( self, + component_id: str = None, request_id: str = None, - service_name: str = None, ): + self.component_id = component_id self.request_id = request_id - self.service_name = service_name def validate(self): pass @@ -4235,27 +4730,27 @@ def to_map(self): return _map result = dict() + if self.component_id is not None: + result['ComponentId'] = self.component_id if self.request_id is not None: result['RequestId'] = self.request_id - if self.service_name is not None: - result['ServiceName'] = self.service_name return result def from_map(self, m: dict = None): m = m or dict() + if m.get('ComponentId') is not None: + self.component_id = m.get('ComponentId') if m.get('RequestId') is not None: self.request_id = m.get('RequestId') - if m.get('ServiceName') is not None: - self.service_name = m.get('ServiceName') return self -class CreateAI4DSerivceResponse(TeaModel): +class CreateComponentResponse(TeaModel): def __init__( self, headers: Dict[str, str] = None, status_code: int = None, - body: CreateAI4DSerivceResponseBody = None, + body: CreateComponentResponseBody = None, ): self.headers = headers self.status_code = status_code @@ -4286,23 +4781,19 @@ def from_map(self, m: dict = None): if m.get('statusCode') is not None: self.status_code = m.get('statusCode') if m.get('body') is not None: - temp_model = CreateAI4DSerivceResponseBody() + temp_model = CreateComponentResponseBody() self.body = temp_model.from_map(m['body']) return self -class CreateAlgorithmRequest(TeaModel): +class CreateComponentVersionRequestLabels(TeaModel): def __init__( self, - algorithm_description: str = None, - algorithm_name: str = None, - display_name: str = None, - workspace_id: str = None, + key: str = None, + value: str = None, ): - self.algorithm_description = algorithm_description - self.algorithm_name = algorithm_name - self.display_name = display_name - self.workspace_id = workspace_id + self.key = key + self.value = value def validate(self): pass @@ -4313,36 +4804,93 @@ def to_map(self): return _map result = dict() - if self.algorithm_description is not None: - result['AlgorithmDescription'] = self.algorithm_description - if self.algorithm_name is not None: - result['AlgorithmName'] = self.algorithm_name - if self.display_name is not None: - result['DisplayName'] = self.display_name - if self.workspace_id is not None: - result['WorkspaceId'] = self.workspace_id + if self.key is not None: + result['Key'] = self.key + if self.value is not None: + result['Value'] = self.value return result def from_map(self, m: dict = None): m = m or dict() - if m.get('AlgorithmDescription') is not None: - self.algorithm_description = m.get('AlgorithmDescription') - if m.get('AlgorithmName') is not None: - self.algorithm_name = m.get('AlgorithmName') - if m.get('DisplayName') is not None: - self.display_name = m.get('DisplayName') - if m.get('WorkspaceId') is not None: - self.workspace_id = m.get('WorkspaceId') + if m.get('Key') is not None: + self.key = m.get('Key') + if m.get('Value') is not None: + self.value = m.get('Value') return self -class CreateAlgorithmResponseBody(TeaModel): +class CreateComponentVersionRequest(TeaModel): def __init__( self, - algorithm_id: str = None, + config_dir: Location = None, + description: str = None, + labels: List[CreateComponentVersionRequestLabels] = None, + spec: ComponentSpec = None, + version: str = None, + ): + self.config_dir = config_dir + self.description = description + self.labels = labels + self.spec = spec + self.version = version + + def validate(self): + if self.config_dir: + self.config_dir.validate() + if self.labels: + for k in self.labels: + if k: + k.validate() + if self.spec: + self.spec.validate() + + def to_map(self): + _map = super().to_map() + if _map is not None: + return _map + + result = dict() + if self.config_dir is not None: + result['ConfigDir'] = self.config_dir.to_map() + if self.description is not None: + result['Description'] = self.description + result['Labels'] = [] + if self.labels is not None: + for k in self.labels: + result['Labels'].append(k.to_map() if k else None) + if self.spec is not None: + result['Spec'] = self.spec.to_map() + if self.version is not None: + result['Version'] = self.version + return result + + def from_map(self, m: dict = None): + m = m or dict() + if m.get('ConfigDir') is not None: + temp_model = Location() + self.config_dir = temp_model.from_map(m['ConfigDir']) + if m.get('Description') is not None: + self.description = m.get('Description') + self.labels = [] + if m.get('Labels') is not None: + for k in m.get('Labels'): + temp_model = CreateComponentVersionRequestLabels() + self.labels.append(temp_model.from_map(k)) + if m.get('Spec') is not None: + temp_model = ComponentSpec() + self.spec = temp_model.from_map(m['Spec']) + if m.get('Version') is not None: + self.version = m.get('Version') + return self + + +class CreateComponentVersionResponseBody(TeaModel): + def __init__( + self, + instance_job_id: str = None, request_id: str = None, ): - self.algorithm_id = algorithm_id + self.instance_job_id = instance_job_id self.request_id = request_id def validate(self): @@ -4354,27 +4902,27 @@ def to_map(self): return _map result = dict() - if self.algorithm_id is not None: - result['AlgorithmId'] = self.algorithm_id + if self.instance_job_id is not None: + result['InstanceJobId'] = self.instance_job_id if self.request_id is not None: result['RequestId'] = self.request_id return result def from_map(self, m: dict = None): m = m or dict() - if m.get('AlgorithmId') is not None: - self.algorithm_id = m.get('AlgorithmId') + if m.get('InstanceJobId') is not None: + self.instance_job_id = m.get('InstanceJobId') if m.get('RequestId') is not None: self.request_id = m.get('RequestId') return self -class CreateAlgorithmResponse(TeaModel): +class CreateComponentVersionResponse(TeaModel): def __init__( self, headers: Dict[str, str] = None, status_code: int = None, - body: CreateAlgorithmResponseBody = None, + body: CreateComponentVersionResponseBody = None, ): self.headers = headers self.status_code = status_code @@ -4405,75 +4953,19 @@ def from_map(self, m: dict = None): if m.get('statusCode') is not None: self.status_code = m.get('statusCode') if m.get('body') is not None: - temp_model = CreateAlgorithmResponseBody() + temp_model = CreateComponentVersionResponseBody() self.body = temp_model.from_map(m['body']) return self -class CreateAlgorithmVersionRequest(TeaModel): - def __init__( - self, - algorithm_spec: AlgorithmSpec = None, - ): - self.algorithm_spec = algorithm_spec - - def validate(self): - if self.algorithm_spec: - self.algorithm_spec.validate() - - def to_map(self): - _map = super().to_map() - if _map is not None: - return _map - - result = dict() - if self.algorithm_spec is not None: - result['AlgorithmSpec'] = self.algorithm_spec.to_map() - return result - - def from_map(self, m: dict = None): - m = m or dict() - if m.get('AlgorithmSpec') is not None: - temp_model = AlgorithmSpec() - self.algorithm_spec = temp_model.from_map(m['AlgorithmSpec']) - return self - - -class CreateAlgorithmVersionShrinkRequest(TeaModel): - def __init__( - self, - algorithm_spec_shrink: str = None, - ): - self.algorithm_spec_shrink = algorithm_spec_shrink - - def validate(self): - pass - - def to_map(self): - _map = super().to_map() - if _map is not None: - return _map - - result = dict() - if self.algorithm_spec_shrink is not None: - result['AlgorithmSpec'] = self.algorithm_spec_shrink - return result - - def from_map(self, m: dict = None): - m = m or dict() - if m.get('AlgorithmSpec') is not None: - self.algorithm_spec_shrink = m.get('AlgorithmSpec') - return self - - -class CreateAlgorithmVersionResponseBody(TeaModel): +class CreateInstanceWebTerminalResponseBody(TeaModel): def __init__( self, - algorithm_id: str = None, - algorithm_version: str = None, + request_id: str = None, + web_terminal_id: str = None, ): - self.algorithm_id = algorithm_id - self.algorithm_version = algorithm_version + self.request_id = request_id + self.web_terminal_id = web_terminal_id def validate(self): pass @@ -4484,27 +4976,27 @@ def to_map(self): return _map result = dict() - if self.algorithm_id is not None: - result['AlgorithmId'] = self.algorithm_id - if self.algorithm_version is not None: - result['AlgorithmVersion'] = self.algorithm_version + if self.request_id is not None: + result['RequestId'] = self.request_id + if self.web_terminal_id is not None: + result['WebTerminalId'] = self.web_terminal_id return result def from_map(self, m: dict = None): m = m or dict() - if m.get('AlgorithmId') is not None: - self.algorithm_id = m.get('AlgorithmId') - if m.get('AlgorithmVersion') is not None: - self.algorithm_version = m.get('AlgorithmVersion') + if m.get('RequestId') is not None: + self.request_id = m.get('RequestId') + if m.get('WebTerminalId') is not None: + self.web_terminal_id = m.get('WebTerminalId') return self -class CreateAlgorithmVersionResponse(TeaModel): +class CreateInstanceWebTerminalResponse(TeaModel): def __init__( self, headers: Dict[str, str] = None, status_code: int = None, - body: CreateAlgorithmVersionResponseBody = None, + body: CreateInstanceWebTerminalResponseBody = None, ): self.headers = headers self.status_code = status_code @@ -4535,31 +5027,45 @@ def from_map(self, m: dict = None): if m.get('statusCode') is not None: self.status_code = m.get('statusCode') if m.get('body') is not None: - temp_model = CreateAlgorithmVersionResponseBody() + temp_model = CreateInstanceWebTerminalResponseBody() self.body = temp_model.from_map(m['body']) return self -class CreateComponentRequest(TeaModel): +class CreateQuotaRequest(TeaModel): def __init__( self, + allocate_strategy: str = None, description: str = None, - display_name: str = None, labels: List[Label] = None, - name: str = None, - workspace_id: str = None, + min: ResourceSpec = None, + parent_quota_id: str = None, + queue_strategy: str = None, + quota_config: QuotaConfig = None, + quota_name: str = None, + resource_group_ids: List[str] = None, + resource_type: str = None, ): + self.allocate_strategy = allocate_strategy self.description = description - self.display_name = display_name self.labels = labels - self.name = name - self.workspace_id = workspace_id + self.min = min + self.parent_quota_id = parent_quota_id + self.queue_strategy = queue_strategy + self.quota_config = quota_config + self.quota_name = quota_name + self.resource_group_ids = resource_group_ids + self.resource_type = resource_type def validate(self): if self.labels: for k in self.labels: if k: k.validate() + if self.min: + self.min.validate() + if self.quota_config: + self.quota_config.validate() def to_map(self): _map = super().to_map() @@ -4567,45 +5073,68 @@ def to_map(self): return _map result = dict() + if self.allocate_strategy is not None: + result['AllocateStrategy'] = self.allocate_strategy if self.description is not None: result['Description'] = self.description - if self.display_name is not None: - result['DisplayName'] = self.display_name result['Labels'] = [] if self.labels is not None: for k in self.labels: result['Labels'].append(k.to_map() if k else None) - if self.name is not None: - result['Name'] = self.name - if self.workspace_id is not None: - result['WorkspaceId'] = self.workspace_id - return result - - def from_map(self, m: dict = None): - m = m or dict() - if m.get('Description') is not None: - self.description = m.get('Description') - if m.get('DisplayName') is not None: - self.display_name = m.get('DisplayName') - self.labels = [] - if m.get('Labels') is not None: - for k in m.get('Labels'): - temp_model = Label() - self.labels.append(temp_model.from_map(k)) - if m.get('Name') is not None: - self.name = m.get('Name') - if m.get('WorkspaceId') is not None: - self.workspace_id = m.get('WorkspaceId') + if self.min is not None: + result['Min'] = self.min.to_map() + if self.parent_quota_id is not None: + result['ParentQuotaId'] = self.parent_quota_id + if self.queue_strategy is not None: + result['QueueStrategy'] = self.queue_strategy + if self.quota_config is not None: + result['QuotaConfig'] = self.quota_config.to_map() + if self.quota_name is not None: + result['QuotaName'] = self.quota_name + if self.resource_group_ids is not None: + result['ResourceGroupIds'] = self.resource_group_ids + if self.resource_type is not None: + result['ResourceType'] = self.resource_type + return result + + def from_map(self, m: dict = None): + m = m or dict() + if m.get('AllocateStrategy') is not None: + self.allocate_strategy = m.get('AllocateStrategy') + if m.get('Description') is not None: + self.description = m.get('Description') + self.labels = [] + if m.get('Labels') is not None: + for k in m.get('Labels'): + temp_model = Label() + self.labels.append(temp_model.from_map(k)) + if m.get('Min') is not None: + temp_model = ResourceSpec() + self.min = temp_model.from_map(m['Min']) + if m.get('ParentQuotaId') is not None: + self.parent_quota_id = m.get('ParentQuotaId') + if m.get('QueueStrategy') is not None: + self.queue_strategy = m.get('QueueStrategy') + if m.get('QuotaConfig') is not None: + temp_model = QuotaConfig() + self.quota_config = temp_model.from_map(m['QuotaConfig']) + if m.get('QuotaName') is not None: + self.quota_name = m.get('QuotaName') + if m.get('ResourceGroupIds') is not None: + self.resource_group_ids = m.get('ResourceGroupIds') + if m.get('ResourceType') is not None: + self.resource_type = m.get('ResourceType') return self -class CreateComponentResponseBody(TeaModel): +class CreateQuotaResponseBody(TeaModel): def __init__( self, - component_id: str = None, + quota_id: str = None, request_id: str = None, ): - self.component_id = component_id + # Quota Id + self.quota_id = quota_id self.request_id = request_id def validate(self): @@ -4617,27 +5146,27 @@ def to_map(self): return _map result = dict() - if self.component_id is not None: - result['ComponentId'] = self.component_id + if self.quota_id is not None: + result['QuotaId'] = self.quota_id if self.request_id is not None: result['RequestId'] = self.request_id return result def from_map(self, m: dict = None): m = m or dict() - if m.get('ComponentId') is not None: - self.component_id = m.get('ComponentId') + if m.get('QuotaId') is not None: + self.quota_id = m.get('QuotaId') if m.get('RequestId') is not None: self.request_id = m.get('RequestId') return self -class CreateComponentResponse(TeaModel): +class CreateQuotaResponse(TeaModel): def __init__( self, headers: Dict[str, str] = None, status_code: int = None, - body: CreateComponentResponseBody = None, + body: CreateQuotaResponseBody = None, ): self.headers = headers self.status_code = status_code @@ -4668,12 +5197,12 @@ def from_map(self, m: dict = None): if m.get('statusCode') is not None: self.status_code = m.get('statusCode') if m.get('body') is not None: - temp_model = CreateComponentResponseBody() + temp_model = CreateQuotaResponseBody() self.body = temp_model.from_map(m['body']) return self -class CreateComponentVersionRequestLabels(TeaModel): +class CreateResourceGroupRequestTag(TeaModel): def __init__( self, key: str = None, @@ -4706,30 +5235,30 @@ def from_map(self, m: dict = None): return self -class CreateComponentVersionRequest(TeaModel): +class CreateResourceGroupRequest(TeaModel): def __init__( self, - config_dir: Location = None, + computing_resource_provider: str = None, description: str = None, - labels: List[CreateComponentVersionRequestLabels] = None, - spec: ComponentSpec = None, - version: str = None, + name: str = None, + resource_type: str = None, + tag: List[CreateResourceGroupRequestTag] = None, + user_vpc: UserVpc = None, ): - self.config_dir = config_dir + self.computing_resource_provider = computing_resource_provider self.description = description - self.labels = labels - self.spec = spec - self.version = version + self.name = name + self.resource_type = resource_type + self.tag = tag + self.user_vpc = user_vpc def validate(self): - if self.config_dir: - self.config_dir.validate() - if self.labels: - for k in self.labels: + if self.tag: + for k in self.tag: if k: k.validate() - if self.spec: - self.spec.validate() + if self.user_vpc: + self.user_vpc.validate() def to_map(self): _map = super().to_map() @@ -4737,48 +5266,51 @@ def to_map(self): return _map result = dict() - if self.config_dir is not None: - result['ConfigDir'] = self.config_dir.to_map() + if self.computing_resource_provider is not None: + result['ComputingResourceProvider'] = self.computing_resource_provider if self.description is not None: result['Description'] = self.description - result['Labels'] = [] - if self.labels is not None: - for k in self.labels: - result['Labels'].append(k.to_map() if k else None) - if self.spec is not None: - result['Spec'] = self.spec.to_map() - if self.version is not None: - result['Version'] = self.version + if self.name is not None: + result['Name'] = self.name + if self.resource_type is not None: + result['ResourceType'] = self.resource_type + result['Tag'] = [] + if self.tag is not None: + for k in self.tag: + result['Tag'].append(k.to_map() if k else None) + if self.user_vpc is not None: + result['UserVpc'] = self.user_vpc.to_map() return result def from_map(self, m: dict = None): m = m or dict() - if m.get('ConfigDir') is not None: - temp_model = Location() - self.config_dir = temp_model.from_map(m['ConfigDir']) + if m.get('ComputingResourceProvider') is not None: + self.computing_resource_provider = m.get('ComputingResourceProvider') if m.get('Description') is not None: self.description = m.get('Description') - self.labels = [] - if m.get('Labels') is not None: - for k in m.get('Labels'): - temp_model = CreateComponentVersionRequestLabels() - self.labels.append(temp_model.from_map(k)) - if m.get('Spec') is not None: - temp_model = ComponentSpec() - self.spec = temp_model.from_map(m['Spec']) - if m.get('Version') is not None: - self.version = m.get('Version') + if m.get('Name') is not None: + self.name = m.get('Name') + if m.get('ResourceType') is not None: + self.resource_type = m.get('ResourceType') + self.tag = [] + if m.get('Tag') is not None: + for k in m.get('Tag'): + temp_model = CreateResourceGroupRequestTag() + self.tag.append(temp_model.from_map(k)) + if m.get('UserVpc') is not None: + temp_model = UserVpc() + self.user_vpc = temp_model.from_map(m['UserVpc']) return self -class CreateComponentVersionResponseBody(TeaModel): +class CreateResourceGroupResponseBody(TeaModel): def __init__( self, - instance_job_id: str = None, request_id: str = None, + resource_group_id: str = None, ): - self.instance_job_id = instance_job_id self.request_id = request_id + self.resource_group_id = resource_group_id def validate(self): pass @@ -4789,27 +5321,27 @@ def to_map(self): return _map result = dict() - if self.instance_job_id is not None: - result['InstanceJobId'] = self.instance_job_id if self.request_id is not None: result['RequestId'] = self.request_id + if self.resource_group_id is not None: + result['ResourceGroupID'] = self.resource_group_id return result def from_map(self, m: dict = None): m = m or dict() - if m.get('InstanceJobId') is not None: - self.instance_job_id = m.get('InstanceJobId') if m.get('RequestId') is not None: self.request_id = m.get('RequestId') + if m.get('ResourceGroupID') is not None: + self.resource_group_id = m.get('ResourceGroupID') return self -class CreateComponentVersionResponse(TeaModel): +class CreateResourceGroupResponse(TeaModel): def __init__( self, headers: Dict[str, str] = None, status_code: int = None, - body: CreateComponentVersionResponseBody = None, + body: CreateResourceGroupResponseBody = None, ): self.headers = headers self.status_code = status_code @@ -4840,19 +5372,19 @@ def from_map(self, m: dict = None): if m.get('statusCode') is not None: self.status_code = m.get('statusCode') if m.get('body') is not None: - temp_model = CreateComponentVersionResponseBody() + temp_model = CreateResourceGroupResponseBody() self.body = temp_model.from_map(m['body']) return self -class CreateInstanceWebTerminalResponseBody(TeaModel): +class CreateResourceGroupMachineGroupRequestTag(TeaModel): def __init__( self, - request_id: str = None, - web_terminal_id: str = None, + key: str = None, + value: str = None, ): - self.request_id = request_id - self.web_terminal_id = web_terminal_id + self.key = key + self.value = value def validate(self): pass @@ -4863,35 +5395,45 @@ def to_map(self): return _map result = dict() - if self.request_id is not None: - result['RequestId'] = self.request_id - if self.web_terminal_id is not None: - result['WebTerminalId'] = self.web_terminal_id + if self.key is not None: + result['Key'] = self.key + if self.value is not None: + result['Value'] = self.value return result def from_map(self, m: dict = None): m = m or dict() - if m.get('RequestId') is not None: - self.request_id = m.get('RequestId') - if m.get('WebTerminalId') is not None: - self.web_terminal_id = m.get('WebTerminalId') + if m.get('Key') is not None: + self.key = m.get('Key') + if m.get('Value') is not None: + self.value = m.get('Value') return self -class CreateInstanceWebTerminalResponse(TeaModel): +class CreateResourceGroupMachineGroupRequest(TeaModel): def __init__( self, - headers: Dict[str, str] = None, - status_code: int = None, - body: CreateInstanceWebTerminalResponseBody = None, + ecs_count: int = None, + ecs_spec: str = None, + name: str = None, + payment_duration: str = None, + payment_duration_unit: str = None, + payment_type: str = None, + tag: List[CreateResourceGroupMachineGroupRequestTag] = None, ): - self.headers = headers - self.status_code = status_code - self.body = body + self.ecs_count = ecs_count + self.ecs_spec = ecs_spec + self.name = name + self.payment_duration = payment_duration + self.payment_duration_unit = payment_duration_unit + self.payment_type = payment_type + self.tag = tag def validate(self): - if self.body: - self.body.validate() + if self.tag: + for k in self.tag: + if k: + k.validate() def to_map(self): _map = super().to_map() @@ -4899,173 +5441,53 @@ def to_map(self): return _map result = dict() - if self.headers is not None: - result['headers'] = self.headers - if self.status_code is not None: - result['statusCode'] = self.status_code - if self.body is not None: - result['body'] = self.body.to_map() + if self.ecs_count is not None: + result['EcsCount'] = self.ecs_count + if self.ecs_spec is not None: + result['EcsSpec'] = self.ecs_spec + if self.name is not None: + result['Name'] = self.name + if self.payment_duration is not None: + result['PaymentDuration'] = self.payment_duration + if self.payment_duration_unit is not None: + result['PaymentDurationUnit'] = self.payment_duration_unit + if self.payment_type is not None: + result['PaymentType'] = self.payment_type + result['Tag'] = [] + if self.tag is not None: + for k in self.tag: + result['Tag'].append(k.to_map() if k else None) return result def from_map(self, m: dict = None): m = m or dict() - if m.get('headers') is not None: - self.headers = m.get('headers') - if m.get('statusCode') is not None: - self.status_code = m.get('statusCode') - if m.get('body') is not None: - temp_model = CreateInstanceWebTerminalResponseBody() - self.body = temp_model.from_map(m['body']) - return self - - -class CreateLLMProjectRequestLabels(TeaModel): - def __init__( - self, - key: str = None, - value: str = None, - ): - self.key = key - self.value = value - - def validate(self): - pass - - def to_map(self): - _map = super().to_map() - if _map is not None: - return _map - - result = dict() - if self.key is not None: - result['Key'] = self.key - if self.value is not None: - result['Value'] = self.value - return result - - def from_map(self, m: dict = None): - m = m or dict() - if m.get('Key') is not None: - self.key = m.get('Key') - if m.get('Value') is not None: - self.value = m.get('Value') - return self - - -class CreateLLMProjectRequestRuntime(TeaModel): - def __init__( - self, - runtime_id: str = None, - runtime_type: str = None, - ): - self.runtime_id = runtime_id - self.runtime_type = runtime_type - - def validate(self): - pass - - def to_map(self): - _map = super().to_map() - if _map is not None: - return _map - - result = dict() - if self.runtime_id is not None: - result['RuntimeId'] = self.runtime_id - if self.runtime_type is not None: - result['RuntimeType'] = self.runtime_type - return result - - def from_map(self, m: dict = None): - m = m or dict() - if m.get('RuntimeId') is not None: - self.runtime_id = m.get('RuntimeId') - if m.get('RuntimeType') is not None: - self.runtime_type = m.get('RuntimeType') - return self - - -class CreateLLMProjectRequest(TeaModel): - def __init__( - self, - labels: List[CreateLLMProjectRequestLabels] = None, - project_description: str = None, - project_name: str = None, - project_type: str = None, - root_path: str = None, - runtime: CreateLLMProjectRequestRuntime = None, - workspace_id: str = None, - ): - self.labels = labels - self.project_description = project_description - self.project_name = project_name - self.project_type = project_type - self.root_path = root_path - self.runtime = runtime - self.workspace_id = workspace_id - - def validate(self): - if self.labels: - for k in self.labels: - if k: - k.validate() - if self.runtime: - self.runtime.validate() - - def to_map(self): - _map = super().to_map() - if _map is not None: - return _map - - result = dict() - result['Labels'] = [] - if self.labels is not None: - for k in self.labels: - result['Labels'].append(k.to_map() if k else None) - if self.project_description is not None: - result['ProjectDescription'] = self.project_description - if self.project_name is not None: - result['ProjectName'] = self.project_name - if self.project_type is not None: - result['ProjectType'] = self.project_type - if self.root_path is not None: - result['RootPath'] = self.root_path - if self.runtime is not None: - result['Runtime'] = self.runtime.to_map() - if self.workspace_id is not None: - result['WorkspaceId'] = self.workspace_id - return result - - def from_map(self, m: dict = None): - m = m or dict() - self.labels = [] - if m.get('Labels') is not None: - for k in m.get('Labels'): - temp_model = CreateLLMProjectRequestLabels() - self.labels.append(temp_model.from_map(k)) - if m.get('ProjectDescription') is not None: - self.project_description = m.get('ProjectDescription') - if m.get('ProjectName') is not None: - self.project_name = m.get('ProjectName') - if m.get('ProjectType') is not None: - self.project_type = m.get('ProjectType') - if m.get('RootPath') is not None: - self.root_path = m.get('RootPath') - if m.get('Runtime') is not None: - temp_model = CreateLLMProjectRequestRuntime() - self.runtime = temp_model.from_map(m['Runtime']) - if m.get('WorkspaceId') is not None: - self.workspace_id = m.get('WorkspaceId') + if m.get('EcsCount') is not None: + self.ecs_count = m.get('EcsCount') + if m.get('EcsSpec') is not None: + self.ecs_spec = m.get('EcsSpec') + if m.get('Name') is not None: + self.name = m.get('Name') + if m.get('PaymentDuration') is not None: + self.payment_duration = m.get('PaymentDuration') + if m.get('PaymentDurationUnit') is not None: + self.payment_duration_unit = m.get('PaymentDurationUnit') + if m.get('PaymentType') is not None: + self.payment_type = m.get('PaymentType') + self.tag = [] + if m.get('Tag') is not None: + for k in m.get('Tag'): + temp_model = CreateResourceGroupMachineGroupRequestTag() + self.tag.append(temp_model.from_map(k)) return self -class CreateLLMProjectResponseBody(TeaModel): +class CreateResourceGroupMachineGroupResponseBody(TeaModel): def __init__( self, - project_id: str = None, + machine_group_id: str = None, request_id: str = None, ): - self.project_id = project_id + self.machine_group_id = machine_group_id self.request_id = request_id def validate(self): @@ -5077,27 +5499,27 @@ def to_map(self): return _map result = dict() - if self.project_id is not None: - result['ProjectId'] = self.project_id + if self.machine_group_id is not None: + result['MachineGroupID'] = self.machine_group_id if self.request_id is not None: result['RequestId'] = self.request_id return result def from_map(self, m: dict = None): m = m or dict() - if m.get('ProjectId') is not None: - self.project_id = m.get('ProjectId') + if m.get('MachineGroupID') is not None: + self.machine_group_id = m.get('MachineGroupID') if m.get('RequestId') is not None: self.request_id = m.get('RequestId') return self -class CreateLLMProjectResponse(TeaModel): +class CreateResourceGroupMachineGroupResponse(TeaModel): def __init__( self, headers: Dict[str, str] = None, status_code: int = None, - body: CreateLLMProjectResponseBody = None, + body: CreateResourceGroupMachineGroupResponseBody = None, ): self.headers = headers self.status_code = status_code @@ -5128,12 +5550,12 @@ def from_map(self, m: dict = None): if m.get('statusCode') is not None: self.status_code = m.get('statusCode') if m.get('body') is not None: - temp_model = CreateLLMProjectResponseBody() + temp_model = CreateResourceGroupMachineGroupResponseBody() self.body = temp_model.from_map(m['body']) return self -class CreateLLMServiceIdentityRoleRequest(TeaModel): +class CreateServiceIdentityRoleRequest(TeaModel): def __init__( self, role_name: str = None, @@ -5160,7 +5582,7 @@ def from_map(self, m: dict = None): return self -class CreateLLMServiceIdentityRoleResponseBody(TeaModel): +class CreateServiceIdentityRoleResponseBody(TeaModel): def __init__( self, request_id: str = None, @@ -5193,12 +5615,12 @@ def from_map(self, m: dict = None): return self -class CreateLLMServiceIdentityRoleResponse(TeaModel): +class CreateServiceIdentityRoleResponse(TeaModel): def __init__( self, headers: Dict[str, str] = None, status_code: int = None, - body: CreateLLMServiceIdentityRoleResponseBody = None, + body: CreateServiceIdentityRoleResponseBody = None, ): self.headers = headers self.status_code = status_code @@ -5229,19 +5651,25 @@ def from_map(self, m: dict = None): if m.get('statusCode') is not None: self.status_code = m.get('statusCode') if m.get('body') is not None: - temp_model = CreateLLMServiceIdentityRoleResponseBody() + temp_model = CreateServiceIdentityRoleResponseBody() self.body = temp_model.from_map(m['body']) return self -class CreateLLMSnapshotRequestStorage(TeaModel): +class CreateTrainingJobRequestComputeResourceInstanceSpec(TeaModel): def __init__( self, - location: str = None, - type: str = None, + cpu: str = None, + gpu: str = None, + gputype: str = None, + memory: str = None, + shared_memory: str = None, ): - self.location = location - self.type = type + self.cpu = cpu + self.gpu = gpu + self.gputype = gputype + self.memory = memory + self.shared_memory = shared_memory def validate(self): pass @@ -5252,31 +5680,44 @@ def to_map(self): return _map result = dict() - if self.location is not None: - result['Location'] = self.location - if self.type is not None: - result['Type'] = self.type + if self.cpu is not None: + result['CPU'] = self.cpu + if self.gpu is not None: + result['GPU'] = self.gpu + if self.gputype is not None: + result['GPUType'] = self.gputype + if self.memory is not None: + result['Memory'] = self.memory + if self.shared_memory is not None: + result['SharedMemory'] = self.shared_memory return result def from_map(self, m: dict = None): m = m or dict() - if m.get('Location') is not None: - self.location = m.get('Location') - if m.get('Type') is not None: - self.type = m.get('Type') + if m.get('CPU') is not None: + self.cpu = m.get('CPU') + if m.get('GPU') is not None: + self.gpu = m.get('GPU') + if m.get('GPUType') is not None: + self.gputype = m.get('GPUType') + if m.get('Memory') is not None: + self.memory = m.get('Memory') + if m.get('SharedMemory') is not None: + self.shared_memory = m.get('SharedMemory') return self -class CreateLLMSnapshotRequest(TeaModel): +class CreateTrainingJobRequestComputeResourceSpotSpec(TeaModel): def __init__( self, - storage: CreateLLMSnapshotRequestStorage = None, + spot_discount_limit: float = None, + spot_strategy: str = None, ): - self.storage = storage + self.spot_discount_limit = spot_discount_limit + self.spot_strategy = spot_strategy def validate(self): - if self.storage: - self.storage.validate() + pass def to_map(self): _map = super().to_map() @@ -5284,31 +5725,45 @@ def to_map(self): return _map result = dict() - if self.storage is not None: - result['Storage'] = self.storage.to_map() + if self.spot_discount_limit is not None: + result['SpotDiscountLimit'] = self.spot_discount_limit + if self.spot_strategy is not None: + result['SpotStrategy'] = self.spot_strategy return result def from_map(self, m: dict = None): m = m or dict() - if m.get('Storage') is not None: - temp_model = CreateLLMSnapshotRequestStorage() - self.storage = temp_model.from_map(m['Storage']) + if m.get('SpotDiscountLimit') is not None: + self.spot_discount_limit = m.get('SpotDiscountLimit') + if m.get('SpotStrategy') is not None: + self.spot_strategy = m.get('SpotStrategy') return self -class CreateLLMSnapshotResponseBody(TeaModel): +class CreateTrainingJobRequestComputeResource(TeaModel): def __init__( self, - pipeline_run_id: str = None, - request_id: str = None, - snapshot_id: str = None, + ecs_count: int = None, + ecs_spec: str = None, + instance_count: int = None, + instance_spec: CreateTrainingJobRequestComputeResourceInstanceSpec = None, + resource_id: str = None, + spot_spec: CreateTrainingJobRequestComputeResourceSpotSpec = None, + use_spot_instance: bool = None, ): - self.pipeline_run_id = pipeline_run_id - self.request_id = request_id - self.snapshot_id = snapshot_id + self.ecs_count = ecs_count + self.ecs_spec = ecs_spec + self.instance_count = instance_count + self.instance_spec = instance_spec + self.resource_id = resource_id + self.spot_spec = spot_spec + self.use_spot_instance = use_spot_instance def validate(self): - pass + if self.instance_spec: + self.instance_spec.validate() + if self.spot_spec: + self.spot_spec.validate() def to_map(self): _map = super().to_map() @@ -5316,39 +5771,52 @@ def to_map(self): return _map result = dict() - if self.pipeline_run_id is not None: - result['PipelineRunId'] = self.pipeline_run_id - if self.request_id is not None: - result['RequestId'] = self.request_id - if self.snapshot_id is not None: - result['SnapshotId'] = self.snapshot_id + if self.ecs_count is not None: + result['EcsCount'] = self.ecs_count + if self.ecs_spec is not None: + result['EcsSpec'] = self.ecs_spec + if self.instance_count is not None: + result['InstanceCount'] = self.instance_count + if self.instance_spec is not None: + result['InstanceSpec'] = self.instance_spec.to_map() + if self.resource_id is not None: + result['ResourceId'] = self.resource_id + if self.spot_spec is not None: + result['SpotSpec'] = self.spot_spec.to_map() + if self.use_spot_instance is not None: + result['UseSpotInstance'] = self.use_spot_instance return result def from_map(self, m: dict = None): m = m or dict() - if m.get('PipelineRunId') is not None: - self.pipeline_run_id = m.get('PipelineRunId') - if m.get('RequestId') is not None: - self.request_id = m.get('RequestId') - if m.get('SnapshotId') is not None: - self.snapshot_id = m.get('SnapshotId') + if m.get('EcsCount') is not None: + self.ecs_count = m.get('EcsCount') + if m.get('EcsSpec') is not None: + self.ecs_spec = m.get('EcsSpec') + if m.get('InstanceCount') is not None: + self.instance_count = m.get('InstanceCount') + if m.get('InstanceSpec') is not None: + temp_model = CreateTrainingJobRequestComputeResourceInstanceSpec() + self.instance_spec = temp_model.from_map(m['InstanceSpec']) + if m.get('ResourceId') is not None: + self.resource_id = m.get('ResourceId') + if m.get('SpotSpec') is not None: + temp_model = CreateTrainingJobRequestComputeResourceSpotSpec() + self.spot_spec = temp_model.from_map(m['SpotSpec']) + if m.get('UseSpotInstance') is not None: + self.use_spot_instance = m.get('UseSpotInstance') return self -class CreateLLMSnapshotResponse(TeaModel): +class CreateTrainingJobRequestExperimentConfig(TeaModel): def __init__( self, - headers: Dict[str, str] = None, - status_code: int = None, - body: CreateLLMSnapshotResponseBody = None, + experiment_id: str = None, ): - self.headers = headers - self.status_code = status_code - self.body = body + self.experiment_id = experiment_id def validate(self): - if self.body: - self.body.validate() + pass def to_map(self): _map = super().to_map() @@ -5356,130 +5824,25 @@ def to_map(self): return _map result = dict() - if self.headers is not None: - result['headers'] = self.headers - if self.status_code is not None: - result['statusCode'] = self.status_code - if self.body is not None: - result['body'] = self.body.to_map() + if self.experiment_id is not None: + result['ExperimentId'] = self.experiment_id return result def from_map(self, m: dict = None): m = m or dict() - if m.get('headers') is not None: - self.headers = m.get('headers') - if m.get('statusCode') is not None: - self.status_code = m.get('statusCode') - if m.get('body') is not None: - temp_model = CreateLLMSnapshotResponseBody() - self.body = temp_model.from_map(m['body']) + if m.get('ExperimentId') is not None: + self.experiment_id = m.get('ExperimentId') return self -class CreateQuotaRequest(TeaModel): +class CreateTrainingJobRequestHyperParameters(TeaModel): def __init__( self, - allocate_strategy: str = None, - description: str = None, - labels: List[Label] = None, - min: ResourceSpec = None, - parent_quota_id: str = None, - queue_strategy: str = None, - quota_config: QuotaConfig = None, - quota_name: str = None, - resource_group_ids: List[str] = None, - resource_type: str = None, + name: str = None, + value: str = None, ): - self.allocate_strategy = allocate_strategy - self.description = description - self.labels = labels - self.min = min - self.parent_quota_id = parent_quota_id - self.queue_strategy = queue_strategy - self.quota_config = quota_config - self.quota_name = quota_name - self.resource_group_ids = resource_group_ids - self.resource_type = resource_type - - def validate(self): - if self.labels: - for k in self.labels: - if k: - k.validate() - if self.min: - self.min.validate() - if self.quota_config: - self.quota_config.validate() - - def to_map(self): - _map = super().to_map() - if _map is not None: - return _map - - result = dict() - if self.allocate_strategy is not None: - result['AllocateStrategy'] = self.allocate_strategy - if self.description is not None: - result['Description'] = self.description - result['Labels'] = [] - if self.labels is not None: - for k in self.labels: - result['Labels'].append(k.to_map() if k else None) - if self.min is not None: - result['Min'] = self.min.to_map() - if self.parent_quota_id is not None: - result['ParentQuotaId'] = self.parent_quota_id - if self.queue_strategy is not None: - result['QueueStrategy'] = self.queue_strategy - if self.quota_config is not None: - result['QuotaConfig'] = self.quota_config.to_map() - if self.quota_name is not None: - result['QuotaName'] = self.quota_name - if self.resource_group_ids is not None: - result['ResourceGroupIds'] = self.resource_group_ids - if self.resource_type is not None: - result['ResourceType'] = self.resource_type - return result - - def from_map(self, m: dict = None): - m = m or dict() - if m.get('AllocateStrategy') is not None: - self.allocate_strategy = m.get('AllocateStrategy') - if m.get('Description') is not None: - self.description = m.get('Description') - self.labels = [] - if m.get('Labels') is not None: - for k in m.get('Labels'): - temp_model = Label() - self.labels.append(temp_model.from_map(k)) - if m.get('Min') is not None: - temp_model = ResourceSpec() - self.min = temp_model.from_map(m['Min']) - if m.get('ParentQuotaId') is not None: - self.parent_quota_id = m.get('ParentQuotaId') - if m.get('QueueStrategy') is not None: - self.queue_strategy = m.get('QueueStrategy') - if m.get('QuotaConfig') is not None: - temp_model = QuotaConfig() - self.quota_config = temp_model.from_map(m['QuotaConfig']) - if m.get('QuotaName') is not None: - self.quota_name = m.get('QuotaName') - if m.get('ResourceGroupIds') is not None: - self.resource_group_ids = m.get('ResourceGroupIds') - if m.get('ResourceType') is not None: - self.resource_type = m.get('ResourceType') - return self - - -class CreateQuotaResponseBody(TeaModel): - def __init__( - self, - quota_id: str = None, - request_id: str = None, - ): - # Quota Id - self.quota_id = quota_id - self.request_id = request_id + self.name = name + self.value = value def validate(self): pass @@ -5490,35 +5853,34 @@ def to_map(self): return _map result = dict() - if self.quota_id is not None: - result['QuotaId'] = self.quota_id - if self.request_id is not None: - result['RequestId'] = self.request_id + if self.name is not None: + result['Name'] = self.name + if self.value is not None: + result['Value'] = self.value return result def from_map(self, m: dict = None): m = m or dict() - if m.get('QuotaId') is not None: - self.quota_id = m.get('QuotaId') - if m.get('RequestId') is not None: - self.request_id = m.get('RequestId') + if m.get('Name') is not None: + self.name = m.get('Name') + if m.get('Value') is not None: + self.value = m.get('Value') return self -class CreateQuotaResponse(TeaModel): +class CreateTrainingJobRequestInputChannels(TeaModel): def __init__( self, - headers: Dict[str, str] = None, - status_code: int = None, - body: CreateQuotaResponseBody = None, + dataset_id: str = None, + input_uri: str = None, + name: str = None, ): - self.headers = headers - self.status_code = status_code - self.body = body + self.dataset_id = dataset_id + self.input_uri = input_uri + self.name = name def validate(self): - if self.body: - self.body.validate() + pass def to_map(self): _map = super().to_map() @@ -5526,27 +5888,26 @@ def to_map(self): return _map result = dict() - if self.headers is not None: - result['headers'] = self.headers - if self.status_code is not None: - result['statusCode'] = self.status_code - if self.body is not None: - result['body'] = self.body.to_map() + if self.dataset_id is not None: + result['DatasetId'] = self.dataset_id + if self.input_uri is not None: + result['InputUri'] = self.input_uri + if self.name is not None: + result['Name'] = self.name return result def from_map(self, m: dict = None): m = m or dict() - if m.get('headers') is not None: - self.headers = m.get('headers') - if m.get('statusCode') is not None: - self.status_code = m.get('statusCode') - if m.get('body') is not None: - temp_model = CreateQuotaResponseBody() - self.body = temp_model.from_map(m['body']) + if m.get('DatasetId') is not None: + self.dataset_id = m.get('DatasetId') + if m.get('InputUri') is not None: + self.input_uri = m.get('InputUri') + if m.get('Name') is not None: + self.name = m.get('Name') return self -class CreateResourceGroupRequestTag(TeaModel): +class CreateTrainingJobRequestLabels(TeaModel): def __init__( self, key: str = None, @@ -5579,30 +5940,19 @@ def from_map(self, m: dict = None): return self -class CreateResourceGroupRequest(TeaModel): +class CreateTrainingJobRequestOutputChannels(TeaModel): def __init__( self, - computing_resource_provider: str = None, - description: str = None, + dataset_id: str = None, name: str = None, - resource_type: str = None, - tag: List[CreateResourceGroupRequestTag] = None, - user_vpc: UserVpc = None, + output_uri: str = None, ): - self.computing_resource_provider = computing_resource_provider - self.description = description + self.dataset_id = dataset_id self.name = name - self.resource_type = resource_type - self.tag = tag - self.user_vpc = user_vpc + self.output_uri = output_uri def validate(self): - if self.tag: - for k in self.tag: - if k: - k.validate() - if self.user_vpc: - self.user_vpc.validate() + pass def to_map(self): _map = super().to_map() @@ -5610,51 +5960,31 @@ def to_map(self): return _map result = dict() - if self.computing_resource_provider is not None: - result['ComputingResourceProvider'] = self.computing_resource_provider - if self.description is not None: - result['Description'] = self.description + if self.dataset_id is not None: + result['DatasetId'] = self.dataset_id if self.name is not None: result['Name'] = self.name - if self.resource_type is not None: - result['ResourceType'] = self.resource_type - result['Tag'] = [] - if self.tag is not None: - for k in self.tag: - result['Tag'].append(k.to_map() if k else None) - if self.user_vpc is not None: - result['UserVpc'] = self.user_vpc.to_map() + if self.output_uri is not None: + result['OutputUri'] = self.output_uri return result def from_map(self, m: dict = None): m = m or dict() - if m.get('ComputingResourceProvider') is not None: - self.computing_resource_provider = m.get('ComputingResourceProvider') - if m.get('Description') is not None: - self.description = m.get('Description') + if m.get('DatasetId') is not None: + self.dataset_id = m.get('DatasetId') if m.get('Name') is not None: self.name = m.get('Name') - if m.get('ResourceType') is not None: - self.resource_type = m.get('ResourceType') - self.tag = [] - if m.get('Tag') is not None: - for k in m.get('Tag'): - temp_model = CreateResourceGroupRequestTag() - self.tag.append(temp_model.from_map(k)) - if m.get('UserVpc') is not None: - temp_model = UserVpc() - self.user_vpc = temp_model.from_map(m['UserVpc']) + if m.get('OutputUri') is not None: + self.output_uri = m.get('OutputUri') return self -class CreateResourceGroupResponseBody(TeaModel): +class CreateTrainingJobRequestScheduler(TeaModel): def __init__( self, - request_id: str = None, - resource_group_id: str = None, + max_running_time_in_seconds: int = None, ): - self.request_id = request_id - self.resource_group_id = resource_group_id + self.max_running_time_in_seconds = max_running_time_in_seconds def validate(self): pass @@ -5665,35 +5995,32 @@ def to_map(self): return _map result = dict() - if self.request_id is not None: - result['RequestId'] = self.request_id - if self.resource_group_id is not None: - result['ResourceGroupID'] = self.resource_group_id + if self.max_running_time_in_seconds is not None: + result['MaxRunningTimeInSeconds'] = self.max_running_time_in_seconds return result def from_map(self, m: dict = None): m = m or dict() - if m.get('RequestId') is not None: - self.request_id = m.get('RequestId') - if m.get('ResourceGroupID') is not None: - self.resource_group_id = m.get('ResourceGroupID') + if m.get('MaxRunningTimeInSeconds') is not None: + self.max_running_time_in_seconds = m.get('MaxRunningTimeInSeconds') return self -class CreateResourceGroupResponse(TeaModel): +class CreateTrainingJobRequestSettings(TeaModel): def __init__( self, - headers: Dict[str, str] = None, - status_code: int = None, - body: CreateResourceGroupResponseBody = None, + aimaster_type: str = None, + enable_error_monitoring_in_aimaster: bool = None, + error_monitoring_args: str = None, + priority: int = None, ): - self.headers = headers - self.status_code = status_code - self.body = body + self.aimaster_type = aimaster_type + self.enable_error_monitoring_in_aimaster = enable_error_monitoring_in_aimaster + self.error_monitoring_args = error_monitoring_args + self.priority = priority def validate(self): - if self.body: - self.body.validate() + pass def to_map(self): _map = super().to_map() @@ -5701,34 +6028,43 @@ def to_map(self): return _map result = dict() - if self.headers is not None: - result['headers'] = self.headers - if self.status_code is not None: - result['statusCode'] = self.status_code - if self.body is not None: - result['body'] = self.body.to_map() + if self.aimaster_type is not None: + result['AIMasterType'] = self.aimaster_type + if self.enable_error_monitoring_in_aimaster is not None: + result['EnableErrorMonitoringInAIMaster'] = self.enable_error_monitoring_in_aimaster + if self.error_monitoring_args is not None: + result['ErrorMonitoringArgs'] = self.error_monitoring_args + if self.priority is not None: + result['Priority'] = self.priority return result def from_map(self, m: dict = None): m = m or dict() - if m.get('headers') is not None: - self.headers = m.get('headers') - if m.get('statusCode') is not None: - self.status_code = m.get('statusCode') - if m.get('body') is not None: - temp_model = CreateResourceGroupResponseBody() - self.body = temp_model.from_map(m['body']) + if m.get('AIMasterType') is not None: + self.aimaster_type = m.get('AIMasterType') + if m.get('EnableErrorMonitoringInAIMaster') is not None: + self.enable_error_monitoring_in_aimaster = m.get('EnableErrorMonitoringInAIMaster') + if m.get('ErrorMonitoringArgs') is not None: + self.error_monitoring_args = m.get('ErrorMonitoringArgs') + if m.get('Priority') is not None: + self.priority = m.get('Priority') return self -class CreateResourceGroupMachineGroupRequestTag(TeaModel): +class CreateTrainingJobRequestUserVpc(TeaModel): def __init__( self, - key: str = None, - value: str = None, + default_route: str = None, + extended_cidrs: List[str] = None, + security_group_id: str = None, + switch_id: str = None, + vpc_id: str = None, ): - self.key = key - self.value = value + self.default_route = default_route + self.extended_cidrs = extended_cidrs + self.security_group_id = security_group_id + self.switch_id = switch_id + self.vpc_id = vpc_id def validate(self): pass @@ -5739,45 +6075,113 @@ def to_map(self): return _map result = dict() - if self.key is not None: - result['Key'] = self.key - if self.value is not None: - result['Value'] = self.value + if self.default_route is not None: + result['DefaultRoute'] = self.default_route + if self.extended_cidrs is not None: + result['ExtendedCIDRs'] = self.extended_cidrs + if self.security_group_id is not None: + result['SecurityGroupId'] = self.security_group_id + if self.switch_id is not None: + result['SwitchId'] = self.switch_id + if self.vpc_id is not None: + result['VpcId'] = self.vpc_id return result def from_map(self, m: dict = None): m = m or dict() - if m.get('Key') is not None: - self.key = m.get('Key') - if m.get('Value') is not None: - self.value = m.get('Value') + if m.get('DefaultRoute') is not None: + self.default_route = m.get('DefaultRoute') + if m.get('ExtendedCIDRs') is not None: + self.extended_cidrs = m.get('ExtendedCIDRs') + if m.get('SecurityGroupId') is not None: + self.security_group_id = m.get('SecurityGroupId') + if m.get('SwitchId') is not None: + self.switch_id = m.get('SwitchId') + if m.get('VpcId') is not None: + self.vpc_id = m.get('VpcId') return self -class CreateResourceGroupMachineGroupRequest(TeaModel): +class CreateTrainingJobRequest(TeaModel): def __init__( self, - ecs_count: int = None, - ecs_spec: str = None, - name: str = None, - payment_duration: str = None, - payment_duration_unit: str = None, - payment_type: str = None, - tag: List[CreateResourceGroupMachineGroupRequestTag] = None, + algorithm_name: str = None, + algorithm_provider: str = None, + algorithm_spec: AlgorithmSpec = None, + algorithm_version: str = None, + code_dir: Location = None, + compute_resource: CreateTrainingJobRequestComputeResource = None, + environments: Dict[str, str] = None, + experiment_config: CreateTrainingJobRequestExperimentConfig = None, + hyper_parameters: List[CreateTrainingJobRequestHyperParameters] = None, + input_channels: List[CreateTrainingJobRequestInputChannels] = None, + labels: List[CreateTrainingJobRequestLabels] = None, + output_channels: List[CreateTrainingJobRequestOutputChannels] = None, + python_requirements: List[str] = None, + role_arn: str = None, + scheduler: CreateTrainingJobRequestScheduler = None, + settings: CreateTrainingJobRequestSettings = None, + training_job_description: str = None, + training_job_name: str = None, + user_vpc: CreateTrainingJobRequestUserVpc = None, + workspace_id: str = None, + resource_type: str = None, ): - self.ecs_count = ecs_count - self.ecs_spec = ecs_spec - self.name = name - self.payment_duration = payment_duration - self.payment_duration_unit = payment_duration_unit - self.payment_type = payment_type - self.tag = tag + self.algorithm_name = algorithm_name + self.algorithm_provider = algorithm_provider + self.algorithm_spec = algorithm_spec + self.algorithm_version = algorithm_version + self.code_dir = code_dir + self.compute_resource = compute_resource + self.environments = environments + self.experiment_config = experiment_config + self.hyper_parameters = hyper_parameters + self.input_channels = input_channels + self.labels = labels + self.output_channels = output_channels + self.python_requirements = python_requirements + self.role_arn = role_arn + self.scheduler = scheduler + self.settings = settings + self.training_job_description = training_job_description + # This parameter is required. + self.training_job_name = training_job_name + self.user_vpc = user_vpc + # This parameter is required. + self.workspace_id = workspace_id + self.resource_type = resource_type def validate(self): - if self.tag: - for k in self.tag: + if self.algorithm_spec: + self.algorithm_spec.validate() + if self.code_dir: + self.code_dir.validate() + if self.compute_resource: + self.compute_resource.validate() + if self.experiment_config: + self.experiment_config.validate() + if self.hyper_parameters: + for k in self.hyper_parameters: if k: k.validate() + if self.input_channels: + for k in self.input_channels: + if k: + k.validate() + if self.labels: + for k in self.labels: + if k: + k.validate() + if self.output_channels: + for k in self.output_channels: + if k: + k.validate() + if self.scheduler: + self.scheduler.validate() + if self.settings: + self.settings.validate() + if self.user_vpc: + self.user_vpc.validate() def to_map(self): _map = super().to_map() @@ -5785,85 +6189,163 @@ def to_map(self): return _map result = dict() - if self.ecs_count is not None: - result['EcsCount'] = self.ecs_count - if self.ecs_spec is not None: - result['EcsSpec'] = self.ecs_spec - if self.name is not None: - result['Name'] = self.name - if self.payment_duration is not None: - result['PaymentDuration'] = self.payment_duration - if self.payment_duration_unit is not None: - result['PaymentDurationUnit'] = self.payment_duration_unit - if self.payment_type is not None: - result['PaymentType'] = self.payment_type - result['Tag'] = [] - if self.tag is not None: - for k in self.tag: - result['Tag'].append(k.to_map() if k else None) - return result - - def from_map(self, m: dict = None): - m = m or dict() - if m.get('EcsCount') is not None: - self.ecs_count = m.get('EcsCount') - if m.get('EcsSpec') is not None: - self.ecs_spec = m.get('EcsSpec') - if m.get('Name') is not None: - self.name = m.get('Name') - if m.get('PaymentDuration') is not None: - self.payment_duration = m.get('PaymentDuration') - if m.get('PaymentDurationUnit') is not None: - self.payment_duration_unit = m.get('PaymentDurationUnit') - if m.get('PaymentType') is not None: - self.payment_type = m.get('PaymentType') - self.tag = [] - if m.get('Tag') is not None: - for k in m.get('Tag'): - temp_model = CreateResourceGroupMachineGroupRequestTag() - self.tag.append(temp_model.from_map(k)) - return self - - -class CreateResourceGroupMachineGroupResponseBody(TeaModel): - def __init__( - self, - machine_group_id: str = None, - request_id: str = None, - ): - self.machine_group_id = machine_group_id - self.request_id = request_id - - def validate(self): - pass - - def to_map(self): - _map = super().to_map() - if _map is not None: - return _map - - result = dict() - if self.machine_group_id is not None: - result['MachineGroupID'] = self.machine_group_id - if self.request_id is not None: - result['RequestId'] = self.request_id + if self.algorithm_name is not None: + result['AlgorithmName'] = self.algorithm_name + if self.algorithm_provider is not None: + result['AlgorithmProvider'] = self.algorithm_provider + if self.algorithm_spec is not None: + result['AlgorithmSpec'] = self.algorithm_spec.to_map() + if self.algorithm_version is not None: + result['AlgorithmVersion'] = self.algorithm_version + if self.code_dir is not None: + result['CodeDir'] = self.code_dir.to_map() + if self.compute_resource is not None: + result['ComputeResource'] = self.compute_resource.to_map() + if self.environments is not None: + result['Environments'] = self.environments + if self.experiment_config is not None: + result['ExperimentConfig'] = self.experiment_config.to_map() + result['HyperParameters'] = [] + if self.hyper_parameters is not None: + for k in self.hyper_parameters: + result['HyperParameters'].append(k.to_map() if k else None) + result['InputChannels'] = [] + if self.input_channels is not None: + for k in self.input_channels: + result['InputChannels'].append(k.to_map() if k else None) + result['Labels'] = [] + if self.labels is not None: + for k in self.labels: + result['Labels'].append(k.to_map() if k else None) + result['OutputChannels'] = [] + if self.output_channels is not None: + for k in self.output_channels: + result['OutputChannels'].append(k.to_map() if k else None) + if self.python_requirements is not None: + result['PythonRequirements'] = self.python_requirements + if self.role_arn is not None: + result['RoleArn'] = self.role_arn + if self.scheduler is not None: + result['Scheduler'] = self.scheduler.to_map() + if self.settings is not None: + result['Settings'] = self.settings.to_map() + if self.training_job_description is not None: + result['TrainingJobDescription'] = self.training_job_description + if self.training_job_name is not None: + result['TrainingJobName'] = self.training_job_name + if self.user_vpc is not None: + result['UserVpc'] = self.user_vpc.to_map() + if self.workspace_id is not None: + result['WorkspaceId'] = self.workspace_id + if self.resource_type is not None: + result['ResourceType'] = self.resource_type return result def from_map(self, m: dict = None): m = m or dict() - if m.get('MachineGroupID') is not None: - self.machine_group_id = m.get('MachineGroupID') - if m.get('RequestId') is not None: + if m.get('AlgorithmName') is not None: + self.algorithm_name = m.get('AlgorithmName') + if m.get('AlgorithmProvider') is not None: + self.algorithm_provider = m.get('AlgorithmProvider') + if m.get('AlgorithmSpec') is not None: + temp_model = AlgorithmSpec() + self.algorithm_spec = temp_model.from_map(m['AlgorithmSpec']) + if m.get('AlgorithmVersion') is not None: + self.algorithm_version = m.get('AlgorithmVersion') + if m.get('CodeDir') is not None: + temp_model = Location() + self.code_dir = temp_model.from_map(m['CodeDir']) + if m.get('ComputeResource') is not None: + temp_model = CreateTrainingJobRequestComputeResource() + self.compute_resource = temp_model.from_map(m['ComputeResource']) + if m.get('Environments') is not None: + self.environments = m.get('Environments') + if m.get('ExperimentConfig') is not None: + temp_model = CreateTrainingJobRequestExperimentConfig() + self.experiment_config = temp_model.from_map(m['ExperimentConfig']) + self.hyper_parameters = [] + if m.get('HyperParameters') is not None: + for k in m.get('HyperParameters'): + temp_model = CreateTrainingJobRequestHyperParameters() + self.hyper_parameters.append(temp_model.from_map(k)) + self.input_channels = [] + if m.get('InputChannels') is not None: + for k in m.get('InputChannels'): + temp_model = CreateTrainingJobRequestInputChannels() + self.input_channels.append(temp_model.from_map(k)) + self.labels = [] + if m.get('Labels') is not None: + for k in m.get('Labels'): + temp_model = CreateTrainingJobRequestLabels() + self.labels.append(temp_model.from_map(k)) + self.output_channels = [] + if m.get('OutputChannels') is not None: + for k in m.get('OutputChannels'): + temp_model = CreateTrainingJobRequestOutputChannels() + self.output_channels.append(temp_model.from_map(k)) + if m.get('PythonRequirements') is not None: + self.python_requirements = m.get('PythonRequirements') + if m.get('RoleArn') is not None: + self.role_arn = m.get('RoleArn') + if m.get('Scheduler') is not None: + temp_model = CreateTrainingJobRequestScheduler() + self.scheduler = temp_model.from_map(m['Scheduler']) + if m.get('Settings') is not None: + temp_model = CreateTrainingJobRequestSettings() + self.settings = temp_model.from_map(m['Settings']) + if m.get('TrainingJobDescription') is not None: + self.training_job_description = m.get('TrainingJobDescription') + if m.get('TrainingJobName') is not None: + self.training_job_name = m.get('TrainingJobName') + if m.get('UserVpc') is not None: + temp_model = CreateTrainingJobRequestUserVpc() + self.user_vpc = temp_model.from_map(m['UserVpc']) + if m.get('WorkspaceId') is not None: + self.workspace_id = m.get('WorkspaceId') + if m.get('ResourceType') is not None: + self.resource_type = m.get('ResourceType') + return self + + +class CreateTrainingJobResponseBody(TeaModel): + def __init__( + self, + request_id: str = None, + training_job_id: str = None, + ): + self.request_id = request_id + self.training_job_id = training_job_id + + def validate(self): + pass + + def to_map(self): + _map = super().to_map() + if _map is not None: + return _map + + result = dict() + if self.request_id is not None: + result['RequestId'] = self.request_id + if self.training_job_id is not None: + result['TrainingJobId'] = self.training_job_id + return result + + def from_map(self, m: dict = None): + m = m or dict() + if m.get('RequestId') is not None: self.request_id = m.get('RequestId') + if m.get('TrainingJobId') is not None: + self.training_job_id = m.get('TrainingJobId') return self -class CreateResourceGroupMachineGroupResponse(TeaModel): +class CreateTrainingJobResponse(TeaModel): def __init__( self, headers: Dict[str, str] = None, status_code: int = None, - body: CreateResourceGroupMachineGroupResponseBody = None, + body: CreateTrainingJobResponseBody = None, ): self.headers = headers self.status_code = status_code @@ -5894,46 +6376,17 @@ def from_map(self, m: dict = None): if m.get('statusCode') is not None: self.status_code = m.get('statusCode') if m.get('body') is not None: - temp_model = CreateResourceGroupMachineGroupResponseBody() + temp_model = CreateTrainingJobResponseBody() self.body = temp_model.from_map(m['body']) return self -class CreateServiceIdentityRoleRequest(TeaModel): - def __init__( - self, - role_name: str = None, - ): - self.role_name = role_name - - def validate(self): - pass - - def to_map(self): - _map = super().to_map() - if _map is not None: - return _map - - result = dict() - if self.role_name is not None: - result['RoleName'] = self.role_name - return result - - def from_map(self, m: dict = None): - m = m or dict() - if m.get('RoleName') is not None: - self.role_name = m.get('RoleName') - return self - - -class CreateServiceIdentityRoleResponseBody(TeaModel): +class DeleteAlgorithmResponseBody(TeaModel): def __init__( self, request_id: str = None, - role_name: str = None, ): self.request_id = request_id - self.role_name = role_name def validate(self): pass @@ -5945,26 +6398,22 @@ def to_map(self): result = dict() if self.request_id is not None: - result['RequestId'] = self.request_id - if self.role_name is not None: - result['RoleName'] = self.role_name + result['requestId'] = self.request_id return result def from_map(self, m: dict = None): m = m or dict() - if m.get('RequestId') is not None: - self.request_id = m.get('RequestId') - if m.get('RoleName') is not None: - self.role_name = m.get('RoleName') + if m.get('requestId') is not None: + self.request_id = m.get('requestId') return self -class CreateServiceIdentityRoleResponse(TeaModel): +class DeleteAlgorithmResponse(TeaModel): def __init__( self, headers: Dict[str, str] = None, status_code: int = None, - body: CreateServiceIdentityRoleResponseBody = None, + body: DeleteAlgorithmResponseBody = None, ): self.headers = headers self.status_code = status_code @@ -5995,25 +6444,17 @@ def from_map(self, m: dict = None): if m.get('statusCode') is not None: self.status_code = m.get('statusCode') if m.get('body') is not None: - temp_model = CreateServiceIdentityRoleResponseBody() + temp_model = DeleteAlgorithmResponseBody() self.body = temp_model.from_map(m['body']) return self -class CreateTrainingJobRequestComputeResourceInstanceSpec(TeaModel): +class DeleteAlgorithmVersionResponseBody(TeaModel): def __init__( self, - cpu: str = None, - gpu: str = None, - gputype: str = None, - memory: str = None, - shared_memory: str = None, + request_id: str = None, ): - self.cpu = cpu - self.gpu = gpu - self.gputype = gputype - self.memory = memory - self.shared_memory = shared_memory + self.request_id = request_id def validate(self): pass @@ -6024,51 +6465,31 @@ def to_map(self): return _map result = dict() - if self.cpu is not None: - result['CPU'] = self.cpu - if self.gpu is not None: - result['GPU'] = self.gpu - if self.gputype is not None: - result['GPUType'] = self.gputype - if self.memory is not None: - result['Memory'] = self.memory - if self.shared_memory is not None: - result['SharedMemory'] = self.shared_memory + if self.request_id is not None: + result['RequestId'] = self.request_id return result def from_map(self, m: dict = None): m = m or dict() - if m.get('CPU') is not None: - self.cpu = m.get('CPU') - if m.get('GPU') is not None: - self.gpu = m.get('GPU') - if m.get('GPUType') is not None: - self.gputype = m.get('GPUType') - if m.get('Memory') is not None: - self.memory = m.get('Memory') - if m.get('SharedMemory') is not None: - self.shared_memory = m.get('SharedMemory') + if m.get('RequestId') is not None: + self.request_id = m.get('RequestId') return self -class CreateTrainingJobRequestComputeResource(TeaModel): +class DeleteAlgorithmVersionResponse(TeaModel): def __init__( self, - ecs_count: int = None, - ecs_spec: str = None, - instance_count: int = None, - instance_spec: CreateTrainingJobRequestComputeResourceInstanceSpec = None, - resource_id: str = None, + headers: Dict[str, str] = None, + status_code: int = None, + body: DeleteAlgorithmVersionResponseBody = None, ): - self.ecs_count = ecs_count - self.ecs_spec = ecs_spec - self.instance_count = instance_count - self.instance_spec = instance_spec - self.resource_id = resource_id + self.headers = headers + self.status_code = status_code + self.body = body def validate(self): - if self.instance_spec: - self.instance_spec.validate() + if self.body: + self.body.validate() def to_map(self): _map = super().to_map() @@ -6076,40 +6497,32 @@ def to_map(self): return _map result = dict() - if self.ecs_count is not None: - result['EcsCount'] = self.ecs_count - if self.ecs_spec is not None: - result['EcsSpec'] = self.ecs_spec - if self.instance_count is not None: - result['InstanceCount'] = self.instance_count - if self.instance_spec is not None: - result['InstanceSpec'] = self.instance_spec.to_map() - if self.resource_id is not None: - result['ResourceId'] = self.resource_id + if self.headers is not None: + result['headers'] = self.headers + if self.status_code is not None: + result['statusCode'] = self.status_code + if self.body is not None: + result['body'] = self.body.to_map() return result def from_map(self, m: dict = None): m = m or dict() - if m.get('EcsCount') is not None: - self.ecs_count = m.get('EcsCount') - if m.get('EcsSpec') is not None: - self.ecs_spec = m.get('EcsSpec') - if m.get('InstanceCount') is not None: - self.instance_count = m.get('InstanceCount') - if m.get('InstanceSpec') is not None: - temp_model = CreateTrainingJobRequestComputeResourceInstanceSpec() - self.instance_spec = temp_model.from_map(m['InstanceSpec']) - if m.get('ResourceId') is not None: - self.resource_id = m.get('ResourceId') + if m.get('headers') is not None: + self.headers = m.get('headers') + if m.get('statusCode') is not None: + self.status_code = m.get('statusCode') + if m.get('body') is not None: + temp_model = DeleteAlgorithmVersionResponseBody() + self.body = temp_model.from_map(m['body']) return self -class CreateTrainingJobRequestExperimentConfig(TeaModel): +class DeleteComponentResponseBody(TeaModel): def __init__( self, - experiment_id: str = None, + request_id: str = None, ): - self.experiment_id = experiment_id + self.request_id = request_id def validate(self): pass @@ -6120,28 +6533,31 @@ def to_map(self): return _map result = dict() - if self.experiment_id is not None: - result['ExperimentId'] = self.experiment_id + if self.request_id is not None: + result['RequestId'] = self.request_id return result def from_map(self, m: dict = None): m = m or dict() - if m.get('ExperimentId') is not None: - self.experiment_id = m.get('ExperimentId') + if m.get('RequestId') is not None: + self.request_id = m.get('RequestId') return self -class CreateTrainingJobRequestHyperParameters(TeaModel): +class DeleteComponentResponse(TeaModel): def __init__( self, - name: str = None, - value: str = None, + headers: Dict[str, str] = None, + status_code: int = None, + body: DeleteComponentResponseBody = None, ): - self.name = name - self.value = value + self.headers = headers + self.status_code = status_code + self.body = body def validate(self): - pass + if self.body: + self.body.validate() def to_map(self): _map = super().to_map() @@ -6149,31 +6565,32 @@ def to_map(self): return _map result = dict() - if self.name is not None: - result['Name'] = self.name - if self.value is not None: - result['Value'] = self.value + if self.headers is not None: + result['headers'] = self.headers + if self.status_code is not None: + result['statusCode'] = self.status_code + if self.body is not None: + result['body'] = self.body.to_map() return result def from_map(self, m: dict = None): m = m or dict() - if m.get('Name') is not None: - self.name = m.get('Name') - if m.get('Value') is not None: - self.value = m.get('Value') + if m.get('headers') is not None: + self.headers = m.get('headers') + if m.get('statusCode') is not None: + self.status_code = m.get('statusCode') + if m.get('body') is not None: + temp_model = DeleteComponentResponseBody() + self.body = temp_model.from_map(m['body']) return self -class CreateTrainingJobRequestInputChannels(TeaModel): +class DeleteComponentVersionResponseBody(TeaModel): def __init__( self, - dataset_id: str = None, - input_uri: str = None, - name: str = None, + request_id: str = None, ): - self.dataset_id = dataset_id - self.input_uri = input_uri - self.name = name + self.request_id = request_id def validate(self): pass @@ -6184,36 +6601,31 @@ def to_map(self): return _map result = dict() - if self.dataset_id is not None: - result['DatasetId'] = self.dataset_id - if self.input_uri is not None: - result['InputUri'] = self.input_uri - if self.name is not None: - result['Name'] = self.name + if self.request_id is not None: + result['RequestId'] = self.request_id return result def from_map(self, m: dict = None): m = m or dict() - if m.get('DatasetId') is not None: - self.dataset_id = m.get('DatasetId') - if m.get('InputUri') is not None: - self.input_uri = m.get('InputUri') - if m.get('Name') is not None: - self.name = m.get('Name') + if m.get('RequestId') is not None: + self.request_id = m.get('RequestId') return self -class CreateTrainingJobRequestLabels(TeaModel): +class DeleteComponentVersionResponse(TeaModel): def __init__( self, - key: str = None, - value: str = None, + headers: Dict[str, str] = None, + status_code: int = None, + body: DeleteComponentVersionResponseBody = None, ): - self.key = key - self.value = value + self.headers = headers + self.status_code = status_code + self.body = body def validate(self): - pass + if self.body: + self.body.validate() def to_map(self): _map = super().to_map() @@ -6221,31 +6633,32 @@ def to_map(self): return _map result = dict() - if self.key is not None: - result['Key'] = self.key - if self.value is not None: - result['Value'] = self.value + if self.headers is not None: + result['headers'] = self.headers + if self.status_code is not None: + result['statusCode'] = self.status_code + if self.body is not None: + result['body'] = self.body.to_map() return result def from_map(self, m: dict = None): m = m or dict() - if m.get('Key') is not None: - self.key = m.get('Key') - if m.get('Value') is not None: - self.value = m.get('Value') + if m.get('headers') is not None: + self.headers = m.get('headers') + if m.get('statusCode') is not None: + self.status_code = m.get('statusCode') + if m.get('body') is not None: + temp_model = DeleteComponentVersionResponseBody() + self.body = temp_model.from_map(m['body']) return self -class CreateTrainingJobRequestOutputChannels(TeaModel): +class DeleteComponentVersionSnapshotResponseBody(TeaModel): def __init__( self, - dataset_id: str = None, - name: str = None, - output_uri: str = None, + request_id: str = None, ): - self.dataset_id = dataset_id - self.name = name - self.output_uri = output_uri + self.request_id = request_id def validate(self): pass @@ -6256,34 +6669,31 @@ def to_map(self): return _map result = dict() - if self.dataset_id is not None: - result['DatasetId'] = self.dataset_id - if self.name is not None: - result['Name'] = self.name - if self.output_uri is not None: - result['OutputUri'] = self.output_uri + if self.request_id is not None: + result['RequestId'] = self.request_id return result def from_map(self, m: dict = None): m = m or dict() - if m.get('DatasetId') is not None: - self.dataset_id = m.get('DatasetId') - if m.get('Name') is not None: - self.name = m.get('Name') - if m.get('OutputUri') is not None: - self.output_uri = m.get('OutputUri') + if m.get('RequestId') is not None: + self.request_id = m.get('RequestId') return self -class CreateTrainingJobRequestScheduler(TeaModel): +class DeleteComponentVersionSnapshotResponse(TeaModel): def __init__( self, - max_running_time_in_seconds: int = None, + headers: Dict[str, str] = None, + status_code: int = None, + body: DeleteComponentVersionSnapshotResponseBody = None, ): - self.max_running_time_in_seconds = max_running_time_in_seconds + self.headers = headers + self.status_code = status_code + self.body = body def validate(self): - pass + if self.body: + self.body.validate() def to_map(self): _map = super().to_map() @@ -6291,29 +6701,34 @@ def to_map(self): return _map result = dict() - if self.max_running_time_in_seconds is not None: - result['MaxRunningTimeInSeconds'] = self.max_running_time_in_seconds + if self.headers is not None: + result['headers'] = self.headers + if self.status_code is not None: + result['statusCode'] = self.status_code + if self.body is not None: + result['body'] = self.body.to_map() return result def from_map(self, m: dict = None): m = m or dict() - if m.get('MaxRunningTimeInSeconds') is not None: - self.max_running_time_in_seconds = m.get('MaxRunningTimeInSeconds') + if m.get('headers') is not None: + self.headers = m.get('headers') + if m.get('statusCode') is not None: + self.status_code = m.get('statusCode') + if m.get('body') is not None: + temp_model = DeleteComponentVersionSnapshotResponseBody() + self.body = temp_model.from_map(m['body']) return self -class CreateTrainingJobRequestSettings(TeaModel): +class DeleteMachineGroupResponseBody(TeaModel): def __init__( self, - aimaster_type: str = None, - enable_error_monitoring_in_aimaster: bool = None, - error_monitoring_args: str = None, - priority: int = None, + machine_group_id: str = None, + request_id: str = None, ): - self.aimaster_type = aimaster_type - self.enable_error_monitoring_in_aimaster = enable_error_monitoring_in_aimaster - self.error_monitoring_args = error_monitoring_args - self.priority = priority + self.machine_group_id = machine_group_id + self.request_id = request_id def validate(self): pass @@ -6324,46 +6739,35 @@ def to_map(self): return _map result = dict() - if self.aimaster_type is not None: - result['AIMasterType'] = self.aimaster_type - if self.enable_error_monitoring_in_aimaster is not None: - result['EnableErrorMonitoringInAIMaster'] = self.enable_error_monitoring_in_aimaster - if self.error_monitoring_args is not None: - result['ErrorMonitoringArgs'] = self.error_monitoring_args - if self.priority is not None: - result['Priority'] = self.priority + if self.machine_group_id is not None: + result['MachineGroupID'] = self.machine_group_id + if self.request_id is not None: + result['RequestId'] = self.request_id return result def from_map(self, m: dict = None): m = m or dict() - if m.get('AIMasterType') is not None: - self.aimaster_type = m.get('AIMasterType') - if m.get('EnableErrorMonitoringInAIMaster') is not None: - self.enable_error_monitoring_in_aimaster = m.get('EnableErrorMonitoringInAIMaster') - if m.get('ErrorMonitoringArgs') is not None: - self.error_monitoring_args = m.get('ErrorMonitoringArgs') - if m.get('Priority') is not None: - self.priority = m.get('Priority') + if m.get('MachineGroupID') is not None: + self.machine_group_id = m.get('MachineGroupID') + if m.get('RequestId') is not None: + self.request_id = m.get('RequestId') return self -class CreateTrainingJobRequestUserVpc(TeaModel): +class DeleteMachineGroupResponse(TeaModel): def __init__( self, - default_route: str = None, - extended_cidrs: List[str] = None, - security_group_id: str = None, - switch_id: str = None, - vpc_id: str = None, + headers: Dict[str, str] = None, + status_code: int = None, + body: DeleteMachineGroupResponseBody = None, ): - self.default_route = default_route - self.extended_cidrs = extended_cidrs - self.security_group_id = security_group_id - self.switch_id = switch_id - self.vpc_id = vpc_id + self.headers = headers + self.status_code = status_code + self.body = body def validate(self): - pass + if self.body: + self.body.validate() def to_map(self): _map = super().to_map() @@ -6371,238 +6775,35 @@ def to_map(self): return _map result = dict() - if self.default_route is not None: - result['DefaultRoute'] = self.default_route - if self.extended_cidrs is not None: - result['ExtendedCIDRs'] = self.extended_cidrs - if self.security_group_id is not None: - result['SecurityGroupId'] = self.security_group_id - if self.switch_id is not None: - result['SwitchId'] = self.switch_id - if self.vpc_id is not None: - result['VpcId'] = self.vpc_id + if self.headers is not None: + result['headers'] = self.headers + if self.status_code is not None: + result['statusCode'] = self.status_code + if self.body is not None: + result['body'] = self.body.to_map() return result def from_map(self, m: dict = None): m = m or dict() - if m.get('DefaultRoute') is not None: - self.default_route = m.get('DefaultRoute') - if m.get('ExtendedCIDRs') is not None: - self.extended_cidrs = m.get('ExtendedCIDRs') - if m.get('SecurityGroupId') is not None: - self.security_group_id = m.get('SecurityGroupId') - if m.get('SwitchId') is not None: - self.switch_id = m.get('SwitchId') - if m.get('VpcId') is not None: - self.vpc_id = m.get('VpcId') + if m.get('headers') is not None: + self.headers = m.get('headers') + if m.get('statusCode') is not None: + self.status_code = m.get('statusCode') + if m.get('body') is not None: + temp_model = DeleteMachineGroupResponseBody() + self.body = temp_model.from_map(m['body']) return self -class CreateTrainingJobRequest(TeaModel): - def __init__( - self, - algorithm_name: str = None, - algorithm_provider: str = None, - algorithm_spec: AlgorithmSpec = None, - algorithm_version: str = None, - code_dir: Location = None, - compute_resource: CreateTrainingJobRequestComputeResource = None, - environments: Dict[str, str] = None, - experiment_config: CreateTrainingJobRequestExperimentConfig = None, - hyper_parameters: List[CreateTrainingJobRequestHyperParameters] = None, - input_channels: List[CreateTrainingJobRequestInputChannels] = None, - labels: List[CreateTrainingJobRequestLabels] = None, - output_channels: List[CreateTrainingJobRequestOutputChannels] = None, - python_requirements: List[str] = None, - role_arn: str = None, - scheduler: CreateTrainingJobRequestScheduler = None, - settings: CreateTrainingJobRequestSettings = None, - training_job_description: str = None, - training_job_name: str = None, - user_vpc: CreateTrainingJobRequestUserVpc = None, - workspace_id: str = None, - ): - self.algorithm_name = algorithm_name - self.algorithm_provider = algorithm_provider - self.algorithm_spec = algorithm_spec - self.algorithm_version = algorithm_version - self.code_dir = code_dir - self.compute_resource = compute_resource - self.environments = environments - self.experiment_config = experiment_config - self.hyper_parameters = hyper_parameters - self.input_channels = input_channels - self.labels = labels - self.output_channels = output_channels - self.python_requirements = python_requirements - self.role_arn = role_arn - self.scheduler = scheduler - self.settings = settings - self.training_job_description = training_job_description - self.training_job_name = training_job_name - self.user_vpc = user_vpc - self.workspace_id = workspace_id - - def validate(self): - if self.algorithm_spec: - self.algorithm_spec.validate() - if self.code_dir: - self.code_dir.validate() - if self.compute_resource: - self.compute_resource.validate() - if self.experiment_config: - self.experiment_config.validate() - if self.hyper_parameters: - for k in self.hyper_parameters: - if k: - k.validate() - if self.input_channels: - for k in self.input_channels: - if k: - k.validate() - if self.labels: - for k in self.labels: - if k: - k.validate() - if self.output_channels: - for k in self.output_channels: - if k: - k.validate() - if self.scheduler: - self.scheduler.validate() - if self.settings: - self.settings.validate() - if self.user_vpc: - self.user_vpc.validate() - - def to_map(self): - _map = super().to_map() - if _map is not None: - return _map - - result = dict() - if self.algorithm_name is not None: - result['AlgorithmName'] = self.algorithm_name - if self.algorithm_provider is not None: - result['AlgorithmProvider'] = self.algorithm_provider - if self.algorithm_spec is not None: - result['AlgorithmSpec'] = self.algorithm_spec.to_map() - if self.algorithm_version is not None: - result['AlgorithmVersion'] = self.algorithm_version - if self.code_dir is not None: - result['CodeDir'] = self.code_dir.to_map() - if self.compute_resource is not None: - result['ComputeResource'] = self.compute_resource.to_map() - if self.environments is not None: - result['Environments'] = self.environments - if self.experiment_config is not None: - result['ExperimentConfig'] = self.experiment_config.to_map() - result['HyperParameters'] = [] - if self.hyper_parameters is not None: - for k in self.hyper_parameters: - result['HyperParameters'].append(k.to_map() if k else None) - result['InputChannels'] = [] - if self.input_channels is not None: - for k in self.input_channels: - result['InputChannels'].append(k.to_map() if k else None) - result['Labels'] = [] - if self.labels is not None: - for k in self.labels: - result['Labels'].append(k.to_map() if k else None) - result['OutputChannels'] = [] - if self.output_channels is not None: - for k in self.output_channels: - result['OutputChannels'].append(k.to_map() if k else None) - if self.python_requirements is not None: - result['PythonRequirements'] = self.python_requirements - if self.role_arn is not None: - result['RoleArn'] = self.role_arn - if self.scheduler is not None: - result['Scheduler'] = self.scheduler.to_map() - if self.settings is not None: - result['Settings'] = self.settings.to_map() - if self.training_job_description is not None: - result['TrainingJobDescription'] = self.training_job_description - if self.training_job_name is not None: - result['TrainingJobName'] = self.training_job_name - if self.user_vpc is not None: - result['UserVpc'] = self.user_vpc.to_map() - if self.workspace_id is not None: - result['WorkspaceId'] = self.workspace_id - return result - - def from_map(self, m: dict = None): - m = m or dict() - if m.get('AlgorithmName') is not None: - self.algorithm_name = m.get('AlgorithmName') - if m.get('AlgorithmProvider') is not None: - self.algorithm_provider = m.get('AlgorithmProvider') - if m.get('AlgorithmSpec') is not None: - temp_model = AlgorithmSpec() - self.algorithm_spec = temp_model.from_map(m['AlgorithmSpec']) - if m.get('AlgorithmVersion') is not None: - self.algorithm_version = m.get('AlgorithmVersion') - if m.get('CodeDir') is not None: - temp_model = Location() - self.code_dir = temp_model.from_map(m['CodeDir']) - if m.get('ComputeResource') is not None: - temp_model = CreateTrainingJobRequestComputeResource() - self.compute_resource = temp_model.from_map(m['ComputeResource']) - if m.get('Environments') is not None: - self.environments = m.get('Environments') - if m.get('ExperimentConfig') is not None: - temp_model = CreateTrainingJobRequestExperimentConfig() - self.experiment_config = temp_model.from_map(m['ExperimentConfig']) - self.hyper_parameters = [] - if m.get('HyperParameters') is not None: - for k in m.get('HyperParameters'): - temp_model = CreateTrainingJobRequestHyperParameters() - self.hyper_parameters.append(temp_model.from_map(k)) - self.input_channels = [] - if m.get('InputChannels') is not None: - for k in m.get('InputChannels'): - temp_model = CreateTrainingJobRequestInputChannels() - self.input_channels.append(temp_model.from_map(k)) - self.labels = [] - if m.get('Labels') is not None: - for k in m.get('Labels'): - temp_model = CreateTrainingJobRequestLabels() - self.labels.append(temp_model.from_map(k)) - self.output_channels = [] - if m.get('OutputChannels') is not None: - for k in m.get('OutputChannels'): - temp_model = CreateTrainingJobRequestOutputChannels() - self.output_channels.append(temp_model.from_map(k)) - if m.get('PythonRequirements') is not None: - self.python_requirements = m.get('PythonRequirements') - if m.get('RoleArn') is not None: - self.role_arn = m.get('RoleArn') - if m.get('Scheduler') is not None: - temp_model = CreateTrainingJobRequestScheduler() - self.scheduler = temp_model.from_map(m['Scheduler']) - if m.get('Settings') is not None: - temp_model = CreateTrainingJobRequestSettings() - self.settings = temp_model.from_map(m['Settings']) - if m.get('TrainingJobDescription') is not None: - self.training_job_description = m.get('TrainingJobDescription') - if m.get('TrainingJobName') is not None: - self.training_job_name = m.get('TrainingJobName') - if m.get('UserVpc') is not None: - temp_model = CreateTrainingJobRequestUserVpc() - self.user_vpc = temp_model.from_map(m['UserVpc']) - if m.get('WorkspaceId') is not None: - self.workspace_id = m.get('WorkspaceId') - return self - - -class CreateTrainingJobResponseBody(TeaModel): +class DeleteQuotaResponseBody(TeaModel): def __init__( self, + quota_id: str = None, request_id: str = None, - training_job_id: str = None, ): + # Quota Id + self.quota_id = quota_id self.request_id = request_id - self.training_job_id = training_job_id def validate(self): pass @@ -6613,27 +6814,27 @@ def to_map(self): return _map result = dict() + if self.quota_id is not None: + result['QuotaId'] = self.quota_id if self.request_id is not None: result['RequestId'] = self.request_id - if self.training_job_id is not None: - result['TrainingJobId'] = self.training_job_id return result def from_map(self, m: dict = None): m = m or dict() + if m.get('QuotaId') is not None: + self.quota_id = m.get('QuotaId') if m.get('RequestId') is not None: self.request_id = m.get('RequestId') - if m.get('TrainingJobId') is not None: - self.training_job_id = m.get('TrainingJobId') return self -class CreateTrainingJobResponse(TeaModel): +class DeleteQuotaResponse(TeaModel): def __init__( self, headers: Dict[str, str] = None, status_code: int = None, - body: CreateTrainingJobResponseBody = None, + body: DeleteQuotaResponseBody = None, ): self.headers = headers self.status_code = status_code @@ -6664,16 +6865,45 @@ def from_map(self, m: dict = None): if m.get('statusCode') is not None: self.status_code = m.get('statusCode') if m.get('body') is not None: - temp_model = CreateTrainingJobResponseBody() + temp_model = DeleteQuotaResponseBody() self.body = temp_model.from_map(m['body']) return self -class DeleteAlgorithmResponseBody(TeaModel): +class DeleteQuotaLabelsRequest(TeaModel): + def __init__( + self, + keys: str = None, + ): + self.keys = keys + + def validate(self): + pass + + def to_map(self): + _map = super().to_map() + if _map is not None: + return _map + + result = dict() + if self.keys is not None: + result['Keys'] = self.keys + return result + + def from_map(self, m: dict = None): + m = m or dict() + if m.get('Keys') is not None: + self.keys = m.get('Keys') + return self + + +class DeleteQuotaLabelsResponseBody(TeaModel): def __init__( self, + quota_id: str = None, request_id: str = None, ): + self.quota_id = quota_id self.request_id = request_id def validate(self): @@ -6685,23 +6915,27 @@ def to_map(self): return _map result = dict() + if self.quota_id is not None: + result['QuotaId'] = self.quota_id if self.request_id is not None: - result['requestId'] = self.request_id + result['RequestId'] = self.request_id return result def from_map(self, m: dict = None): m = m or dict() - if m.get('requestId') is not None: - self.request_id = m.get('requestId') + if m.get('QuotaId') is not None: + self.quota_id = m.get('QuotaId') + if m.get('RequestId') is not None: + self.request_id = m.get('RequestId') return self -class DeleteAlgorithmResponse(TeaModel): +class DeleteQuotaLabelsResponse(TeaModel): def __init__( self, headers: Dict[str, str] = None, status_code: int = None, - body: DeleteAlgorithmResponseBody = None, + body: DeleteQuotaLabelsResponseBody = None, ): self.headers = headers self.status_code = status_code @@ -6732,17 +6966,19 @@ def from_map(self, m: dict = None): if m.get('statusCode') is not None: self.status_code = m.get('statusCode') if m.get('body') is not None: - temp_model = DeleteAlgorithmResponseBody() + temp_model = DeleteQuotaLabelsResponseBody() self.body = temp_model.from_map(m['body']) return self -class DeleteAlgorithmVersionResponseBody(TeaModel): +class DeleteResourceGroupResponseBody(TeaModel): def __init__( self, request_id: str = None, + resource_group_id: str = None, ): self.request_id = request_id + self.resource_group_id = resource_group_id def validate(self): pass @@ -6755,21 +6991,25 @@ def to_map(self): result = dict() if self.request_id is not None: result['RequestId'] = self.request_id + if self.resource_group_id is not None: + result['ResourceGroupID'] = self.resource_group_id return result def from_map(self, m: dict = None): m = m or dict() if m.get('RequestId') is not None: self.request_id = m.get('RequestId') + if m.get('ResourceGroupID') is not None: + self.resource_group_id = m.get('ResourceGroupID') return self -class DeleteAlgorithmVersionResponse(TeaModel): +class DeleteResourceGroupResponse(TeaModel): def __init__( self, headers: Dict[str, str] = None, status_code: int = None, - body: DeleteAlgorithmVersionResponseBody = None, + body: DeleteResourceGroupResponseBody = None, ): self.headers = headers self.status_code = status_code @@ -6800,16 +7040,18 @@ def from_map(self, m: dict = None): if m.get('statusCode') is not None: self.status_code = m.get('statusCode') if m.get('body') is not None: - temp_model = DeleteAlgorithmVersionResponseBody() + temp_model = DeleteResourceGroupResponseBody() self.body = temp_model.from_map(m['body']) return self -class DeleteComponentResponseBody(TeaModel): +class DeleteResourceGroupMachineGroupResponseBody(TeaModel): def __init__( self, + machine_group_id: str = None, request_id: str = None, ): + self.machine_group_id = machine_group_id self.request_id = request_id def validate(self): @@ -6821,23 +7063,27 @@ def to_map(self): return _map result = dict() + if self.machine_group_id is not None: + result['MachineGroupID'] = self.machine_group_id if self.request_id is not None: result['RequestId'] = self.request_id return result def from_map(self, m: dict = None): m = m or dict() + if m.get('MachineGroupID') is not None: + self.machine_group_id = m.get('MachineGroupID') if m.get('RequestId') is not None: self.request_id = m.get('RequestId') return self -class DeleteComponentResponse(TeaModel): +class DeleteResourceGroupMachineGroupResponse(TeaModel): def __init__( self, headers: Dict[str, str] = None, status_code: int = None, - body: DeleteComponentResponseBody = None, + body: DeleteResourceGroupMachineGroupResponseBody = None, ): self.headers = headers self.status_code = status_code @@ -6868,12 +7114,12 @@ def from_map(self, m: dict = None): if m.get('statusCode') is not None: self.status_code = m.get('statusCode') if m.get('body') is not None: - temp_model = DeleteComponentResponseBody() + temp_model = DeleteResourceGroupMachineGroupResponseBody() self.body = temp_model.from_map(m['body']) return self -class DeleteComponentVersionResponseBody(TeaModel): +class DeleteTrainingJobResponseBody(TeaModel): def __init__( self, request_id: str = None, @@ -6900,12 +7146,12 @@ def from_map(self, m: dict = None): return self -class DeleteComponentVersionResponse(TeaModel): +class DeleteTrainingJobResponse(TeaModel): def __init__( self, headers: Dict[str, str] = None, status_code: int = None, - body: DeleteComponentVersionResponseBody = None, + body: DeleteTrainingJobResponseBody = None, ): self.headers = headers self.status_code = status_code @@ -6936,12 +7182,40 @@ def from_map(self, m: dict = None): if m.get('statusCode') is not None: self.status_code = m.get('statusCode') if m.get('body') is not None: - temp_model = DeleteComponentVersionResponseBody() + temp_model = DeleteTrainingJobResponseBody() self.body = temp_model.from_map(m['body']) return self -class DeleteComponentVersionSnapshotResponseBody(TeaModel): +class DeleteTrainingJobLabelsRequest(TeaModel): + def __init__( + self, + keys: str = None, + ): + # This parameter is required. + self.keys = keys + + def validate(self): + pass + + def to_map(self): + _map = super().to_map() + if _map is not None: + return _map + + result = dict() + if self.keys is not None: + result['Keys'] = self.keys + return result + + def from_map(self, m: dict = None): + m = m or dict() + if m.get('Keys') is not None: + self.keys = m.get('Keys') + return self + + +class DeleteTrainingJobLabelsResponseBody(TeaModel): def __init__( self, request_id: str = None, @@ -6968,12 +7242,12 @@ def from_map(self, m: dict = None): return self -class DeleteComponentVersionSnapshotResponse(TeaModel): +class DeleteTrainingJobLabelsResponse(TeaModel): def __init__( self, headers: Dict[str, str] = None, status_code: int = None, - body: DeleteComponentVersionSnapshotResponseBody = None, + body: DeleteTrainingJobLabelsResponseBody = None, ): self.headers = headers self.status_code = status_code @@ -7004,16 +7278,22 @@ def from_map(self, m: dict = None): if m.get('statusCode') is not None: self.status_code = m.get('statusCode') if m.get('body') is not None: - temp_model = DeleteComponentVersionSnapshotResponseBody() + temp_model = DeleteTrainingJobLabelsResponseBody() self.body = temp_model.from_map(m['body']) return self -class DeleteLLMProjectResponseBody(TeaModel): +class GetAI4DDefaultBucketResponseBody(TeaModel): def __init__( self, + extranet_endpoint: str = None, + intranet_endpoint: str = None, + name: str = None, request_id: str = None, ): + self.extranet_endpoint = extranet_endpoint + self.intranet_endpoint = intranet_endpoint + self.name = name self.request_id = request_id def validate(self): @@ -7025,23 +7305,35 @@ def to_map(self): return _map result = dict() + if self.extranet_endpoint is not None: + result['ExtranetEndpoint'] = self.extranet_endpoint + if self.intranet_endpoint is not None: + result['IntranetEndpoint'] = self.intranet_endpoint + if self.name is not None: + result['Name'] = self.name if self.request_id is not None: result['RequestId'] = self.request_id return result def from_map(self, m: dict = None): m = m or dict() + if m.get('ExtranetEndpoint') is not None: + self.extranet_endpoint = m.get('ExtranetEndpoint') + if m.get('IntranetEndpoint') is not None: + self.intranet_endpoint = m.get('IntranetEndpoint') + if m.get('Name') is not None: + self.name = m.get('Name') if m.get('RequestId') is not None: self.request_id = m.get('RequestId') return self -class DeleteLLMProjectResponse(TeaModel): +class GetAI4DDefaultBucketResponse(TeaModel): def __init__( self, headers: Dict[str, str] = None, status_code: int = None, - body: DeleteLLMProjectResponseBody = None, + body: GetAI4DDefaultBucketResponseBody = None, ): self.headers = headers self.status_code = status_code @@ -7072,19 +7364,37 @@ def from_map(self, m: dict = None): if m.get('statusCode') is not None: self.status_code = m.get('statusCode') if m.get('body') is not None: - temp_model = DeleteLLMProjectResponseBody() + temp_model = GetAI4DDefaultBucketResponseBody() self.body = temp_model.from_map(m['body']) return self -class DeleteMachineGroupResponseBody(TeaModel): +class GetAlgorithmResponseBody(TeaModel): def __init__( self, - machine_group_id: str = None, + algorithm_description: str = None, + algorithm_id: str = None, + algorithm_name: str = None, + algorithm_provider: str = None, + display_name: str = None, + gmt_create_time: str = None, + gmt_modified_time: str = None, request_id: str = None, + tenant_id: str = None, + user_id: str = None, + workspace_id: str = None, ): - self.machine_group_id = machine_group_id + self.algorithm_description = algorithm_description + self.algorithm_id = algorithm_id + self.algorithm_name = algorithm_name + self.algorithm_provider = algorithm_provider + self.display_name = display_name + self.gmt_create_time = gmt_create_time + self.gmt_modified_time = gmt_modified_time self.request_id = request_id + self.tenant_id = tenant_id + self.user_id = user_id + self.workspace_id = workspace_id def validate(self): pass @@ -7095,27 +7405,63 @@ def to_map(self): return _map result = dict() - if self.machine_group_id is not None: - result['MachineGroupID'] = self.machine_group_id + if self.algorithm_description is not None: + result['AlgorithmDescription'] = self.algorithm_description + if self.algorithm_id is not None: + result['AlgorithmId'] = self.algorithm_id + if self.algorithm_name is not None: + result['AlgorithmName'] = self.algorithm_name + if self.algorithm_provider is not None: + result['AlgorithmProvider'] = self.algorithm_provider + if self.display_name is not None: + result['DisplayName'] = self.display_name + if self.gmt_create_time is not None: + result['GmtCreateTime'] = self.gmt_create_time + if self.gmt_modified_time is not None: + result['GmtModifiedTime'] = self.gmt_modified_time if self.request_id is not None: result['RequestId'] = self.request_id + if self.tenant_id is not None: + result['TenantId'] = self.tenant_id + if self.user_id is not None: + result['UserId'] = self.user_id + if self.workspace_id is not None: + result['WorkspaceId'] = self.workspace_id return result def from_map(self, m: dict = None): m = m or dict() - if m.get('MachineGroupID') is not None: - self.machine_group_id = m.get('MachineGroupID') + if m.get('AlgorithmDescription') is not None: + self.algorithm_description = m.get('AlgorithmDescription') + if m.get('AlgorithmId') is not None: + self.algorithm_id = m.get('AlgorithmId') + if m.get('AlgorithmName') is not None: + self.algorithm_name = m.get('AlgorithmName') + if m.get('AlgorithmProvider') is not None: + self.algorithm_provider = m.get('AlgorithmProvider') + if m.get('DisplayName') is not None: + self.display_name = m.get('DisplayName') + if m.get('GmtCreateTime') is not None: + self.gmt_create_time = m.get('GmtCreateTime') + if m.get('GmtModifiedTime') is not None: + self.gmt_modified_time = m.get('GmtModifiedTime') if m.get('RequestId') is not None: self.request_id = m.get('RequestId') + if m.get('TenantId') is not None: + self.tenant_id = m.get('TenantId') + if m.get('UserId') is not None: + self.user_id = m.get('UserId') + if m.get('WorkspaceId') is not None: + self.workspace_id = m.get('WorkspaceId') return self -class DeleteMachineGroupResponse(TeaModel): +class GetAlgorithmResponse(TeaModel): def __init__( self, headers: Dict[str, str] = None, status_code: int = None, - body: DeleteMachineGroupResponseBody = None, + body: GetAlgorithmResponseBody = None, ): self.headers = headers self.status_code = status_code @@ -7146,23 +7492,37 @@ def from_map(self, m: dict = None): if m.get('statusCode') is not None: self.status_code = m.get('statusCode') if m.get('body') is not None: - temp_model = DeleteMachineGroupResponseBody() + temp_model = GetAlgorithmResponseBody() self.body = temp_model.from_map(m['body']) return self -class DeleteQuotaResponseBody(TeaModel): +class GetAlgorithmVersionResponseBody(TeaModel): def __init__( self, - quota_id: str = None, - request_id: str = None, + algorithm_id: str = None, + algorithm_name: str = None, + algorithm_provider: str = None, + algorithm_spec: AlgorithmSpec = None, + algorithm_version: str = None, + gmt_create_time: str = None, + gmt_modified_time: str = None, + tenant_id: str = None, + user_id: str = None, ): - # Quota Id - self.quota_id = quota_id - self.request_id = request_id + self.algorithm_id = algorithm_id + self.algorithm_name = algorithm_name + self.algorithm_provider = algorithm_provider + self.algorithm_spec = algorithm_spec + self.algorithm_version = algorithm_version + self.gmt_create_time = gmt_create_time + self.gmt_modified_time = gmt_modified_time + self.tenant_id = tenant_id + self.user_id = user_id def validate(self): - pass + if self.algorithm_spec: + self.algorithm_spec.validate() def to_map(self): _map = super().to_map() @@ -7170,27 +7530,56 @@ def to_map(self): return _map result = dict() - if self.quota_id is not None: - result['QuotaId'] = self.quota_id - if self.request_id is not None: - result['RequestId'] = self.request_id + if self.algorithm_id is not None: + result['AlgorithmId'] = self.algorithm_id + if self.algorithm_name is not None: + result['AlgorithmName'] = self.algorithm_name + if self.algorithm_provider is not None: + result['AlgorithmProvider'] = self.algorithm_provider + if self.algorithm_spec is not None: + result['AlgorithmSpec'] = self.algorithm_spec.to_map() + if self.algorithm_version is not None: + result['AlgorithmVersion'] = self.algorithm_version + if self.gmt_create_time is not None: + result['GmtCreateTime'] = self.gmt_create_time + if self.gmt_modified_time is not None: + result['GmtModifiedTime'] = self.gmt_modified_time + if self.tenant_id is not None: + result['TenantId'] = self.tenant_id + if self.user_id is not None: + result['UserId'] = self.user_id return result def from_map(self, m: dict = None): m = m or dict() - if m.get('QuotaId') is not None: - self.quota_id = m.get('QuotaId') - if m.get('RequestId') is not None: - self.request_id = m.get('RequestId') + if m.get('AlgorithmId') is not None: + self.algorithm_id = m.get('AlgorithmId') + if m.get('AlgorithmName') is not None: + self.algorithm_name = m.get('AlgorithmName') + if m.get('AlgorithmProvider') is not None: + self.algorithm_provider = m.get('AlgorithmProvider') + if m.get('AlgorithmSpec') is not None: + temp_model = AlgorithmSpec() + self.algorithm_spec = temp_model.from_map(m['AlgorithmSpec']) + if m.get('AlgorithmVersion') is not None: + self.algorithm_version = m.get('AlgorithmVersion') + if m.get('GmtCreateTime') is not None: + self.gmt_create_time = m.get('GmtCreateTime') + if m.get('GmtModifiedTime') is not None: + self.gmt_modified_time = m.get('GmtModifiedTime') + if m.get('TenantId') is not None: + self.tenant_id = m.get('TenantId') + if m.get('UserId') is not None: + self.user_id = m.get('UserId') return self -class DeleteQuotaResponse(TeaModel): +class GetAlgorithmVersionResponse(TeaModel): def __init__( self, headers: Dict[str, str] = None, status_code: int = None, - body: DeleteQuotaResponseBody = None, + body: GetAlgorithmVersionResponseBody = None, ): self.headers = headers self.status_code = status_code @@ -7221,17 +7610,24 @@ def from_map(self, m: dict = None): if m.get('statusCode') is not None: self.status_code = m.get('statusCode') if m.get('body') is not None: - temp_model = DeleteQuotaResponseBody() + temp_model = GetAlgorithmVersionResponseBody() self.body = temp_model.from_map(m['body']) return self -class DeleteQuotaLabelsRequest(TeaModel): +class GetComponentResponseBodyVersions(TeaModel): def __init__( self, - keys: str = None, + gmt_create_time: str = None, + snapshot_id: str = None, + status: str = None, + version: str = None, ): - self.keys = keys + # Use the UTC time format: yyyy-MM-ddTHH:mmZ + self.gmt_create_time = gmt_create_time + self.snapshot_id = snapshot_id + self.status = status + self.version = version def validate(self): pass @@ -7242,28 +7638,71 @@ def to_map(self): return _map result = dict() - if self.keys is not None: - result['Keys'] = self.keys + if self.gmt_create_time is not None: + result['GmtCreateTime'] = self.gmt_create_time + if self.snapshot_id is not None: + result['SnapshotId'] = self.snapshot_id + if self.status is not None: + result['Status'] = self.status + if self.version is not None: + result['Version'] = self.version return result def from_map(self, m: dict = None): m = m or dict() - if m.get('Keys') is not None: - self.keys = m.get('Keys') + if m.get('GmtCreateTime') is not None: + self.gmt_create_time = m.get('GmtCreateTime') + if m.get('SnapshotId') is not None: + self.snapshot_id = m.get('SnapshotId') + if m.get('Status') is not None: + self.status = m.get('Status') + if m.get('Version') is not None: + self.version = m.get('Version') return self -class DeleteQuotaLabelsResponseBody(TeaModel): +class GetComponentResponseBody(TeaModel): def __init__( self, - quota_id: str = None, + component_id: str = None, + description: str = None, + display_name: str = None, + gmt_create_time: str = None, + gmt_modified_time: str = None, + labels: List[Label] = None, + name: str = None, + provider: str = None, request_id: str = None, + tenant_id: str = None, + user_id: str = None, + versions: List[GetComponentResponseBodyVersions] = None, + workspace_id: str = None, ): - self.quota_id = quota_id + self.component_id = component_id + self.description = description + self.display_name = display_name + # Use the UTC time format: yyyy-MM-ddTHH:mmZ + self.gmt_create_time = gmt_create_time + # Use the UTC time format: yyyy-MM-ddTHH:mmZ + self.gmt_modified_time = gmt_modified_time + self.labels = labels + self.name = name + self.provider = provider self.request_id = request_id + self.tenant_id = tenant_id + self.user_id = user_id + self.versions = versions + self.workspace_id = workspace_id def validate(self): - pass + if self.labels: + for k in self.labels: + if k: + k.validate() + if self.versions: + for k in self.versions: + if k: + k.validate() def to_map(self): _map = super().to_map() @@ -7271,27 +7710,81 @@ def to_map(self): return _map result = dict() - if self.quota_id is not None: - result['QuotaId'] = self.quota_id + if self.component_id is not None: + result['ComponentId'] = self.component_id + if self.description is not None: + result['Description'] = self.description + if self.display_name is not None: + result['DisplayName'] = self.display_name + if self.gmt_create_time is not None: + result['GmtCreateTime'] = self.gmt_create_time + if self.gmt_modified_time is not None: + result['GmtModifiedTime'] = self.gmt_modified_time + result['Labels'] = [] + if self.labels is not None: + for k in self.labels: + result['Labels'].append(k.to_map() if k else None) + if self.name is not None: + result['Name'] = self.name + if self.provider is not None: + result['Provider'] = self.provider if self.request_id is not None: result['RequestId'] = self.request_id + if self.tenant_id is not None: + result['TenantId'] = self.tenant_id + if self.user_id is not None: + result['UserId'] = self.user_id + result['Versions'] = [] + if self.versions is not None: + for k in self.versions: + result['Versions'].append(k.to_map() if k else None) + if self.workspace_id is not None: + result['WorkspaceId'] = self.workspace_id return result def from_map(self, m: dict = None): m = m or dict() - if m.get('QuotaId') is not None: - self.quota_id = m.get('QuotaId') + if m.get('ComponentId') is not None: + self.component_id = m.get('ComponentId') + if m.get('Description') is not None: + self.description = m.get('Description') + if m.get('DisplayName') is not None: + self.display_name = m.get('DisplayName') + if m.get('GmtCreateTime') is not None: + self.gmt_create_time = m.get('GmtCreateTime') + if m.get('GmtModifiedTime') is not None: + self.gmt_modified_time = m.get('GmtModifiedTime') + self.labels = [] + if m.get('Labels') is not None: + for k in m.get('Labels'): + temp_model = Label() + self.labels.append(temp_model.from_map(k)) + if m.get('Name') is not None: + self.name = m.get('Name') + if m.get('Provider') is not None: + self.provider = m.get('Provider') if m.get('RequestId') is not None: self.request_id = m.get('RequestId') + if m.get('TenantId') is not None: + self.tenant_id = m.get('TenantId') + if m.get('UserId') is not None: + self.user_id = m.get('UserId') + self.versions = [] + if m.get('Versions') is not None: + for k in m.get('Versions'): + temp_model = GetComponentResponseBodyVersions() + self.versions.append(temp_model.from_map(k)) + if m.get('WorkspaceId') is not None: + self.workspace_id = m.get('WorkspaceId') return self -class DeleteQuotaLabelsResponse(TeaModel): +class GetComponentResponse(TeaModel): def __init__( self, headers: Dict[str, str] = None, status_code: int = None, - body: DeleteQuotaLabelsResponseBody = None, + body: GetComponentResponseBody = None, ): self.headers = headers self.status_code = status_code @@ -7322,22 +7815,51 @@ def from_map(self, m: dict = None): if m.get('statusCode') is not None: self.status_code = m.get('statusCode') if m.get('body') is not None: - temp_model = DeleteQuotaLabelsResponseBody() + temp_model = GetComponentResponseBody() self.body = temp_model.from_map(m['body']) return self -class DeleteResourceGroupResponseBody(TeaModel): +class GetComponentVersionResponseBody(TeaModel): def __init__( self, + description: str = None, + gmt_create_time: str = None, + gmt_modified_time: str = None, + labels: List[Label] = None, + name: str = None, + provider: str = None, request_id: str = None, - resource_group_id: str = None, + snapshot_id: str = None, + spec: ComponentSpec = None, + tenant_id: str = None, + user_id: str = None, + version: str = None, + workspace_id: str = None, ): + self.description = description + # Use the UTC time format: yyyy-MM-ddTHH:mmZ + self.gmt_create_time = gmt_create_time + # Use the UTC time format: yyyy-MM-ddTHH:mmZ + self.gmt_modified_time = gmt_modified_time + self.labels = labels + self.name = name + self.provider = provider self.request_id = request_id - self.resource_group_id = resource_group_id + self.snapshot_id = snapshot_id + self.spec = spec + self.tenant_id = tenant_id + self.user_id = user_id + self.version = version + self.workspace_id = workspace_id def validate(self): - pass + if self.labels: + for k in self.labels: + if k: + k.validate() + if self.spec: + self.spec.validate() def to_map(self): _map = super().to_map() @@ -7345,27 +7867,77 @@ def to_map(self): return _map result = dict() + if self.description is not None: + result['Description'] = self.description + if self.gmt_create_time is not None: + result['GmtCreateTime'] = self.gmt_create_time + if self.gmt_modified_time is not None: + result['GmtModifiedTime'] = self.gmt_modified_time + result['Labels'] = [] + if self.labels is not None: + for k in self.labels: + result['Labels'].append(k.to_map() if k else None) + if self.name is not None: + result['Name'] = self.name + if self.provider is not None: + result['Provider'] = self.provider if self.request_id is not None: result['RequestId'] = self.request_id - if self.resource_group_id is not None: - result['ResourceGroupID'] = self.resource_group_id + if self.snapshot_id is not None: + result['SnapshotId'] = self.snapshot_id + if self.spec is not None: + result['Spec'] = self.spec.to_map() + if self.tenant_id is not None: + result['TenantId'] = self.tenant_id + if self.user_id is not None: + result['UserId'] = self.user_id + if self.version is not None: + result['Version'] = self.version + if self.workspace_id is not None: + result['WorkspaceId'] = self.workspace_id return result def from_map(self, m: dict = None): m = m or dict() + if m.get('Description') is not None: + self.description = m.get('Description') + if m.get('GmtCreateTime') is not None: + self.gmt_create_time = m.get('GmtCreateTime') + if m.get('GmtModifiedTime') is not None: + self.gmt_modified_time = m.get('GmtModifiedTime') + self.labels = [] + if m.get('Labels') is not None: + for k in m.get('Labels'): + temp_model = Label() + self.labels.append(temp_model.from_map(k)) + if m.get('Name') is not None: + self.name = m.get('Name') + if m.get('Provider') is not None: + self.provider = m.get('Provider') if m.get('RequestId') is not None: self.request_id = m.get('RequestId') - if m.get('ResourceGroupID') is not None: - self.resource_group_id = m.get('ResourceGroupID') + if m.get('SnapshotId') is not None: + self.snapshot_id = m.get('SnapshotId') + if m.get('Spec') is not None: + temp_model = ComponentSpec() + self.spec = temp_model.from_map(m['Spec']) + if m.get('TenantId') is not None: + self.tenant_id = m.get('TenantId') + if m.get('UserId') is not None: + self.user_id = m.get('UserId') + if m.get('Version') is not None: + self.version = m.get('Version') + if m.get('WorkspaceId') is not None: + self.workspace_id = m.get('WorkspaceId') return self -class DeleteResourceGroupResponse(TeaModel): +class GetComponentVersionResponse(TeaModel): def __init__( self, headers: Dict[str, str] = None, status_code: int = None, - body: DeleteResourceGroupResponseBody = None, + body: GetComponentVersionResponseBody = None, ): self.headers = headers self.status_code = status_code @@ -7396,19 +7968,37 @@ def from_map(self, m: dict = None): if m.get('statusCode') is not None: self.status_code = m.get('statusCode') if m.get('body') is not None: - temp_model = DeleteResourceGroupResponseBody() + temp_model = GetComponentVersionResponseBody() self.body = temp_model.from_map(m['body']) return self -class DeleteResourceGroupMachineGroupResponseBody(TeaModel): +class GetComponentVersionSnapshotResponseBody(TeaModel): def __init__( self, - machine_group_id: str = None, + component_id: str = None, + gmt_create_time: str = None, + gmt_modified_time: str = None, + is_current_version: bool = None, request_id: str = None, + snapshot_id: str = None, + tenant_id: str = None, + user_id: str = None, + version: str = None, + workspace_id: str = None, ): - self.machine_group_id = machine_group_id + self.component_id = component_id + # Use the UTC time format: yyyy-MM-ddTHH:mmZ + self.gmt_create_time = gmt_create_time + # Use the UTC time format: yyyy-MM-ddTHH:mmZ + self.gmt_modified_time = gmt_modified_time + self.is_current_version = is_current_version self.request_id = request_id + self.snapshot_id = snapshot_id + self.tenant_id = tenant_id + self.user_id = user_id + self.version = version + self.workspace_id = workspace_id def validate(self): pass @@ -7419,27 +8009,59 @@ def to_map(self): return _map result = dict() - if self.machine_group_id is not None: - result['MachineGroupID'] = self.machine_group_id + if self.component_id is not None: + result['ComponentId'] = self.component_id + if self.gmt_create_time is not None: + result['GmtCreateTime'] = self.gmt_create_time + if self.gmt_modified_time is not None: + result['GmtModifiedTime'] = self.gmt_modified_time + if self.is_current_version is not None: + result['IsCurrentVersion'] = self.is_current_version if self.request_id is not None: result['RequestId'] = self.request_id - return result + if self.snapshot_id is not None: + result['SnapshotId'] = self.snapshot_id + if self.tenant_id is not None: + result['TenantId'] = self.tenant_id + if self.user_id is not None: + result['UserId'] = self.user_id + if self.version is not None: + result['Version'] = self.version + if self.workspace_id is not None: + result['WorkspaceId'] = self.workspace_id + return result def from_map(self, m: dict = None): m = m or dict() - if m.get('MachineGroupID') is not None: - self.machine_group_id = m.get('MachineGroupID') + if m.get('ComponentId') is not None: + self.component_id = m.get('ComponentId') + if m.get('GmtCreateTime') is not None: + self.gmt_create_time = m.get('GmtCreateTime') + if m.get('GmtModifiedTime') is not None: + self.gmt_modified_time = m.get('GmtModifiedTime') + if m.get('IsCurrentVersion') is not None: + self.is_current_version = m.get('IsCurrentVersion') if m.get('RequestId') is not None: self.request_id = m.get('RequestId') + if m.get('SnapshotId') is not None: + self.snapshot_id = m.get('SnapshotId') + if m.get('TenantId') is not None: + self.tenant_id = m.get('TenantId') + if m.get('UserId') is not None: + self.user_id = m.get('UserId') + if m.get('Version') is not None: + self.version = m.get('Version') + if m.get('WorkspaceId') is not None: + self.workspace_id = m.get('WorkspaceId') return self -class DeleteResourceGroupMachineGroupResponse(TeaModel): +class GetComponentVersionSnapshotResponse(TeaModel): def __init__( self, headers: Dict[str, str] = None, status_code: int = None, - body: DeleteResourceGroupMachineGroupResponseBody = None, + body: GetComponentVersionSnapshotResponseBody = None, ): self.headers = headers self.status_code = status_code @@ -7470,17 +8092,36 @@ def from_map(self, m: dict = None): if m.get('statusCode') is not None: self.status_code = m.get('statusCode') if m.get('body') is not None: - temp_model = DeleteResourceGroupMachineGroupResponseBody() + temp_model = GetComponentVersionSnapshotResponseBody() self.body = temp_model.from_map(m['body']) return self -class DeleteTrainingJobResponseBody(TeaModel): +class GetInstanceJobResponseBody(TeaModel): def __init__( self, + creator: str = None, + gmt_create_time: str = None, + instance_id: str = None, + instance_job_id: str = None, + instance_job_type: str = None, + reason_code: str = None, + reason_message: str = None, request_id: str = None, + status: str = None, + workspace_id: str = None, ): + self.creator = creator + # Use the UTC time format: yyyy-MM-ddTHH:mmZ + self.gmt_create_time = gmt_create_time + self.instance_id = instance_id + self.instance_job_id = instance_job_id + self.instance_job_type = instance_job_type + self.reason_code = reason_code + self.reason_message = reason_message self.request_id = request_id + self.status = status + self.workspace_id = workspace_id def validate(self): pass @@ -7491,23 +8132,59 @@ def to_map(self): return _map result = dict() + if self.creator is not None: + result['Creator'] = self.creator + if self.gmt_create_time is not None: + result['GmtCreateTime'] = self.gmt_create_time + if self.instance_id is not None: + result['InstanceId'] = self.instance_id + if self.instance_job_id is not None: + result['InstanceJobId'] = self.instance_job_id + if self.instance_job_type is not None: + result['InstanceJobType'] = self.instance_job_type + if self.reason_code is not None: + result['ReasonCode'] = self.reason_code + if self.reason_message is not None: + result['ReasonMessage'] = self.reason_message if self.request_id is not None: result['RequestId'] = self.request_id + if self.status is not None: + result['Status'] = self.status + if self.workspace_id is not None: + result['WorkspaceId'] = self.workspace_id return result def from_map(self, m: dict = None): m = m or dict() + if m.get('Creator') is not None: + self.creator = m.get('Creator') + if m.get('GmtCreateTime') is not None: + self.gmt_create_time = m.get('GmtCreateTime') + if m.get('InstanceId') is not None: + self.instance_id = m.get('InstanceId') + if m.get('InstanceJobId') is not None: + self.instance_job_id = m.get('InstanceJobId') + if m.get('InstanceJobType') is not None: + self.instance_job_type = m.get('InstanceJobType') + if m.get('ReasonCode') is not None: + self.reason_code = m.get('ReasonCode') + if m.get('ReasonMessage') is not None: + self.reason_message = m.get('ReasonMessage') if m.get('RequestId') is not None: self.request_id = m.get('RequestId') + if m.get('Status') is not None: + self.status = m.get('Status') + if m.get('WorkspaceId') is not None: + self.workspace_id = m.get('WorkspaceId') return self -class DeleteTrainingJobResponse(TeaModel): +class GetInstanceJobResponse(TeaModel): def __init__( self, headers: Dict[str, str] = None, status_code: int = None, - body: DeleteTrainingJobResponseBody = None, + body: GetInstanceJobResponseBody = None, ): self.headers = headers self.status_code = status_code @@ -7538,17 +8215,29 @@ def from_map(self, m: dict = None): if m.get('statusCode') is not None: self.status_code = m.get('statusCode') if m.get('body') is not None: - temp_model = DeleteTrainingJobResponseBody() + temp_model = GetInstanceJobResponseBody() self.body = temp_model.from_map(m['body']) return self -class DeleteTrainingJobLabelsRequest(TeaModel): +class GetJobViewMetricsRequest(TeaModel): def __init__( self, - keys: str = None, + end_time: str = None, + page_number: int = None, + page_size: int = None, + sort_by: str = None, + start_time: str = None, + time_step: str = None, + workspace_id: str = None, ): - self.keys = keys + self.end_time = end_time + self.page_number = page_number + self.page_size = page_size + self.sort_by = sort_by + self.start_time = start_time + self.time_step = time_step + self.workspace_id = workspace_id def validate(self): pass @@ -7559,26 +8248,61 @@ def to_map(self): return _map result = dict() - if self.keys is not None: - result['Keys'] = self.keys + if self.end_time is not None: + result['EndTime'] = self.end_time + if self.page_number is not None: + result['PageNumber'] = self.page_number + if self.page_size is not None: + result['PageSize'] = self.page_size + if self.sort_by is not None: + result['SortBy'] = self.sort_by + if self.start_time is not None: + result['StartTime'] = self.start_time + if self.time_step is not None: + result['TimeStep'] = self.time_step + if self.workspace_id is not None: + result['WorkspaceId'] = self.workspace_id return result def from_map(self, m: dict = None): m = m or dict() - if m.get('Keys') is not None: - self.keys = m.get('Keys') + if m.get('EndTime') is not None: + self.end_time = m.get('EndTime') + if m.get('PageNumber') is not None: + self.page_number = m.get('PageNumber') + if m.get('PageSize') is not None: + self.page_size = m.get('PageSize') + if m.get('SortBy') is not None: + self.sort_by = m.get('SortBy') + if m.get('StartTime') is not None: + self.start_time = m.get('StartTime') + if m.get('TimeStep') is not None: + self.time_step = m.get('TimeStep') + if m.get('WorkspaceId') is not None: + self.workspace_id = m.get('WorkspaceId') return self -class DeleteTrainingJobLabelsResponseBody(TeaModel): +class GetJobViewMetricsResponseBody(TeaModel): def __init__( self, + job_metrics: List[JobViewMetric] = None, request_id: str = None, + summary: JobViewMetric = None, + total: int = None, ): + self.job_metrics = job_metrics self.request_id = request_id + self.summary = summary + self.total = total def validate(self): - pass + if self.job_metrics: + for k in self.job_metrics: + if k: + k.validate() + if self.summary: + self.summary.validate() def to_map(self): _map = super().to_map() @@ -7586,23 +8310,41 @@ def to_map(self): return _map result = dict() + result['JobMetrics'] = [] + if self.job_metrics is not None: + for k in self.job_metrics: + result['JobMetrics'].append(k.to_map() if k else None) if self.request_id is not None: result['RequestId'] = self.request_id + if self.summary is not None: + result['Summary'] = self.summary.to_map() + if self.total is not None: + result['Total'] = self.total return result def from_map(self, m: dict = None): m = m or dict() + self.job_metrics = [] + if m.get('JobMetrics') is not None: + for k in m.get('JobMetrics'): + temp_model = JobViewMetric() + self.job_metrics.append(temp_model.from_map(k)) if m.get('RequestId') is not None: self.request_id = m.get('RequestId') + if m.get('Summary') is not None: + temp_model = JobViewMetric() + self.summary = temp_model.from_map(m['Summary']) + if m.get('Total') is not None: + self.total = m.get('Total') return self -class DeleteTrainingJobLabelsResponse(TeaModel): +class GetJobViewMetricsResponse(TeaModel): def __init__( self, headers: Dict[str, str] = None, status_code: int = None, - body: DeleteTrainingJobLabelsResponseBody = None, + body: GetJobViewMetricsResponseBody = None, ): self.headers = headers self.status_code = status_code @@ -7633,21 +8375,21 @@ def from_map(self, m: dict = None): if m.get('statusCode') is not None: self.status_code = m.get('statusCode') if m.get('body') is not None: - temp_model = DeleteTrainingJobLabelsResponseBody() + temp_model = GetJobViewMetricsResponseBody() self.body = temp_model.from_map(m['body']) return self -class DeployLLMSnapshotRequestWorkloadContainer(TeaModel): +class GetJobsStatisticsByQuotaRequest(TeaModel): def __init__( self, - image: str = None, - port: int = None, - user_command: str = None, + end_time: str = None, + start_time: str = None, + workspace_id: str = None, ): - self.image = image - self.port = port - self.user_command = user_command + self.end_time = end_time + self.start_time = start_time + self.workspace_id = workspace_id def validate(self): pass @@ -7658,35 +8400,35 @@ def to_map(self): return _map result = dict() - if self.image is not None: - result['Image'] = self.image - if self.port is not None: - result['Port'] = self.port - if self.user_command is not None: - result['UserCommand'] = self.user_command + if self.end_time is not None: + result['EndTime'] = self.end_time + if self.start_time is not None: + result['StartTime'] = self.start_time + if self.workspace_id is not None: + result['WorkspaceId'] = self.workspace_id return result def from_map(self, m: dict = None): m = m or dict() - if m.get('Image') is not None: - self.image = m.get('Image') - if m.get('Port') is not None: - self.port = m.get('Port') - if m.get('UserCommand') is not None: - self.user_command = m.get('UserCommand') + if m.get('EndTime') is not None: + self.end_time = m.get('EndTime') + if m.get('StartTime') is not None: + self.start_time = m.get('StartTime') + if m.get('WorkspaceId') is not None: + self.workspace_id = m.get('WorkspaceId') return self -class DeployLLMSnapshotRequestWorkloadExtraConfig(TeaModel): +class GetJobsStatisticsByQuotaResponseBody(TeaModel): def __init__( self, - enable_webservice: bool = None, - job_max_running_time_minutes: int = None, - third_party_lib_dir: str = None, + quota_id: str = None, + request_id: str = None, + statistics: Dict[str, Any] = None, ): - self.enable_webservice = enable_webservice - self.job_max_running_time_minutes = job_max_running_time_minutes - self.third_party_lib_dir = third_party_lib_dir + self.quota_id = quota_id + self.request_id = request_id + self.statistics = statistics def validate(self): pass @@ -7697,40 +8439,39 @@ def to_map(self): return _map result = dict() - if self.enable_webservice is not None: - result['EnableWebservice'] = self.enable_webservice - if self.job_max_running_time_minutes is not None: - result['JobMaxRunningTimeMinutes'] = self.job_max_running_time_minutes - if self.third_party_lib_dir is not None: - result['ThirdPartyLibDir'] = self.third_party_lib_dir + if self.quota_id is not None: + result['QuotaId'] = self.quota_id + if self.request_id is not None: + result['RequestId'] = self.request_id + if self.statistics is not None: + result['Statistics'] = self.statistics return result def from_map(self, m: dict = None): m = m or dict() - if m.get('EnableWebservice') is not None: - self.enable_webservice = m.get('EnableWebservice') - if m.get('JobMaxRunningTimeMinutes') is not None: - self.job_max_running_time_minutes = m.get('JobMaxRunningTimeMinutes') - if m.get('ThirdPartyLibDir') is not None: - self.third_party_lib_dir = m.get('ThirdPartyLibDir') + if m.get('QuotaId') is not None: + self.quota_id = m.get('QuotaId') + if m.get('RequestId') is not None: + self.request_id = m.get('RequestId') + if m.get('Statistics') is not None: + self.statistics = m.get('Statistics') return self -class DeployLLMSnapshotRequestWorkloadResourceSpecResourceConfig(TeaModel): +class GetJobsStatisticsByQuotaResponse(TeaModel): def __init__( self, - cpu: int = None, - gpu: int = None, - memory_in_gi_b: int = None, - resource_group: str = None, + headers: Dict[str, str] = None, + status_code: int = None, + body: GetJobsStatisticsByQuotaResponseBody = None, ): - self.cpu = cpu - self.gpu = gpu - self.memory_in_gi_b = memory_in_gi_b - self.resource_group = resource_group + self.headers = headers + self.status_code = status_code + self.body = body def validate(self): - pass + if self.body: + self.body.validate() def to_map(self): _map = super().to_map() @@ -7738,84 +8479,36 @@ def to_map(self): return _map result = dict() - if self.cpu is not None: - result['Cpu'] = self.cpu - if self.gpu is not None: - result['Gpu'] = self.gpu - if self.memory_in_gi_b is not None: - result['MemoryInGiB'] = self.memory_in_gi_b - if self.resource_group is not None: - result['ResourceGroup'] = self.resource_group + if self.headers is not None: + result['headers'] = self.headers + if self.status_code is not None: + result['statusCode'] = self.status_code + if self.body is not None: + result['body'] = self.body.to_map() return result def from_map(self, m: dict = None): m = m or dict() - if m.get('Cpu') is not None: - self.cpu = m.get('Cpu') - if m.get('Gpu') is not None: - self.gpu = m.get('Gpu') - if m.get('MemoryInGiB') is not None: - self.memory_in_gi_b = m.get('MemoryInGiB') - if m.get('ResourceGroup') is not None: - self.resource_group = m.get('ResourceGroup') - return self - - -class DeployLLMSnapshotRequestWorkloadResourceSpec(TeaModel): - def __init__( - self, - ecs_spec: str = None, - instance_num: int = None, - resource_config: DeployLLMSnapshotRequestWorkloadResourceSpecResourceConfig = None, - ): - self.ecs_spec = ecs_spec - self.instance_num = instance_num - self.resource_config = resource_config - - def validate(self): - if self.resource_config: - self.resource_config.validate() - - def to_map(self): - _map = super().to_map() - if _map is not None: - return _map - - result = dict() - if self.ecs_spec is not None: - result['EcsSpec'] = self.ecs_spec - if self.instance_num is not None: - result['InstanceNum'] = self.instance_num - if self.resource_config is not None: - result['ResourceConfig'] = self.resource_config.to_map() - return result - - def from_map(self, m: dict = None): - m = m or dict() - if m.get('EcsSpec') is not None: - self.ecs_spec = m.get('EcsSpec') - if m.get('InstanceNum') is not None: - self.instance_num = m.get('InstanceNum') - if m.get('ResourceConfig') is not None: - temp_model = DeployLLMSnapshotRequestWorkloadResourceSpecResourceConfig() - self.resource_config = temp_model.from_map(m['ResourceConfig']) + if m.get('headers') is not None: + self.headers = m.get('headers') + if m.get('statusCode') is not None: + self.status_code = m.get('statusCode') + if m.get('body') is not None: + temp_model = GetJobsStatisticsByQuotaResponseBody() + self.body = temp_model.from_map(m['body']) return self -class DeployLLMSnapshotRequestWorkloadUserVpc(TeaModel): +class GetJobsStatisticsByResourceGroupRequest(TeaModel): def __init__( self, - default_route: str = None, - extended_cidrs: List[str] = None, - security_group_id: str = None, - switch_id: str = None, - vpc_id: str = None, + end_time: str = None, + start_time: str = None, + workspace_id: str = None, ): - self.default_route = default_route - self.extended_cidrs = extended_cidrs - self.security_group_id = security_group_id - self.switch_id = switch_id - self.vpc_id = vpc_id + self.end_time = end_time + self.start_time = start_time + self.workspace_id = workspace_id def validate(self): pass @@ -7826,55 +8519,36 @@ def to_map(self): return _map result = dict() - if self.default_route is not None: - result['DefaultRoute'] = self.default_route - if self.extended_cidrs is not None: - result['ExtendedCIDRs'] = self.extended_cidrs - if self.security_group_id is not None: - result['SecurityGroupId'] = self.security_group_id - if self.switch_id is not None: - result['SwitchId'] = self.switch_id - if self.vpc_id is not None: - result['VpcId'] = self.vpc_id + if self.end_time is not None: + result['EndTime'] = self.end_time + if self.start_time is not None: + result['StartTime'] = self.start_time + if self.workspace_id is not None: + result['WorkspaceID'] = self.workspace_id return result def from_map(self, m: dict = None): m = m or dict() - if m.get('DefaultRoute') is not None: - self.default_route = m.get('DefaultRoute') - if m.get('ExtendedCIDRs') is not None: - self.extended_cidrs = m.get('ExtendedCIDRs') - if m.get('SecurityGroupId') is not None: - self.security_group_id = m.get('SecurityGroupId') - if m.get('SwitchId') is not None: - self.switch_id = m.get('SwitchId') - if m.get('VpcId') is not None: - self.vpc_id = m.get('VpcId') + if m.get('EndTime') is not None: + self.end_time = m.get('EndTime') + if m.get('StartTime') is not None: + self.start_time = m.get('StartTime') + if m.get('WorkspaceID') is not None: + self.workspace_id = m.get('WorkspaceID') return self -class DeployLLMSnapshotRequestWorkload(TeaModel): +class GetJobsStatisticsByResourceGroupResponseBody(TeaModel): def __init__( self, - container: DeployLLMSnapshotRequestWorkloadContainer = None, - extra_config: DeployLLMSnapshotRequestWorkloadExtraConfig = None, - resource_spec: DeployLLMSnapshotRequestWorkloadResourceSpec = None, - user_vpc: DeployLLMSnapshotRequestWorkloadUserVpc = None, + request_id: str = None, + statistics: Dict[str, Any] = None, ): - self.container = container - self.extra_config = extra_config - self.resource_spec = resource_spec - self.user_vpc = user_vpc + self.request_id = request_id + self.statistics = statistics def validate(self): - if self.container: - self.container.validate() - if self.extra_config: - self.extra_config.validate() - if self.resource_spec: - self.resource_spec.validate() - if self.user_vpc: - self.user_vpc.validate() + pass def to_map(self): _map = super().to_map() @@ -7882,49 +8556,35 @@ def to_map(self): return _map result = dict() - if self.container is not None: - result['Container'] = self.container.to_map() - if self.extra_config is not None: - result['ExtraConfig'] = self.extra_config.to_map() - if self.resource_spec is not None: - result['ResourceSpec'] = self.resource_spec.to_map() - if self.user_vpc is not None: - result['UserVpc'] = self.user_vpc.to_map() + if self.request_id is not None: + result['RequestId'] = self.request_id + if self.statistics is not None: + result['Statistics'] = self.statistics return result def from_map(self, m: dict = None): m = m or dict() - if m.get('Container') is not None: - temp_model = DeployLLMSnapshotRequestWorkloadContainer() - self.container = temp_model.from_map(m['Container']) - if m.get('ExtraConfig') is not None: - temp_model = DeployLLMSnapshotRequestWorkloadExtraConfig() - self.extra_config = temp_model.from_map(m['ExtraConfig']) - if m.get('ResourceSpec') is not None: - temp_model = DeployLLMSnapshotRequestWorkloadResourceSpec() - self.resource_spec = temp_model.from_map(m['ResourceSpec']) - if m.get('UserVpc') is not None: - temp_model = DeployLLMSnapshotRequestWorkloadUserVpc() - self.user_vpc = temp_model.from_map(m['UserVpc']) + if m.get('RequestId') is not None: + self.request_id = m.get('RequestId') + if m.get('Statistics') is not None: + self.statistics = m.get('Statistics') return self -class DeployLLMSnapshotRequest(TeaModel): +class GetJobsStatisticsByResourceGroupResponse(TeaModel): def __init__( self, - description: str = None, - display_name: str = None, - labels: Dict[str, Any] = None, - workload: DeployLLMSnapshotRequestWorkload = None, + headers: Dict[str, str] = None, + status_code: int = None, + body: GetJobsStatisticsByResourceGroupResponseBody = None, ): - self.description = description - self.display_name = display_name - self.labels = labels - self.workload = workload + self.headers = headers + self.status_code = status_code + self.body = body def validate(self): - if self.workload: - self.workload.validate() + if self.body: + self.body.validate() def to_map(self): _map = super().to_map() @@ -7932,44 +8592,66 @@ def to_map(self): return _map result = dict() - if self.description is not None: - result['Description'] = self.description - if self.display_name is not None: - result['DisplayName'] = self.display_name - if self.labels is not None: - result['Labels'] = self.labels - if self.workload is not None: - result['Workload'] = self.workload.to_map() + if self.headers is not None: + result['headers'] = self.headers + if self.status_code is not None: + result['statusCode'] = self.status_code + if self.body is not None: + result['body'] = self.body.to_map() return result def from_map(self, m: dict = None): m = m or dict() - if m.get('Description') is not None: - self.description = m.get('Description') - if m.get('DisplayName') is not None: - self.display_name = m.get('DisplayName') - if m.get('Labels') is not None: - self.labels = m.get('Labels') - if m.get('Workload') is not None: - temp_model = DeployLLMSnapshotRequestWorkload() - self.workload = temp_model.from_map(m['Workload']) + if m.get('headers') is not None: + self.headers = m.get('headers') + if m.get('statusCode') is not None: + self.status_code = m.get('statusCode') + if m.get('body') is not None: + temp_model = GetJobsStatisticsByResourceGroupResponseBody() + self.body = temp_model.from_map(m['body']) return self -class DeployLLMSnapshotResponseBody(TeaModel): +class GetMachineGroupResponseBody(TeaModel): def __init__( self, - job_id: str = None, - job_name: str = None, - job_request_id: str = None, + count: int = None, + default_driver: str = None, + duration: str = None, + ecs_type: str = None, + gmt_created: str = None, + gmt_expired: str = None, + gmt_modified: str = None, + gmt_started: str = None, + machine_group_id: str = None, + order_id: str = None, + order_instance_id: str = None, + pairesource_id: str = None, + pay_type: str = None, + pricing_cycle: str = None, + region_id: str = None, request_id: str = None, status: str = None, + supported_drivers: List[str] = None, ): - self.job_id = job_id - self.job_name = job_name - self.job_request_id = job_request_id + self.count = count + self.default_driver = default_driver + self.duration = duration + self.ecs_type = ecs_type + self.gmt_created = gmt_created + self.gmt_expired = gmt_expired + self.gmt_modified = gmt_modified + self.gmt_started = gmt_started + self.machine_group_id = machine_group_id + self.order_id = order_id + self.order_instance_id = order_instance_id + self.pairesource_id = pairesource_id + self.pay_type = pay_type + self.pricing_cycle = pricing_cycle + self.region_id = region_id self.request_id = request_id self.status = status + self.supported_drivers = supported_drivers def validate(self): pass @@ -7980,39 +8662,91 @@ def to_map(self): return _map result = dict() - if self.job_id is not None: - result['JobId'] = self.job_id - if self.job_name is not None: - result['JobName'] = self.job_name - if self.job_request_id is not None: - result['JobRequestId'] = self.job_request_id + if self.count is not None: + result['Count'] = self.count + if self.default_driver is not None: + result['DefaultDriver'] = self.default_driver + if self.duration is not None: + result['Duration'] = self.duration + if self.ecs_type is not None: + result['EcsType'] = self.ecs_type + if self.gmt_created is not None: + result['GmtCreated'] = self.gmt_created + if self.gmt_expired is not None: + result['GmtExpired'] = self.gmt_expired + if self.gmt_modified is not None: + result['GmtModified'] = self.gmt_modified + if self.gmt_started is not None: + result['GmtStarted'] = self.gmt_started + if self.machine_group_id is not None: + result['MachineGroupID'] = self.machine_group_id + if self.order_id is not None: + result['OrderID'] = self.order_id + if self.order_instance_id is not None: + result['OrderInstanceId'] = self.order_instance_id + if self.pairesource_id is not None: + result['PAIResourceID'] = self.pairesource_id + if self.pay_type is not None: + result['PayType'] = self.pay_type + if self.pricing_cycle is not None: + result['PricingCycle'] = self.pricing_cycle + if self.region_id is not None: + result['RegionID'] = self.region_id if self.request_id is not None: result['RequestId'] = self.request_id if self.status is not None: result['Status'] = self.status + if self.supported_drivers is not None: + result['SupportedDrivers'] = self.supported_drivers return result def from_map(self, m: dict = None): m = m or dict() - if m.get('JobId') is not None: - self.job_id = m.get('JobId') - if m.get('JobName') is not None: - self.job_name = m.get('JobName') - if m.get('JobRequestId') is not None: - self.job_request_id = m.get('JobRequestId') + if m.get('Count') is not None: + self.count = m.get('Count') + if m.get('DefaultDriver') is not None: + self.default_driver = m.get('DefaultDriver') + if m.get('Duration') is not None: + self.duration = m.get('Duration') + if m.get('EcsType') is not None: + self.ecs_type = m.get('EcsType') + if m.get('GmtCreated') is not None: + self.gmt_created = m.get('GmtCreated') + if m.get('GmtExpired') is not None: + self.gmt_expired = m.get('GmtExpired') + if m.get('GmtModified') is not None: + self.gmt_modified = m.get('GmtModified') + if m.get('GmtStarted') is not None: + self.gmt_started = m.get('GmtStarted') + if m.get('MachineGroupID') is not None: + self.machine_group_id = m.get('MachineGroupID') + if m.get('OrderID') is not None: + self.order_id = m.get('OrderID') + if m.get('OrderInstanceId') is not None: + self.order_instance_id = m.get('OrderInstanceId') + if m.get('PAIResourceID') is not None: + self.pairesource_id = m.get('PAIResourceID') + if m.get('PayType') is not None: + self.pay_type = m.get('PayType') + if m.get('PricingCycle') is not None: + self.pricing_cycle = m.get('PricingCycle') + if m.get('RegionID') is not None: + self.region_id = m.get('RegionID') if m.get('RequestId') is not None: self.request_id = m.get('RequestId') if m.get('Status') is not None: self.status = m.get('Status') + if m.get('SupportedDrivers') is not None: + self.supported_drivers = m.get('SupportedDrivers') return self -class DeployLLMSnapshotResponse(TeaModel): +class GetMachineGroupResponse(TeaModel): def __init__( self, headers: Dict[str, str] = None, status_code: int = None, - body: DeployLLMSnapshotResponseBody = None, + body: GetMachineGroupResponseBody = None, ): self.headers = headers self.status_code = status_code @@ -8043,23 +8777,106 @@ def from_map(self, m: dict = None): if m.get('statusCode') is not None: self.status_code = m.get('statusCode') if m.get('body') is not None: - temp_model = DeployLLMSnapshotResponseBody() + temp_model = GetMachineGroupResponseBody() self.body = temp_model.from_map(m['body']) return self -class GetAI4DDefaultBucketResponseBody(TeaModel): +class GetMetricsRequest(TeaModel): def __init__( self, - extranet_endpoint: str = None, - intranet_endpoint: str = None, - name: str = None, - request_id: str = None, + dimensions: str = None, + end_time: str = None, + express: str = None, + length: str = None, + metric_name: str = None, + namespace: str = None, + next_token: str = None, + period: str = None, + start_time: str = None, ): - self.extranet_endpoint = extranet_endpoint - self.intranet_endpoint = intranet_endpoint - self.name = name + # This parameter is required. + self.dimensions = dimensions + self.end_time = end_time + self.express = express + self.length = length + # This parameter is required. + self.metric_name = metric_name + self.namespace = namespace + self.next_token = next_token + self.period = period + self.start_time = start_time + + def validate(self): + pass + + def to_map(self): + _map = super().to_map() + if _map is not None: + return _map + + result = dict() + if self.dimensions is not None: + result['Dimensions'] = self.dimensions + if self.end_time is not None: + result['EndTime'] = self.end_time + if self.express is not None: + result['Express'] = self.express + if self.length is not None: + result['Length'] = self.length + if self.metric_name is not None: + result['MetricName'] = self.metric_name + if self.namespace is not None: + result['Namespace'] = self.namespace + if self.next_token is not None: + result['NextToken'] = self.next_token + if self.period is not None: + result['Period'] = self.period + if self.start_time is not None: + result['StartTime'] = self.start_time + return result + + def from_map(self, m: dict = None): + m = m or dict() + if m.get('Dimensions') is not None: + self.dimensions = m.get('Dimensions') + if m.get('EndTime') is not None: + self.end_time = m.get('EndTime') + if m.get('Express') is not None: + self.express = m.get('Express') + if m.get('Length') is not None: + self.length = m.get('Length') + if m.get('MetricName') is not None: + self.metric_name = m.get('MetricName') + if m.get('Namespace') is not None: + self.namespace = m.get('Namespace') + if m.get('NextToken') is not None: + self.next_token = m.get('NextToken') + if m.get('Period') is not None: + self.period = m.get('Period') + if m.get('StartTime') is not None: + self.start_time = m.get('StartTime') + return self + + +class GetMetricsResponseBody(TeaModel): + def __init__( + self, + code: str = None, + datapoints: str = None, + message: str = None, + next_token: str = None, + period: str = None, + request_id: str = None, + success: bool = None, + ): + self.code = code + self.datapoints = datapoints + self.message = message + self.next_token = next_token + self.period = period self.request_id = request_id + self.success = success def validate(self): pass @@ -8070,35 +8887,47 @@ def to_map(self): return _map result = dict() - if self.extranet_endpoint is not None: - result['ExtranetEndpoint'] = self.extranet_endpoint - if self.intranet_endpoint is not None: - result['IntranetEndpoint'] = self.intranet_endpoint - if self.name is not None: - result['Name'] = self.name + if self.code is not None: + result['Code'] = self.code + if self.datapoints is not None: + result['Datapoints'] = self.datapoints + if self.message is not None: + result['Message'] = self.message + if self.next_token is not None: + result['NextToken'] = self.next_token + if self.period is not None: + result['Period'] = self.period if self.request_id is not None: result['RequestId'] = self.request_id + if self.success is not None: + result['Success'] = self.success return result def from_map(self, m: dict = None): m = m or dict() - if m.get('ExtranetEndpoint') is not None: - self.extranet_endpoint = m.get('ExtranetEndpoint') - if m.get('IntranetEndpoint') is not None: - self.intranet_endpoint = m.get('IntranetEndpoint') - if m.get('Name') is not None: - self.name = m.get('Name') + if m.get('Code') is not None: + self.code = m.get('Code') + if m.get('Datapoints') is not None: + self.datapoints = m.get('Datapoints') + if m.get('Message') is not None: + self.message = m.get('Message') + if m.get('NextToken') is not None: + self.next_token = m.get('NextToken') + if m.get('Period') is not None: + self.period = m.get('Period') if m.get('RequestId') is not None: self.request_id = m.get('RequestId') + if m.get('Success') is not None: + self.success = m.get('Success') return self -class GetAI4DDefaultBucketResponse(TeaModel): +class GetMetricsResponse(TeaModel): def __init__( self, headers: Dict[str, str] = None, status_code: int = None, - body: GetAI4DDefaultBucketResponseBody = None, + body: GetMetricsResponseBody = None, ): self.headers = headers self.status_code = status_code @@ -8129,40 +8958,72 @@ def from_map(self, m: dict = None): if m.get('statusCode') is not None: self.status_code = m.get('statusCode') if m.get('body') is not None: - temp_model = GetAI4DDefaultBucketResponseBody() + temp_model = GetMetricsResponseBody() self.body = temp_model.from_map(m['body']) return self -class GetAlgorithmResponseBody(TeaModel): +class GetNodeGPUMetricsRequest(TeaModel): def __init__( self, - algorithm_description: str = None, - algorithm_id: str = None, - algorithm_name: str = None, - algorithm_provider: str = None, - display_name: str = None, - gmt_create_time: str = None, - gmt_modified_time: str = None, + end_time: str = None, + metric_type: str = None, + quota_id: str = None, + start_time: str = None, + ): + self.end_time = end_time + # This parameter is required. + self.metric_type = metric_type + # This parameter is required. + self.quota_id = quota_id + self.start_time = start_time + + def validate(self): + pass + + def to_map(self): + _map = super().to_map() + if _map is not None: + return _map + + result = dict() + if self.end_time is not None: + result['EndTime'] = self.end_time + if self.metric_type is not None: + result['MetricType'] = self.metric_type + if self.quota_id is not None: + result['QuotaId'] = self.quota_id + if self.start_time is not None: + result['StartTime'] = self.start_time + return result + + def from_map(self, m: dict = None): + m = m or dict() + if m.get('EndTime') is not None: + self.end_time = m.get('EndTime') + if m.get('MetricType') is not None: + self.metric_type = m.get('MetricType') + if m.get('QuotaId') is not None: + self.quota_id = m.get('QuotaId') + if m.get('StartTime') is not None: + self.start_time = m.get('StartTime') + return self + + +class GetNodeGPUMetricsResponseBody(TeaModel): + def __init__( + self, + metric_type: str = None, + node_gpumetric: NodeGPUMetric = None, request_id: str = None, - tenant_id: str = None, - user_id: str = None, - workspace_id: str = None, ): - self.algorithm_description = algorithm_description - self.algorithm_id = algorithm_id - self.algorithm_name = algorithm_name - self.algorithm_provider = algorithm_provider - self.display_name = display_name - self.gmt_create_time = gmt_create_time - self.gmt_modified_time = gmt_modified_time + self.metric_type = metric_type + self.node_gpumetric = node_gpumetric self.request_id = request_id - self.tenant_id = tenant_id - self.user_id = user_id - self.workspace_id = workspace_id def validate(self): - pass + if self.node_gpumetric: + self.node_gpumetric.validate() def to_map(self): _map = super().to_map() @@ -8170,63 +9031,32 @@ def to_map(self): return _map result = dict() - if self.algorithm_description is not None: - result['AlgorithmDescription'] = self.algorithm_description - if self.algorithm_id is not None: - result['AlgorithmId'] = self.algorithm_id - if self.algorithm_name is not None: - result['AlgorithmName'] = self.algorithm_name - if self.algorithm_provider is not None: - result['AlgorithmProvider'] = self.algorithm_provider - if self.display_name is not None: - result['DisplayName'] = self.display_name - if self.gmt_create_time is not None: - result['GmtCreateTime'] = self.gmt_create_time - if self.gmt_modified_time is not None: - result['GmtModifiedTime'] = self.gmt_modified_time + if self.metric_type is not None: + result['MetricType'] = self.metric_type + if self.node_gpumetric is not None: + result['NodeGPUMetric'] = self.node_gpumetric.to_map() if self.request_id is not None: - result['RequestId'] = self.request_id - if self.tenant_id is not None: - result['TenantId'] = self.tenant_id - if self.user_id is not None: - result['UserId'] = self.user_id - if self.workspace_id is not None: - result['WorkspaceId'] = self.workspace_id + result['requestId'] = self.request_id return result def from_map(self, m: dict = None): m = m or dict() - if m.get('AlgorithmDescription') is not None: - self.algorithm_description = m.get('AlgorithmDescription') - if m.get('AlgorithmId') is not None: - self.algorithm_id = m.get('AlgorithmId') - if m.get('AlgorithmName') is not None: - self.algorithm_name = m.get('AlgorithmName') - if m.get('AlgorithmProvider') is not None: - self.algorithm_provider = m.get('AlgorithmProvider') - if m.get('DisplayName') is not None: - self.display_name = m.get('DisplayName') - if m.get('GmtCreateTime') is not None: - self.gmt_create_time = m.get('GmtCreateTime') - if m.get('GmtModifiedTime') is not None: - self.gmt_modified_time = m.get('GmtModifiedTime') - if m.get('RequestId') is not None: - self.request_id = m.get('RequestId') - if m.get('TenantId') is not None: - self.tenant_id = m.get('TenantId') - if m.get('UserId') is not None: - self.user_id = m.get('UserId') - if m.get('WorkspaceId') is not None: - self.workspace_id = m.get('WorkspaceId') + if m.get('MetricType') is not None: + self.metric_type = m.get('MetricType') + if m.get('NodeGPUMetric') is not None: + temp_model = NodeGPUMetric() + self.node_gpumetric = temp_model.from_map(m['NodeGPUMetric']) + if m.get('requestId') is not None: + self.request_id = m.get('requestId') return self -class GetAlgorithmResponse(TeaModel): +class GetNodeGPUMetricsResponse(TeaModel): def __init__( self, headers: Dict[str, str] = None, status_code: int = None, - body: GetAlgorithmResponseBody = None, + body: GetNodeGPUMetricsResponseBody = None, ): self.headers = headers self.status_code = status_code @@ -8257,37 +9087,28 @@ def from_map(self, m: dict = None): if m.get('statusCode') is not None: self.status_code = m.get('statusCode') if m.get('body') is not None: - temp_model = GetAlgorithmResponseBody() + temp_model = GetNodeGPUMetricsResponseBody() self.body = temp_model.from_map(m['body']) return self -class GetAlgorithmVersionResponseBody(TeaModel): +class GetNodeMetricsRequest(TeaModel): def __init__( self, - algorithm_id: str = None, - algorithm_name: str = None, - algorithm_provider: str = None, - algorithm_spec: AlgorithmSpec = None, - algorithm_version: str = None, - gmt_create_time: str = None, - gmt_modified_time: str = None, - tenant_id: str = None, - user_id: str = None, + end_time: str = None, + gputype: str = None, + start_time: str = None, + time_step: str = None, + verbose: bool = None, ): - self.algorithm_id = algorithm_id - self.algorithm_name = algorithm_name - self.algorithm_provider = algorithm_provider - self.algorithm_spec = algorithm_spec - self.algorithm_version = algorithm_version - self.gmt_create_time = gmt_create_time - self.gmt_modified_time = gmt_modified_time - self.tenant_id = tenant_id - self.user_id = user_id + self.end_time = end_time + self.gputype = gputype + self.start_time = start_time + self.time_step = time_step + self.verbose = verbose def validate(self): - if self.algorithm_spec: - self.algorithm_spec.validate() + pass def to_map(self): _map = super().to_map() @@ -8295,56 +9116,86 @@ def to_map(self): return _map result = dict() - if self.algorithm_id is not None: - result['AlgorithmId'] = self.algorithm_id - if self.algorithm_name is not None: - result['AlgorithmName'] = self.algorithm_name - if self.algorithm_provider is not None: - result['AlgorithmProvider'] = self.algorithm_provider - if self.algorithm_spec is not None: - result['AlgorithmSpec'] = self.algorithm_spec.to_map() - if self.algorithm_version is not None: - result['AlgorithmVersion'] = self.algorithm_version - if self.gmt_create_time is not None: - result['GmtCreateTime'] = self.gmt_create_time - if self.gmt_modified_time is not None: - result['GmtModifiedTime'] = self.gmt_modified_time - if self.tenant_id is not None: - result['TenantId'] = self.tenant_id - if self.user_id is not None: - result['UserId'] = self.user_id - return result - - def from_map(self, m: dict = None): - m = m or dict() - if m.get('AlgorithmId') is not None: - self.algorithm_id = m.get('AlgorithmId') - if m.get('AlgorithmName') is not None: - self.algorithm_name = m.get('AlgorithmName') - if m.get('AlgorithmProvider') is not None: - self.algorithm_provider = m.get('AlgorithmProvider') - if m.get('AlgorithmSpec') is not None: - temp_model = AlgorithmSpec() - self.algorithm_spec = temp_model.from_map(m['AlgorithmSpec']) - if m.get('AlgorithmVersion') is not None: - self.algorithm_version = m.get('AlgorithmVersion') - if m.get('GmtCreateTime') is not None: - self.gmt_create_time = m.get('GmtCreateTime') - if m.get('GmtModifiedTime') is not None: - self.gmt_modified_time = m.get('GmtModifiedTime') - if m.get('TenantId') is not None: - self.tenant_id = m.get('TenantId') - if m.get('UserId') is not None: - self.user_id = m.get('UserId') + if self.end_time is not None: + result['EndTime'] = self.end_time + if self.gputype is not None: + result['GPUType'] = self.gputype + if self.start_time is not None: + result['StartTime'] = self.start_time + if self.time_step is not None: + result['TimeStep'] = self.time_step + if self.verbose is not None: + result['Verbose'] = self.verbose + return result + + def from_map(self, m: dict = None): + m = m or dict() + if m.get('EndTime') is not None: + self.end_time = m.get('EndTime') + if m.get('GPUType') is not None: + self.gputype = m.get('GPUType') + if m.get('StartTime') is not None: + self.start_time = m.get('StartTime') + if m.get('TimeStep') is not None: + self.time_step = m.get('TimeStep') + if m.get('Verbose') is not None: + self.verbose = m.get('Verbose') return self -class GetAlgorithmVersionResponse(TeaModel): +class GetNodeMetricsResponseBody(TeaModel): + def __init__( + self, + metric_type: str = None, + nodes_metrics: List[NodeMetric] = None, + resource_group_id: str = None, + ): + self.metric_type = metric_type + self.nodes_metrics = nodes_metrics + self.resource_group_id = resource_group_id + + def validate(self): + if self.nodes_metrics: + for k in self.nodes_metrics: + if k: + k.validate() + + def to_map(self): + _map = super().to_map() + if _map is not None: + return _map + + result = dict() + if self.metric_type is not None: + result['MetricType'] = self.metric_type + result['NodesMetrics'] = [] + if self.nodes_metrics is not None: + for k in self.nodes_metrics: + result['NodesMetrics'].append(k.to_map() if k else None) + if self.resource_group_id is not None: + result['ResourceGroupID'] = self.resource_group_id + return result + + def from_map(self, m: dict = None): + m = m or dict() + if m.get('MetricType') is not None: + self.metric_type = m.get('MetricType') + self.nodes_metrics = [] + if m.get('NodesMetrics') is not None: + for k in m.get('NodesMetrics'): + temp_model = NodeMetric() + self.nodes_metrics.append(temp_model.from_map(k)) + if m.get('ResourceGroupID') is not None: + self.resource_group_id = m.get('ResourceGroupID') + return self + + +class GetNodeMetricsResponse(TeaModel): def __init__( self, headers: Dict[str, str] = None, status_code: int = None, - body: GetAlgorithmVersionResponseBody = None, + body: GetNodeMetricsResponseBody = None, ): self.headers = headers self.status_code = status_code @@ -8375,23 +9226,27 @@ def from_map(self, m: dict = None): if m.get('statusCode') is not None: self.status_code = m.get('statusCode') if m.get('body') is not None: - temp_model = GetAlgorithmVersionResponseBody() + temp_model = GetNodeMetricsResponseBody() self.body = temp_model.from_map(m['body']) return self -class GetComponentResponseBodyVersions(TeaModel): +class GetNodeViewMetricsRequest(TeaModel): def __init__( self, - gmt_create_time: str = None, - snapshot_id: str = None, - status: str = None, - version: str = None, + node_id: str = None, + page_number: int = None, + page_size: int = None, + time_step: str = None, + workspace_id: str = None, ): - self.gmt_create_time = gmt_create_time - self.snapshot_id = snapshot_id - self.status = status - self.version = version + self.node_id = node_id + # This parameter is required. + self.page_number = page_number + # This parameter is required. + self.page_size = page_size + self.time_step = time_step + self.workspace_id = workspace_id def validate(self): pass @@ -8402,67 +9257,45 @@ def to_map(self): return _map result = dict() - if self.gmt_create_time is not None: - result['GmtCreateTime'] = self.gmt_create_time - if self.snapshot_id is not None: - result['SnapshotId'] = self.snapshot_id - if self.status is not None: - result['Status'] = self.status - if self.version is not None: - result['Version'] = self.version + if self.node_id is not None: + result['NodeId'] = self.node_id + if self.page_number is not None: + result['PageNumber'] = self.page_number + if self.page_size is not None: + result['PageSize'] = self.page_size + if self.time_step is not None: + result['TimeStep'] = self.time_step + if self.workspace_id is not None: + result['WorkspaceId'] = self.workspace_id return result def from_map(self, m: dict = None): m = m or dict() - if m.get('GmtCreateTime') is not None: - self.gmt_create_time = m.get('GmtCreateTime') - if m.get('SnapshotId') is not None: - self.snapshot_id = m.get('SnapshotId') - if m.get('Status') is not None: - self.status = m.get('Status') - if m.get('Version') is not None: - self.version = m.get('Version') + if m.get('NodeId') is not None: + self.node_id = m.get('NodeId') + if m.get('PageNumber') is not None: + self.page_number = m.get('PageNumber') + if m.get('PageSize') is not None: + self.page_size = m.get('PageSize') + if m.get('TimeStep') is not None: + self.time_step = m.get('TimeStep') + if m.get('WorkspaceId') is not None: + self.workspace_id = m.get('WorkspaceId') return self -class GetComponentResponseBody(TeaModel): +class GetNodeViewMetricsResponseBody(TeaModel): def __init__( self, - component_id: str = None, - description: str = None, - display_name: str = None, - gmt_create_time: str = None, - gmt_modified_time: str = None, - labels: List[Label] = None, - name: str = None, - provider: str = None, - request_id: str = None, - tenant_id: str = None, - user_id: str = None, - versions: List[GetComponentResponseBodyVersions] = None, - workspace_id: str = None, + node_metrics: List[NodeViewMetric] = None, + total: int = None, ): - self.component_id = component_id - self.description = description - self.display_name = display_name - self.gmt_create_time = gmt_create_time - self.gmt_modified_time = gmt_modified_time - self.labels = labels - self.name = name - self.provider = provider - self.request_id = request_id - self.tenant_id = tenant_id - self.user_id = user_id - self.versions = versions - self.workspace_id = workspace_id + self.node_metrics = node_metrics + self.total = total def validate(self): - if self.labels: - for k in self.labels: - if k: - k.validate() - if self.versions: - for k in self.versions: + if self.node_metrics: + for k in self.node_metrics: if k: k.validate() @@ -8472,81 +9305,32 @@ def to_map(self): return _map result = dict() - if self.component_id is not None: - result['ComponentId'] = self.component_id - if self.description is not None: - result['Description'] = self.description - if self.display_name is not None: - result['DisplayName'] = self.display_name - if self.gmt_create_time is not None: - result['GmtCreateTime'] = self.gmt_create_time - if self.gmt_modified_time is not None: - result['GmtModifiedTime'] = self.gmt_modified_time - result['Labels'] = [] - if self.labels is not None: - for k in self.labels: - result['Labels'].append(k.to_map() if k else None) - if self.name is not None: - result['Name'] = self.name - if self.provider is not None: - result['Provider'] = self.provider - if self.request_id is not None: - result['RequestId'] = self.request_id - if self.tenant_id is not None: - result['TenantId'] = self.tenant_id - if self.user_id is not None: - result['UserId'] = self.user_id - result['Versions'] = [] - if self.versions is not None: - for k in self.versions: - result['Versions'].append(k.to_map() if k else None) - if self.workspace_id is not None: - result['WorkspaceId'] = self.workspace_id + result['NodeMetrics'] = [] + if self.node_metrics is not None: + for k in self.node_metrics: + result['NodeMetrics'].append(k.to_map() if k else None) + if self.total is not None: + result['Total'] = self.total return result def from_map(self, m: dict = None): m = m or dict() - if m.get('ComponentId') is not None: - self.component_id = m.get('ComponentId') - if m.get('Description') is not None: - self.description = m.get('Description') - if m.get('DisplayName') is not None: - self.display_name = m.get('DisplayName') - if m.get('GmtCreateTime') is not None: - self.gmt_create_time = m.get('GmtCreateTime') - if m.get('GmtModifiedTime') is not None: - self.gmt_modified_time = m.get('GmtModifiedTime') - self.labels = [] - if m.get('Labels') is not None: - for k in m.get('Labels'): - temp_model = Label() - self.labels.append(temp_model.from_map(k)) - if m.get('Name') is not None: - self.name = m.get('Name') - if m.get('Provider') is not None: - self.provider = m.get('Provider') - if m.get('RequestId') is not None: - self.request_id = m.get('RequestId') - if m.get('TenantId') is not None: - self.tenant_id = m.get('TenantId') - if m.get('UserId') is not None: - self.user_id = m.get('UserId') - self.versions = [] - if m.get('Versions') is not None: - for k in m.get('Versions'): - temp_model = GetComponentResponseBodyVersions() - self.versions.append(temp_model.from_map(k)) - if m.get('WorkspaceId') is not None: - self.workspace_id = m.get('WorkspaceId') - return self + self.node_metrics = [] + if m.get('NodeMetrics') is not None: + for k in m.get('NodeMetrics'): + temp_model = NodeViewMetric() + self.node_metrics.append(temp_model.from_map(k)) + if m.get('Total') is not None: + self.total = m.get('Total') + return self -class GetComponentResponse(TeaModel): +class GetNodeViewMetricsResponse(TeaModel): def __init__( self, headers: Dict[str, str] = None, status_code: int = None, - body: GetComponentResponseBody = None, + body: GetNodeViewMetricsResponseBody = None, ): self.headers = headers self.status_code = status_code @@ -8577,49 +9361,48 @@ def from_map(self, m: dict = None): if m.get('statusCode') is not None: self.status_code = m.get('statusCode') if m.get('body') is not None: - temp_model = GetComponentResponseBody() + temp_model = GetNodeViewMetricsResponseBody() self.body = temp_model.from_map(m['body']) return self -class GetComponentVersionResponseBody(TeaModel): +class GetOperationResponseBody(TeaModel): def __init__( self, - description: str = None, - gmt_create_time: str = None, + creator_id: str = None, + gmt_created_time: str = None, + gmt_end_time: str = None, gmt_modified_time: str = None, - labels: List[Label] = None, - name: str = None, - provider: str = None, + gmt_start_time: str = None, + object_id: str = None, + object_type: str = None, + operation_description: str = None, + operation_id: str = None, + operation_spec_json: str = None, + operation_type: str = None, + reason_code: str = None, + reason_message: str = None, request_id: str = None, - snapshot_id: str = None, - spec: ComponentSpec = None, - tenant_id: str = None, - user_id: str = None, - version: str = None, - workspace_id: str = None, + status: str = None, ): - self.description = description - self.gmt_create_time = gmt_create_time + self.creator_id = creator_id + self.gmt_created_time = gmt_created_time + self.gmt_end_time = gmt_end_time self.gmt_modified_time = gmt_modified_time - self.labels = labels - self.name = name - self.provider = provider + self.gmt_start_time = gmt_start_time + self.object_id = object_id + self.object_type = object_type + self.operation_description = operation_description + self.operation_id = operation_id + self.operation_spec_json = operation_spec_json + self.operation_type = operation_type + self.reason_code = reason_code + self.reason_message = reason_message self.request_id = request_id - self.snapshot_id = snapshot_id - self.spec = spec - self.tenant_id = tenant_id - self.user_id = user_id - self.version = version - self.workspace_id = workspace_id + self.status = status def validate(self): - if self.labels: - for k in self.labels: - if k: - k.validate() - if self.spec: - self.spec.validate() + pass def to_map(self): _map = super().to_map() @@ -8627,77 +9410,79 @@ def to_map(self): return _map result = dict() - if self.description is not None: - result['Description'] = self.description - if self.gmt_create_time is not None: - result['GmtCreateTime'] = self.gmt_create_time + if self.creator_id is not None: + result['CreatorId'] = self.creator_id + if self.gmt_created_time is not None: + result['GmtCreatedTime'] = self.gmt_created_time + if self.gmt_end_time is not None: + result['GmtEndTime'] = self.gmt_end_time if self.gmt_modified_time is not None: result['GmtModifiedTime'] = self.gmt_modified_time - result['Labels'] = [] - if self.labels is not None: - for k in self.labels: - result['Labels'].append(k.to_map() if k else None) - if self.name is not None: - result['Name'] = self.name - if self.provider is not None: - result['Provider'] = self.provider + if self.gmt_start_time is not None: + result['GmtStartTime'] = self.gmt_start_time + if self.object_id is not None: + result['ObjectId'] = self.object_id + if self.object_type is not None: + result['ObjectType'] = self.object_type + if self.operation_description is not None: + result['OperationDescription'] = self.operation_description + if self.operation_id is not None: + result['OperationId'] = self.operation_id + if self.operation_spec_json is not None: + result['OperationSpecJson'] = self.operation_spec_json + if self.operation_type is not None: + result['OperationType'] = self.operation_type + if self.reason_code is not None: + result['ReasonCode'] = self.reason_code + if self.reason_message is not None: + result['ReasonMessage'] = self.reason_message if self.request_id is not None: result['RequestId'] = self.request_id - if self.snapshot_id is not None: - result['SnapshotId'] = self.snapshot_id - if self.spec is not None: - result['Spec'] = self.spec.to_map() - if self.tenant_id is not None: - result['TenantId'] = self.tenant_id - if self.user_id is not None: - result['UserId'] = self.user_id - if self.version is not None: - result['Version'] = self.version - if self.workspace_id is not None: - result['WorkspaceId'] = self.workspace_id + if self.status is not None: + result['Status'] = self.status return result def from_map(self, m: dict = None): m = m or dict() - if m.get('Description') is not None: - self.description = m.get('Description') - if m.get('GmtCreateTime') is not None: - self.gmt_create_time = m.get('GmtCreateTime') + if m.get('CreatorId') is not None: + self.creator_id = m.get('CreatorId') + if m.get('GmtCreatedTime') is not None: + self.gmt_created_time = m.get('GmtCreatedTime') + if m.get('GmtEndTime') is not None: + self.gmt_end_time = m.get('GmtEndTime') if m.get('GmtModifiedTime') is not None: self.gmt_modified_time = m.get('GmtModifiedTime') - self.labels = [] - if m.get('Labels') is not None: - for k in m.get('Labels'): - temp_model = Label() - self.labels.append(temp_model.from_map(k)) - if m.get('Name') is not None: - self.name = m.get('Name') - if m.get('Provider') is not None: - self.provider = m.get('Provider') + if m.get('GmtStartTime') is not None: + self.gmt_start_time = m.get('GmtStartTime') + if m.get('ObjectId') is not None: + self.object_id = m.get('ObjectId') + if m.get('ObjectType') is not None: + self.object_type = m.get('ObjectType') + if m.get('OperationDescription') is not None: + self.operation_description = m.get('OperationDescription') + if m.get('OperationId') is not None: + self.operation_id = m.get('OperationId') + if m.get('OperationSpecJson') is not None: + self.operation_spec_json = m.get('OperationSpecJson') + if m.get('OperationType') is not None: + self.operation_type = m.get('OperationType') + if m.get('ReasonCode') is not None: + self.reason_code = m.get('ReasonCode') + if m.get('ReasonMessage') is not None: + self.reason_message = m.get('ReasonMessage') if m.get('RequestId') is not None: self.request_id = m.get('RequestId') - if m.get('SnapshotId') is not None: - self.snapshot_id = m.get('SnapshotId') - if m.get('Spec') is not None: - temp_model = ComponentSpec() - self.spec = temp_model.from_map(m['Spec']) - if m.get('TenantId') is not None: - self.tenant_id = m.get('TenantId') - if m.get('UserId') is not None: - self.user_id = m.get('UserId') - if m.get('Version') is not None: - self.version = m.get('Version') - if m.get('WorkspaceId') is not None: - self.workspace_id = m.get('WorkspaceId') + if m.get('Status') is not None: + self.status = m.get('Status') return self -class GetComponentVersionResponse(TeaModel): +class GetOperationResponse(TeaModel): def __init__( self, headers: Dict[str, str] = None, status_code: int = None, - body: GetComponentVersionResponseBody = None, + body: GetOperationResponseBody = None, ): self.headers = headers self.status_code = status_code @@ -8728,35 +9513,31 @@ def from_map(self, m: dict = None): if m.get('statusCode') is not None: self.status_code = m.get('statusCode') if m.get('body') is not None: - temp_model = GetComponentVersionResponseBody() + temp_model = GetOperationResponseBody() self.body = temp_model.from_map(m['body']) return self -class GetComponentVersionSnapshotResponseBody(TeaModel): +class GetQueueInfosRequest(TeaModel): def __init__( self, - component_id: str = None, - gmt_create_time: str = None, - gmt_modified_time: str = None, - is_current_version: bool = None, - request_id: str = None, - snapshot_id: str = None, - tenant_id: str = None, - user_id: str = None, - version: str = None, - workspace_id: str = None, + order: str = None, + page_number: int = None, + page_size: int = None, + quota_ids: str = None, + sort_by: str = None, + workload_ids: str = None, + workload_type: str = None, + workspace_ids: str = None, ): - self.component_id = component_id - self.gmt_create_time = gmt_create_time - self.gmt_modified_time = gmt_modified_time - self.is_current_version = is_current_version - self.request_id = request_id - self.snapshot_id = snapshot_id - self.tenant_id = tenant_id - self.user_id = user_id - self.version = version - self.workspace_id = workspace_id + self.order = order + self.page_number = page_number + self.page_size = page_size + self.quota_ids = quota_ids + self.sort_by = sort_by + self.workload_ids = workload_ids + self.workload_type = workload_type + self.workspace_ids = workspace_ids def validate(self): pass @@ -8767,121 +9548,61 @@ def to_map(self): return _map result = dict() - if self.component_id is not None: - result['ComponentId'] = self.component_id - if self.gmt_create_time is not None: - result['GmtCreateTime'] = self.gmt_create_time - if self.gmt_modified_time is not None: - result['GmtModifiedTime'] = self.gmt_modified_time - if self.is_current_version is not None: - result['IsCurrentVersion'] = self.is_current_version - if self.request_id is not None: - result['RequestId'] = self.request_id - if self.snapshot_id is not None: - result['SnapshotId'] = self.snapshot_id - if self.tenant_id is not None: - result['TenantId'] = self.tenant_id - if self.user_id is not None: - result['UserId'] = self.user_id - if self.version is not None: - result['Version'] = self.version - if self.workspace_id is not None: - result['WorkspaceId'] = self.workspace_id + if self.order is not None: + result['Order'] = self.order + if self.page_number is not None: + result['PageNumber'] = self.page_number + if self.page_size is not None: + result['PageSize'] = self.page_size + if self.quota_ids is not None: + result['QuotaIds'] = self.quota_ids + if self.sort_by is not None: + result['SortBy'] = self.sort_by + if self.workload_ids is not None: + result['WorkloadIds'] = self.workload_ids + if self.workload_type is not None: + result['WorkloadType'] = self.workload_type + if self.workspace_ids is not None: + result['WorkspaceIds'] = self.workspace_ids return result def from_map(self, m: dict = None): m = m or dict() - if m.get('ComponentId') is not None: - self.component_id = m.get('ComponentId') - if m.get('GmtCreateTime') is not None: - self.gmt_create_time = m.get('GmtCreateTime') - if m.get('GmtModifiedTime') is not None: - self.gmt_modified_time = m.get('GmtModifiedTime') - if m.get('IsCurrentVersion') is not None: - self.is_current_version = m.get('IsCurrentVersion') - if m.get('RequestId') is not None: - self.request_id = m.get('RequestId') - if m.get('SnapshotId') is not None: - self.snapshot_id = m.get('SnapshotId') - if m.get('TenantId') is not None: - self.tenant_id = m.get('TenantId') - if m.get('UserId') is not None: - self.user_id = m.get('UserId') - if m.get('Version') is not None: - self.version = m.get('Version') - if m.get('WorkspaceId') is not None: - self.workspace_id = m.get('WorkspaceId') - return self - - -class GetComponentVersionSnapshotResponse(TeaModel): - def __init__( - self, - headers: Dict[str, str] = None, - status_code: int = None, - body: GetComponentVersionSnapshotResponseBody = None, - ): - self.headers = headers - self.status_code = status_code - self.body = body - - def validate(self): - if self.body: - self.body.validate() - - def to_map(self): - _map = super().to_map() - if _map is not None: - return _map - - result = dict() - if self.headers is not None: - result['headers'] = self.headers - if self.status_code is not None: - result['statusCode'] = self.status_code - if self.body is not None: - result['body'] = self.body.to_map() - return result - - def from_map(self, m: dict = None): - m = m or dict() - if m.get('headers') is not None: - self.headers = m.get('headers') - if m.get('statusCode') is not None: - self.status_code = m.get('statusCode') - if m.get('body') is not None: - temp_model = GetComponentVersionSnapshotResponseBody() - self.body = temp_model.from_map(m['body']) + if m.get('Order') is not None: + self.order = m.get('Order') + if m.get('PageNumber') is not None: + self.page_number = m.get('PageNumber') + if m.get('PageSize') is not None: + self.page_size = m.get('PageSize') + if m.get('QuotaIds') is not None: + self.quota_ids = m.get('QuotaIds') + if m.get('SortBy') is not None: + self.sort_by = m.get('SortBy') + if m.get('WorkloadIds') is not None: + self.workload_ids = m.get('WorkloadIds') + if m.get('WorkloadType') is not None: + self.workload_type = m.get('WorkloadType') + if m.get('WorkspaceIds') is not None: + self.workspace_ids = m.get('WorkspaceIds') return self -class GetInstanceJobResponseBody(TeaModel): +class GetQueueInfosResponseBody(TeaModel): def __init__( self, - creator: str = None, - gmt_create_time: str = None, - instance_id: str = None, - instance_job_id: str = None, - instance_job_type: str = None, - reason_code: str = None, - reason_message: str = None, + queue_infos: List[QueueInfo] = None, request_id: str = None, - status: str = None, - workspace_id: str = None, + total_count: int = None, ): - self.creator = creator - self.gmt_create_time = gmt_create_time - self.instance_id = instance_id - self.instance_job_id = instance_job_id - self.instance_job_type = instance_job_type - self.reason_code = reason_code - self.reason_message = reason_message + self.queue_infos = queue_infos self.request_id = request_id - self.status = status - self.workspace_id = workspace_id + self.total_count = total_count def validate(self): - pass + if self.queue_infos: + for k in self.queue_infos: + if k: + k.validate() def to_map(self): _map = super().to_map() @@ -8889,59 +9610,36 @@ def to_map(self): return _map result = dict() - if self.creator is not None: - result['Creator'] = self.creator - if self.gmt_create_time is not None: - result['GmtCreateTime'] = self.gmt_create_time - if self.instance_id is not None: - result['InstanceId'] = self.instance_id - if self.instance_job_id is not None: - result['InstanceJobId'] = self.instance_job_id - if self.instance_job_type is not None: - result['InstanceJobType'] = self.instance_job_type - if self.reason_code is not None: - result['ReasonCode'] = self.reason_code - if self.reason_message is not None: - result['ReasonMessage'] = self.reason_message + result['QueueInfos'] = [] + if self.queue_infos is not None: + for k in self.queue_infos: + result['QueueInfos'].append(k.to_map() if k else None) if self.request_id is not None: result['RequestId'] = self.request_id - if self.status is not None: - result['Status'] = self.status - if self.workspace_id is not None: - result['WorkspaceId'] = self.workspace_id + if self.total_count is not None: + result['TotalCount'] = self.total_count return result def from_map(self, m: dict = None): m = m or dict() - if m.get('Creator') is not None: - self.creator = m.get('Creator') - if m.get('GmtCreateTime') is not None: - self.gmt_create_time = m.get('GmtCreateTime') - if m.get('InstanceId') is not None: - self.instance_id = m.get('InstanceId') - if m.get('InstanceJobId') is not None: - self.instance_job_id = m.get('InstanceJobId') - if m.get('InstanceJobType') is not None: - self.instance_job_type = m.get('InstanceJobType') - if m.get('ReasonCode') is not None: - self.reason_code = m.get('ReasonCode') - if m.get('ReasonMessage') is not None: - self.reason_message = m.get('ReasonMessage') + self.queue_infos = [] + if m.get('QueueInfos') is not None: + for k in m.get('QueueInfos'): + temp_model = QueueInfo() + self.queue_infos.append(temp_model.from_map(k)) if m.get('RequestId') is not None: self.request_id = m.get('RequestId') - if m.get('Status') is not None: - self.status = m.get('Status') - if m.get('WorkspaceId') is not None: - self.workspace_id = m.get('WorkspaceId') + if m.get('TotalCount') is not None: + self.total_count = m.get('TotalCount') return self -class GetInstanceJobResponse(TeaModel): +class GetQueueInfosResponse(TeaModel): def __init__( self, headers: Dict[str, str] = None, status_code: int = None, - body: GetInstanceJobResponseBody = None, + body: GetQueueInfosResponseBody = None, ): self.headers = headers self.status_code = status_code @@ -8972,29 +9670,17 @@ def from_map(self, m: dict = None): if m.get('statusCode') is not None: self.status_code = m.get('statusCode') if m.get('body') is not None: - temp_model = GetInstanceJobResponseBody() + temp_model = GetQueueInfosResponseBody() self.body = temp_model.from_map(m['body']) return self -class GetJobViewMetricsRequest(TeaModel): +class GetQuotaRequest(TeaModel): def __init__( self, - end_time: str = None, - page_number: int = None, - page_size: int = None, - sort_by: str = None, - start_time: str = None, - time_step: str = None, - workspace_id: str = None, + verbose: bool = None, ): - self.end_time = end_time - self.page_number = page_number - self.page_size = page_size - self.sort_by = sort_by - self.start_time = start_time - self.time_step = time_step - self.workspace_id = workspace_id + self.verbose = verbose def validate(self): pass @@ -9005,61 +9691,86 @@ def to_map(self): return _map result = dict() - if self.end_time is not None: - result['EndTime'] = self.end_time - if self.page_number is not None: - result['PageNumber'] = self.page_number - if self.page_size is not None: - result['PageSize'] = self.page_size - if self.sort_by is not None: - result['SortBy'] = self.sort_by - if self.start_time is not None: - result['StartTime'] = self.start_time - if self.time_step is not None: - result['TimeStep'] = self.time_step - if self.workspace_id is not None: - result['WorkspaceId'] = self.workspace_id + if self.verbose is not None: + result['Verbose'] = self.verbose return result def from_map(self, m: dict = None): m = m or dict() - if m.get('EndTime') is not None: - self.end_time = m.get('EndTime') - if m.get('PageNumber') is not None: - self.page_number = m.get('PageNumber') - if m.get('PageSize') is not None: - self.page_size = m.get('PageSize') - if m.get('SortBy') is not None: - self.sort_by = m.get('SortBy') - if m.get('StartTime') is not None: - self.start_time = m.get('StartTime') - if m.get('TimeStep') is not None: - self.time_step = m.get('TimeStep') - if m.get('WorkspaceId') is not None: - self.workspace_id = m.get('WorkspaceId') + if m.get('Verbose') is not None: + self.verbose = m.get('Verbose') return self -class GetJobViewMetricsResponseBody(TeaModel): +class GetQuotaResponseBody(TeaModel): def __init__( self, - job_metrics: List[JobViewMetric] = None, + allocate_strategy: str = None, + creator_id: str = None, + description: str = None, + gmt_created_time: str = None, + gmt_modified_time: str = None, + labels: List[Label] = None, + latest_operation_id: str = None, + min: ResourceSpec = None, + parent_quota_id: str = None, + queue_strategy: str = None, + quota_config: QuotaConfig = None, + quota_details: QuotaDetails = None, + quota_id: str = None, + quota_name: str = None, + reason_code: str = None, + reason_message: str = None, request_id: str = None, - summary: JobViewMetric = None, - total: int = None, + resource_group_ids: List[str] = None, + resource_type: str = None, + status: str = None, + sub_quotas: List[QuotaIdName] = None, + workspaces: List[WorkspaceIdName] = None, ): - self.job_metrics = job_metrics + self.allocate_strategy = allocate_strategy + self.creator_id = creator_id + self.description = description + self.gmt_created_time = gmt_created_time + self.gmt_modified_time = gmt_modified_time + self.labels = labels + self.latest_operation_id = latest_operation_id + self.min = min + self.parent_quota_id = parent_quota_id + self.queue_strategy = queue_strategy + self.quota_config = quota_config + self.quota_details = quota_details + # Quota Id + self.quota_id = quota_id + self.quota_name = quota_name + self.reason_code = reason_code + self.reason_message = reason_message self.request_id = request_id - self.summary = summary - self.total = total - - def validate(self): - if self.job_metrics: - for k in self.job_metrics: + self.resource_group_ids = resource_group_ids + self.resource_type = resource_type + self.status = status + self.sub_quotas = sub_quotas + self.workspaces = workspaces + + def validate(self): + if self.labels: + for k in self.labels: + if k: + k.validate() + if self.min: + self.min.validate() + if self.quota_config: + self.quota_config.validate() + if self.quota_details: + self.quota_details.validate() + if self.sub_quotas: + for k in self.sub_quotas: + if k: + k.validate() + if self.workspaces: + for k in self.workspaces: if k: k.validate() - if self.summary: - self.summary.validate() def to_map(self): _map = super().to_map() @@ -9067,41 +9778,125 @@ def to_map(self): return _map result = dict() - result['JobMetrics'] = [] - if self.job_metrics is not None: - for k in self.job_metrics: - result['JobMetrics'].append(k.to_map() if k else None) + if self.allocate_strategy is not None: + result['AllocateStrategy'] = self.allocate_strategy + if self.creator_id is not None: + result['CreatorId'] = self.creator_id + if self.description is not None: + result['Description'] = self.description + if self.gmt_created_time is not None: + result['GmtCreatedTime'] = self.gmt_created_time + if self.gmt_modified_time is not None: + result['GmtModifiedTime'] = self.gmt_modified_time + result['Labels'] = [] + if self.labels is not None: + for k in self.labels: + result['Labels'].append(k.to_map() if k else None) + if self.latest_operation_id is not None: + result['LatestOperationId'] = self.latest_operation_id + if self.min is not None: + result['Min'] = self.min.to_map() + if self.parent_quota_id is not None: + result['ParentQuotaId'] = self.parent_quota_id + if self.queue_strategy is not None: + result['QueueStrategy'] = self.queue_strategy + if self.quota_config is not None: + result['QuotaConfig'] = self.quota_config.to_map() + if self.quota_details is not None: + result['QuotaDetails'] = self.quota_details.to_map() + if self.quota_id is not None: + result['QuotaId'] = self.quota_id + if self.quota_name is not None: + result['QuotaName'] = self.quota_name + if self.reason_code is not None: + result['ReasonCode'] = self.reason_code + if self.reason_message is not None: + result['ReasonMessage'] = self.reason_message if self.request_id is not None: result['RequestId'] = self.request_id - if self.summary is not None: - result['Summary'] = self.summary.to_map() - if self.total is not None: - result['Total'] = self.total + if self.resource_group_ids is not None: + result['ResourceGroupIds'] = self.resource_group_ids + if self.resource_type is not None: + result['ResourceType'] = self.resource_type + if self.status is not None: + result['Status'] = self.status + result['SubQuotas'] = [] + if self.sub_quotas is not None: + for k in self.sub_quotas: + result['SubQuotas'].append(k.to_map() if k else None) + result['Workspaces'] = [] + if self.workspaces is not None: + for k in self.workspaces: + result['Workspaces'].append(k.to_map() if k else None) return result def from_map(self, m: dict = None): m = m or dict() - self.job_metrics = [] - if m.get('JobMetrics') is not None: - for k in m.get('JobMetrics'): - temp_model = JobViewMetric() - self.job_metrics.append(temp_model.from_map(k)) + if m.get('AllocateStrategy') is not None: + self.allocate_strategy = m.get('AllocateStrategy') + if m.get('CreatorId') is not None: + self.creator_id = m.get('CreatorId') + if m.get('Description') is not None: + self.description = m.get('Description') + if m.get('GmtCreatedTime') is not None: + self.gmt_created_time = m.get('GmtCreatedTime') + if m.get('GmtModifiedTime') is not None: + self.gmt_modified_time = m.get('GmtModifiedTime') + self.labels = [] + if m.get('Labels') is not None: + for k in m.get('Labels'): + temp_model = Label() + self.labels.append(temp_model.from_map(k)) + if m.get('LatestOperationId') is not None: + self.latest_operation_id = m.get('LatestOperationId') + if m.get('Min') is not None: + temp_model = ResourceSpec() + self.min = temp_model.from_map(m['Min']) + if m.get('ParentQuotaId') is not None: + self.parent_quota_id = m.get('ParentQuotaId') + if m.get('QueueStrategy') is not None: + self.queue_strategy = m.get('QueueStrategy') + if m.get('QuotaConfig') is not None: + temp_model = QuotaConfig() + self.quota_config = temp_model.from_map(m['QuotaConfig']) + if m.get('QuotaDetails') is not None: + temp_model = QuotaDetails() + self.quota_details = temp_model.from_map(m['QuotaDetails']) + if m.get('QuotaId') is not None: + self.quota_id = m.get('QuotaId') + if m.get('QuotaName') is not None: + self.quota_name = m.get('QuotaName') + if m.get('ReasonCode') is not None: + self.reason_code = m.get('ReasonCode') + if m.get('ReasonMessage') is not None: + self.reason_message = m.get('ReasonMessage') if m.get('RequestId') is not None: self.request_id = m.get('RequestId') - if m.get('Summary') is not None: - temp_model = JobViewMetric() - self.summary = temp_model.from_map(m['Summary']) - if m.get('Total') is not None: - self.total = m.get('Total') + if m.get('ResourceGroupIds') is not None: + self.resource_group_ids = m.get('ResourceGroupIds') + if m.get('ResourceType') is not None: + self.resource_type = m.get('ResourceType') + if m.get('Status') is not None: + self.status = m.get('Status') + self.sub_quotas = [] + if m.get('SubQuotas') is not None: + for k in m.get('SubQuotas'): + temp_model = QuotaIdName() + self.sub_quotas.append(temp_model.from_map(k)) + self.workspaces = [] + if m.get('Workspaces') is not None: + for k in m.get('Workspaces'): + temp_model = WorkspaceIdName() + self.workspaces.append(temp_model.from_map(k)) return self -class GetJobViewMetricsResponse(TeaModel): +class GetQuotaResponse(TeaModel): def __init__( self, headers: Dict[str, str] = None, status_code: int = None, - body: GetJobViewMetricsResponseBody = None, + body: GetQuotaResponseBody = None, ): self.headers = headers self.status_code = status_code @@ -9132,20 +9927,30 @@ def from_map(self, m: dict = None): if m.get('statusCode') is not None: self.status_code = m.get('statusCode') if m.get('body') is not None: - temp_model = GetJobViewMetricsResponseBody() + temp_model = GetQuotaResponseBody() self.body = temp_model.from_map(m['body']) return self -class GetJobsStatisticsByQuotaRequest(TeaModel): +class GetQuotaJobViewMetricsRequest(TeaModel): def __init__( self, end_time: str = None, + order: str = None, + page_number: int = None, + page_size: int = None, + sort_by: str = None, start_time: str = None, + time_step: str = None, workspace_id: str = None, ): self.end_time = end_time + self.order = order + self.page_number = page_number + self.page_size = page_size + self.sort_by = sort_by self.start_time = start_time + self.time_step = time_step self.workspace_id = workspace_id def validate(self): @@ -9159,8 +9964,18 @@ def to_map(self): result = dict() if self.end_time is not None: result['EndTime'] = self.end_time + if self.order is not None: + result['Order'] = self.order + if self.page_number is not None: + result['PageNumber'] = self.page_number + if self.page_size is not None: + result['PageSize'] = self.page_size + if self.sort_by is not None: + result['SortBy'] = self.sort_by if self.start_time is not None: result['StartTime'] = self.start_time + if self.time_step is not None: + result['TimeStep'] = self.time_step if self.workspace_id is not None: result['WorkspaceId'] = self.workspace_id return result @@ -9169,26 +9984,45 @@ def from_map(self, m: dict = None): m = m or dict() if m.get('EndTime') is not None: self.end_time = m.get('EndTime') + if m.get('Order') is not None: + self.order = m.get('Order') + if m.get('PageNumber') is not None: + self.page_number = m.get('PageNumber') + if m.get('PageSize') is not None: + self.page_size = m.get('PageSize') + if m.get('SortBy') is not None: + self.sort_by = m.get('SortBy') if m.get('StartTime') is not None: self.start_time = m.get('StartTime') + if m.get('TimeStep') is not None: + self.time_step = m.get('TimeStep') if m.get('WorkspaceId') is not None: self.workspace_id = m.get('WorkspaceId') return self -class GetJobsStatisticsByQuotaResponseBody(TeaModel): +class GetQuotaJobViewMetricsResponseBody(TeaModel): def __init__( self, + job_metrics: List[QuotaJobViewMetric] = None, quota_id: str = None, request_id: str = None, - statistics: Dict[str, Any] = None, + summary: QuotaJobViewMetric = None, + total_count: int = None, ): + self.job_metrics = job_metrics self.quota_id = quota_id self.request_id = request_id - self.statistics = statistics + self.summary = summary + self.total_count = total_count def validate(self): - pass + if self.job_metrics: + for k in self.job_metrics: + if k: + k.validate() + if self.summary: + self.summary.validate() def to_map(self): _map = super().to_map() @@ -9196,31 +10030,45 @@ def to_map(self): return _map result = dict() + result['JobMetrics'] = [] + if self.job_metrics is not None: + for k in self.job_metrics: + result['JobMetrics'].append(k.to_map() if k else None) if self.quota_id is not None: result['QuotaId'] = self.quota_id if self.request_id is not None: result['RequestId'] = self.request_id - if self.statistics is not None: - result['Statistics'] = self.statistics + if self.summary is not None: + result['Summary'] = self.summary.to_map() + if self.total_count is not None: + result['TotalCount'] = self.total_count return result def from_map(self, m: dict = None): m = m or dict() + self.job_metrics = [] + if m.get('JobMetrics') is not None: + for k in m.get('JobMetrics'): + temp_model = QuotaJobViewMetric() + self.job_metrics.append(temp_model.from_map(k)) if m.get('QuotaId') is not None: self.quota_id = m.get('QuotaId') if m.get('RequestId') is not None: self.request_id = m.get('RequestId') - if m.get('Statistics') is not None: - self.statistics = m.get('Statistics') + if m.get('Summary') is not None: + temp_model = QuotaJobViewMetric() + self.summary = temp_model.from_map(m['Summary']) + if m.get('TotalCount') is not None: + self.total_count = m.get('TotalCount') return self -class GetJobsStatisticsByQuotaResponse(TeaModel): +class GetQuotaJobViewMetricsResponse(TeaModel): def __init__( self, headers: Dict[str, str] = None, status_code: int = None, - body: GetJobsStatisticsByQuotaResponseBody = None, + body: GetQuotaJobViewMetricsResponseBody = None, ): self.headers = headers self.status_code = status_code @@ -9251,21 +10099,23 @@ def from_map(self, m: dict = None): if m.get('statusCode') is not None: self.status_code = m.get('statusCode') if m.get('body') is not None: - temp_model = GetJobsStatisticsByQuotaResponseBody() + temp_model = GetQuotaJobViewMetricsResponseBody() self.body = temp_model.from_map(m['body']) return self -class GetJobsStatisticsByResourceGroupRequest(TeaModel): +class GetQuotaMetricsRequest(TeaModel): def __init__( self, end_time: str = None, + gputype: str = None, start_time: str = None, - workspace_id: str = None, + time_step: str = None, ): self.end_time = end_time + self.gputype = gputype self.start_time = start_time - self.workspace_id = workspace_id + self.time_step = time_step def validate(self): pass @@ -9278,34 +10128,43 @@ def to_map(self): result = dict() if self.end_time is not None: result['EndTime'] = self.end_time + if self.gputype is not None: + result['GPUType'] = self.gputype if self.start_time is not None: result['StartTime'] = self.start_time - if self.workspace_id is not None: - result['WorkspaceID'] = self.workspace_id + if self.time_step is not None: + result['TimeStep'] = self.time_step return result def from_map(self, m: dict = None): m = m or dict() if m.get('EndTime') is not None: self.end_time = m.get('EndTime') + if m.get('GPUType') is not None: + self.gputype = m.get('GPUType') if m.get('StartTime') is not None: self.start_time = m.get('StartTime') - if m.get('WorkspaceID') is not None: - self.workspace_id = m.get('WorkspaceID') + if m.get('TimeStep') is not None: + self.time_step = m.get('TimeStep') return self -class GetJobsStatisticsByResourceGroupResponseBody(TeaModel): +class GetQuotaMetricsResponseBody(TeaModel): def __init__( self, + quota_id: str = None, + quota_metrics: List[QuotaMetric] = None, request_id: str = None, - statistics: Dict[str, Any] = None, ): + self.quota_id = quota_id + self.quota_metrics = quota_metrics self.request_id = request_id - self.statistics = statistics def validate(self): - pass + if self.quota_metrics: + for k in self.quota_metrics: + if k: + k.validate() def to_map(self): _map = super().to_map() @@ -9313,27 +10172,36 @@ def to_map(self): return _map result = dict() + if self.quota_id is not None: + result['QuotaId'] = self.quota_id + result['QuotaMetrics'] = [] + if self.quota_metrics is not None: + for k in self.quota_metrics: + result['QuotaMetrics'].append(k.to_map() if k else None) if self.request_id is not None: result['RequestId'] = self.request_id - if self.statistics is not None: - result['Statistics'] = self.statistics return result def from_map(self, m: dict = None): m = m or dict() + if m.get('QuotaId') is not None: + self.quota_id = m.get('QuotaId') + self.quota_metrics = [] + if m.get('QuotaMetrics') is not None: + for k in m.get('QuotaMetrics'): + temp_model = QuotaMetric() + self.quota_metrics.append(temp_model.from_map(k)) if m.get('RequestId') is not None: self.request_id = m.get('RequestId') - if m.get('Statistics') is not None: - self.statistics = m.get('Statistics') return self -class GetJobsStatisticsByResourceGroupResponse(TeaModel): +class GetQuotaMetricsResponse(TeaModel): def __init__( self, headers: Dict[str, str] = None, status_code: int = None, - body: GetJobsStatisticsByResourceGroupResponseBody = None, + body: GetQuotaMetricsResponseBody = None, ): self.headers = headers self.status_code = status_code @@ -9364,52 +10232,25 @@ def from_map(self, m: dict = None): if m.get('statusCode') is not None: self.status_code = m.get('statusCode') if m.get('body') is not None: - temp_model = GetJobsStatisticsByResourceGroupResponseBody() + temp_model = GetQuotaMetricsResponseBody() self.body = temp_model.from_map(m['body']) return self -class GetLLMProjectResponseBodyLabels(TeaModel): - def __init__( - self, - key: str = None, - value: str = None, - ): - self.key = key - self.value = value - - def validate(self): - pass - - def to_map(self): - _map = super().to_map() - if _map is not None: - return _map - - result = dict() - if self.key is not None: - result['Key'] = self.key - if self.value is not None: - result['Value'] = self.value - return result - - def from_map(self, m: dict = None): - m = m or dict() - if m.get('Key') is not None: - self.key = m.get('Key') - if m.get('Value') is not None: - self.value = m.get('Value') - return self - - -class GetLLMProjectResponseBodyRuntime(TeaModel): +class GetQuotaNodeMetricsRequest(TeaModel): def __init__( self, - runtime_id: str = None, - runtime_type: str = None, + end_time: str = None, + gputype: str = None, + start_time: str = None, + time_step: str = None, + verbose: bool = None, ): - self.runtime_id = runtime_id - self.runtime_type = runtime_type + self.end_time = end_time + self.gputype = gputype + self.start_time = start_time + self.time_step = time_step + self.verbose = verbose def validate(self): pass @@ -9420,59 +10261,51 @@ def to_map(self): return _map result = dict() - if self.runtime_id is not None: - result['RuntimeId'] = self.runtime_id - if self.runtime_type is not None: - result['RuntimeType'] = self.runtime_type + if self.end_time is not None: + result['EndTime'] = self.end_time + if self.gputype is not None: + result['GPUType'] = self.gputype + if self.start_time is not None: + result['StartTime'] = self.start_time + if self.time_step is not None: + result['TimeStep'] = self.time_step + if self.verbose is not None: + result['Verbose'] = self.verbose return result def from_map(self, m: dict = None): m = m or dict() - if m.get('RuntimeId') is not None: - self.runtime_id = m.get('RuntimeId') - if m.get('RuntimeType') is not None: - self.runtime_type = m.get('RuntimeType') + if m.get('EndTime') is not None: + self.end_time = m.get('EndTime') + if m.get('GPUType') is not None: + self.gputype = m.get('GPUType') + if m.get('StartTime') is not None: + self.start_time = m.get('StartTime') + if m.get('TimeStep') is not None: + self.time_step = m.get('TimeStep') + if m.get('Verbose') is not None: + self.verbose = m.get('Verbose') return self -class GetLLMProjectResponseBody(TeaModel): +class GetQuotaNodeMetricsResponseBody(TeaModel): def __init__( self, - gmt_create_time: str = None, - gmt_modified_time: str = None, - labels: List[GetLLMProjectResponseBodyLabels] = None, - owner_id: str = None, - project_description: str = None, - project_id: str = None, - project_name: str = None, - project_type: str = None, + metric_type: str = None, + nodes_metrics: List[NodeMetric] = None, + quota_id: str = None, request_id: str = None, - root_path: str = None, - runtime: GetLLMProjectResponseBodyRuntime = None, - user_id: str = None, - workspace_id: str = None, ): - self.gmt_create_time = gmt_create_time - self.gmt_modified_time = gmt_modified_time - self.labels = labels - self.owner_id = owner_id - self.project_description = project_description - self.project_id = project_id - self.project_name = project_name - self.project_type = project_type + self.metric_type = metric_type + self.nodes_metrics = nodes_metrics + self.quota_id = quota_id self.request_id = request_id - self.root_path = root_path - self.runtime = runtime - self.user_id = user_id - self.workspace_id = workspace_id def validate(self): - if self.labels: - for k in self.labels: + if self.nodes_metrics: + for k in self.nodes_metrics: if k: k.validate() - if self.runtime: - self.runtime.validate() def to_map(self): _map = super().to_map() @@ -9480,77 +10313,40 @@ def to_map(self): return _map result = dict() - if self.gmt_create_time is not None: - result['GmtCreateTime'] = self.gmt_create_time - if self.gmt_modified_time is not None: - result['GmtModifiedTime'] = self.gmt_modified_time - result['Labels'] = [] - if self.labels is not None: - for k in self.labels: - result['Labels'].append(k.to_map() if k else None) - if self.owner_id is not None: - result['OwnerId'] = self.owner_id - if self.project_description is not None: - result['ProjectDescription'] = self.project_description - if self.project_id is not None: - result['ProjectId'] = self.project_id - if self.project_name is not None: - result['ProjectName'] = self.project_name - if self.project_type is not None: - result['ProjectType'] = self.project_type + if self.metric_type is not None: + result['MetricType'] = self.metric_type + result['NodesMetrics'] = [] + if self.nodes_metrics is not None: + for k in self.nodes_metrics: + result['NodesMetrics'].append(k.to_map() if k else None) + if self.quota_id is not None: + result['QuotaId'] = self.quota_id if self.request_id is not None: result['RequestId'] = self.request_id - if self.root_path is not None: - result['RootPath'] = self.root_path - if self.runtime is not None: - result['Runtime'] = self.runtime.to_map() - if self.user_id is not None: - result['UserId'] = self.user_id - if self.workspace_id is not None: - result['WorkspaceId'] = self.workspace_id return result def from_map(self, m: dict = None): m = m or dict() - if m.get('GmtCreateTime') is not None: - self.gmt_create_time = m.get('GmtCreateTime') - if m.get('GmtModifiedTime') is not None: - self.gmt_modified_time = m.get('GmtModifiedTime') - self.labels = [] - if m.get('Labels') is not None: - for k in m.get('Labels'): - temp_model = GetLLMProjectResponseBodyLabels() - self.labels.append(temp_model.from_map(k)) - if m.get('OwnerId') is not None: - self.owner_id = m.get('OwnerId') - if m.get('ProjectDescription') is not None: - self.project_description = m.get('ProjectDescription') - if m.get('ProjectId') is not None: - self.project_id = m.get('ProjectId') - if m.get('ProjectName') is not None: - self.project_name = m.get('ProjectName') - if m.get('ProjectType') is not None: - self.project_type = m.get('ProjectType') + if m.get('MetricType') is not None: + self.metric_type = m.get('MetricType') + self.nodes_metrics = [] + if m.get('NodesMetrics') is not None: + for k in m.get('NodesMetrics'): + temp_model = NodeMetric() + self.nodes_metrics.append(temp_model.from_map(k)) + if m.get('QuotaId') is not None: + self.quota_id = m.get('QuotaId') if m.get('RequestId') is not None: self.request_id = m.get('RequestId') - if m.get('RootPath') is not None: - self.root_path = m.get('RootPath') - if m.get('Runtime') is not None: - temp_model = GetLLMProjectResponseBodyRuntime() - self.runtime = temp_model.from_map(m['Runtime']) - if m.get('UserId') is not None: - self.user_id = m.get('UserId') - if m.get('WorkspaceId') is not None: - self.workspace_id = m.get('WorkspaceId') return self -class GetLLMProjectResponse(TeaModel): +class GetQuotaNodeMetricsResponse(TeaModel): def __init__( self, headers: Dict[str, str] = None, status_code: int = None, - body: GetLLMProjectResponseBody = None, + body: GetQuotaNodeMetricsResponseBody = None, ): self.headers = headers self.status_code = status_code @@ -9581,21 +10377,37 @@ def from_map(self, m: dict = None): if m.get('statusCode') is not None: self.status_code = m.get('statusCode') if m.get('body') is not None: - temp_model = GetLLMProjectResponseBody() + temp_model = GetQuotaNodeMetricsResponseBody() self.body = temp_model.from_map(m['body']) return self -class GetLLMServiceIdentityRoleResponseBody(TeaModel): +class GetQuotaNodeViewMetricsRequest(TeaModel): def __init__( self, - exist: bool = None, - request_id: str = None, - role_name: str = None, + node_id: str = None, + node_status: str = None, + order: str = None, + order_status: str = None, + page_number: int = None, + page_size: int = None, + resource_group_id: str = None, + self_only: bool = None, + sort_by: str = None, + time_step: str = None, + workspace_id: str = None, ): - self.exist = exist - self.request_id = request_id - self.role_name = role_name + self.node_id = node_id + self.node_status = node_status + self.order = order + self.order_status = order_status + self.page_number = page_number + self.page_size = page_size + self.resource_group_id = resource_group_id + self.self_only = self_only + self.sort_by = sort_by + self.time_step = time_step + self.workspace_id = workspace_id def validate(self): pass @@ -9606,38 +10418,123 @@ def to_map(self): return _map result = dict() - if self.exist is not None: - result['Exist'] = self.exist - if self.request_id is not None: - result['RequestId'] = self.request_id - if self.role_name is not None: - result['RoleName'] = self.role_name + if self.node_id is not None: + result['NodeId'] = self.node_id + if self.node_status is not None: + result['NodeStatus'] = self.node_status + if self.order is not None: + result['Order'] = self.order + if self.order_status is not None: + result['OrderStatus'] = self.order_status + if self.page_number is not None: + result['PageNumber'] = self.page_number + if self.page_size is not None: + result['PageSize'] = self.page_size + if self.resource_group_id is not None: + result['ResourceGroupId'] = self.resource_group_id + if self.self_only is not None: + result['SelfOnly'] = self.self_only + if self.sort_by is not None: + result['SortBy'] = self.sort_by + if self.time_step is not None: + result['TimeStep'] = self.time_step + if self.workspace_id is not None: + result['WorkspaceId'] = self.workspace_id return result def from_map(self, m: dict = None): m = m or dict() - if m.get('Exist') is not None: - self.exist = m.get('Exist') - if m.get('RequestId') is not None: - self.request_id = m.get('RequestId') - if m.get('RoleName') is not None: - self.role_name = m.get('RoleName') + if m.get('NodeId') is not None: + self.node_id = m.get('NodeId') + if m.get('NodeStatus') is not None: + self.node_status = m.get('NodeStatus') + if m.get('Order') is not None: + self.order = m.get('Order') + if m.get('OrderStatus') is not None: + self.order_status = m.get('OrderStatus') + if m.get('PageNumber') is not None: + self.page_number = m.get('PageNumber') + if m.get('PageSize') is not None: + self.page_size = m.get('PageSize') + if m.get('ResourceGroupId') is not None: + self.resource_group_id = m.get('ResourceGroupId') + if m.get('SelfOnly') is not None: + self.self_only = m.get('SelfOnly') + if m.get('SortBy') is not None: + self.sort_by = m.get('SortBy') + if m.get('TimeStep') is not None: + self.time_step = m.get('TimeStep') + if m.get('WorkspaceId') is not None: + self.workspace_id = m.get('WorkspaceId') return self -class GetLLMServiceIdentityRoleResponse(TeaModel): +class GetQuotaNodeViewMetricsResponseBody(TeaModel): def __init__( self, - headers: Dict[str, str] = None, - status_code: int = None, - body: GetLLMServiceIdentityRoleResponseBody = None, + node_metrics: List[QuotaNodeViewMetric] = None, + quota_id: str = None, + request_id: str = None, + total_count: int = None, ): - self.headers = headers - self.status_code = status_code - self.body = body + self.node_metrics = node_metrics + self.quota_id = quota_id + self.request_id = request_id + self.total_count = total_count def validate(self): - if self.body: + if self.node_metrics: + for k in self.node_metrics: + if k: + k.validate() + + def to_map(self): + _map = super().to_map() + if _map is not None: + return _map + + result = dict() + result['NodeMetrics'] = [] + if self.node_metrics is not None: + for k in self.node_metrics: + result['NodeMetrics'].append(k.to_map() if k else None) + if self.quota_id is not None: + result['QuotaId'] = self.quota_id + if self.request_id is not None: + result['RequestId'] = self.request_id + if self.total_count is not None: + result['TotalCount'] = self.total_count + return result + + def from_map(self, m: dict = None): + m = m or dict() + self.node_metrics = [] + if m.get('NodeMetrics') is not None: + for k in m.get('NodeMetrics'): + temp_model = QuotaNodeViewMetric() + self.node_metrics.append(temp_model.from_map(k)) + if m.get('QuotaId') is not None: + self.quota_id = m.get('QuotaId') + if m.get('RequestId') is not None: + self.request_id = m.get('RequestId') + if m.get('TotalCount') is not None: + self.total_count = m.get('TotalCount') + return self + + +class GetQuotaNodeViewMetricsResponse(TeaModel): + def __init__( + self, + headers: Dict[str, str] = None, + status_code: int = None, + body: GetQuotaNodeViewMetricsResponseBody = None, + ): + self.headers = headers + self.status_code = status_code + self.body = body + + def validate(self): + if self.body: self.body.validate() def to_map(self): @@ -9661,19 +10558,37 @@ def from_map(self, m: dict = None): if m.get('statusCode') is not None: self.status_code = m.get('statusCode') if m.get('body') is not None: - temp_model = GetLLMServiceIdentityRoleResponseBody() + temp_model = GetQuotaNodeViewMetricsResponseBody() self.body = temp_model.from_map(m['body']) return self -class GetLLMSnapshotResponseBodyContentStorage(TeaModel): +class GetQuotaQueueInfoRequest(TeaModel): def __init__( self, - location: str = None, - type: str = None, + before_workload_id: str = None, + order: str = None, + page_number: int = None, + page_size: int = None, + show_own: bool = None, + sort_by: str = None, + status: str = None, + sub_quota_ids: str = None, + workload_ids: str = None, + workload_type: str = None, + workspace_ids: str = None, ): - self.location = location - self.type = type + self.before_workload_id = before_workload_id + self.order = order + self.page_number = page_number + self.page_size = page_size + self.show_own = show_own + self.sort_by = sort_by + self.status = status + self.sub_quota_ids = sub_quota_ids + self.workload_ids = workload_ids + self.workload_type = workload_type + self.workspace_ids = workspace_ids def validate(self): pass @@ -9684,45 +10599,73 @@ def to_map(self): return _map result = dict() - if self.location is not None: - result['Location'] = self.location - if self.type is not None: - result['Type'] = self.type + if self.before_workload_id is not None: + result['BeforeWorkloadId'] = self.before_workload_id + if self.order is not None: + result['Order'] = self.order + if self.page_number is not None: + result['PageNumber'] = self.page_number + if self.page_size is not None: + result['PageSize'] = self.page_size + if self.show_own is not None: + result['ShowOwn'] = self.show_own + if self.sort_by is not None: + result['SortBy'] = self.sort_by + if self.status is not None: + result['Status'] = self.status + if self.sub_quota_ids is not None: + result['SubQuotaIds'] = self.sub_quota_ids + if self.workload_ids is not None: + result['WorkloadIds'] = self.workload_ids + if self.workload_type is not None: + result['WorkloadType'] = self.workload_type + if self.workspace_ids is not None: + result['WorkspaceIds'] = self.workspace_ids return result def from_map(self, m: dict = None): m = m or dict() - if m.get('Location') is not None: - self.location = m.get('Location') - if m.get('Type') is not None: - self.type = m.get('Type') + if m.get('BeforeWorkloadId') is not None: + self.before_workload_id = m.get('BeforeWorkloadId') + if m.get('Order') is not None: + self.order = m.get('Order') + if m.get('PageNumber') is not None: + self.page_number = m.get('PageNumber') + if m.get('PageSize') is not None: + self.page_size = m.get('PageSize') + if m.get('ShowOwn') is not None: + self.show_own = m.get('ShowOwn') + if m.get('SortBy') is not None: + self.sort_by = m.get('SortBy') + if m.get('Status') is not None: + self.status = m.get('Status') + if m.get('SubQuotaIds') is not None: + self.sub_quota_ids = m.get('SubQuotaIds') + if m.get('WorkloadIds') is not None: + self.workload_ids = m.get('WorkloadIds') + if m.get('WorkloadType') is not None: + self.workload_type = m.get('WorkloadType') + if m.get('WorkspaceIds') is not None: + self.workspace_ids = m.get('WorkspaceIds') return self -class GetLLMSnapshotResponseBody(TeaModel): +class GetQuotaQueueInfoResponseBody(TeaModel): def __init__( self, - content_storage: GetLLMSnapshotResponseBodyContentStorage = None, - gmt_create_time: str = None, - gmt_modified_time: str = None, - owner_id: str = None, - project_id: str = None, + queue_infos: List[QueueInfo] = None, request_id: str = None, - snapshot_id: str = None, - user_id: str = None, + total_count: int = None, ): - self.content_storage = content_storage - self.gmt_create_time = gmt_create_time - self.gmt_modified_time = gmt_modified_time - self.owner_id = owner_id - self.project_id = project_id + self.queue_infos = queue_infos self.request_id = request_id - self.snapshot_id = snapshot_id - self.user_id = user_id + self.total_count = total_count def validate(self): - if self.content_storage: - self.content_storage.validate() + if self.queue_infos: + for k in self.queue_infos: + if k: + k.validate() def to_map(self): _map = super().to_map() @@ -9730,52 +10673,36 @@ def to_map(self): return _map result = dict() - if self.content_storage is not None: - result['ContentStorage'] = self.content_storage.to_map() - if self.gmt_create_time is not None: - result['GmtCreateTime'] = self.gmt_create_time - if self.gmt_modified_time is not None: - result['GmtModifiedTime'] = self.gmt_modified_time - if self.owner_id is not None: - result['OwnerId'] = self.owner_id - if self.project_id is not None: - result['ProjectId'] = self.project_id + result['QueueInfos'] = [] + if self.queue_infos is not None: + for k in self.queue_infos: + result['QueueInfos'].append(k.to_map() if k else None) if self.request_id is not None: result['RequestId'] = self.request_id - if self.snapshot_id is not None: - result['SnapshotId'] = self.snapshot_id - if self.user_id is not None: - result['UserId'] = self.user_id + if self.total_count is not None: + result['TotalCount'] = self.total_count return result def from_map(self, m: dict = None): m = m or dict() - if m.get('ContentStorage') is not None: - temp_model = GetLLMSnapshotResponseBodyContentStorage() - self.content_storage = temp_model.from_map(m['ContentStorage']) - if m.get('GmtCreateTime') is not None: - self.gmt_create_time = m.get('GmtCreateTime') - if m.get('GmtModifiedTime') is not None: - self.gmt_modified_time = m.get('GmtModifiedTime') - if m.get('OwnerId') is not None: - self.owner_id = m.get('OwnerId') - if m.get('ProjectId') is not None: - self.project_id = m.get('ProjectId') + self.queue_infos = [] + if m.get('QueueInfos') is not None: + for k in m.get('QueueInfos'): + temp_model = QueueInfo() + self.queue_infos.append(temp_model.from_map(k)) if m.get('RequestId') is not None: self.request_id = m.get('RequestId') - if m.get('SnapshotId') is not None: - self.snapshot_id = m.get('SnapshotId') - if m.get('UserId') is not None: - self.user_id = m.get('UserId') + if m.get('TotalCount') is not None: + self.total_count = m.get('TotalCount') return self -class GetLLMSnapshotResponse(TeaModel): +class GetQuotaQueueInfoResponse(TeaModel): def __init__( self, headers: Dict[str, str] = None, status_code: int = None, - body: GetLLMSnapshotResponseBody = None, + body: GetQuotaQueueInfoResponseBody = None, ): self.headers = headers self.status_code = status_code @@ -9806,49 +10733,31 @@ def from_map(self, m: dict = None): if m.get('statusCode') is not None: self.status_code = m.get('statusCode') if m.get('body') is not None: - temp_model = GetLLMSnapshotResponseBody() + temp_model = GetQuotaQueueInfoResponseBody() self.body = temp_model.from_map(m['body']) return self -class GetMachineGroupResponseBody(TeaModel): +class GetQuotaRangeUserViewMetricsRequest(TeaModel): def __init__( self, - count: int = None, - default_driver: str = None, - duration: str = None, - ecs_type: str = None, - gmt_created: str = None, - gmt_expired: str = None, - gmt_modified: str = None, - gmt_started: str = None, - machine_group_id: str = None, - order_id: str = None, - pairesource_id: str = None, - pay_type: str = None, - pricing_cycle: str = None, - region_id: str = None, - request_id: str = None, - status: str = None, - supported_drivers: List[str] = None, + end_time: str = None, + order: str = None, + page_number: int = None, + page_size: int = None, + sort_by: str = None, + start_time: str = None, + user_id: str = None, + workspace_id: str = None, ): - self.count = count - self.default_driver = default_driver - self.duration = duration - self.ecs_type = ecs_type - self.gmt_created = gmt_created - self.gmt_expired = gmt_expired - self.gmt_modified = gmt_modified - self.gmt_started = gmt_started - self.machine_group_id = machine_group_id - self.order_id = order_id - self.pairesource_id = pairesource_id - self.pay_type = pay_type - self.pricing_cycle = pricing_cycle - self.region_id = region_id - self.request_id = request_id - self.status = status - self.supported_drivers = supported_drivers + self.end_time = end_time + self.order = order + self.page_number = page_number + self.page_size = page_size + self.sort_by = sort_by + self.start_time = start_time + self.user_id = user_id + self.workspace_id = workspace_id def validate(self): pass @@ -9859,87 +10768,113 @@ def to_map(self): return _map result = dict() - if self.count is not None: - result['Count'] = self.count - if self.default_driver is not None: - result['DefaultDriver'] = self.default_driver - if self.duration is not None: - result['Duration'] = self.duration - if self.ecs_type is not None: - result['EcsType'] = self.ecs_type - if self.gmt_created is not None: - result['GmtCreated'] = self.gmt_created - if self.gmt_expired is not None: - result['GmtExpired'] = self.gmt_expired - if self.gmt_modified is not None: - result['GmtModified'] = self.gmt_modified - if self.gmt_started is not None: - result['GmtStarted'] = self.gmt_started - if self.machine_group_id is not None: - result['MachineGroupID'] = self.machine_group_id - if self.order_id is not None: - result['OrderID'] = self.order_id - if self.pairesource_id is not None: - result['PAIResourceID'] = self.pairesource_id - if self.pay_type is not None: - result['PayType'] = self.pay_type - if self.pricing_cycle is not None: - result['PricingCycle'] = self.pricing_cycle - if self.region_id is not None: - result['RegionID'] = self.region_id - if self.request_id is not None: + if self.end_time is not None: + result['EndTime'] = self.end_time + if self.order is not None: + result['Order'] = self.order + if self.page_number is not None: + result['PageNumber'] = self.page_number + if self.page_size is not None: + result['PageSize'] = self.page_size + if self.sort_by is not None: + result['SortBy'] = self.sort_by + if self.start_time is not None: + result['StartTime'] = self.start_time + if self.user_id is not None: + result['UserId'] = self.user_id + if self.workspace_id is not None: + result['WorkspaceId'] = self.workspace_id + return result + + def from_map(self, m: dict = None): + m = m or dict() + if m.get('EndTime') is not None: + self.end_time = m.get('EndTime') + if m.get('Order') is not None: + self.order = m.get('Order') + if m.get('PageNumber') is not None: + self.page_number = m.get('PageNumber') + if m.get('PageSize') is not None: + self.page_size = m.get('PageSize') + if m.get('SortBy') is not None: + self.sort_by = m.get('SortBy') + if m.get('StartTime') is not None: + self.start_time = m.get('StartTime') + if m.get('UserId') is not None: + self.user_id = m.get('UserId') + if m.get('WorkspaceId') is not None: + self.workspace_id = m.get('WorkspaceId') + return self + + +class GetQuotaRangeUserViewMetricsResponseBody(TeaModel): + def __init__( + self, + quota_id: str = None, + request_id: str = None, + summary: QuotaUserViewMetric = None, + total_count: int = None, + user_metrics: List[QuotaUserViewMetric] = None, + ): + self.quota_id = quota_id + self.request_id = request_id + self.summary = summary + self.total_count = total_count + self.user_metrics = user_metrics + + def validate(self): + if self.summary: + self.summary.validate() + if self.user_metrics: + for k in self.user_metrics: + if k: + k.validate() + + def to_map(self): + _map = super().to_map() + if _map is not None: + return _map + + result = dict() + if self.quota_id is not None: + result['QuotaId'] = self.quota_id + if self.request_id is not None: result['RequestId'] = self.request_id - if self.status is not None: - result['Status'] = self.status - if self.supported_drivers is not None: - result['SupportedDrivers'] = self.supported_drivers + if self.summary is not None: + result['Summary'] = self.summary.to_map() + if self.total_count is not None: + result['TotalCount'] = self.total_count + result['UserMetrics'] = [] + if self.user_metrics is not None: + for k in self.user_metrics: + result['UserMetrics'].append(k.to_map() if k else None) return result def from_map(self, m: dict = None): m = m or dict() - if m.get('Count') is not None: - self.count = m.get('Count') - if m.get('DefaultDriver') is not None: - self.default_driver = m.get('DefaultDriver') - if m.get('Duration') is not None: - self.duration = m.get('Duration') - if m.get('EcsType') is not None: - self.ecs_type = m.get('EcsType') - if m.get('GmtCreated') is not None: - self.gmt_created = m.get('GmtCreated') - if m.get('GmtExpired') is not None: - self.gmt_expired = m.get('GmtExpired') - if m.get('GmtModified') is not None: - self.gmt_modified = m.get('GmtModified') - if m.get('GmtStarted') is not None: - self.gmt_started = m.get('GmtStarted') - if m.get('MachineGroupID') is not None: - self.machine_group_id = m.get('MachineGroupID') - if m.get('OrderID') is not None: - self.order_id = m.get('OrderID') - if m.get('PAIResourceID') is not None: - self.pairesource_id = m.get('PAIResourceID') - if m.get('PayType') is not None: - self.pay_type = m.get('PayType') - if m.get('PricingCycle') is not None: - self.pricing_cycle = m.get('PricingCycle') - if m.get('RegionID') is not None: - self.region_id = m.get('RegionID') + if m.get('QuotaId') is not None: + self.quota_id = m.get('QuotaId') if m.get('RequestId') is not None: self.request_id = m.get('RequestId') - if m.get('Status') is not None: - self.status = m.get('Status') - if m.get('SupportedDrivers') is not None: - self.supported_drivers = m.get('SupportedDrivers') + if m.get('Summary') is not None: + temp_model = QuotaUserViewMetric() + self.summary = temp_model.from_map(m['Summary']) + if m.get('TotalCount') is not None: + self.total_count = m.get('TotalCount') + self.user_metrics = [] + if m.get('UserMetrics') is not None: + for k in m.get('UserMetrics'): + temp_model = QuotaUserViewMetric() + self.user_metrics.append(temp_model.from_map(k)) return self -class GetMachineGroupResponse(TeaModel): +class GetQuotaRangeUserViewMetricsResponse(TeaModel): def __init__( self, headers: Dict[str, str] = None, status_code: int = None, - body: GetMachineGroupResponseBody = None, + body: GetQuotaRangeUserViewMetricsResponseBody = None, ): self.headers = headers self.status_code = status_code @@ -9970,24 +10905,20 @@ def from_map(self, m: dict = None): if m.get('statusCode') is not None: self.status_code = m.get('statusCode') if m.get('body') is not None: - temp_model = GetMachineGroupResponseBody() + temp_model = GetQuotaRangeUserViewMetricsResponseBody() self.body = temp_model.from_map(m['body']) return self -class GetNodeMetricsRequest(TeaModel): +class GetQuotaTopoRequest(TeaModel): def __init__( self, - end_time: str = None, - gputype: str = None, - start_time: str = None, - time_step: str = None, + depth: int = None, + show_own_workloads: bool = None, verbose: bool = None, ): - self.end_time = end_time - self.gputype = gputype - self.start_time = start_time - self.time_step = time_step + self.depth = depth + self.show_own_workloads = show_own_workloads self.verbose = verbose def validate(self): @@ -9999,49 +10930,34 @@ def to_map(self): return _map result = dict() - if self.end_time is not None: - result['EndTime'] = self.end_time - if self.gputype is not None: - result['GPUType'] = self.gputype - if self.start_time is not None: - result['StartTime'] = self.start_time - if self.time_step is not None: - result['TimeStep'] = self.time_step + if self.depth is not None: + result['Depth'] = self.depth + if self.show_own_workloads is not None: + result['ShowOwnWorkloads'] = self.show_own_workloads if self.verbose is not None: result['Verbose'] = self.verbose return result def from_map(self, m: dict = None): m = m or dict() - if m.get('EndTime') is not None: - self.end_time = m.get('EndTime') - if m.get('GPUType') is not None: - self.gputype = m.get('GPUType') - if m.get('StartTime') is not None: - self.start_time = m.get('StartTime') - if m.get('TimeStep') is not None: - self.time_step = m.get('TimeStep') + if m.get('Depth') is not None: + self.depth = m.get('Depth') + if m.get('ShowOwnWorkloads') is not None: + self.show_own_workloads = m.get('ShowOwnWorkloads') if m.get('Verbose') is not None: self.verbose = m.get('Verbose') return self -class GetNodeMetricsResponseBody(TeaModel): +class GetQuotaTopoResponseBody(TeaModel): def __init__( self, - metric_type: str = None, - nodes_metrics: List[NodeMetric] = None, - resource_group_id: str = None, + request_id: str = None, ): - self.metric_type = metric_type - self.nodes_metrics = nodes_metrics - self.resource_group_id = resource_group_id + self.request_id = request_id def validate(self): - if self.nodes_metrics: - for k in self.nodes_metrics: - if k: - k.validate() + pass def to_map(self): _map = super().to_map() @@ -10049,36 +10965,23 @@ def to_map(self): return _map result = dict() - if self.metric_type is not None: - result['MetricType'] = self.metric_type - result['NodesMetrics'] = [] - if self.nodes_metrics is not None: - for k in self.nodes_metrics: - result['NodesMetrics'].append(k.to_map() if k else None) - if self.resource_group_id is not None: - result['ResourceGroupID'] = self.resource_group_id + if self.request_id is not None: + result['requestId'] = self.request_id return result def from_map(self, m: dict = None): m = m or dict() - if m.get('MetricType') is not None: - self.metric_type = m.get('MetricType') - self.nodes_metrics = [] - if m.get('NodesMetrics') is not None: - for k in m.get('NodesMetrics'): - temp_model = NodeMetric() - self.nodes_metrics.append(temp_model.from_map(k)) - if m.get('ResourceGroupID') is not None: - self.resource_group_id = m.get('ResourceGroupID') + if m.get('requestId') is not None: + self.request_id = m.get('requestId') return self -class GetNodeMetricsResponse(TeaModel): +class GetQuotaTopoResponse(TeaModel): def __init__( self, headers: Dict[str, str] = None, status_code: int = None, - body: GetNodeMetricsResponseBody = None, + body: GetQuotaTopoResponseBody = None, ): self.headers = headers self.status_code = status_code @@ -10109,24 +11012,30 @@ def from_map(self, m: dict = None): if m.get('statusCode') is not None: self.status_code = m.get('statusCode') if m.get('body') is not None: - temp_model = GetNodeMetricsResponseBody() + temp_model = GetQuotaTopoResponseBody() self.body = temp_model.from_map(m['body']) return self -class GetNodeViewMetricsRequest(TeaModel): +class GetQuotaUserViewMetricsRequest(TeaModel): def __init__( self, - node_id: str = None, - page_number: int = None, - page_size: int = None, + order: str = None, + page_number: str = None, + page_size: str = None, + sort_by: str = None, time_step: str = None, + user_id: str = None, workspace_id: str = None, ): - self.node_id = node_id + self.order = order + # This parameter is required. self.page_number = page_number + # This parameter is required. self.page_size = page_size + self.sort_by = sort_by self.time_step = time_step + self.user_id = user_id self.workspace_id = workspace_id def validate(self): @@ -10138,45 +11047,61 @@ def to_map(self): return _map result = dict() - if self.node_id is not None: - result['NodeId'] = self.node_id + if self.order is not None: + result['Order'] = self.order if self.page_number is not None: result['PageNumber'] = self.page_number if self.page_size is not None: result['PageSize'] = self.page_size + if self.sort_by is not None: + result['SortBy'] = self.sort_by if self.time_step is not None: result['TimeStep'] = self.time_step + if self.user_id is not None: + result['UserId'] = self.user_id if self.workspace_id is not None: result['WorkspaceId'] = self.workspace_id return result def from_map(self, m: dict = None): m = m or dict() - if m.get('NodeId') is not None: - self.node_id = m.get('NodeId') + if m.get('Order') is not None: + self.order = m.get('Order') if m.get('PageNumber') is not None: self.page_number = m.get('PageNumber') if m.get('PageSize') is not None: self.page_size = m.get('PageSize') + if m.get('SortBy') is not None: + self.sort_by = m.get('SortBy') if m.get('TimeStep') is not None: self.time_step = m.get('TimeStep') + if m.get('UserId') is not None: + self.user_id = m.get('UserId') if m.get('WorkspaceId') is not None: self.workspace_id = m.get('WorkspaceId') return self -class GetNodeViewMetricsResponseBody(TeaModel): +class GetQuotaUserViewMetricsResponseBody(TeaModel): def __init__( self, - node_metrics: List[NodeViewMetric] = None, - total: int = None, + quota_id: str = None, + request_id: str = None, + summary: QuotaUserViewMetric = None, + total_count: int = None, + user_metrics: List[QuotaUserViewMetric] = None, ): - self.node_metrics = node_metrics - self.total = total + self.quota_id = quota_id + self.request_id = request_id + self.summary = summary + self.total_count = total_count + self.user_metrics = user_metrics def validate(self): - if self.node_metrics: - for k in self.node_metrics: + if self.summary: + self.summary.validate() + if self.user_metrics: + for k in self.user_metrics: if k: k.validate() @@ -10186,32 +11111,45 @@ def to_map(self): return _map result = dict() - result['NodeMetrics'] = [] - if self.node_metrics is not None: - for k in self.node_metrics: - result['NodeMetrics'].append(k.to_map() if k else None) - if self.total is not None: - result['Total'] = self.total - return result - + if self.quota_id is not None: + result['QuotaId'] = self.quota_id + if self.request_id is not None: + result['RequestId'] = self.request_id + if self.summary is not None: + result['Summary'] = self.summary.to_map() + if self.total_count is not None: + result['TotalCount'] = self.total_count + result['UserMetrics'] = [] + if self.user_metrics is not None: + for k in self.user_metrics: + result['UserMetrics'].append(k.to_map() if k else None) + return result + def from_map(self, m: dict = None): m = m or dict() - self.node_metrics = [] - if m.get('NodeMetrics') is not None: - for k in m.get('NodeMetrics'): - temp_model = NodeViewMetric() - self.node_metrics.append(temp_model.from_map(k)) - if m.get('Total') is not None: - self.total = m.get('Total') + if m.get('QuotaId') is not None: + self.quota_id = m.get('QuotaId') + if m.get('RequestId') is not None: + self.request_id = m.get('RequestId') + if m.get('Summary') is not None: + temp_model = QuotaUserViewMetric() + self.summary = temp_model.from_map(m['Summary']) + if m.get('TotalCount') is not None: + self.total_count = m.get('TotalCount') + self.user_metrics = [] + if m.get('UserMetrics') is not None: + for k in m.get('UserMetrics'): + temp_model = QuotaUserViewMetric() + self.user_metrics.append(temp_model.from_map(k)) return self -class GetNodeViewMetricsResponse(TeaModel): +class GetQuotaUserViewMetricsResponse(TeaModel): def __init__( self, headers: Dict[str, str] = None, status_code: int = None, - body: GetNodeViewMetricsResponseBody = None, + body: GetQuotaUserViewMetricsResponseBody = None, ): self.headers = headers self.status_code = status_code @@ -10242,48 +11180,98 @@ def from_map(self, m: dict = None): if m.get('statusCode') is not None: self.status_code = m.get('statusCode') if m.get('body') is not None: - temp_model = GetNodeViewMetricsResponseBody() + temp_model = GetQuotaUserViewMetricsResponseBody() self.body = temp_model.from_map(m['body']) return self -class GetOperationResponseBody(TeaModel): +class GetRangeUserViewMetricsRequest(TeaModel): def __init__( self, - creator_id: str = None, - gmt_created_time: str = None, - gmt_end_time: str = None, - gmt_modified_time: str = None, - gmt_start_time: str = None, - object_id: str = None, - object_type: str = None, - operation_description: str = None, - operation_id: str = None, - operation_spec_json: str = None, - operation_type: str = None, - reason_code: str = None, - reason_message: str = None, + end_time: str = None, + order: str = None, + page_number: int = None, + page_size: int = None, + sort_by: str = None, + start_time: str = None, + user_id: str = None, + workspace_id: str = None, + ): + self.end_time = end_time + self.order = order + self.page_number = page_number + self.page_size = page_size + self.sort_by = sort_by + self.start_time = start_time + self.user_id = user_id + self.workspace_id = workspace_id + + def validate(self): + pass + + def to_map(self): + _map = super().to_map() + if _map is not None: + return _map + + result = dict() + if self.end_time is not None: + result['EndTime'] = self.end_time + if self.order is not None: + result['Order'] = self.order + if self.page_number is not None: + result['PageNumber'] = self.page_number + if self.page_size is not None: + result['PageSize'] = self.page_size + if self.sort_by is not None: + result['SortBy'] = self.sort_by + if self.start_time is not None: + result['StartTime'] = self.start_time + if self.user_id is not None: + result['UserId'] = self.user_id + if self.workspace_id is not None: + result['WorkspaceId'] = self.workspace_id + return result + + def from_map(self, m: dict = None): + m = m or dict() + if m.get('EndTime') is not None: + self.end_time = m.get('EndTime') + if m.get('Order') is not None: + self.order = m.get('Order') + if m.get('PageNumber') is not None: + self.page_number = m.get('PageNumber') + if m.get('PageSize') is not None: + self.page_size = m.get('PageSize') + if m.get('SortBy') is not None: + self.sort_by = m.get('SortBy') + if m.get('StartTime') is not None: + self.start_time = m.get('StartTime') + if m.get('UserId') is not None: + self.user_id = m.get('UserId') + if m.get('WorkspaceId') is not None: + self.workspace_id = m.get('WorkspaceId') + return self + + +class GetRangeUserViewMetricsResponseBody(TeaModel): + def __init__( + self, + summary: UserViewMetric = None, + user_metrics: List[UserViewMetric] = None, request_id: str = None, - status: str = None, ): - self.creator_id = creator_id - self.gmt_created_time = gmt_created_time - self.gmt_end_time = gmt_end_time - self.gmt_modified_time = gmt_modified_time - self.gmt_start_time = gmt_start_time - self.object_id = object_id - self.object_type = object_type - self.operation_description = operation_description - self.operation_id = operation_id - self.operation_spec_json = operation_spec_json - self.operation_type = operation_type - self.reason_code = reason_code - self.reason_message = reason_message + self.summary = summary + self.user_metrics = user_metrics self.request_id = request_id - self.status = status def validate(self): - pass + if self.summary: + self.summary.validate() + if self.user_metrics: + for k in self.user_metrics: + if k: + k.validate() def to_map(self): _map = super().to_map() @@ -10291,79 +11279,37 @@ def to_map(self): return _map result = dict() - if self.creator_id is not None: - result['CreatorId'] = self.creator_id - if self.gmt_created_time is not None: - result['GmtCreatedTime'] = self.gmt_created_time - if self.gmt_end_time is not None: - result['GmtEndTime'] = self.gmt_end_time - if self.gmt_modified_time is not None: - result['GmtModifiedTime'] = self.gmt_modified_time - if self.gmt_start_time is not None: - result['GmtStartTime'] = self.gmt_start_time - if self.object_id is not None: - result['ObjectId'] = self.object_id - if self.object_type is not None: - result['ObjectType'] = self.object_type - if self.operation_description is not None: - result['OperationDescription'] = self.operation_description - if self.operation_id is not None: - result['OperationId'] = self.operation_id - if self.operation_spec_json is not None: - result['OperationSpecJson'] = self.operation_spec_json - if self.operation_type is not None: - result['OperationType'] = self.operation_type - if self.reason_code is not None: - result['ReasonCode'] = self.reason_code - if self.reason_message is not None: - result['ReasonMessage'] = self.reason_message + if self.summary is not None: + result['Summary'] = self.summary.to_map() + result['UserMetrics'] = [] + if self.user_metrics is not None: + for k in self.user_metrics: + result['UserMetrics'].append(k.to_map() if k else None) if self.request_id is not None: - result['RequestId'] = self.request_id - if self.status is not None: - result['Status'] = self.status + result['requestId'] = self.request_id return result def from_map(self, m: dict = None): m = m or dict() - if m.get('CreatorId') is not None: - self.creator_id = m.get('CreatorId') - if m.get('GmtCreatedTime') is not None: - self.gmt_created_time = m.get('GmtCreatedTime') - if m.get('GmtEndTime') is not None: - self.gmt_end_time = m.get('GmtEndTime') - if m.get('GmtModifiedTime') is not None: - self.gmt_modified_time = m.get('GmtModifiedTime') - if m.get('GmtStartTime') is not None: - self.gmt_start_time = m.get('GmtStartTime') - if m.get('ObjectId') is not None: - self.object_id = m.get('ObjectId') - if m.get('ObjectType') is not None: - self.object_type = m.get('ObjectType') - if m.get('OperationDescription') is not None: - self.operation_description = m.get('OperationDescription') - if m.get('OperationId') is not None: - self.operation_id = m.get('OperationId') - if m.get('OperationSpecJson') is not None: - self.operation_spec_json = m.get('OperationSpecJson') - if m.get('OperationType') is not None: - self.operation_type = m.get('OperationType') - if m.get('ReasonCode') is not None: - self.reason_code = m.get('ReasonCode') - if m.get('ReasonMessage') is not None: - self.reason_message = m.get('ReasonMessage') - if m.get('RequestId') is not None: - self.request_id = m.get('RequestId') - if m.get('Status') is not None: - self.status = m.get('Status') + if m.get('Summary') is not None: + temp_model = UserViewMetric() + self.summary = temp_model.from_map(m['Summary']) + self.user_metrics = [] + if m.get('UserMetrics') is not None: + for k in m.get('UserMetrics'): + temp_model = UserViewMetric() + self.user_metrics.append(temp_model.from_map(k)) + if m.get('requestId') is not None: + self.request_id = m.get('requestId') return self -class GetOperationResponse(TeaModel): +class GetRangeUserViewMetricsResponse(TeaModel): def __init__( self, headers: Dict[str, str] = None, status_code: int = None, - body: GetOperationResponseBody = None, + body: GetRangeUserViewMetricsResponseBody = None, ): self.headers = headers self.status_code = status_code @@ -10394,31 +11340,19 @@ def from_map(self, m: dict = None): if m.get('statusCode') is not None: self.status_code = m.get('statusCode') if m.get('body') is not None: - temp_model = GetOperationResponseBody() + temp_model = GetRangeUserViewMetricsResponseBody() self.body = temp_model.from_map(m['body']) return self -class GetQueueInfosRequest(TeaModel): +class GetResourceGroupRequestTag(TeaModel): def __init__( self, - order: str = None, - page_number: int = None, - page_size: int = None, - quota_ids: str = None, - sort_by: str = None, - workload_ids: str = None, - workload_type: str = None, - workspace_ids: str = None, + key: str = None, + value: str = None, ): - self.order = order - self.page_number = page_number - self.page_size = page_size - self.quota_ids = quota_ids - self.sort_by = sort_by - self.workload_ids = workload_ids - self.workload_type = workload_type - self.workspace_ids = workspace_ids + self.key = key + self.value = value def validate(self): pass @@ -10429,59 +11363,33 @@ def to_map(self): return _map result = dict() - if self.order is not None: - result['Order'] = self.order - if self.page_number is not None: - result['PageNumber'] = self.page_number - if self.page_size is not None: - result['PageSize'] = self.page_size - if self.quota_ids is not None: - result['QuotaIds'] = self.quota_ids - if self.sort_by is not None: - result['SortBy'] = self.sort_by - if self.workload_ids is not None: - result['WorkloadIds'] = self.workload_ids - if self.workload_type is not None: - result['WorkloadType'] = self.workload_type - if self.workspace_ids is not None: - result['WorkspaceIds'] = self.workspace_ids + if self.key is not None: + result['Key'] = self.key + if self.value is not None: + result['Value'] = self.value return result def from_map(self, m: dict = None): m = m or dict() - if m.get('Order') is not None: - self.order = m.get('Order') - if m.get('PageNumber') is not None: - self.page_number = m.get('PageNumber') - if m.get('PageSize') is not None: - self.page_size = m.get('PageSize') - if m.get('QuotaIds') is not None: - self.quota_ids = m.get('QuotaIds') - if m.get('SortBy') is not None: - self.sort_by = m.get('SortBy') - if m.get('WorkloadIds') is not None: - self.workload_ids = m.get('WorkloadIds') - if m.get('WorkloadType') is not None: - self.workload_type = m.get('WorkloadType') - if m.get('WorkspaceIds') is not None: - self.workspace_ids = m.get('WorkspaceIds') + if m.get('Key') is not None: + self.key = m.get('Key') + if m.get('Value') is not None: + self.value = m.get('Value') return self -class GetQueueInfosResponseBody(TeaModel): +class GetResourceGroupRequest(TeaModel): def __init__( self, - queue_infos: List[QueueInfo] = None, - request_id: str = None, - total_count: int = None, + is_aiworkspace_data_enabled: bool = None, + tag: List[GetResourceGroupRequestTag] = None, ): - self.queue_infos = queue_infos - self.request_id = request_id - self.total_count = total_count + self.is_aiworkspace_data_enabled = is_aiworkspace_data_enabled + self.tag = tag def validate(self): - if self.queue_infos: - for k in self.queue_infos: + if self.tag: + for k in self.tag: if k: k.validate() @@ -10491,44 +11399,37 @@ def to_map(self): return _map result = dict() - result['QueueInfos'] = [] - if self.queue_infos is not None: - for k in self.queue_infos: - result['QueueInfos'].append(k.to_map() if k else None) - if self.request_id is not None: - result['RequestId'] = self.request_id - if self.total_count is not None: - result['TotalCount'] = self.total_count + if self.is_aiworkspace_data_enabled is not None: + result['IsAIWorkspaceDataEnabled'] = self.is_aiworkspace_data_enabled + result['Tag'] = [] + if self.tag is not None: + for k in self.tag: + result['Tag'].append(k.to_map() if k else None) return result def from_map(self, m: dict = None): m = m or dict() - self.queue_infos = [] - if m.get('QueueInfos') is not None: - for k in m.get('QueueInfos'): - temp_model = QueueInfo() - self.queue_infos.append(temp_model.from_map(k)) - if m.get('RequestId') is not None: - self.request_id = m.get('RequestId') - if m.get('TotalCount') is not None: - self.total_count = m.get('TotalCount') + if m.get('IsAIWorkspaceDataEnabled') is not None: + self.is_aiworkspace_data_enabled = m.get('IsAIWorkspaceDataEnabled') + self.tag = [] + if m.get('Tag') is not None: + for k in m.get('Tag'): + temp_model = GetResourceGroupRequestTag() + self.tag.append(temp_model.from_map(k)) return self -class GetQueueInfosResponse(TeaModel): +class GetResourceGroupShrinkRequest(TeaModel): def __init__( self, - headers: Dict[str, str] = None, - status_code: int = None, - body: GetQueueInfosResponseBody = None, + is_aiworkspace_data_enabled: bool = None, + tag_shrink: str = None, ): - self.headers = headers - self.status_code = status_code - self.body = body + self.is_aiworkspace_data_enabled = is_aiworkspace_data_enabled + self.tag_shrink = tag_shrink def validate(self): - if self.body: - self.body.validate() + pass def to_map(self): _map = super().to_map() @@ -10536,95 +11437,94 @@ def to_map(self): return _map result = dict() - if self.headers is not None: - result['headers'] = self.headers - if self.status_code is not None: - result['statusCode'] = self.status_code - if self.body is not None: - result['body'] = self.body.to_map() + if self.is_aiworkspace_data_enabled is not None: + result['IsAIWorkspaceDataEnabled'] = self.is_aiworkspace_data_enabled + if self.tag_shrink is not None: + result['Tag'] = self.tag_shrink return result def from_map(self, m: dict = None): m = m or dict() - if m.get('headers') is not None: - self.headers = m.get('headers') - if m.get('statusCode') is not None: - self.status_code = m.get('statusCode') - if m.get('body') is not None: - temp_model = GetQueueInfosResponseBody() - self.body = temp_model.from_map(m['body']) + if m.get('IsAIWorkspaceDataEnabled') is not None: + self.is_aiworkspace_data_enabled = m.get('IsAIWorkspaceDataEnabled') + if m.get('Tag') is not None: + self.tag_shrink = m.get('Tag') return self -class GetQuotaResponseBody(TeaModel): +class GetResourceGroupResponseBodyTags(TeaModel): def __init__( self, - allocate_strategy: str = None, + tag_key: str = None, + tag_value: str = None, + ): + self.tag_key = tag_key + self.tag_value = tag_value + + def validate(self): + pass + + def to_map(self): + _map = super().to_map() + if _map is not None: + return _map + + result = dict() + if self.tag_key is not None: + result['TagKey'] = self.tag_key + if self.tag_value is not None: + result['TagValue'] = self.tag_value + return result + + def from_map(self, m: dict = None): + m = m or dict() + if m.get('TagKey') is not None: + self.tag_key = m.get('TagKey') + if m.get('TagValue') is not None: + self.tag_value = m.get('TagValue') + return self + + +class GetResourceGroupResponseBody(TeaModel): + def __init__( + self, + cluster_id: str = None, + computing_resource_provider: str = None, creator_id: str = None, description: str = None, gmt_created_time: str = None, gmt_modified_time: str = None, - labels: List[Label] = None, - latest_operation_id: str = None, - min: ResourceSpec = None, - parent_quota_id: str = None, - queue_strategy: str = None, - quota_config: QuotaConfig = None, - quota_details: QuotaDetails = None, - quota_id: str = None, - quota_name: str = None, - reason_code: str = None, - reason_message: str = None, + name: str = None, request_id: str = None, - resource_group_ids: List[str] = None, resource_type: str = None, status: str = None, - sub_quotas: List[QuotaIdName] = None, - workspaces: List[WorkspaceIdName] = None, + support_rdma: bool = None, + tags: List[GetResourceGroupResponseBodyTags] = None, + user_vpc: UserVpc = None, + workspace_id: str = None, ): - self.allocate_strategy = allocate_strategy + self.cluster_id = cluster_id + self.computing_resource_provider = computing_resource_provider self.creator_id = creator_id self.description = description self.gmt_created_time = gmt_created_time self.gmt_modified_time = gmt_modified_time - self.labels = labels - self.latest_operation_id = latest_operation_id - self.min = min - self.parent_quota_id = parent_quota_id - self.queue_strategy = queue_strategy - self.quota_config = quota_config - self.quota_details = quota_details - # Quota Id - self.quota_id = quota_id - self.quota_name = quota_name - self.reason_code = reason_code - self.reason_message = reason_message + self.name = name self.request_id = request_id - self.resource_group_ids = resource_group_ids self.resource_type = resource_type self.status = status - self.sub_quotas = sub_quotas - self.workspaces = workspaces + self.support_rdma = support_rdma + self.tags = tags + self.user_vpc = user_vpc + self.workspace_id = workspace_id def validate(self): - if self.labels: - for k in self.labels: - if k: - k.validate() - if self.min: - self.min.validate() - if self.quota_config: - self.quota_config.validate() - if self.quota_details: - self.quota_details.validate() - if self.sub_quotas: - for k in self.sub_quotas: - if k: - k.validate() - if self.workspaces: - for k in self.workspaces: + if self.tags: + for k in self.tags: if k: k.validate() + if self.user_vpc: + self.user_vpc.validate() def to_map(self): _map = super().to_map() @@ -10632,125 +11532,81 @@ def to_map(self): return _map result = dict() - if self.allocate_strategy is not None: - result['AllocateStrategy'] = self.allocate_strategy + if self.cluster_id is not None: + result['ClusterID'] = self.cluster_id + if self.computing_resource_provider is not None: + result['ComputingResourceProvider'] = self.computing_resource_provider if self.creator_id is not None: - result['CreatorId'] = self.creator_id + result['CreatorID'] = self.creator_id if self.description is not None: result['Description'] = self.description if self.gmt_created_time is not None: result['GmtCreatedTime'] = self.gmt_created_time if self.gmt_modified_time is not None: result['GmtModifiedTime'] = self.gmt_modified_time - result['Labels'] = [] - if self.labels is not None: - for k in self.labels: - result['Labels'].append(k.to_map() if k else None) - if self.latest_operation_id is not None: - result['LatestOperationId'] = self.latest_operation_id - if self.min is not None: - result['Min'] = self.min.to_map() - if self.parent_quota_id is not None: - result['ParentQuotaId'] = self.parent_quota_id - if self.queue_strategy is not None: - result['QueueStrategy'] = self.queue_strategy - if self.quota_config is not None: - result['QuotaConfig'] = self.quota_config.to_map() - if self.quota_details is not None: - result['QuotaDetails'] = self.quota_details.to_map() - if self.quota_id is not None: - result['QuotaId'] = self.quota_id - if self.quota_name is not None: - result['QuotaName'] = self.quota_name - if self.reason_code is not None: - result['ReasonCode'] = self.reason_code - if self.reason_message is not None: - result['ReasonMessage'] = self.reason_message + if self.name is not None: + result['Name'] = self.name if self.request_id is not None: result['RequestId'] = self.request_id - if self.resource_group_ids is not None: - result['ResourceGroupIds'] = self.resource_group_ids if self.resource_type is not None: result['ResourceType'] = self.resource_type if self.status is not None: result['Status'] = self.status - result['SubQuotas'] = [] - if self.sub_quotas is not None: - for k in self.sub_quotas: - result['SubQuotas'].append(k.to_map() if k else None) - result['Workspaces'] = [] - if self.workspaces is not None: - for k in self.workspaces: - result['Workspaces'].append(k.to_map() if k else None) + if self.support_rdma is not None: + result['SupportRDMA'] = self.support_rdma + result['Tags'] = [] + if self.tags is not None: + for k in self.tags: + result['Tags'].append(k.to_map() if k else None) + if self.user_vpc is not None: + result['UserVpc'] = self.user_vpc.to_map() + if self.workspace_id is not None: + result['WorkspaceID'] = self.workspace_id return result def from_map(self, m: dict = None): m = m or dict() - if m.get('AllocateStrategy') is not None: - self.allocate_strategy = m.get('AllocateStrategy') - if m.get('CreatorId') is not None: - self.creator_id = m.get('CreatorId') + if m.get('ClusterID') is not None: + self.cluster_id = m.get('ClusterID') + if m.get('ComputingResourceProvider') is not None: + self.computing_resource_provider = m.get('ComputingResourceProvider') + if m.get('CreatorID') is not None: + self.creator_id = m.get('CreatorID') if m.get('Description') is not None: self.description = m.get('Description') if m.get('GmtCreatedTime') is not None: self.gmt_created_time = m.get('GmtCreatedTime') if m.get('GmtModifiedTime') is not None: self.gmt_modified_time = m.get('GmtModifiedTime') - self.labels = [] - if m.get('Labels') is not None: - for k in m.get('Labels'): - temp_model = Label() - self.labels.append(temp_model.from_map(k)) - if m.get('LatestOperationId') is not None: - self.latest_operation_id = m.get('LatestOperationId') - if m.get('Min') is not None: - temp_model = ResourceSpec() - self.min = temp_model.from_map(m['Min']) - if m.get('ParentQuotaId') is not None: - self.parent_quota_id = m.get('ParentQuotaId') - if m.get('QueueStrategy') is not None: - self.queue_strategy = m.get('QueueStrategy') - if m.get('QuotaConfig') is not None: - temp_model = QuotaConfig() - self.quota_config = temp_model.from_map(m['QuotaConfig']) - if m.get('QuotaDetails') is not None: - temp_model = QuotaDetails() - self.quota_details = temp_model.from_map(m['QuotaDetails']) - if m.get('QuotaId') is not None: - self.quota_id = m.get('QuotaId') - if m.get('QuotaName') is not None: - self.quota_name = m.get('QuotaName') - if m.get('ReasonCode') is not None: - self.reason_code = m.get('ReasonCode') - if m.get('ReasonMessage') is not None: - self.reason_message = m.get('ReasonMessage') + if m.get('Name') is not None: + self.name = m.get('Name') if m.get('RequestId') is not None: self.request_id = m.get('RequestId') - if m.get('ResourceGroupIds') is not None: - self.resource_group_ids = m.get('ResourceGroupIds') if m.get('ResourceType') is not None: self.resource_type = m.get('ResourceType') if m.get('Status') is not None: self.status = m.get('Status') - self.sub_quotas = [] - if m.get('SubQuotas') is not None: - for k in m.get('SubQuotas'): - temp_model = QuotaIdName() - self.sub_quotas.append(temp_model.from_map(k)) - self.workspaces = [] - if m.get('Workspaces') is not None: - for k in m.get('Workspaces'): - temp_model = WorkspaceIdName() - self.workspaces.append(temp_model.from_map(k)) + if m.get('SupportRDMA') is not None: + self.support_rdma = m.get('SupportRDMA') + self.tags = [] + if m.get('Tags') is not None: + for k in m.get('Tags'): + temp_model = GetResourceGroupResponseBodyTags() + self.tags.append(temp_model.from_map(k)) + if m.get('UserVpc') is not None: + temp_model = UserVpc() + self.user_vpc = temp_model.from_map(m['UserVpc']) + if m.get('WorkspaceID') is not None: + self.workspace_id = m.get('WorkspaceID') return self -class GetQuotaResponse(TeaModel): +class GetResourceGroupResponse(TeaModel): def __init__( self, headers: Dict[str, str] = None, status_code: int = None, - body: GetQuotaResponseBody = None, + body: GetResourceGroupResponseBody = None, ): self.headers = headers self.status_code = status_code @@ -10781,31 +11637,19 @@ def from_map(self, m: dict = None): if m.get('statusCode') is not None: self.status_code = m.get('statusCode') if m.get('body') is not None: - temp_model = GetQuotaResponseBody() + temp_model = GetResourceGroupResponseBody() self.body = temp_model.from_map(m['body']) return self -class GetQuotaJobViewMetricsRequest(TeaModel): +class GetResourceGroupMachineGroupRequestTag(TeaModel): def __init__( self, - end_time: str = None, - order: str = None, - page_number: int = None, - page_size: int = None, - sort_by: str = None, - start_time: str = None, - time_step: str = None, - workspace_id: str = None, + key: str = None, + value: str = None, ): - self.end_time = end_time - self.order = order - self.page_number = page_number - self.page_size = page_size - self.sort_by = sort_by - self.start_time = start_time - self.time_step = time_step - self.workspace_id = workspace_id + self.key = key + self.value = value def validate(self): pass @@ -10816,67 +11660,33 @@ def to_map(self): return _map result = dict() - if self.end_time is not None: - result['EndTime'] = self.end_time - if self.order is not None: - result['Order'] = self.order - if self.page_number is not None: - result['PageNumber'] = self.page_number - if self.page_size is not None: - result['PageSize'] = self.page_size - if self.sort_by is not None: - result['SortBy'] = self.sort_by - if self.start_time is not None: - result['StartTime'] = self.start_time - if self.time_step is not None: - result['TimeStep'] = self.time_step - if self.workspace_id is not None: - result['WorkspaceId'] = self.workspace_id + if self.key is not None: + result['Key'] = self.key + if self.value is not None: + result['Value'] = self.value return result def from_map(self, m: dict = None): m = m or dict() - if m.get('EndTime') is not None: - self.end_time = m.get('EndTime') - if m.get('Order') is not None: - self.order = m.get('Order') - if m.get('PageNumber') is not None: - self.page_number = m.get('PageNumber') - if m.get('PageSize') is not None: - self.page_size = m.get('PageSize') - if m.get('SortBy') is not None: - self.sort_by = m.get('SortBy') - if m.get('StartTime') is not None: - self.start_time = m.get('StartTime') - if m.get('TimeStep') is not None: - self.time_step = m.get('TimeStep') - if m.get('WorkspaceId') is not None: - self.workspace_id = m.get('WorkspaceId') + if m.get('Key') is not None: + self.key = m.get('Key') + if m.get('Value') is not None: + self.value = m.get('Value') return self -class GetQuotaJobViewMetricsResponseBody(TeaModel): +class GetResourceGroupMachineGroupRequest(TeaModel): def __init__( self, - job_metrics: List[QuotaJobViewMetric] = None, - quota_id: str = None, - request_id: str = None, - summary: QuotaJobViewMetric = None, - total_count: int = None, + tag: List[GetResourceGroupMachineGroupRequestTag] = None, ): - self.job_metrics = job_metrics - self.quota_id = quota_id - self.request_id = request_id - self.summary = summary - self.total_count = total_count + self.tag = tag def validate(self): - if self.job_metrics: - for k in self.job_metrics: + if self.tag: + for k in self.tag: if k: k.validate() - if self.summary: - self.summary.validate() def to_map(self): _map = super().to_map() @@ -10884,53 +11694,31 @@ def to_map(self): return _map result = dict() - result['JobMetrics'] = [] - if self.job_metrics is not None: - for k in self.job_metrics: - result['JobMetrics'].append(k.to_map() if k else None) - if self.quota_id is not None: - result['QuotaId'] = self.quota_id - if self.request_id is not None: - result['RequestId'] = self.request_id - if self.summary is not None: - result['Summary'] = self.summary.to_map() - if self.total_count is not None: - result['TotalCount'] = self.total_count + result['Tag'] = [] + if self.tag is not None: + for k in self.tag: + result['Tag'].append(k.to_map() if k else None) return result def from_map(self, m: dict = None): m = m or dict() - self.job_metrics = [] - if m.get('JobMetrics') is not None: - for k in m.get('JobMetrics'): - temp_model = QuotaJobViewMetric() - self.job_metrics.append(temp_model.from_map(k)) - if m.get('QuotaId') is not None: - self.quota_id = m.get('QuotaId') - if m.get('RequestId') is not None: - self.request_id = m.get('RequestId') - if m.get('Summary') is not None: - temp_model = QuotaJobViewMetric() - self.summary = temp_model.from_map(m['Summary']) - if m.get('TotalCount') is not None: - self.total_count = m.get('TotalCount') + self.tag = [] + if m.get('Tag') is not None: + for k in m.get('Tag'): + temp_model = GetResourceGroupMachineGroupRequestTag() + self.tag.append(temp_model.from_map(k)) return self -class GetQuotaJobViewMetricsResponse(TeaModel): +class GetResourceGroupMachineGroupShrinkRequest(TeaModel): def __init__( self, - headers: Dict[str, str] = None, - status_code: int = None, - body: GetQuotaJobViewMetricsResponseBody = None, + tag_shrink: str = None, ): - self.headers = headers - self.status_code = status_code - self.body = body + self.tag_shrink = tag_shrink def validate(self): - if self.body: - self.body.validate() + pass def to_map(self): _map = super().to_map() @@ -10938,38 +11726,25 @@ def to_map(self): return _map result = dict() - if self.headers is not None: - result['headers'] = self.headers - if self.status_code is not None: - result['statusCode'] = self.status_code - if self.body is not None: - result['body'] = self.body.to_map() + if self.tag_shrink is not None: + result['Tag'] = self.tag_shrink return result def from_map(self, m: dict = None): m = m or dict() - if m.get('headers') is not None: - self.headers = m.get('headers') - if m.get('statusCode') is not None: - self.status_code = m.get('statusCode') - if m.get('body') is not None: - temp_model = GetQuotaJobViewMetricsResponseBody() - self.body = temp_model.from_map(m['body']) + if m.get('Tag') is not None: + self.tag_shrink = m.get('Tag') return self -class GetQuotaMetricsRequest(TeaModel): +class GetResourceGroupMachineGroupResponseBodyTags(TeaModel): def __init__( self, - end_time: str = None, - gputype: str = None, - start_time: str = None, - time_step: str = None, + tag_key: str = None, + tag_value: str = None, ): - self.end_time = end_time - self.gputype = gputype - self.start_time = start_time - self.time_step = time_step + self.tag_key = tag_key + self.tag_value = tag_value def validate(self): pass @@ -10980,43 +11755,71 @@ def to_map(self): return _map result = dict() - if self.end_time is not None: - result['EndTime'] = self.end_time - if self.gputype is not None: - result['GPUType'] = self.gputype - if self.start_time is not None: - result['StartTime'] = self.start_time - if self.time_step is not None: - result['TimeStep'] = self.time_step + if self.tag_key is not None: + result['TagKey'] = self.tag_key + if self.tag_value is not None: + result['TagValue'] = self.tag_value return result def from_map(self, m: dict = None): m = m or dict() - if m.get('EndTime') is not None: - self.end_time = m.get('EndTime') - if m.get('GPUType') is not None: - self.gputype = m.get('GPUType') - if m.get('StartTime') is not None: - self.start_time = m.get('StartTime') - if m.get('TimeStep') is not None: - self.time_step = m.get('TimeStep') + if m.get('TagKey') is not None: + self.tag_key = m.get('TagKey') + if m.get('TagValue') is not None: + self.tag_value = m.get('TagValue') return self -class GetQuotaMetricsResponseBody(TeaModel): +class GetResourceGroupMachineGroupResponseBody(TeaModel): def __init__( self, - quota_id: str = None, - quota_metrics: List[QuotaMetric] = None, + cpu: str = None, + default_driver: str = None, + ecs_count: int = None, + ecs_spec: str = None, + gmt_created_time: str = None, + gmt_expired_time: str = None, + gmt_modified_time: str = None, + gmt_started_time: str = None, + gpu: str = None, + gpu_type: str = None, + machine_group_id: str = None, + memory: str = None, + name: str = None, + payment_duration: str = None, + payment_duration_unit: str = None, + payment_type: str = None, request_id: str = None, + resource_group_id: str = None, + status: str = None, + supported_drivers: List[str] = None, + tags: List[GetResourceGroupMachineGroupResponseBodyTags] = None, ): - self.quota_id = quota_id - self.quota_metrics = quota_metrics + self.cpu = cpu + self.default_driver = default_driver + self.ecs_count = ecs_count + self.ecs_spec = ecs_spec + self.gmt_created_time = gmt_created_time + self.gmt_expired_time = gmt_expired_time + self.gmt_modified_time = gmt_modified_time + self.gmt_started_time = gmt_started_time + self.gpu = gpu + self.gpu_type = gpu_type + self.machine_group_id = machine_group_id + self.memory = memory + self.name = name + self.payment_duration = payment_duration + self.payment_duration_unit = payment_duration_unit + self.payment_type = payment_type self.request_id = request_id + self.resource_group_id = resource_group_id + self.status = status + self.supported_drivers = supported_drivers + self.tags = tags def validate(self): - if self.quota_metrics: - for k in self.quota_metrics: + if self.tags: + for k in self.tags: if k: k.validate() @@ -11026,36 +11829,108 @@ def to_map(self): return _map result = dict() - if self.quota_id is not None: - result['QuotaId'] = self.quota_id - result['QuotaMetrics'] = [] - if self.quota_metrics is not None: - for k in self.quota_metrics: - result['QuotaMetrics'].append(k.to_map() if k else None) + if self.cpu is not None: + result['Cpu'] = self.cpu + if self.default_driver is not None: + result['DefaultDriver'] = self.default_driver + if self.ecs_count is not None: + result['EcsCount'] = self.ecs_count + if self.ecs_spec is not None: + result['EcsSpec'] = self.ecs_spec + if self.gmt_created_time is not None: + result['GmtCreatedTime'] = self.gmt_created_time + if self.gmt_expired_time is not None: + result['GmtExpiredTime'] = self.gmt_expired_time + if self.gmt_modified_time is not None: + result['GmtModifiedTime'] = self.gmt_modified_time + if self.gmt_started_time is not None: + result['GmtStartedTime'] = self.gmt_started_time + if self.gpu is not None: + result['Gpu'] = self.gpu + if self.gpu_type is not None: + result['GpuType'] = self.gpu_type + if self.machine_group_id is not None: + result['MachineGroupID'] = self.machine_group_id + if self.memory is not None: + result['Memory'] = self.memory + if self.name is not None: + result['Name'] = self.name + if self.payment_duration is not None: + result['PaymentDuration'] = self.payment_duration + if self.payment_duration_unit is not None: + result['PaymentDurationUnit'] = self.payment_duration_unit + if self.payment_type is not None: + result['PaymentType'] = self.payment_type if self.request_id is not None: result['RequestId'] = self.request_id + if self.resource_group_id is not None: + result['ResourceGroupID'] = self.resource_group_id + if self.status is not None: + result['Status'] = self.status + if self.supported_drivers is not None: + result['SupportedDrivers'] = self.supported_drivers + result['Tags'] = [] + if self.tags is not None: + for k in self.tags: + result['Tags'].append(k.to_map() if k else None) return result def from_map(self, m: dict = None): m = m or dict() - if m.get('QuotaId') is not None: - self.quota_id = m.get('QuotaId') - self.quota_metrics = [] - if m.get('QuotaMetrics') is not None: - for k in m.get('QuotaMetrics'): - temp_model = QuotaMetric() - self.quota_metrics.append(temp_model.from_map(k)) + if m.get('Cpu') is not None: + self.cpu = m.get('Cpu') + if m.get('DefaultDriver') is not None: + self.default_driver = m.get('DefaultDriver') + if m.get('EcsCount') is not None: + self.ecs_count = m.get('EcsCount') + if m.get('EcsSpec') is not None: + self.ecs_spec = m.get('EcsSpec') + if m.get('GmtCreatedTime') is not None: + self.gmt_created_time = m.get('GmtCreatedTime') + if m.get('GmtExpiredTime') is not None: + self.gmt_expired_time = m.get('GmtExpiredTime') + if m.get('GmtModifiedTime') is not None: + self.gmt_modified_time = m.get('GmtModifiedTime') + if m.get('GmtStartedTime') is not None: + self.gmt_started_time = m.get('GmtStartedTime') + if m.get('Gpu') is not None: + self.gpu = m.get('Gpu') + if m.get('GpuType') is not None: + self.gpu_type = m.get('GpuType') + if m.get('MachineGroupID') is not None: + self.machine_group_id = m.get('MachineGroupID') + if m.get('Memory') is not None: + self.memory = m.get('Memory') + if m.get('Name') is not None: + self.name = m.get('Name') + if m.get('PaymentDuration') is not None: + self.payment_duration = m.get('PaymentDuration') + if m.get('PaymentDurationUnit') is not None: + self.payment_duration_unit = m.get('PaymentDurationUnit') + if m.get('PaymentType') is not None: + self.payment_type = m.get('PaymentType') if m.get('RequestId') is not None: self.request_id = m.get('RequestId') + if m.get('ResourceGroupID') is not None: + self.resource_group_id = m.get('ResourceGroupID') + if m.get('Status') is not None: + self.status = m.get('Status') + if m.get('SupportedDrivers') is not None: + self.supported_drivers = m.get('SupportedDrivers') + self.tags = [] + if m.get('Tags') is not None: + for k in m.get('Tags'): + temp_model = GetResourceGroupMachineGroupResponseBodyTags() + self.tags.append(temp_model.from_map(k)) return self -class GetQuotaMetricsResponse(TeaModel): +class GetResourceGroupMachineGroupResponse(TeaModel): def __init__( self, headers: Dict[str, str] = None, status_code: int = None, - body: GetQuotaMetricsResponseBody = None, + body: GetResourceGroupMachineGroupResponseBody = None, ): self.headers = headers self.status_code = status_code @@ -11086,25 +11961,23 @@ def from_map(self, m: dict = None): if m.get('statusCode') is not None: self.status_code = m.get('statusCode') if m.get('body') is not None: - temp_model = GetQuotaMetricsResponseBody() + temp_model = GetResourceGroupMachineGroupResponseBody() self.body = temp_model.from_map(m['body']) return self -class GetQuotaNodeMetricsRequest(TeaModel): +class GetResourceGroupMetricsRequest(TeaModel): def __init__( self, end_time: str = None, gputype: str = None, start_time: str = None, time_step: str = None, - verbose: bool = None, ): self.end_time = end_time self.gputype = gputype self.start_time = start_time self.time_step = time_step - self.verbose = verbose def validate(self): pass @@ -11123,8 +11996,6 @@ def to_map(self): result['StartTime'] = self.start_time if self.time_step is not None: result['TimeStep'] = self.time_step - if self.verbose is not None: - result['Verbose'] = self.verbose return result def from_map(self, m: dict = None): @@ -11137,27 +12008,23 @@ def from_map(self, m: dict = None): self.start_time = m.get('StartTime') if m.get('TimeStep') is not None: self.time_step = m.get('TimeStep') - if m.get('Verbose') is not None: - self.verbose = m.get('Verbose') return self -class GetQuotaNodeMetricsResponseBody(TeaModel): +class GetResourceGroupMetricsResponseBody(TeaModel): def __init__( self, - metric_type: str = None, - nodes_metrics: List[NodeMetric] = None, - quota_id: str = None, request_id: str = None, + resource_group_id: str = None, + resource_group_metrics: List[ResourceGroupMetric] = None, ): - self.metric_type = metric_type - self.nodes_metrics = nodes_metrics - self.quota_id = quota_id self.request_id = request_id + self.resource_group_id = resource_group_id + self.resource_group_metrics = resource_group_metrics def validate(self): - if self.nodes_metrics: - for k in self.nodes_metrics: + if self.resource_group_metrics: + for k in self.resource_group_metrics: if k: k.validate() @@ -11167,40 +12034,36 @@ def to_map(self): return _map result = dict() - if self.metric_type is not None: - result['MetricType'] = self.metric_type - result['NodesMetrics'] = [] - if self.nodes_metrics is not None: - for k in self.nodes_metrics: - result['NodesMetrics'].append(k.to_map() if k else None) - if self.quota_id is not None: - result['QuotaId'] = self.quota_id if self.request_id is not None: result['RequestId'] = self.request_id + if self.resource_group_id is not None: + result['ResourceGroupID'] = self.resource_group_id + result['ResourceGroupMetrics'] = [] + if self.resource_group_metrics is not None: + for k in self.resource_group_metrics: + result['ResourceGroupMetrics'].append(k.to_map() if k else None) return result def from_map(self, m: dict = None): m = m or dict() - if m.get('MetricType') is not None: - self.metric_type = m.get('MetricType') - self.nodes_metrics = [] - if m.get('NodesMetrics') is not None: - for k in m.get('NodesMetrics'): - temp_model = NodeMetric() - self.nodes_metrics.append(temp_model.from_map(k)) - if m.get('QuotaId') is not None: - self.quota_id = m.get('QuotaId') if m.get('RequestId') is not None: self.request_id = m.get('RequestId') + if m.get('ResourceGroupID') is not None: + self.resource_group_id = m.get('ResourceGroupID') + self.resource_group_metrics = [] + if m.get('ResourceGroupMetrics') is not None: + for k in m.get('ResourceGroupMetrics'): + temp_model = ResourceGroupMetric() + self.resource_group_metrics.append(temp_model.from_map(k)) return self -class GetQuotaNodeMetricsResponse(TeaModel): +class GetResourceGroupMetricsResponse(TeaModel): def __init__( self, headers: Dict[str, str] = None, status_code: int = None, - body: GetQuotaNodeMetricsResponseBody = None, + body: GetResourceGroupMetricsResponseBody = None, ): self.headers = headers self.status_code = status_code @@ -11231,37 +12094,20 @@ def from_map(self, m: dict = None): if m.get('statusCode') is not None: self.status_code = m.get('statusCode') if m.get('body') is not None: - temp_model = GetQuotaNodeMetricsResponseBody() + temp_model = GetResourceGroupMetricsResponseBody() self.body = temp_model.from_map(m['body']) return self -class GetQuotaNodeViewMetricsRequest(TeaModel): +class GetResourceGroupRequestRequest(TeaModel): def __init__( self, - node_id: str = None, - node_status: str = None, - order: str = None, - order_status: str = None, - page_number: int = None, - page_size: int = None, + pod_status: str = None, resource_group_id: str = None, - self_only: bool = None, - sort_by: str = None, - time_step: str = None, - workspace_id: str = None, ): - self.node_id = node_id - self.node_status = node_status - self.order = order - self.order_status = order_status - self.page_number = page_number - self.page_size = page_size + self.pod_status = pod_status + # This parameter is required. self.resource_group_id = resource_group_id - self.self_only = self_only - self.sort_by = sort_by - self.time_step = time_step - self.workspace_id = workspace_id def validate(self): pass @@ -11272,73 +12118,37 @@ def to_map(self): return _map result = dict() - if self.node_id is not None: - result['NodeId'] = self.node_id - if self.node_status is not None: - result['NodeStatus'] = self.node_status - if self.order is not None: - result['Order'] = self.order - if self.order_status is not None: - result['OrderStatus'] = self.order_status - if self.page_number is not None: - result['PageNumber'] = self.page_number - if self.page_size is not None: - result['PageSize'] = self.page_size + if self.pod_status is not None: + result['PodStatus'] = self.pod_status if self.resource_group_id is not None: - result['ResourceGroupId'] = self.resource_group_id - if self.self_only is not None: - result['SelfOnly'] = self.self_only - if self.sort_by is not None: - result['SortBy'] = self.sort_by - if self.time_step is not None: - result['TimeStep'] = self.time_step - if self.workspace_id is not None: - result['WorkspaceId'] = self.workspace_id + result['ResourceGroupID'] = self.resource_group_id return result def from_map(self, m: dict = None): m = m or dict() - if m.get('NodeId') is not None: - self.node_id = m.get('NodeId') - if m.get('NodeStatus') is not None: - self.node_status = m.get('NodeStatus') - if m.get('Order') is not None: - self.order = m.get('Order') - if m.get('OrderStatus') is not None: - self.order_status = m.get('OrderStatus') - if m.get('PageNumber') is not None: - self.page_number = m.get('PageNumber') - if m.get('PageSize') is not None: - self.page_size = m.get('PageSize') - if m.get('ResourceGroupId') is not None: - self.resource_group_id = m.get('ResourceGroupId') - if m.get('SelfOnly') is not None: - self.self_only = m.get('SelfOnly') - if m.get('SortBy') is not None: - self.sort_by = m.get('SortBy') - if m.get('TimeStep') is not None: - self.time_step = m.get('TimeStep') - if m.get('WorkspaceId') is not None: - self.workspace_id = m.get('WorkspaceId') + if m.get('PodStatus') is not None: + self.pod_status = m.get('PodStatus') + if m.get('ResourceGroupID') is not None: + self.resource_group_id = m.get('ResourceGroupID') return self -class GetQuotaNodeViewMetricsResponseBody(TeaModel): +class GetResourceGroupRequestResponseBody(TeaModel): def __init__( self, - node_metrics: List[QuotaNodeViewMetric] = None, - quota_id: str = None, - request_id: str = None, - total_count: int = None, + request_cpu: int = None, + request_gpu: int = None, + request_gpuinfos: List[GPUInfo] = None, + request_memory: int = None, ): - self.node_metrics = node_metrics - self.quota_id = quota_id - self.request_id = request_id - self.total_count = total_count + self.request_cpu = request_cpu + self.request_gpu = request_gpu + self.request_gpuinfos = request_gpuinfos + self.request_memory = request_memory def validate(self): - if self.node_metrics: - for k in self.node_metrics: + if self.request_gpuinfos: + for k in self.request_gpuinfos: if k: k.validate() @@ -11348,40 +12158,40 @@ def to_map(self): return _map result = dict() - result['NodeMetrics'] = [] - if self.node_metrics is not None: - for k in self.node_metrics: - result['NodeMetrics'].append(k.to_map() if k else None) - if self.quota_id is not None: - result['QuotaId'] = self.quota_id - if self.request_id is not None: - result['RequestId'] = self.request_id - if self.total_count is not None: - result['TotalCount'] = self.total_count + if self.request_cpu is not None: + result['requestCPU'] = self.request_cpu + if self.request_gpu is not None: + result['requestGPU'] = self.request_gpu + result['requestGPUInfos'] = [] + if self.request_gpuinfos is not None: + for k in self.request_gpuinfos: + result['requestGPUInfos'].append(k.to_map() if k else None) + if self.request_memory is not None: + result['requestMemory'] = self.request_memory return result def from_map(self, m: dict = None): m = m or dict() - self.node_metrics = [] - if m.get('NodeMetrics') is not None: - for k in m.get('NodeMetrics'): - temp_model = QuotaNodeViewMetric() - self.node_metrics.append(temp_model.from_map(k)) - if m.get('QuotaId') is not None: - self.quota_id = m.get('QuotaId') - if m.get('RequestId') is not None: - self.request_id = m.get('RequestId') - if m.get('TotalCount') is not None: - self.total_count = m.get('TotalCount') + if m.get('requestCPU') is not None: + self.request_cpu = m.get('requestCPU') + if m.get('requestGPU') is not None: + self.request_gpu = m.get('requestGPU') + self.request_gpuinfos = [] + if m.get('requestGPUInfos') is not None: + for k in m.get('requestGPUInfos'): + temp_model = GPUInfo() + self.request_gpuinfos.append(temp_model.from_map(k)) + if m.get('requestMemory') is not None: + self.request_memory = m.get('requestMemory') return self -class GetQuotaNodeViewMetricsResponse(TeaModel): +class GetResourceGroupRequestResponse(TeaModel): def __init__( self, headers: Dict[str, str] = None, status_code: int = None, - body: GetQuotaNodeViewMetricsResponseBody = None, + body: GetResourceGroupRequestResponseBody = None, ): self.headers = headers self.status_code = status_code @@ -11412,37 +12222,17 @@ def from_map(self, m: dict = None): if m.get('statusCode') is not None: self.status_code = m.get('statusCode') if m.get('body') is not None: - temp_model = GetQuotaNodeViewMetricsResponseBody() + temp_model = GetResourceGroupRequestResponseBody() self.body = temp_model.from_map(m['body']) return self -class GetQuotaQueueInfoRequest(TeaModel): +class GetResourceGroupTotalRequest(TeaModel): def __init__( self, - before_workload_id: str = None, - order: str = None, - page_number: int = None, - page_size: int = None, - show_own: bool = None, - sort_by: str = None, - status: str = None, - sub_quota_ids: str = None, - workload_ids: str = None, - workload_type: str = None, - workspace_ids: str = None, + resource_group_id: str = None, ): - self.before_workload_id = before_workload_id - self.order = order - self.page_number = page_number - self.page_size = page_size - self.show_own = show_own - self.sort_by = sort_by - self.status = status - self.sub_quota_ids = sub_quota_ids - self.workload_ids = workload_ids - self.workload_type = workload_type - self.workspace_ids = workspace_ids + self.resource_group_id = resource_group_id def validate(self): pass @@ -11453,71 +12243,33 @@ def to_map(self): return _map result = dict() - if self.before_workload_id is not None: - result['BeforeWorkloadId'] = self.before_workload_id - if self.order is not None: - result['Order'] = self.order - if self.page_number is not None: - result['PageNumber'] = self.page_number - if self.page_size is not None: - result['PageSize'] = self.page_size - if self.show_own is not None: - result['ShowOwn'] = self.show_own - if self.sort_by is not None: - result['SortBy'] = self.sort_by - if self.status is not None: - result['Status'] = self.status - if self.sub_quota_ids is not None: - result['SubQuotaIds'] = self.sub_quota_ids - if self.workload_ids is not None: - result['WorkloadIds'] = self.workload_ids - if self.workload_type is not None: - result['WorkloadType'] = self.workload_type - if self.workspace_ids is not None: - result['WorkspaceIds'] = self.workspace_ids + if self.resource_group_id is not None: + result['ResourceGroupID'] = self.resource_group_id return result def from_map(self, m: dict = None): m = m or dict() - if m.get('BeforeWorkloadId') is not None: - self.before_workload_id = m.get('BeforeWorkloadId') - if m.get('Order') is not None: - self.order = m.get('Order') - if m.get('PageNumber') is not None: - self.page_number = m.get('PageNumber') - if m.get('PageSize') is not None: - self.page_size = m.get('PageSize') - if m.get('ShowOwn') is not None: - self.show_own = m.get('ShowOwn') - if m.get('SortBy') is not None: - self.sort_by = m.get('SortBy') - if m.get('Status') is not None: - self.status = m.get('Status') - if m.get('SubQuotaIds') is not None: - self.sub_quota_ids = m.get('SubQuotaIds') - if m.get('WorkloadIds') is not None: - self.workload_ids = m.get('WorkloadIds') - if m.get('WorkloadType') is not None: - self.workload_type = m.get('WorkloadType') - if m.get('WorkspaceIds') is not None: - self.workspace_ids = m.get('WorkspaceIds') + if m.get('ResourceGroupID') is not None: + self.resource_group_id = m.get('ResourceGroupID') return self -class GetQuotaQueueInfoResponseBody(TeaModel): +class GetResourceGroupTotalResponseBody(TeaModel): def __init__( self, - queue_infos: List[QueueInfo] = None, - request_id: str = None, - total_count: int = None, + total_cpu: int = None, + total_gpu: int = None, + total_gpuinfos: List[GPUInfo] = None, + total_memory: int = None, ): - self.queue_infos = queue_infos - self.request_id = request_id - self.total_count = total_count + self.total_cpu = total_cpu + self.total_gpu = total_gpu + self.total_gpuinfos = total_gpuinfos + self.total_memory = total_memory def validate(self): - if self.queue_infos: - for k in self.queue_infos: + if self.total_gpuinfos: + for k in self.total_gpuinfos: if k: k.validate() @@ -11527,36 +12279,40 @@ def to_map(self): return _map result = dict() - result['QueueInfos'] = [] - if self.queue_infos is not None: - for k in self.queue_infos: - result['QueueInfos'].append(k.to_map() if k else None) - if self.request_id is not None: - result['RequestId'] = self.request_id - if self.total_count is not None: - result['TotalCount'] = self.total_count + if self.total_cpu is not None: + result['totalCPU'] = self.total_cpu + if self.total_gpu is not None: + result['totalGPU'] = self.total_gpu + result['totalGPUInfos'] = [] + if self.total_gpuinfos is not None: + for k in self.total_gpuinfos: + result['totalGPUInfos'].append(k.to_map() if k else None) + if self.total_memory is not None: + result['totalMemory'] = self.total_memory return result def from_map(self, m: dict = None): m = m or dict() - self.queue_infos = [] - if m.get('QueueInfos') is not None: - for k in m.get('QueueInfos'): - temp_model = QueueInfo() - self.queue_infos.append(temp_model.from_map(k)) - if m.get('RequestId') is not None: - self.request_id = m.get('RequestId') - if m.get('TotalCount') is not None: - self.total_count = m.get('TotalCount') + if m.get('totalCPU') is not None: + self.total_cpu = m.get('totalCPU') + if m.get('totalGPU') is not None: + self.total_gpu = m.get('totalGPU') + self.total_gpuinfos = [] + if m.get('totalGPUInfos') is not None: + for k in m.get('totalGPUInfos'): + temp_model = GPUInfo() + self.total_gpuinfos.append(temp_model.from_map(k)) + if m.get('totalMemory') is not None: + self.total_memory = m.get('totalMemory') return self -class GetQuotaQueueInfoResponse(TeaModel): +class GetResourceGroupTotalResponse(TeaModel): def __init__( self, headers: Dict[str, str] = None, status_code: int = None, - body: GetQuotaQueueInfoResponseBody = None, + body: GetResourceGroupTotalResponseBody = None, ): self.headers = headers self.status_code = status_code @@ -11587,102 +12343,24 @@ def from_map(self, m: dict = None): if m.get('statusCode') is not None: self.status_code = m.get('statusCode') if m.get('body') is not None: - temp_model = GetQuotaQueueInfoResponseBody() + temp_model = GetResourceGroupTotalResponseBody() self.body = temp_model.from_map(m['body']) return self -class GetQuotaRangeUserViewMetricsRequest(TeaModel): - def __init__( - self, - end_time: str = None, - order: str = None, - page_number: int = None, - page_size: int = None, - sort_by: str = None, - start_time: str = None, - user_id: str = None, - workspace_id: str = None, - ): - self.end_time = end_time - self.order = order - self.page_number = page_number - self.page_size = page_size - self.sort_by = sort_by - self.start_time = start_time - self.user_id = user_id - self.workspace_id = workspace_id - - def validate(self): - pass - - def to_map(self): - _map = super().to_map() - if _map is not None: - return _map - - result = dict() - if self.end_time is not None: - result['EndTime'] = self.end_time - if self.order is not None: - result['Order'] = self.order - if self.page_number is not None: - result['PageNumber'] = self.page_number - if self.page_size is not None: - result['PageSize'] = self.page_size - if self.sort_by is not None: - result['SortBy'] = self.sort_by - if self.start_time is not None: - result['StartTime'] = self.start_time - if self.user_id is not None: - result['UserId'] = self.user_id - if self.workspace_id is not None: - result['WorkspaceId'] = self.workspace_id - return result - - def from_map(self, m: dict = None): - m = m or dict() - if m.get('EndTime') is not None: - self.end_time = m.get('EndTime') - if m.get('Order') is not None: - self.order = m.get('Order') - if m.get('PageNumber') is not None: - self.page_number = m.get('PageNumber') - if m.get('PageSize') is not None: - self.page_size = m.get('PageSize') - if m.get('SortBy') is not None: - self.sort_by = m.get('SortBy') - if m.get('StartTime') is not None: - self.start_time = m.get('StartTime') - if m.get('UserId') is not None: - self.user_id = m.get('UserId') - if m.get('WorkspaceId') is not None: - self.workspace_id = m.get('WorkspaceId') - return self - - -class GetQuotaRangeUserViewMetricsResponseBody(TeaModel): +class GetServiceIdentityRoleResponseBody(TeaModel): def __init__( self, - quota_id: str = None, + exist: bool = None, request_id: str = None, - summary: QuotaUserViewMetric = None, - total_count: int = None, - user_metrics: List[QuotaUserViewMetric] = None, + role_name: str = None, ): - self.quota_id = quota_id + self.exist = exist self.request_id = request_id - self.summary = summary - self.total_count = total_count - self.user_metrics = user_metrics + self.role_name = role_name def validate(self): - if self.summary: - self.summary.validate() - if self.user_metrics: - for k in self.user_metrics: - if k: - k.validate() + pass def to_map(self): _map = super().to_map() @@ -11690,45 +12368,31 @@ def to_map(self): return _map result = dict() - if self.quota_id is not None: - result['QuotaId'] = self.quota_id + if self.exist is not None: + result['Exist'] = self.exist if self.request_id is not None: result['RequestId'] = self.request_id - if self.summary is not None: - result['Summary'] = self.summary.to_map() - if self.total_count is not None: - result['TotalCount'] = self.total_count - result['UserMetrics'] = [] - if self.user_metrics is not None: - for k in self.user_metrics: - result['UserMetrics'].append(k.to_map() if k else None) + if self.role_name is not None: + result['RoleName'] = self.role_name return result def from_map(self, m: dict = None): m = m or dict() - if m.get('QuotaId') is not None: - self.quota_id = m.get('QuotaId') + if m.get('Exist') is not None: + self.exist = m.get('Exist') if m.get('RequestId') is not None: self.request_id = m.get('RequestId') - if m.get('Summary') is not None: - temp_model = QuotaUserViewMetric() - self.summary = temp_model.from_map(m['Summary']) - if m.get('TotalCount') is not None: - self.total_count = m.get('TotalCount') - self.user_metrics = [] - if m.get('UserMetrics') is not None: - for k in m.get('UserMetrics'): - temp_model = QuotaUserViewMetric() - self.user_metrics.append(temp_model.from_map(k)) + if m.get('RoleName') is not None: + self.role_name = m.get('RoleName') return self -class GetQuotaRangeUserViewMetricsResponse(TeaModel): +class GetServiceIdentityRoleResponse(TeaModel): def __init__( self, headers: Dict[str, str] = None, status_code: int = None, - body: GetQuotaRangeUserViewMetricsResponseBody = None, + body: GetServiceIdentityRoleResponseBody = None, ): self.headers = headers self.status_code = status_code @@ -11759,29 +12423,27 @@ def from_map(self, m: dict = None): if m.get('statusCode') is not None: self.status_code = m.get('statusCode') if m.get('body') is not None: - temp_model = GetQuotaRangeUserViewMetricsResponseBody() + temp_model = GetServiceIdentityRoleResponseBody() self.body = temp_model.from_map(m['body']) return self -class GetQuotaUserViewMetricsRequest(TeaModel): +class GetSpotPriceHistoryRequest(TeaModel): def __init__( self, + end_time: str = None, order: str = None, - page_number: str = None, - page_size: str = None, + page_number: int = None, + page_size: int = None, sort_by: str = None, - time_step: str = None, - user_id: str = None, - workspace_id: str = None, + start_time: str = None, ): + self.end_time = end_time self.order = order self.page_number = page_number self.page_size = page_size self.sort_by = sort_by - self.time_step = time_step - self.user_id = user_id - self.workspace_id = workspace_id + self.start_time = start_time def validate(self): pass @@ -11792,6 +12454,8 @@ def to_map(self): return _map result = dict() + if self.end_time is not None: + result['EndTime'] = self.end_time if self.order is not None: result['Order'] = self.order if self.page_number is not None: @@ -11800,16 +12464,14 @@ def to_map(self): result['PageSize'] = self.page_size if self.sort_by is not None: result['SortBy'] = self.sort_by - if self.time_step is not None: - result['TimeStep'] = self.time_step - if self.user_id is not None: - result['UserId'] = self.user_id - if self.workspace_id is not None: - result['WorkspaceId'] = self.workspace_id + if self.start_time is not None: + result['StartTime'] = self.start_time return result def from_map(self, m: dict = None): m = m or dict() + if m.get('EndTime') is not None: + self.end_time = m.get('EndTime') if m.get('Order') is not None: self.order = m.get('Order') if m.get('PageNumber') is not None: @@ -11818,35 +12480,25 @@ def from_map(self, m: dict = None): self.page_size = m.get('PageSize') if m.get('SortBy') is not None: self.sort_by = m.get('SortBy') - if m.get('TimeStep') is not None: - self.time_step = m.get('TimeStep') - if m.get('UserId') is not None: - self.user_id = m.get('UserId') - if m.get('WorkspaceId') is not None: - self.workspace_id = m.get('WorkspaceId') + if m.get('StartTime') is not None: + self.start_time = m.get('StartTime') return self -class GetQuotaUserViewMetricsResponseBody(TeaModel): +class GetSpotPriceHistoryResponseBody(TeaModel): def __init__( self, - quota_id: str = None, request_id: str = None, - summary: QuotaUserViewMetric = None, + spot_price_history: List[SpotPriceItem] = None, total_count: int = None, - user_metrics: List[QuotaUserViewMetric] = None, ): - self.quota_id = quota_id self.request_id = request_id - self.summary = summary + self.spot_price_history = spot_price_history self.total_count = total_count - self.user_metrics = user_metrics def validate(self): - if self.summary: - self.summary.validate() - if self.user_metrics: - for k in self.user_metrics: + if self.spot_price_history: + for k in self.spot_price_history: if k: k.validate() @@ -11856,45 +12508,36 @@ def to_map(self): return _map result = dict() - if self.quota_id is not None: - result['QuotaId'] = self.quota_id if self.request_id is not None: result['RequestId'] = self.request_id - if self.summary is not None: - result['Summary'] = self.summary.to_map() + result['SpotPriceHistory'] = [] + if self.spot_price_history is not None: + for k in self.spot_price_history: + result['SpotPriceHistory'].append(k.to_map() if k else None) if self.total_count is not None: result['TotalCount'] = self.total_count - result['UserMetrics'] = [] - if self.user_metrics is not None: - for k in self.user_metrics: - result['UserMetrics'].append(k.to_map() if k else None) return result def from_map(self, m: dict = None): m = m or dict() - if m.get('QuotaId') is not None: - self.quota_id = m.get('QuotaId') if m.get('RequestId') is not None: self.request_id = m.get('RequestId') - if m.get('Summary') is not None: - temp_model = QuotaUserViewMetric() - self.summary = temp_model.from_map(m['Summary']) + self.spot_price_history = [] + if m.get('SpotPriceHistory') is not None: + for k in m.get('SpotPriceHistory'): + temp_model = SpotPriceItem() + self.spot_price_history.append(temp_model.from_map(k)) if m.get('TotalCount') is not None: self.total_count = m.get('TotalCount') - self.user_metrics = [] - if m.get('UserMetrics') is not None: - for k in m.get('UserMetrics'): - temp_model = QuotaUserViewMetric() - self.user_metrics.append(temp_model.from_map(k)) return self -class GetQuotaUserViewMetricsResponse(TeaModel): +class GetSpotPriceHistoryResponse(TeaModel): def __init__( self, headers: Dict[str, str] = None, status_code: int = None, - body: GetQuotaUserViewMetricsResponseBody = None, + body: GetSpotPriceHistoryResponseBody = None, ): self.headers = headers self.status_code = status_code @@ -11925,31 +12568,21 @@ def from_map(self, m: dict = None): if m.get('statusCode') is not None: self.status_code = m.get('statusCode') if m.get('body') is not None: - temp_model = GetQuotaUserViewMetricsResponseBody() + temp_model = GetSpotPriceHistoryResponseBody() self.body = temp_model.from_map(m['body']) return self -class GetRangeUserViewMetricsRequest(TeaModel): +class GetSpotStockPreviewResponseBody(TeaModel): def __init__( self, - end_time: str = None, - order: str = None, - page_number: int = None, - page_size: int = None, - sort_by: str = None, - start_time: str = None, - user_id: str = None, - workspace_id: str = None, + instance_type: str = None, + request_id: str = None, + stock_status: str = None, ): - self.end_time = end_time - self.order = order - self.page_number = page_number - self.page_size = page_size - self.sort_by = sort_by - self.start_time = start_time - self.user_id = user_id - self.workspace_id = workspace_id + self.instance_type = instance_type + self.request_id = request_id + self.stock_status = stock_status def validate(self): pass @@ -11960,101 +12593,31 @@ def to_map(self): return _map result = dict() - if self.end_time is not None: - result['EndTime'] = self.end_time - if self.order is not None: - result['Order'] = self.order - if self.page_number is not None: - result['PageNumber'] = self.page_number - if self.page_size is not None: - result['PageSize'] = self.page_size - if self.sort_by is not None: - result['SortBy'] = self.sort_by - if self.start_time is not None: - result['StartTime'] = self.start_time - if self.user_id is not None: - result['UserId'] = self.user_id - if self.workspace_id is not None: - result['WorkspaceId'] = self.workspace_id - return result - - def from_map(self, m: dict = None): - m = m or dict() - if m.get('EndTime') is not None: - self.end_time = m.get('EndTime') - if m.get('Order') is not None: - self.order = m.get('Order') - if m.get('PageNumber') is not None: - self.page_number = m.get('PageNumber') - if m.get('PageSize') is not None: - self.page_size = m.get('PageSize') - if m.get('SortBy') is not None: - self.sort_by = m.get('SortBy') - if m.get('StartTime') is not None: - self.start_time = m.get('StartTime') - if m.get('UserId') is not None: - self.user_id = m.get('UserId') - if m.get('WorkspaceId') is not None: - self.workspace_id = m.get('WorkspaceId') - return self - - -class GetRangeUserViewMetricsResponseBody(TeaModel): - def __init__( - self, - summary: UserViewMetric = None, - user_metrics: List[UserViewMetric] = None, - request_id: str = None, - ): - self.summary = summary - self.user_metrics = user_metrics - self.request_id = request_id - - def validate(self): - if self.summary: - self.summary.validate() - if self.user_metrics: - for k in self.user_metrics: - if k: - k.validate() - - def to_map(self): - _map = super().to_map() - if _map is not None: - return _map - - result = dict() - if self.summary is not None: - result['Summary'] = self.summary.to_map() - result['UserMetrics'] = [] - if self.user_metrics is not None: - for k in self.user_metrics: - result['UserMetrics'].append(k.to_map() if k else None) + if self.instance_type is not None: + result['InstanceType'] = self.instance_type if self.request_id is not None: - result['requestId'] = self.request_id + result['RequestId'] = self.request_id + if self.stock_status is not None: + result['StockStatus'] = self.stock_status return result def from_map(self, m: dict = None): m = m or dict() - if m.get('Summary') is not None: - temp_model = UserViewMetric() - self.summary = temp_model.from_map(m['Summary']) - self.user_metrics = [] - if m.get('UserMetrics') is not None: - for k in m.get('UserMetrics'): - temp_model = UserViewMetric() - self.user_metrics.append(temp_model.from_map(k)) - if m.get('requestId') is not None: - self.request_id = m.get('requestId') + if m.get('InstanceType') is not None: + self.instance_type = m.get('InstanceType') + if m.get('RequestId') is not None: + self.request_id = m.get('RequestId') + if m.get('StockStatus') is not None: + self.stock_status = m.get('StockStatus') return self -class GetRangeUserViewMetricsResponse(TeaModel): +class GetSpotStockPreviewResponse(TeaModel): def __init__( self, headers: Dict[str, str] = None, status_code: int = None, - body: GetRangeUserViewMetricsResponseBody = None, + body: GetSpotStockPreviewResponseBody = None, ): self.headers = headers self.status_code = status_code @@ -12085,19 +12648,20 @@ def from_map(self, m: dict = None): if m.get('statusCode') is not None: self.status_code = m.get('statusCode') if m.get('body') is not None: - temp_model = GetRangeUserViewMetricsResponseBody() + temp_model = GetSpotStockPreviewResponseBody() self.body = temp_model.from_map(m['body']) return self -class GetResourceGroupRequestTag(TeaModel): +class GetTokenRequest(TeaModel): def __init__( self, - key: str = None, - value: str = None, + expire_time: int = None, + training_job_id: str = None, ): - self.key = key - self.value = value + self.expire_time = expire_time + # This parameter is required. + self.training_job_id = training_job_id def validate(self): pass @@ -12108,35 +12672,32 @@ def to_map(self): return _map result = dict() - if self.key is not None: - result['Key'] = self.key - if self.value is not None: - result['Value'] = self.value + if self.expire_time is not None: + result['ExpireTime'] = self.expire_time + if self.training_job_id is not None: + result['TrainingJobId'] = self.training_job_id return result def from_map(self, m: dict = None): m = m or dict() - if m.get('Key') is not None: - self.key = m.get('Key') - if m.get('Value') is not None: - self.value = m.get('Value') + if m.get('ExpireTime') is not None: + self.expire_time = m.get('ExpireTime') + if m.get('TrainingJobId') is not None: + self.training_job_id = m.get('TrainingJobId') return self -class GetResourceGroupRequest(TeaModel): +class GetTokenResponseBody(TeaModel): def __init__( self, - is_aiworkspace_data_enabled: bool = None, - tag: List[GetResourceGroupRequestTag] = None, + request_id: str = None, + token: str = None, ): - self.is_aiworkspace_data_enabled = is_aiworkspace_data_enabled - self.tag = tag + self.request_id = request_id + self.token = token def validate(self): - if self.tag: - for k in self.tag: - if k: - k.validate() + pass def to_map(self): _map = super().to_map() @@ -12144,37 +12705,35 @@ def to_map(self): return _map result = dict() - if self.is_aiworkspace_data_enabled is not None: - result['IsAIWorkspaceDataEnabled'] = self.is_aiworkspace_data_enabled - result['Tag'] = [] - if self.tag is not None: - for k in self.tag: - result['Tag'].append(k.to_map() if k else None) + if self.request_id is not None: + result['RequestId'] = self.request_id + if self.token is not None: + result['Token'] = self.token return result def from_map(self, m: dict = None): m = m or dict() - if m.get('IsAIWorkspaceDataEnabled') is not None: - self.is_aiworkspace_data_enabled = m.get('IsAIWorkspaceDataEnabled') - self.tag = [] - if m.get('Tag') is not None: - for k in m.get('Tag'): - temp_model = GetResourceGroupRequestTag() - self.tag.append(temp_model.from_map(k)) + if m.get('RequestId') is not None: + self.request_id = m.get('RequestId') + if m.get('Token') is not None: + self.token = m.get('Token') return self -class GetResourceGroupShrinkRequest(TeaModel): +class GetTokenResponse(TeaModel): def __init__( self, - is_aiworkspace_data_enabled: bool = None, - tag_shrink: str = None, + headers: Dict[str, str] = None, + status_code: int = None, + body: GetTokenResponseBody = None, ): - self.is_aiworkspace_data_enabled = is_aiworkspace_data_enabled - self.tag_shrink = tag_shrink + self.headers = headers + self.status_code = status_code + self.body = body def validate(self): - pass + if self.body: + self.body.validate() def to_map(self): _map = super().to_map() @@ -12182,29 +12741,32 @@ def to_map(self): return _map result = dict() - if self.is_aiworkspace_data_enabled is not None: - result['IsAIWorkspaceDataEnabled'] = self.is_aiworkspace_data_enabled - if self.tag_shrink is not None: - result['Tag'] = self.tag_shrink + if self.headers is not None: + result['headers'] = self.headers + if self.status_code is not None: + result['statusCode'] = self.status_code + if self.body is not None: + result['body'] = self.body.to_map() return result def from_map(self, m: dict = None): m = m or dict() - if m.get('IsAIWorkspaceDataEnabled') is not None: - self.is_aiworkspace_data_enabled = m.get('IsAIWorkspaceDataEnabled') - if m.get('Tag') is not None: - self.tag_shrink = m.get('Tag') + if m.get('headers') is not None: + self.headers = m.get('headers') + if m.get('statusCode') is not None: + self.status_code = m.get('statusCode') + if m.get('body') is not None: + temp_model = GetTokenResponseBody() + self.body = temp_model.from_map(m['body']) return self -class GetResourceGroupResponseBodyTags(TeaModel): +class GetTrainingJobRequest(TeaModel): def __init__( self, - tag_key: str = None, - tag_value: str = None, + token: str = None, ): - self.tag_key = tag_key - self.tag_value = tag_value + self.token = token def validate(self): pass @@ -12215,61 +12777,34 @@ def to_map(self): return _map result = dict() - if self.tag_key is not None: - result['TagKey'] = self.tag_key - if self.tag_value is not None: - result['TagValue'] = self.tag_value + if self.token is not None: + result['Token'] = self.token return result def from_map(self, m: dict = None): m = m or dict() - if m.get('TagKey') is not None: - self.tag_key = m.get('TagKey') - if m.get('TagValue') is not None: - self.tag_value = m.get('TagValue') + if m.get('Token') is not None: + self.token = m.get('Token') return self -class GetResourceGroupResponseBody(TeaModel): +class GetTrainingJobResponseBodyComputeResourceInstanceSpec(TeaModel): def __init__( self, - cluster_id: str = None, - computing_resource_provider: str = None, - creator_id: str = None, - description: str = None, - gmt_created_time: str = None, - gmt_modified_time: str = None, - name: str = None, - request_id: str = None, - resource_type: str = None, - status: str = None, - support_rdma: bool = None, - tags: List[GetResourceGroupResponseBodyTags] = None, - user_vpc: UserVpc = None, - workspace_id: str = None, + cpu: str = None, + gpu: str = None, + gputype: str = None, + memory: str = None, + shared_memory: str = None, ): - self.cluster_id = cluster_id - self.computing_resource_provider = computing_resource_provider - self.creator_id = creator_id - self.description = description - self.gmt_created_time = gmt_created_time - self.gmt_modified_time = gmt_modified_time - self.name = name - self.request_id = request_id - self.resource_type = resource_type - self.status = status - self.support_rdma = support_rdma - self.tags = tags - self.user_vpc = user_vpc - self.workspace_id = workspace_id + self.cpu = cpu + self.gpu = gpu + self.gputype = gputype + self.memory = memory + self.shared_memory = shared_memory def validate(self): - if self.tags: - for k in self.tags: - if k: - k.validate() - if self.user_vpc: - self.user_vpc.validate() + pass def to_map(self): _map = super().to_map() @@ -12277,89 +12812,44 @@ def to_map(self): return _map result = dict() - if self.cluster_id is not None: - result['ClusterID'] = self.cluster_id - if self.computing_resource_provider is not None: - result['ComputingResourceProvider'] = self.computing_resource_provider - if self.creator_id is not None: - result['CreatorID'] = self.creator_id - if self.description is not None: - result['Description'] = self.description - if self.gmt_created_time is not None: - result['GmtCreatedTime'] = self.gmt_created_time - if self.gmt_modified_time is not None: - result['GmtModifiedTime'] = self.gmt_modified_time - if self.name is not None: - result['Name'] = self.name - if self.request_id is not None: - result['RequestId'] = self.request_id - if self.resource_type is not None: - result['ResourceType'] = self.resource_type - if self.status is not None: - result['Status'] = self.status - if self.support_rdma is not None: - result['SupportRDMA'] = self.support_rdma - result['Tags'] = [] - if self.tags is not None: - for k in self.tags: - result['Tags'].append(k.to_map() if k else None) - if self.user_vpc is not None: - result['UserVpc'] = self.user_vpc.to_map() - if self.workspace_id is not None: - result['WorkspaceID'] = self.workspace_id + if self.cpu is not None: + result['CPU'] = self.cpu + if self.gpu is not None: + result['GPU'] = self.gpu + if self.gputype is not None: + result['GPUType'] = self.gputype + if self.memory is not None: + result['Memory'] = self.memory + if self.shared_memory is not None: + result['SharedMemory'] = self.shared_memory return result def from_map(self, m: dict = None): m = m or dict() - if m.get('ClusterID') is not None: - self.cluster_id = m.get('ClusterID') - if m.get('ComputingResourceProvider') is not None: - self.computing_resource_provider = m.get('ComputingResourceProvider') - if m.get('CreatorID') is not None: - self.creator_id = m.get('CreatorID') - if m.get('Description') is not None: - self.description = m.get('Description') - if m.get('GmtCreatedTime') is not None: - self.gmt_created_time = m.get('GmtCreatedTime') - if m.get('GmtModifiedTime') is not None: - self.gmt_modified_time = m.get('GmtModifiedTime') - if m.get('Name') is not None: - self.name = m.get('Name') - if m.get('RequestId') is not None: - self.request_id = m.get('RequestId') - if m.get('ResourceType') is not None: - self.resource_type = m.get('ResourceType') - if m.get('Status') is not None: - self.status = m.get('Status') - if m.get('SupportRDMA') is not None: - self.support_rdma = m.get('SupportRDMA') - self.tags = [] - if m.get('Tags') is not None: - for k in m.get('Tags'): - temp_model = GetResourceGroupResponseBodyTags() - self.tags.append(temp_model.from_map(k)) - if m.get('UserVpc') is not None: - temp_model = UserVpc() - self.user_vpc = temp_model.from_map(m['UserVpc']) - if m.get('WorkspaceID') is not None: - self.workspace_id = m.get('WorkspaceID') + if m.get('CPU') is not None: + self.cpu = m.get('CPU') + if m.get('GPU') is not None: + self.gpu = m.get('GPU') + if m.get('GPUType') is not None: + self.gputype = m.get('GPUType') + if m.get('Memory') is not None: + self.memory = m.get('Memory') + if m.get('SharedMemory') is not None: + self.shared_memory = m.get('SharedMemory') return self -class GetResourceGroupResponse(TeaModel): +class GetTrainingJobResponseBodyComputeResourceSpotSpec(TeaModel): def __init__( self, - headers: Dict[str, str] = None, - status_code: int = None, - body: GetResourceGroupResponseBody = None, + spot_discount_limit: float = None, + spot_strategy: str = None, ): - self.headers = headers - self.status_code = status_code - self.body = body + self.spot_discount_limit = spot_discount_limit + self.spot_strategy = spot_strategy def validate(self): - if self.body: - self.body.validate() + pass def to_map(self): _map = super().to_map() @@ -12367,37 +12857,45 @@ def to_map(self): return _map result = dict() - if self.headers is not None: - result['headers'] = self.headers - if self.status_code is not None: - result['statusCode'] = self.status_code - if self.body is not None: - result['body'] = self.body.to_map() + if self.spot_discount_limit is not None: + result['SpotDiscountLimit'] = self.spot_discount_limit + if self.spot_strategy is not None: + result['SpotStrategy'] = self.spot_strategy return result def from_map(self, m: dict = None): m = m or dict() - if m.get('headers') is not None: - self.headers = m.get('headers') - if m.get('statusCode') is not None: - self.status_code = m.get('statusCode') - if m.get('body') is not None: - temp_model = GetResourceGroupResponseBody() - self.body = temp_model.from_map(m['body']) + if m.get('SpotDiscountLimit') is not None: + self.spot_discount_limit = m.get('SpotDiscountLimit') + if m.get('SpotStrategy') is not None: + self.spot_strategy = m.get('SpotStrategy') return self -class GetResourceGroupMachineGroupRequestTag(TeaModel): +class GetTrainingJobResponseBodyComputeResource(TeaModel): def __init__( self, - key: str = None, - value: str = None, + ecs_count: int = None, + ecs_spec: str = None, + instance_count: int = None, + instance_spec: GetTrainingJobResponseBodyComputeResourceInstanceSpec = None, + resource_id: str = None, + spot_spec: GetTrainingJobResponseBodyComputeResourceSpotSpec = None, + use_spot_instance: bool = None, ): - self.key = key - self.value = value + self.ecs_count = ecs_count + self.ecs_spec = ecs_spec + self.instance_count = instance_count + self.instance_spec = instance_spec + self.resource_id = resource_id + self.spot_spec = spot_spec + self.use_spot_instance = use_spot_instance def validate(self): - pass + if self.instance_spec: + self.instance_spec.validate() + if self.spot_spec: + self.spot_spec.validate() def to_map(self): _map = super().to_map() @@ -12405,33 +12903,54 @@ def to_map(self): return _map result = dict() - if self.key is not None: - result['Key'] = self.key - if self.value is not None: - result['Value'] = self.value + if self.ecs_count is not None: + result['EcsCount'] = self.ecs_count + if self.ecs_spec is not None: + result['EcsSpec'] = self.ecs_spec + if self.instance_count is not None: + result['InstanceCount'] = self.instance_count + if self.instance_spec is not None: + result['InstanceSpec'] = self.instance_spec.to_map() + if self.resource_id is not None: + result['ResourceId'] = self.resource_id + if self.spot_spec is not None: + result['SpotSpec'] = self.spot_spec.to_map() + if self.use_spot_instance is not None: + result['UseSpotInstance'] = self.use_spot_instance return result def from_map(self, m: dict = None): m = m or dict() - if m.get('Key') is not None: - self.key = m.get('Key') - if m.get('Value') is not None: - self.value = m.get('Value') + if m.get('EcsCount') is not None: + self.ecs_count = m.get('EcsCount') + if m.get('EcsSpec') is not None: + self.ecs_spec = m.get('EcsSpec') + if m.get('InstanceCount') is not None: + self.instance_count = m.get('InstanceCount') + if m.get('InstanceSpec') is not None: + temp_model = GetTrainingJobResponseBodyComputeResourceInstanceSpec() + self.instance_spec = temp_model.from_map(m['InstanceSpec']) + if m.get('ResourceId') is not None: + self.resource_id = m.get('ResourceId') + if m.get('SpotSpec') is not None: + temp_model = GetTrainingJobResponseBodyComputeResourceSpotSpec() + self.spot_spec = temp_model.from_map(m['SpotSpec']) + if m.get('UseSpotInstance') is not None: + self.use_spot_instance = m.get('UseSpotInstance') return self -class GetResourceGroupMachineGroupRequest(TeaModel): +class GetTrainingJobResponseBodyExperimentConfig(TeaModel): def __init__( self, - tag: List[GetResourceGroupMachineGroupRequestTag] = None, + experiment_id: str = None, + experiment_name: str = None, ): - self.tag = tag + self.experiment_id = experiment_id + self.experiment_name = experiment_name def validate(self): - if self.tag: - for k in self.tag: - if k: - k.validate() + pass def to_map(self): _map = super().to_map() @@ -12439,28 +12958,29 @@ def to_map(self): return _map result = dict() - result['Tag'] = [] - if self.tag is not None: - for k in self.tag: - result['Tag'].append(k.to_map() if k else None) + if self.experiment_id is not None: + result['ExperimentId'] = self.experiment_id + if self.experiment_name is not None: + result['ExperimentName'] = self.experiment_name return result def from_map(self, m: dict = None): m = m or dict() - self.tag = [] - if m.get('Tag') is not None: - for k in m.get('Tag'): - temp_model = GetResourceGroupMachineGroupRequestTag() - self.tag.append(temp_model.from_map(k)) + if m.get('ExperimentId') is not None: + self.experiment_id = m.get('ExperimentId') + if m.get('ExperimentName') is not None: + self.experiment_name = m.get('ExperimentName') return self -class GetResourceGroupMachineGroupShrinkRequest(TeaModel): +class GetTrainingJobResponseBodyHyperParameters(TeaModel): def __init__( self, - tag_shrink: str = None, + name: str = None, + value: str = None, ): - self.tag_shrink = tag_shrink + self.name = name + self.value = value def validate(self): pass @@ -12471,25 +12991,31 @@ def to_map(self): return _map result = dict() - if self.tag_shrink is not None: - result['Tag'] = self.tag_shrink + if self.name is not None: + result['Name'] = self.name + if self.value is not None: + result['Value'] = self.value return result def from_map(self, m: dict = None): m = m or dict() - if m.get('Tag') is not None: - self.tag_shrink = m.get('Tag') + if m.get('Name') is not None: + self.name = m.get('Name') + if m.get('Value') is not None: + self.value = m.get('Value') return self -class GetResourceGroupMachineGroupResponseBodyTags(TeaModel): +class GetTrainingJobResponseBodyInputChannels(TeaModel): def __init__( self, - tag_key: str = None, - tag_value: str = None, + dataset_id: str = None, + input_uri: str = None, + name: str = None, ): - self.tag_key = tag_key - self.tag_value = tag_value + self.dataset_id = dataset_id + self.input_uri = input_uri + self.name = name def validate(self): pass @@ -12500,73 +13026,38 @@ def to_map(self): return _map result = dict() - if self.tag_key is not None: - result['TagKey'] = self.tag_key - if self.tag_value is not None: - result['TagValue'] = self.tag_value + if self.dataset_id is not None: + result['DatasetId'] = self.dataset_id + if self.input_uri is not None: + result['InputUri'] = self.input_uri + if self.name is not None: + result['Name'] = self.name return result def from_map(self, m: dict = None): m = m or dict() - if m.get('TagKey') is not None: - self.tag_key = m.get('TagKey') - if m.get('TagValue') is not None: - self.tag_value = m.get('TagValue') + if m.get('DatasetId') is not None: + self.dataset_id = m.get('DatasetId') + if m.get('InputUri') is not None: + self.input_uri = m.get('InputUri') + if m.get('Name') is not None: + self.name = m.get('Name') return self -class GetResourceGroupMachineGroupResponseBody(TeaModel): +class GetTrainingJobResponseBodyInstances(TeaModel): def __init__( self, - cpu: str = None, - default_driver: str = None, - ecs_count: int = None, - ecs_spec: str = None, - gmt_created_time: str = None, - gmt_expired_time: str = None, - gmt_modified_time: str = None, - gmt_started_time: str = None, - gpu: str = None, - gpu_type: str = None, - machine_group_id: str = None, - memory: str = None, name: str = None, - payment_duration: str = None, - payment_duration_unit: str = None, - payment_type: str = None, - request_id: str = None, - resource_group_id: str = None, + role: str = None, status: str = None, - supported_drivers: List[str] = None, - tags: List[GetResourceGroupMachineGroupResponseBodyTags] = None, ): - self.cpu = cpu - self.default_driver = default_driver - self.ecs_count = ecs_count - self.ecs_spec = ecs_spec - self.gmt_created_time = gmt_created_time - self.gmt_expired_time = gmt_expired_time - self.gmt_modified_time = gmt_modified_time - self.gmt_started_time = gmt_started_time - self.gpu = gpu - self.gpu_type = gpu_type - self.machine_group_id = machine_group_id - self.memory = memory self.name = name - self.payment_duration = payment_duration - self.payment_duration_unit = payment_duration_unit - self.payment_type = payment_type - self.request_id = request_id - self.resource_group_id = resource_group_id + self.role = role self.status = status - self.supported_drivers = supported_drivers - self.tags = tags def validate(self): - if self.tags: - for k in self.tags: - if k: - k.validate() + pass def to_map(self): _map = super().to_map() @@ -12574,116 +13065,36 @@ def to_map(self): return _map result = dict() - if self.cpu is not None: - result['Cpu'] = self.cpu - if self.default_driver is not None: - result['DefaultDriver'] = self.default_driver - if self.ecs_count is not None: - result['EcsCount'] = self.ecs_count - if self.ecs_spec is not None: - result['EcsSpec'] = self.ecs_spec - if self.gmt_created_time is not None: - result['GmtCreatedTime'] = self.gmt_created_time - if self.gmt_expired_time is not None: - result['GmtExpiredTime'] = self.gmt_expired_time - if self.gmt_modified_time is not None: - result['GmtModifiedTime'] = self.gmt_modified_time - if self.gmt_started_time is not None: - result['GmtStartedTime'] = self.gmt_started_time - if self.gpu is not None: - result['Gpu'] = self.gpu - if self.gpu_type is not None: - result['GpuType'] = self.gpu_type - if self.machine_group_id is not None: - result['MachineGroupID'] = self.machine_group_id - if self.memory is not None: - result['Memory'] = self.memory if self.name is not None: result['Name'] = self.name - if self.payment_duration is not None: - result['PaymentDuration'] = self.payment_duration - if self.payment_duration_unit is not None: - result['PaymentDurationUnit'] = self.payment_duration_unit - if self.payment_type is not None: - result['PaymentType'] = self.payment_type - if self.request_id is not None: - result['RequestId'] = self.request_id - if self.resource_group_id is not None: - result['ResourceGroupID'] = self.resource_group_id + if self.role is not None: + result['Role'] = self.role if self.status is not None: result['Status'] = self.status - if self.supported_drivers is not None: - result['SupportedDrivers'] = self.supported_drivers - result['Tags'] = [] - if self.tags is not None: - for k in self.tags: - result['Tags'].append(k.to_map() if k else None) return result def from_map(self, m: dict = None): m = m or dict() - if m.get('Cpu') is not None: - self.cpu = m.get('Cpu') - if m.get('DefaultDriver') is not None: - self.default_driver = m.get('DefaultDriver') - if m.get('EcsCount') is not None: - self.ecs_count = m.get('EcsCount') - if m.get('EcsSpec') is not None: - self.ecs_spec = m.get('EcsSpec') - if m.get('GmtCreatedTime') is not None: - self.gmt_created_time = m.get('GmtCreatedTime') - if m.get('GmtExpiredTime') is not None: - self.gmt_expired_time = m.get('GmtExpiredTime') - if m.get('GmtModifiedTime') is not None: - self.gmt_modified_time = m.get('GmtModifiedTime') - if m.get('GmtStartedTime') is not None: - self.gmt_started_time = m.get('GmtStartedTime') - if m.get('Gpu') is not None: - self.gpu = m.get('Gpu') - if m.get('GpuType') is not None: - self.gpu_type = m.get('GpuType') - if m.get('MachineGroupID') is not None: - self.machine_group_id = m.get('MachineGroupID') - if m.get('Memory') is not None: - self.memory = m.get('Memory') if m.get('Name') is not None: self.name = m.get('Name') - if m.get('PaymentDuration') is not None: - self.payment_duration = m.get('PaymentDuration') - if m.get('PaymentDurationUnit') is not None: - self.payment_duration_unit = m.get('PaymentDurationUnit') - if m.get('PaymentType') is not None: - self.payment_type = m.get('PaymentType') - if m.get('RequestId') is not None: - self.request_id = m.get('RequestId') - if m.get('ResourceGroupID') is not None: - self.resource_group_id = m.get('ResourceGroupID') + if m.get('Role') is not None: + self.role = m.get('Role') if m.get('Status') is not None: self.status = m.get('Status') - if m.get('SupportedDrivers') is not None: - self.supported_drivers = m.get('SupportedDrivers') - self.tags = [] - if m.get('Tags') is not None: - for k in m.get('Tags'): - temp_model = GetResourceGroupMachineGroupResponseBodyTags() - self.tags.append(temp_model.from_map(k)) return self -class GetResourceGroupMachineGroupResponse(TeaModel): +class GetTrainingJobResponseBodyLabels(TeaModel): def __init__( self, - headers: Dict[str, str] = None, - status_code: int = None, - body: GetResourceGroupMachineGroupResponseBody = None, + key: str = None, + value: str = None, ): - self.headers = headers - self.status_code = status_code - self.body = body + self.key = key + self.value = value def validate(self): - if self.body: - self.body.validate() + pass def to_map(self): _map = super().to_map() @@ -12691,38 +13102,31 @@ def to_map(self): return _map result = dict() - if self.headers is not None: - result['headers'] = self.headers - if self.status_code is not None: - result['statusCode'] = self.status_code - if self.body is not None: - result['body'] = self.body.to_map() + if self.key is not None: + result['Key'] = self.key + if self.value is not None: + result['Value'] = self.value return result def from_map(self, m: dict = None): m = m or dict() - if m.get('headers') is not None: - self.headers = m.get('headers') - if m.get('statusCode') is not None: - self.status_code = m.get('statusCode') - if m.get('body') is not None: - temp_model = GetResourceGroupMachineGroupResponseBody() - self.body = temp_model.from_map(m['body']) + if m.get('Key') is not None: + self.key = m.get('Key') + if m.get('Value') is not None: + self.value = m.get('Value') return self -class GetResourceGroupMetricsRequest(TeaModel): +class GetTrainingJobResponseBodyLatestMetrics(TeaModel): def __init__( self, - end_time: str = None, - gputype: str = None, - start_time: str = None, - time_step: str = None, + name: str = None, + timestamp: str = None, + value: float = None, ): - self.end_time = end_time - self.gputype = gputype - self.start_time = start_time - self.time_step = time_step + self.name = name + self.timestamp = timestamp + self.value = value def validate(self): pass @@ -12733,45 +13137,36 @@ def to_map(self): return _map result = dict() - if self.end_time is not None: - result['EndTime'] = self.end_time - if self.gputype is not None: - result['GPUType'] = self.gputype - if self.start_time is not None: - result['StartTime'] = self.start_time - if self.time_step is not None: - result['TimeStep'] = self.time_step + if self.name is not None: + result['Name'] = self.name + if self.timestamp is not None: + result['Timestamp'] = self.timestamp + if self.value is not None: + result['Value'] = self.value return result def from_map(self, m: dict = None): m = m or dict() - if m.get('EndTime') is not None: - self.end_time = m.get('EndTime') - if m.get('GPUType') is not None: - self.gputype = m.get('GPUType') - if m.get('StartTime') is not None: - self.start_time = m.get('StartTime') - if m.get('TimeStep') is not None: - self.time_step = m.get('TimeStep') + if m.get('Name') is not None: + self.name = m.get('Name') + if m.get('Timestamp') is not None: + self.timestamp = m.get('Timestamp') + if m.get('Value') is not None: + self.value = m.get('Value') return self -class GetResourceGroupMetricsResponseBody(TeaModel): +class GetTrainingJobResponseBodyLatestProgressOverallProgress(TeaModel): def __init__( self, - request_id: str = None, - resource_group_id: str = None, - resource_group_metrics: List[ResourceGroupMetric] = None, + timestamp: str = None, + value: float = None, ): - self.request_id = request_id - self.resource_group_id = resource_group_id - self.resource_group_metrics = resource_group_metrics + self.timestamp = timestamp + self.value = value def validate(self): - if self.resource_group_metrics: - for k in self.resource_group_metrics: - if k: - k.validate() + pass def to_map(self): _map = super().to_map() @@ -12779,44 +13174,32 @@ def to_map(self): return _map result = dict() - if self.request_id is not None: - result['RequestId'] = self.request_id - if self.resource_group_id is not None: - result['ResourceGroupID'] = self.resource_group_id - result['ResourceGroupMetrics'] = [] - if self.resource_group_metrics is not None: - for k in self.resource_group_metrics: - result['ResourceGroupMetrics'].append(k.to_map() if k else None) + if self.timestamp is not None: + result['Timestamp'] = self.timestamp + if self.value is not None: + result['Value'] = self.value return result def from_map(self, m: dict = None): m = m or dict() - if m.get('RequestId') is not None: - self.request_id = m.get('RequestId') - if m.get('ResourceGroupID') is not None: - self.resource_group_id = m.get('ResourceGroupID') - self.resource_group_metrics = [] - if m.get('ResourceGroupMetrics') is not None: - for k in m.get('ResourceGroupMetrics'): - temp_model = ResourceGroupMetric() - self.resource_group_metrics.append(temp_model.from_map(k)) + if m.get('Timestamp') is not None: + self.timestamp = m.get('Timestamp') + if m.get('Value') is not None: + self.value = m.get('Value') return self -class GetResourceGroupMetricsResponse(TeaModel): +class GetTrainingJobResponseBodyLatestProgressRemainingTime(TeaModel): def __init__( self, - headers: Dict[str, str] = None, - status_code: int = None, - body: GetResourceGroupMetricsResponseBody = None, + timestamp: str = None, + value: int = None, ): - self.headers = headers - self.status_code = status_code - self.body = body + self.timestamp = timestamp + self.value = value def validate(self): - if self.body: - self.body.validate() + pass def to_map(self): _map = super().to_map() @@ -12824,37 +13207,35 @@ def to_map(self): return _map result = dict() - if self.headers is not None: - result['headers'] = self.headers - if self.status_code is not None: - result['statusCode'] = self.status_code - if self.body is not None: - result['body'] = self.body.to_map() + if self.timestamp is not None: + result['Timestamp'] = self.timestamp + if self.value is not None: + result['Value'] = self.value return result def from_map(self, m: dict = None): m = m or dict() - if m.get('headers') is not None: - self.headers = m.get('headers') - if m.get('statusCode') is not None: - self.status_code = m.get('statusCode') - if m.get('body') is not None: - temp_model = GetResourceGroupMetricsResponseBody() - self.body = temp_model.from_map(m['body']) - return self - - -class GetResourceGroupRequestRequest(TeaModel): + if m.get('Timestamp') is not None: + self.timestamp = m.get('Timestamp') + if m.get('Value') is not None: + self.value = m.get('Value') + return self + + +class GetTrainingJobResponseBodyLatestProgress(TeaModel): def __init__( self, - pod_status: str = None, - resource_group_id: str = None, + overall_progress: GetTrainingJobResponseBodyLatestProgressOverallProgress = None, + remaining_time: GetTrainingJobResponseBodyLatestProgressRemainingTime = None, ): - self.pod_status = pod_status - self.resource_group_id = resource_group_id + self.overall_progress = overall_progress + self.remaining_time = remaining_time def validate(self): - pass + if self.overall_progress: + self.overall_progress.validate() + if self.remaining_time: + self.remaining_time.validate() def to_map(self): _map = super().to_map() @@ -12862,39 +13243,36 @@ def to_map(self): return _map result = dict() - if self.pod_status is not None: - result['PodStatus'] = self.pod_status - if self.resource_group_id is not None: - result['ResourceGroupID'] = self.resource_group_id + if self.overall_progress is not None: + result['OverallProgress'] = self.overall_progress.to_map() + if self.remaining_time is not None: + result['RemainingTime'] = self.remaining_time.to_map() return result def from_map(self, m: dict = None): m = m or dict() - if m.get('PodStatus') is not None: - self.pod_status = m.get('PodStatus') - if m.get('ResourceGroupID') is not None: - self.resource_group_id = m.get('ResourceGroupID') + if m.get('OverallProgress') is not None: + temp_model = GetTrainingJobResponseBodyLatestProgressOverallProgress() + self.overall_progress = temp_model.from_map(m['OverallProgress']) + if m.get('RemainingTime') is not None: + temp_model = GetTrainingJobResponseBodyLatestProgressRemainingTime() + self.remaining_time = temp_model.from_map(m['RemainingTime']) return self -class GetResourceGroupRequestResponseBody(TeaModel): +class GetTrainingJobResponseBodyOutputChannels(TeaModel): def __init__( self, - request_cpu: int = None, - request_gpu: int = None, - request_gpuinfos: List[GPUInfo] = None, - request_memory: int = None, + dataset_id: str = None, + name: str = None, + output_uri: str = None, ): - self.request_cpu = request_cpu - self.request_gpu = request_gpu - self.request_gpuinfos = request_gpuinfos - self.request_memory = request_memory + self.dataset_id = dataset_id + self.name = name + self.output_uri = output_uri def validate(self): - if self.request_gpuinfos: - for k in self.request_gpuinfos: - if k: - k.validate() + pass def to_map(self): _map = super().to_map() @@ -12902,48 +13280,36 @@ def to_map(self): return _map result = dict() - if self.request_cpu is not None: - result['requestCPU'] = self.request_cpu - if self.request_gpu is not None: - result['requestGPU'] = self.request_gpu - result['requestGPUInfos'] = [] - if self.request_gpuinfos is not None: - for k in self.request_gpuinfos: - result['requestGPUInfos'].append(k.to_map() if k else None) - if self.request_memory is not None: - result['requestMemory'] = self.request_memory + if self.dataset_id is not None: + result['DatasetId'] = self.dataset_id + if self.name is not None: + result['Name'] = self.name + if self.output_uri is not None: + result['OutputUri'] = self.output_uri return result def from_map(self, m: dict = None): m = m or dict() - if m.get('requestCPU') is not None: - self.request_cpu = m.get('requestCPU') - if m.get('requestGPU') is not None: - self.request_gpu = m.get('requestGPU') - self.request_gpuinfos = [] - if m.get('requestGPUInfos') is not None: - for k in m.get('requestGPUInfos'): - temp_model = GPUInfo() - self.request_gpuinfos.append(temp_model.from_map(k)) - if m.get('requestMemory') is not None: - self.request_memory = m.get('requestMemory') + if m.get('DatasetId') is not None: + self.dataset_id = m.get('DatasetId') + if m.get('Name') is not None: + self.name = m.get('Name') + if m.get('OutputUri') is not None: + self.output_uri = m.get('OutputUri') return self -class GetResourceGroupRequestResponse(TeaModel): +class GetTrainingJobResponseBodyOutputModel(TeaModel): def __init__( self, - headers: Dict[str, str] = None, - status_code: int = None, - body: GetResourceGroupRequestResponseBody = None, + output_channel_name: str = None, + uri: str = None, ): - self.headers = headers - self.status_code = status_code - self.body = body + self.output_channel_name = output_channel_name + self.uri = uri def validate(self): - if self.body: - self.body.validate() + pass def to_map(self): _map = super().to_map() @@ -12951,32 +13317,27 @@ def to_map(self): return _map result = dict() - if self.headers is not None: - result['headers'] = self.headers - if self.status_code is not None: - result['statusCode'] = self.status_code - if self.body is not None: - result['body'] = self.body.to_map() + if self.output_channel_name is not None: + result['OutputChannelName'] = self.output_channel_name + if self.uri is not None: + result['Uri'] = self.uri return result def from_map(self, m: dict = None): m = m or dict() - if m.get('headers') is not None: - self.headers = m.get('headers') - if m.get('statusCode') is not None: - self.status_code = m.get('statusCode') - if m.get('body') is not None: - temp_model = GetResourceGroupRequestResponseBody() - self.body = temp_model.from_map(m['body']) + if m.get('OutputChannelName') is not None: + self.output_channel_name = m.get('OutputChannelName') + if m.get('Uri') is not None: + self.uri = m.get('Uri') return self -class GetResourceGroupTotalRequest(TeaModel): +class GetTrainingJobResponseBodyScheduler(TeaModel): def __init__( self, - resource_group_id: str = None, + max_running_time_in_seconds: int = None, ): - self.resource_group_id = resource_group_id + self.max_running_time_in_seconds = max_running_time_in_seconds def validate(self): pass @@ -12987,35 +13348,32 @@ def to_map(self): return _map result = dict() - if self.resource_group_id is not None: - result['ResourceGroupID'] = self.resource_group_id + if self.max_running_time_in_seconds is not None: + result['MaxRunningTimeInSeconds'] = self.max_running_time_in_seconds return result def from_map(self, m: dict = None): m = m or dict() - if m.get('ResourceGroupID') is not None: - self.resource_group_id = m.get('ResourceGroupID') + if m.get('MaxRunningTimeInSeconds') is not None: + self.max_running_time_in_seconds = m.get('MaxRunningTimeInSeconds') return self -class GetResourceGroupTotalResponseBody(TeaModel): +class GetTrainingJobResponseBodySettings(TeaModel): def __init__( self, - total_cpu: int = None, - total_gpu: int = None, - total_gpuinfos: List[GPUInfo] = None, - total_memory: int = None, + aimaster_type: str = None, + enable_error_monitoring_in_aimaster: bool = None, + error_monitoring_args: str = None, + priority: int = None, ): - self.total_cpu = total_cpu - self.total_gpu = total_gpu - self.total_gpuinfos = total_gpuinfos - self.total_memory = total_memory + self.aimaster_type = aimaster_type + self.enable_error_monitoring_in_aimaster = enable_error_monitoring_in_aimaster + self.error_monitoring_args = error_monitoring_args + self.priority = priority def validate(self): - if self.total_gpuinfos: - for k in self.total_gpuinfos: - if k: - k.validate() + pass def to_map(self): _map = super().to_map() @@ -13023,48 +13381,46 @@ def to_map(self): return _map result = dict() - if self.total_cpu is not None: - result['totalCPU'] = self.total_cpu - if self.total_gpu is not None: - result['totalGPU'] = self.total_gpu - result['totalGPUInfos'] = [] - if self.total_gpuinfos is not None: - for k in self.total_gpuinfos: - result['totalGPUInfos'].append(k.to_map() if k else None) - if self.total_memory is not None: - result['totalMemory'] = self.total_memory + if self.aimaster_type is not None: + result['AIMasterType'] = self.aimaster_type + if self.enable_error_monitoring_in_aimaster is not None: + result['EnableErrorMonitoringInAIMaster'] = self.enable_error_monitoring_in_aimaster + if self.error_monitoring_args is not None: + result['ErrorMonitoringArgs'] = self.error_monitoring_args + if self.priority is not None: + result['Priority'] = self.priority return result def from_map(self, m: dict = None): m = m or dict() - if m.get('totalCPU') is not None: - self.total_cpu = m.get('totalCPU') - if m.get('totalGPU') is not None: - self.total_gpu = m.get('totalGPU') - self.total_gpuinfos = [] - if m.get('totalGPUInfos') is not None: - for k in m.get('totalGPUInfos'): - temp_model = GPUInfo() - self.total_gpuinfos.append(temp_model.from_map(k)) - if m.get('totalMemory') is not None: - self.total_memory = m.get('totalMemory') + if m.get('AIMasterType') is not None: + self.aimaster_type = m.get('AIMasterType') + if m.get('EnableErrorMonitoringInAIMaster') is not None: + self.enable_error_monitoring_in_aimaster = m.get('EnableErrorMonitoringInAIMaster') + if m.get('ErrorMonitoringArgs') is not None: + self.error_monitoring_args = m.get('ErrorMonitoringArgs') + if m.get('Priority') is not None: + self.priority = m.get('Priority') return self -class GetResourceGroupTotalResponse(TeaModel): +class GetTrainingJobResponseBodyStatusTransitions(TeaModel): def __init__( self, - headers: Dict[str, str] = None, - status_code: int = None, - body: GetResourceGroupTotalResponseBody = None, + end_time: str = None, + reason_code: str = None, + reason_message: str = None, + start_time: str = None, + status: str = None, ): - self.headers = headers - self.status_code = status_code - self.body = body + self.end_time = end_time + self.reason_code = reason_code + self.reason_message = reason_message + self.start_time = start_time + self.status = status def validate(self): - if self.body: - self.body.validate() + pass def to_map(self): _map = super().to_map() @@ -13072,79 +13428,48 @@ def to_map(self): return _map result = dict() - if self.headers is not None: - result['headers'] = self.headers - if self.status_code is not None: - result['statusCode'] = self.status_code - if self.body is not None: - result['body'] = self.body.to_map() + if self.end_time is not None: + result['EndTime'] = self.end_time + if self.reason_code is not None: + result['ReasonCode'] = self.reason_code + if self.reason_message is not None: + result['ReasonMessage'] = self.reason_message + if self.start_time is not None: + result['StartTime'] = self.start_time + if self.status is not None: + result['Status'] = self.status return result def from_map(self, m: dict = None): m = m or dict() - if m.get('headers') is not None: - self.headers = m.get('headers') - if m.get('statusCode') is not None: - self.status_code = m.get('statusCode') - if m.get('body') is not None: - temp_model = GetResourceGroupTotalResponseBody() - self.body = temp_model.from_map(m['body']) - return self - - -class GetServiceIdentityRoleResponseBody(TeaModel): - def __init__( - self, - exist: bool = None, - request_id: str = None, - role_name: str = None, - ): - self.exist = exist - self.request_id = request_id - self.role_name = role_name - - def validate(self): - pass - - def to_map(self): - _map = super().to_map() - if _map is not None: - return _map - - result = dict() - if self.exist is not None: - result['Exist'] = self.exist - if self.request_id is not None: - result['RequestId'] = self.request_id - if self.role_name is not None: - result['RoleName'] = self.role_name - return result - - def from_map(self, m: dict = None): - m = m or dict() - if m.get('Exist') is not None: - self.exist = m.get('Exist') - if m.get('RequestId') is not None: - self.request_id = m.get('RequestId') - if m.get('RoleName') is not None: - self.role_name = m.get('RoleName') + if m.get('EndTime') is not None: + self.end_time = m.get('EndTime') + if m.get('ReasonCode') is not None: + self.reason_code = m.get('ReasonCode') + if m.get('ReasonMessage') is not None: + self.reason_message = m.get('ReasonMessage') + if m.get('StartTime') is not None: + self.start_time = m.get('StartTime') + if m.get('Status') is not None: + self.status = m.get('Status') return self -class GetServiceIdentityRoleResponse(TeaModel): +class GetTrainingJobResponseBodyUserVpc(TeaModel): def __init__( self, - headers: Dict[str, str] = None, - status_code: int = None, - body: GetServiceIdentityRoleResponseBody = None, + extended_cidrs: List[str] = None, + security_group_id: str = None, + switch_id: str = None, + vpc_id: str = None, ): - self.headers = headers - self.status_code = status_code - self.body = body + self.extended_cidrs = extended_cidrs + self.security_group_id = security_group_id + self.switch_id = switch_id + self.vpc_id = vpc_id def validate(self): - if self.body: - self.body.validate() + pass def to_map(self): _map = super().to_map() @@ -13152,45 +13477,151 @@ def to_map(self): return _map result = dict() - if self.headers is not None: - result['headers'] = self.headers - if self.status_code is not None: - result['statusCode'] = self.status_code - if self.body is not None: - result['body'] = self.body.to_map() + if self.extended_cidrs is not None: + result['ExtendedCIDRs'] = self.extended_cidrs + if self.security_group_id is not None: + result['SecurityGroupId'] = self.security_group_id + if self.switch_id is not None: + result['SwitchId'] = self.switch_id + if self.vpc_id is not None: + result['VpcId'] = self.vpc_id return result def from_map(self, m: dict = None): m = m or dict() - if m.get('headers') is not None: - self.headers = m.get('headers') - if m.get('statusCode') is not None: - self.status_code = m.get('statusCode') - if m.get('body') is not None: - temp_model = GetServiceIdentityRoleResponseBody() - self.body = temp_model.from_map(m['body']) + if m.get('ExtendedCIDRs') is not None: + self.extended_cidrs = m.get('ExtendedCIDRs') + if m.get('SecurityGroupId') is not None: + self.security_group_id = m.get('SecurityGroupId') + if m.get('SwitchId') is not None: + self.switch_id = m.get('SwitchId') + if m.get('VpcId') is not None: + self.vpc_id = m.get('VpcId') return self -class GetSpotPriceHistoryRequest(TeaModel): +class GetTrainingJobResponseBody(TeaModel): def __init__( self, - end_time: str = None, - order: str = None, - page_number: int = None, - page_size: int = None, - sort_by: str = None, - start_time: str = None, + algorithm_id: str = None, + algorithm_name: str = None, + algorithm_provider: str = None, + algorithm_spec: AlgorithmSpec = None, + algorithm_version: str = None, + compute_resource: GetTrainingJobResponseBodyComputeResource = None, + duration: int = None, + environments: Dict[str, str] = None, + experiment_config: GetTrainingJobResponseBodyExperimentConfig = None, + gmt_create_time: str = None, + gmt_modified_time: str = None, + hyper_parameters: List[GetTrainingJobResponseBodyHyperParameters] = None, + input_channels: List[GetTrainingJobResponseBodyInputChannels] = None, + instances: List[GetTrainingJobResponseBodyInstances] = None, + is_temp_algo: bool = None, + labels: List[GetTrainingJobResponseBodyLabels] = None, + latest_metrics: List[GetTrainingJobResponseBodyLatestMetrics] = None, + latest_progress: GetTrainingJobResponseBodyLatestProgress = None, + output_channels: List[GetTrainingJobResponseBodyOutputChannels] = None, + output_model: GetTrainingJobResponseBodyOutputModel = None, + python_requirements: List[str] = None, + reason_code: str = None, + reason_message: str = None, + request_id: str = None, + role_arn: str = None, + scheduler: GetTrainingJobResponseBodyScheduler = None, + settings: GetTrainingJobResponseBodySettings = None, + status: str = None, + status_transitions: List[GetTrainingJobResponseBodyStatusTransitions] = None, + training_job_description: str = None, + training_job_id: str = None, + training_job_name: str = None, + training_job_url: str = None, + user_id: str = None, + user_vpc: GetTrainingJobResponseBodyUserVpc = None, + workspace_id: str = None, ): - self.end_time = end_time - self.order = order - self.page_number = page_number - self.page_size = page_size - self.sort_by = sort_by - self.start_time = start_time + self.algorithm_id = algorithm_id + self.algorithm_name = algorithm_name + self.algorithm_provider = algorithm_provider + self.algorithm_spec = algorithm_spec + self.algorithm_version = algorithm_version + self.compute_resource = compute_resource + self.duration = duration + self.environments = environments + self.experiment_config = experiment_config + self.gmt_create_time = gmt_create_time + self.gmt_modified_time = gmt_modified_time + self.hyper_parameters = hyper_parameters + self.input_channels = input_channels + self.instances = instances + self.is_temp_algo = is_temp_algo + self.labels = labels + self.latest_metrics = latest_metrics + self.latest_progress = latest_progress + self.output_channels = output_channels + self.output_model = output_model + self.python_requirements = python_requirements + self.reason_code = reason_code + self.reason_message = reason_message + self.request_id = request_id + self.role_arn = role_arn + self.scheduler = scheduler + self.settings = settings + self.status = status + self.status_transitions = status_transitions + self.training_job_description = training_job_description + self.training_job_id = training_job_id + self.training_job_name = training_job_name + self.training_job_url = training_job_url + self.user_id = user_id + self.user_vpc = user_vpc + self.workspace_id = workspace_id def validate(self): - pass + if self.algorithm_spec: + self.algorithm_spec.validate() + if self.compute_resource: + self.compute_resource.validate() + if self.experiment_config: + self.experiment_config.validate() + if self.hyper_parameters: + for k in self.hyper_parameters: + if k: + k.validate() + if self.input_channels: + for k in self.input_channels: + if k: + k.validate() + if self.instances: + for k in self.instances: + if k: + k.validate() + if self.labels: + for k in self.labels: + if k: + k.validate() + if self.latest_metrics: + for k in self.latest_metrics: + if k: + k.validate() + if self.latest_progress: + self.latest_progress.validate() + if self.output_channels: + for k in self.output_channels: + if k: + k.validate() + if self.output_model: + self.output_model.validate() + if self.scheduler: + self.scheduler.validate() + if self.settings: + self.settings.validate() + if self.status_transitions: + for k in self.status_transitions: + if k: + k.validate() + if self.user_vpc: + self.user_vpc.validate() def to_map(self): _map = super().to_map() @@ -13198,1788 +13629,206 @@ def to_map(self): return _map result = dict() - if self.end_time is not None: - result['EndTime'] = self.end_time - if self.order is not None: - result['Order'] = self.order - if self.page_number is not None: - result['PageNumber'] = self.page_number - if self.page_size is not None: - result['PageSize'] = self.page_size - if self.sort_by is not None: - result['SortBy'] = self.sort_by - if self.start_time is not None: - result['StartTime'] = self.start_time - return result - - def from_map(self, m: dict = None): - m = m or dict() - if m.get('EndTime') is not None: - self.end_time = m.get('EndTime') - if m.get('Order') is not None: - self.order = m.get('Order') - if m.get('PageNumber') is not None: - self.page_number = m.get('PageNumber') - if m.get('PageSize') is not None: - self.page_size = m.get('PageSize') - if m.get('SortBy') is not None: - self.sort_by = m.get('SortBy') - if m.get('StartTime') is not None: - self.start_time = m.get('StartTime') - return self - - -class GetSpotPriceHistoryResponseBody(TeaModel): - def __init__( - self, - request_id: str = None, - spot_price_history: List[SpotPriceItem] = None, - total_count: int = None, - ): - self.request_id = request_id - self.spot_price_history = spot_price_history - self.total_count = total_count - - def validate(self): - if self.spot_price_history: - for k in self.spot_price_history: - if k: - k.validate() - - def to_map(self): - _map = super().to_map() - if _map is not None: - return _map - - result = dict() - if self.request_id is not None: - result['RequestId'] = self.request_id - result['SpotPriceHistory'] = [] - if self.spot_price_history is not None: - for k in self.spot_price_history: - result['SpotPriceHistory'].append(k.to_map() if k else None) - if self.total_count is not None: - result['TotalCount'] = self.total_count - return result - - def from_map(self, m: dict = None): - m = m or dict() - if m.get('RequestId') is not None: - self.request_id = m.get('RequestId') - self.spot_price_history = [] - if m.get('SpotPriceHistory') is not None: - for k in m.get('SpotPriceHistory'): - temp_model = SpotPriceItem() - self.spot_price_history.append(temp_model.from_map(k)) - if m.get('TotalCount') is not None: - self.total_count = m.get('TotalCount') - return self - - -class GetSpotPriceHistoryResponse(TeaModel): - def __init__( - self, - headers: Dict[str, str] = None, - status_code: int = None, - body: GetSpotPriceHistoryResponseBody = None, - ): - self.headers = headers - self.status_code = status_code - self.body = body - - def validate(self): - if self.body: - self.body.validate() - - def to_map(self): - _map = super().to_map() - if _map is not None: - return _map - - result = dict() - if self.headers is not None: - result['headers'] = self.headers - if self.status_code is not None: - result['statusCode'] = self.status_code - if self.body is not None: - result['body'] = self.body.to_map() - return result - - def from_map(self, m: dict = None): - m = m or dict() - if m.get('headers') is not None: - self.headers = m.get('headers') - if m.get('statusCode') is not None: - self.status_code = m.get('statusCode') - if m.get('body') is not None: - temp_model = GetSpotPriceHistoryResponseBody() - self.body = temp_model.from_map(m['body']) - return self - - -class GetSpotStockPreviewResponseBody(TeaModel): - def __init__( - self, - instance_type: str = None, - request_id: str = None, - stock_status: str = None, - ): - self.instance_type = instance_type - self.request_id = request_id - self.stock_status = stock_status - - def validate(self): - pass - - def to_map(self): - _map = super().to_map() - if _map is not None: - return _map - - result = dict() - if self.instance_type is not None: - result['InstanceType'] = self.instance_type - if self.request_id is not None: - result['RequestId'] = self.request_id - if self.stock_status is not None: - result['StockStatus'] = self.stock_status - return result - - def from_map(self, m: dict = None): - m = m or dict() - if m.get('InstanceType') is not None: - self.instance_type = m.get('InstanceType') - if m.get('RequestId') is not None: - self.request_id = m.get('RequestId') - if m.get('StockStatus') is not None: - self.stock_status = m.get('StockStatus') - return self - - -class GetSpotStockPreviewResponse(TeaModel): - def __init__( - self, - headers: Dict[str, str] = None, - status_code: int = None, - body: GetSpotStockPreviewResponseBody = None, - ): - self.headers = headers - self.status_code = status_code - self.body = body - - def validate(self): - if self.body: - self.body.validate() - - def to_map(self): - _map = super().to_map() - if _map is not None: - return _map - - result = dict() - if self.headers is not None: - result['headers'] = self.headers - if self.status_code is not None: - result['statusCode'] = self.status_code - if self.body is not None: - result['body'] = self.body.to_map() - return result - - def from_map(self, m: dict = None): - m = m or dict() - if m.get('headers') is not None: - self.headers = m.get('headers') - if m.get('statusCode') is not None: - self.status_code = m.get('statusCode') - if m.get('body') is not None: - temp_model = GetSpotStockPreviewResponseBody() - self.body = temp_model.from_map(m['body']) - return self - - -class GetTokenRequest(TeaModel): - def __init__( - self, - expire_time: int = None, - training_job_id: str = None, - ): - self.expire_time = expire_time - self.training_job_id = training_job_id - - def validate(self): - pass - - def to_map(self): - _map = super().to_map() - if _map is not None: - return _map - - result = dict() - if self.expire_time is not None: - result['ExpireTime'] = self.expire_time - if self.training_job_id is not None: - result['TrainingJobId'] = self.training_job_id - return result - - def from_map(self, m: dict = None): - m = m or dict() - if m.get('ExpireTime') is not None: - self.expire_time = m.get('ExpireTime') - if m.get('TrainingJobId') is not None: - self.training_job_id = m.get('TrainingJobId') - return self - - -class GetTokenResponseBody(TeaModel): - def __init__( - self, - request_id: str = None, - token: str = None, - ): - self.request_id = request_id - self.token = token - - def validate(self): - pass - - def to_map(self): - _map = super().to_map() - if _map is not None: - return _map - - result = dict() - if self.request_id is not None: - result['RequestId'] = self.request_id - if self.token is not None: - result['Token'] = self.token - return result - - def from_map(self, m: dict = None): - m = m or dict() - if m.get('RequestId') is not None: - self.request_id = m.get('RequestId') - if m.get('Token') is not None: - self.token = m.get('Token') - return self - - -class GetTokenResponse(TeaModel): - def __init__( - self, - headers: Dict[str, str] = None, - status_code: int = None, - body: GetTokenResponseBody = None, - ): - self.headers = headers - self.status_code = status_code - self.body = body - - def validate(self): - if self.body: - self.body.validate() - - def to_map(self): - _map = super().to_map() - if _map is not None: - return _map - - result = dict() - if self.headers is not None: - result['headers'] = self.headers - if self.status_code is not None: - result['statusCode'] = self.status_code - if self.body is not None: - result['body'] = self.body.to_map() - return result - - def from_map(self, m: dict = None): - m = m or dict() - if m.get('headers') is not None: - self.headers = m.get('headers') - if m.get('statusCode') is not None: - self.status_code = m.get('statusCode') - if m.get('body') is not None: - temp_model = GetTokenResponseBody() - self.body = temp_model.from_map(m['body']) - return self - - -class GetTrainingJobRequest(TeaModel): - def __init__( - self, - token: str = None, - ): - self.token = token - - def validate(self): - pass - - def to_map(self): - _map = super().to_map() - if _map is not None: - return _map - - result = dict() - if self.token is not None: - result['Token'] = self.token - return result - - def from_map(self, m: dict = None): - m = m or dict() - if m.get('Token') is not None: - self.token = m.get('Token') - return self - - -class GetTrainingJobResponseBodyComputeResourceInstanceSpec(TeaModel): - def __init__( - self, - cpu: str = None, - gpu: str = None, - gputype: str = None, - memory: str = None, - shared_memory: str = None, - ): - self.cpu = cpu - self.gpu = gpu - self.gputype = gputype - self.memory = memory - self.shared_memory = shared_memory - - def validate(self): - pass - - def to_map(self): - _map = super().to_map() - if _map is not None: - return _map - - result = dict() - if self.cpu is not None: - result['CPU'] = self.cpu - if self.gpu is not None: - result['GPU'] = self.gpu - if self.gputype is not None: - result['GPUType'] = self.gputype - if self.memory is not None: - result['Memory'] = self.memory - if self.shared_memory is not None: - result['SharedMemory'] = self.shared_memory - return result - - def from_map(self, m: dict = None): - m = m or dict() - if m.get('CPU') is not None: - self.cpu = m.get('CPU') - if m.get('GPU') is not None: - self.gpu = m.get('GPU') - if m.get('GPUType') is not None: - self.gputype = m.get('GPUType') - if m.get('Memory') is not None: - self.memory = m.get('Memory') - if m.get('SharedMemory') is not None: - self.shared_memory = m.get('SharedMemory') - return self - - -class GetTrainingJobResponseBodyComputeResource(TeaModel): - def __init__( - self, - ecs_count: int = None, - ecs_spec: str = None, - instance_count: int = None, - instance_spec: GetTrainingJobResponseBodyComputeResourceInstanceSpec = None, - resource_id: str = None, - ): - self.ecs_count = ecs_count - self.ecs_spec = ecs_spec - self.instance_count = instance_count - self.instance_spec = instance_spec - self.resource_id = resource_id - - def validate(self): - if self.instance_spec: - self.instance_spec.validate() - - def to_map(self): - _map = super().to_map() - if _map is not None: - return _map - - result = dict() - if self.ecs_count is not None: - result['EcsCount'] = self.ecs_count - if self.ecs_spec is not None: - result['EcsSpec'] = self.ecs_spec - if self.instance_count is not None: - result['InstanceCount'] = self.instance_count - if self.instance_spec is not None: - result['InstanceSpec'] = self.instance_spec.to_map() - if self.resource_id is not None: - result['ResourceId'] = self.resource_id - return result - - def from_map(self, m: dict = None): - m = m or dict() - if m.get('EcsCount') is not None: - self.ecs_count = m.get('EcsCount') - if m.get('EcsSpec') is not None: - self.ecs_spec = m.get('EcsSpec') - if m.get('InstanceCount') is not None: - self.instance_count = m.get('InstanceCount') - if m.get('InstanceSpec') is not None: - temp_model = GetTrainingJobResponseBodyComputeResourceInstanceSpec() - self.instance_spec = temp_model.from_map(m['InstanceSpec']) - if m.get('ResourceId') is not None: - self.resource_id = m.get('ResourceId') - return self - - -class GetTrainingJobResponseBodyExperimentConfig(TeaModel): - def __init__( - self, - experiment_id: str = None, - experiment_name: str = None, - ): - self.experiment_id = experiment_id - self.experiment_name = experiment_name - - def validate(self): - pass - - def to_map(self): - _map = super().to_map() - if _map is not None: - return _map - - result = dict() - if self.experiment_id is not None: - result['ExperimentId'] = self.experiment_id - if self.experiment_name is not None: - result['ExperimentName'] = self.experiment_name - return result - - def from_map(self, m: dict = None): - m = m or dict() - if m.get('ExperimentId') is not None: - self.experiment_id = m.get('ExperimentId') - if m.get('ExperimentName') is not None: - self.experiment_name = m.get('ExperimentName') - return self - - -class GetTrainingJobResponseBodyHyperParameters(TeaModel): - def __init__( - self, - name: str = None, - value: str = None, - ): - self.name = name - self.value = value - - def validate(self): - pass - - def to_map(self): - _map = super().to_map() - if _map is not None: - return _map - - result = dict() - if self.name is not None: - result['Name'] = self.name - if self.value is not None: - result['Value'] = self.value - return result - - def from_map(self, m: dict = None): - m = m or dict() - if m.get('Name') is not None: - self.name = m.get('Name') - if m.get('Value') is not None: - self.value = m.get('Value') - return self - - -class GetTrainingJobResponseBodyInputChannels(TeaModel): - def __init__( - self, - dataset_id: str = None, - input_uri: str = None, - name: str = None, - ): - self.dataset_id = dataset_id - self.input_uri = input_uri - self.name = name - - def validate(self): - pass - - def to_map(self): - _map = super().to_map() - if _map is not None: - return _map - - result = dict() - if self.dataset_id is not None: - result['DatasetId'] = self.dataset_id - if self.input_uri is not None: - result['InputUri'] = self.input_uri - if self.name is not None: - result['Name'] = self.name - return result - - def from_map(self, m: dict = None): - m = m or dict() - if m.get('DatasetId') is not None: - self.dataset_id = m.get('DatasetId') - if m.get('InputUri') is not None: - self.input_uri = m.get('InputUri') - if m.get('Name') is not None: - self.name = m.get('Name') - return self - - -class GetTrainingJobResponseBodyInstances(TeaModel): - def __init__( - self, - name: str = None, - role: str = None, - status: str = None, - ): - self.name = name - self.role = role - self.status = status - - def validate(self): - pass - - def to_map(self): - _map = super().to_map() - if _map is not None: - return _map - - result = dict() - if self.name is not None: - result['Name'] = self.name - if self.role is not None: - result['Role'] = self.role - if self.status is not None: - result['Status'] = self.status - return result - - def from_map(self, m: dict = None): - m = m or dict() - if m.get('Name') is not None: - self.name = m.get('Name') - if m.get('Role') is not None: - self.role = m.get('Role') - if m.get('Status') is not None: - self.status = m.get('Status') - return self - - -class GetTrainingJobResponseBodyLabels(TeaModel): - def __init__( - self, - key: str = None, - value: str = None, - ): - self.key = key - self.value = value - - def validate(self): - pass - - def to_map(self): - _map = super().to_map() - if _map is not None: - return _map - - result = dict() - if self.key is not None: - result['Key'] = self.key - if self.value is not None: - result['Value'] = self.value - return result - - def from_map(self, m: dict = None): - m = m or dict() - if m.get('Key') is not None: - self.key = m.get('Key') - if m.get('Value') is not None: - self.value = m.get('Value') - return self - - -class GetTrainingJobResponseBodyLatestMetrics(TeaModel): - def __init__( - self, - name: str = None, - timestamp: str = None, - value: float = None, - ): - self.name = name - self.timestamp = timestamp - self.value = value - - def validate(self): - pass - - def to_map(self): - _map = super().to_map() - if _map is not None: - return _map - - result = dict() - if self.name is not None: - result['Name'] = self.name - if self.timestamp is not None: - result['Timestamp'] = self.timestamp - if self.value is not None: - result['Value'] = self.value - return result - - def from_map(self, m: dict = None): - m = m or dict() - if m.get('Name') is not None: - self.name = m.get('Name') - if m.get('Timestamp') is not None: - self.timestamp = m.get('Timestamp') - if m.get('Value') is not None: - self.value = m.get('Value') - return self - - -class GetTrainingJobResponseBodyLatestProgressOverallProgress(TeaModel): - def __init__( - self, - timestamp: str = None, - value: float = None, - ): - self.timestamp = timestamp - self.value = value - - def validate(self): - pass - - def to_map(self): - _map = super().to_map() - if _map is not None: - return _map - - result = dict() - if self.timestamp is not None: - result['Timestamp'] = self.timestamp - if self.value is not None: - result['Value'] = self.value - return result - - def from_map(self, m: dict = None): - m = m or dict() - if m.get('Timestamp') is not None: - self.timestamp = m.get('Timestamp') - if m.get('Value') is not None: - self.value = m.get('Value') - return self - - -class GetTrainingJobResponseBodyLatestProgressRemainingTime(TeaModel): - def __init__( - self, - timestamp: str = None, - value: int = None, - ): - self.timestamp = timestamp - self.value = value - - def validate(self): - pass - - def to_map(self): - _map = super().to_map() - if _map is not None: - return _map - - result = dict() - if self.timestamp is not None: - result['Timestamp'] = self.timestamp - if self.value is not None: - result['Value'] = self.value - return result - - def from_map(self, m: dict = None): - m = m or dict() - if m.get('Timestamp') is not None: - self.timestamp = m.get('Timestamp') - if m.get('Value') is not None: - self.value = m.get('Value') - return self - - -class GetTrainingJobResponseBodyLatestProgress(TeaModel): - def __init__( - self, - overall_progress: GetTrainingJobResponseBodyLatestProgressOverallProgress = None, - remaining_time: GetTrainingJobResponseBodyLatestProgressRemainingTime = None, - ): - self.overall_progress = overall_progress - self.remaining_time = remaining_time - - def validate(self): - if self.overall_progress: - self.overall_progress.validate() - if self.remaining_time: - self.remaining_time.validate() - - def to_map(self): - _map = super().to_map() - if _map is not None: - return _map - - result = dict() - if self.overall_progress is not None: - result['OverallProgress'] = self.overall_progress.to_map() - if self.remaining_time is not None: - result['RemainingTime'] = self.remaining_time.to_map() - return result - - def from_map(self, m: dict = None): - m = m or dict() - if m.get('OverallProgress') is not None: - temp_model = GetTrainingJobResponseBodyLatestProgressOverallProgress() - self.overall_progress = temp_model.from_map(m['OverallProgress']) - if m.get('RemainingTime') is not None: - temp_model = GetTrainingJobResponseBodyLatestProgressRemainingTime() - self.remaining_time = temp_model.from_map(m['RemainingTime']) - return self - - -class GetTrainingJobResponseBodyOutputChannels(TeaModel): - def __init__( - self, - dataset_id: str = None, - name: str = None, - output_uri: str = None, - ): - self.dataset_id = dataset_id - self.name = name - self.output_uri = output_uri - - def validate(self): - pass - - def to_map(self): - _map = super().to_map() - if _map is not None: - return _map - - result = dict() - if self.dataset_id is not None: - result['DatasetId'] = self.dataset_id - if self.name is not None: - result['Name'] = self.name - if self.output_uri is not None: - result['OutputUri'] = self.output_uri - return result - - def from_map(self, m: dict = None): - m = m or dict() - if m.get('DatasetId') is not None: - self.dataset_id = m.get('DatasetId') - if m.get('Name') is not None: - self.name = m.get('Name') - if m.get('OutputUri') is not None: - self.output_uri = m.get('OutputUri') - return self - - -class GetTrainingJobResponseBodyOutputModel(TeaModel): - def __init__( - self, - output_channel_name: str = None, - uri: str = None, - ): - self.output_channel_name = output_channel_name - self.uri = uri - - def validate(self): - pass - - def to_map(self): - _map = super().to_map() - if _map is not None: - return _map - - result = dict() - if self.output_channel_name is not None: - result['OutputChannelName'] = self.output_channel_name - if self.uri is not None: - result['Uri'] = self.uri - return result - - def from_map(self, m: dict = None): - m = m or dict() - if m.get('OutputChannelName') is not None: - self.output_channel_name = m.get('OutputChannelName') - if m.get('Uri') is not None: - self.uri = m.get('Uri') - return self - - -class GetTrainingJobResponseBodyScheduler(TeaModel): - def __init__( - self, - max_running_time_in_seconds: int = None, - ): - self.max_running_time_in_seconds = max_running_time_in_seconds - - def validate(self): - pass - - def to_map(self): - _map = super().to_map() - if _map is not None: - return _map - - result = dict() - if self.max_running_time_in_seconds is not None: - result['MaxRunningTimeInSeconds'] = self.max_running_time_in_seconds - return result - - def from_map(self, m: dict = None): - m = m or dict() - if m.get('MaxRunningTimeInSeconds') is not None: - self.max_running_time_in_seconds = m.get('MaxRunningTimeInSeconds') - return self - - -class GetTrainingJobResponseBodySettings(TeaModel): - def __init__( - self, - aimaster_type: str = None, - enable_error_monitoring_in_aimaster: bool = None, - error_monitoring_args: str = None, - priority: int = None, - ): - self.aimaster_type = aimaster_type - self.enable_error_monitoring_in_aimaster = enable_error_monitoring_in_aimaster - self.error_monitoring_args = error_monitoring_args - self.priority = priority - - def validate(self): - pass - - def to_map(self): - _map = super().to_map() - if _map is not None: - return _map - - result = dict() - if self.aimaster_type is not None: - result['AIMasterType'] = self.aimaster_type - if self.enable_error_monitoring_in_aimaster is not None: - result['EnableErrorMonitoringInAIMaster'] = self.enable_error_monitoring_in_aimaster - if self.error_monitoring_args is not None: - result['ErrorMonitoringArgs'] = self.error_monitoring_args - if self.priority is not None: - result['Priority'] = self.priority - return result - - def from_map(self, m: dict = None): - m = m or dict() - if m.get('AIMasterType') is not None: - self.aimaster_type = m.get('AIMasterType') - if m.get('EnableErrorMonitoringInAIMaster') is not None: - self.enable_error_monitoring_in_aimaster = m.get('EnableErrorMonitoringInAIMaster') - if m.get('ErrorMonitoringArgs') is not None: - self.error_monitoring_args = m.get('ErrorMonitoringArgs') - if m.get('Priority') is not None: - self.priority = m.get('Priority') - return self - - -class GetTrainingJobResponseBodyStatusTransitions(TeaModel): - def __init__( - self, - end_time: str = None, - reason_code: str = None, - reason_message: str = None, - start_time: str = None, - status: str = None, - ): - self.end_time = end_time - self.reason_code = reason_code - self.reason_message = reason_message - self.start_time = start_time - self.status = status - - def validate(self): - pass - - def to_map(self): - _map = super().to_map() - if _map is not None: - return _map - - result = dict() - if self.end_time is not None: - result['EndTime'] = self.end_time - if self.reason_code is not None: - result['ReasonCode'] = self.reason_code - if self.reason_message is not None: - result['ReasonMessage'] = self.reason_message - if self.start_time is not None: - result['StartTime'] = self.start_time - if self.status is not None: - result['Status'] = self.status - return result - - def from_map(self, m: dict = None): - m = m or dict() - if m.get('EndTime') is not None: - self.end_time = m.get('EndTime') - if m.get('ReasonCode') is not None: - self.reason_code = m.get('ReasonCode') - if m.get('ReasonMessage') is not None: - self.reason_message = m.get('ReasonMessage') - if m.get('StartTime') is not None: - self.start_time = m.get('StartTime') - if m.get('Status') is not None: - self.status = m.get('Status') - return self - - -class GetTrainingJobResponseBodyUserVpc(TeaModel): - def __init__( - self, - extended_cidrs: List[str] = None, - security_group_id: str = None, - switch_id: str = None, - vpc_id: str = None, - ): - self.extended_cidrs = extended_cidrs - self.security_group_id = security_group_id - self.switch_id = switch_id - self.vpc_id = vpc_id - - def validate(self): - pass - - def to_map(self): - _map = super().to_map() - if _map is not None: - return _map - - result = dict() - if self.extended_cidrs is not None: - result['ExtendedCIDRs'] = self.extended_cidrs - if self.security_group_id is not None: - result['SecurityGroupId'] = self.security_group_id - if self.switch_id is not None: - result['SwitchId'] = self.switch_id - if self.vpc_id is not None: - result['VpcId'] = self.vpc_id - return result - - def from_map(self, m: dict = None): - m = m or dict() - if m.get('ExtendedCIDRs') is not None: - self.extended_cidrs = m.get('ExtendedCIDRs') - if m.get('SecurityGroupId') is not None: - self.security_group_id = m.get('SecurityGroupId') - if m.get('SwitchId') is not None: - self.switch_id = m.get('SwitchId') - if m.get('VpcId') is not None: - self.vpc_id = m.get('VpcId') - return self - - -class GetTrainingJobResponseBody(TeaModel): - def __init__( - self, - algorithm_id: str = None, - algorithm_name: str = None, - algorithm_provider: str = None, - algorithm_spec: AlgorithmSpec = None, - algorithm_version: str = None, - compute_resource: GetTrainingJobResponseBodyComputeResource = None, - duration: int = None, - environments: Dict[str, str] = None, - experiment_config: GetTrainingJobResponseBodyExperimentConfig = None, - gmt_create_time: str = None, - gmt_modified_time: str = None, - hyper_parameters: List[GetTrainingJobResponseBodyHyperParameters] = None, - input_channels: List[GetTrainingJobResponseBodyInputChannels] = None, - instances: List[GetTrainingJobResponseBodyInstances] = None, - is_temp_algo: bool = None, - labels: List[GetTrainingJobResponseBodyLabels] = None, - latest_metrics: List[GetTrainingJobResponseBodyLatestMetrics] = None, - latest_progress: GetTrainingJobResponseBodyLatestProgress = None, - output_channels: List[GetTrainingJobResponseBodyOutputChannels] = None, - output_model: GetTrainingJobResponseBodyOutputModel = None, - python_requirements: List[str] = None, - reason_code: str = None, - reason_message: str = None, - request_id: str = None, - role_arn: str = None, - scheduler: GetTrainingJobResponseBodyScheduler = None, - settings: GetTrainingJobResponseBodySettings = None, - status: str = None, - status_transitions: List[GetTrainingJobResponseBodyStatusTransitions] = None, - training_job_description: str = None, - training_job_id: str = None, - training_job_name: str = None, - training_job_url: str = None, - user_id: str = None, - user_vpc: GetTrainingJobResponseBodyUserVpc = None, - workspace_id: str = None, - ): - self.algorithm_id = algorithm_id - self.algorithm_name = algorithm_name - self.algorithm_provider = algorithm_provider - self.algorithm_spec = algorithm_spec - self.algorithm_version = algorithm_version - self.compute_resource = compute_resource - self.duration = duration - self.environments = environments - self.experiment_config = experiment_config - self.gmt_create_time = gmt_create_time - self.gmt_modified_time = gmt_modified_time - self.hyper_parameters = hyper_parameters - self.input_channels = input_channels - self.instances = instances - self.is_temp_algo = is_temp_algo - self.labels = labels - self.latest_metrics = latest_metrics - self.latest_progress = latest_progress - self.output_channels = output_channels - self.output_model = output_model - self.python_requirements = python_requirements - self.reason_code = reason_code - self.reason_message = reason_message - self.request_id = request_id - self.role_arn = role_arn - self.scheduler = scheduler - self.settings = settings - self.status = status - self.status_transitions = status_transitions - self.training_job_description = training_job_description - self.training_job_id = training_job_id - self.training_job_name = training_job_name - self.training_job_url = training_job_url - self.user_id = user_id - self.user_vpc = user_vpc - self.workspace_id = workspace_id - - def validate(self): - if self.algorithm_spec: - self.algorithm_spec.validate() - if self.compute_resource: - self.compute_resource.validate() - if self.experiment_config: - self.experiment_config.validate() - if self.hyper_parameters: - for k in self.hyper_parameters: - if k: - k.validate() - if self.input_channels: - for k in self.input_channels: - if k: - k.validate() - if self.instances: - for k in self.instances: - if k: - k.validate() - if self.labels: - for k in self.labels: - if k: - k.validate() - if self.latest_metrics: - for k in self.latest_metrics: - if k: - k.validate() - if self.latest_progress: - self.latest_progress.validate() - if self.output_channels: - for k in self.output_channels: - if k: - k.validate() - if self.output_model: - self.output_model.validate() - if self.scheduler: - self.scheduler.validate() - if self.settings: - self.settings.validate() - if self.status_transitions: - for k in self.status_transitions: - if k: - k.validate() - if self.user_vpc: - self.user_vpc.validate() - - def to_map(self): - _map = super().to_map() - if _map is not None: - return _map - - result = dict() - if self.algorithm_id is not None: - result['AlgorithmId'] = self.algorithm_id - if self.algorithm_name is not None: - result['AlgorithmName'] = self.algorithm_name - if self.algorithm_provider is not None: - result['AlgorithmProvider'] = self.algorithm_provider - if self.algorithm_spec is not None: - result['AlgorithmSpec'] = self.algorithm_spec.to_map() - if self.algorithm_version is not None: - result['AlgorithmVersion'] = self.algorithm_version - if self.compute_resource is not None: - result['ComputeResource'] = self.compute_resource.to_map() - if self.duration is not None: - result['Duration'] = self.duration - if self.environments is not None: - result['Environments'] = self.environments - if self.experiment_config is not None: - result['ExperimentConfig'] = self.experiment_config.to_map() - if self.gmt_create_time is not None: - result['GmtCreateTime'] = self.gmt_create_time - if self.gmt_modified_time is not None: - result['GmtModifiedTime'] = self.gmt_modified_time - result['HyperParameters'] = [] - if self.hyper_parameters is not None: - for k in self.hyper_parameters: - result['HyperParameters'].append(k.to_map() if k else None) - result['InputChannels'] = [] - if self.input_channels is not None: - for k in self.input_channels: - result['InputChannels'].append(k.to_map() if k else None) - result['Instances'] = [] - if self.instances is not None: - for k in self.instances: - result['Instances'].append(k.to_map() if k else None) - if self.is_temp_algo is not None: - result['IsTempAlgo'] = self.is_temp_algo - result['Labels'] = [] - if self.labels is not None: - for k in self.labels: - result['Labels'].append(k.to_map() if k else None) - result['LatestMetrics'] = [] - if self.latest_metrics is not None: - for k in self.latest_metrics: - result['LatestMetrics'].append(k.to_map() if k else None) - if self.latest_progress is not None: - result['LatestProgress'] = self.latest_progress.to_map() - result['OutputChannels'] = [] - if self.output_channels is not None: - for k in self.output_channels: - result['OutputChannels'].append(k.to_map() if k else None) - if self.output_model is not None: - result['OutputModel'] = self.output_model.to_map() - if self.python_requirements is not None: - result['PythonRequirements'] = self.python_requirements - if self.reason_code is not None: - result['ReasonCode'] = self.reason_code - if self.reason_message is not None: - result['ReasonMessage'] = self.reason_message - if self.request_id is not None: - result['RequestId'] = self.request_id - if self.role_arn is not None: - result['RoleArn'] = self.role_arn - if self.scheduler is not None: - result['Scheduler'] = self.scheduler.to_map() - if self.settings is not None: - result['Settings'] = self.settings.to_map() - if self.status is not None: - result['Status'] = self.status - result['StatusTransitions'] = [] - if self.status_transitions is not None: - for k in self.status_transitions: - result['StatusTransitions'].append(k.to_map() if k else None) - if self.training_job_description is not None: - result['TrainingJobDescription'] = self.training_job_description - if self.training_job_id is not None: - result['TrainingJobId'] = self.training_job_id - if self.training_job_name is not None: - result['TrainingJobName'] = self.training_job_name - if self.training_job_url is not None: - result['TrainingJobUrl'] = self.training_job_url - if self.user_id is not None: - result['UserId'] = self.user_id - if self.user_vpc is not None: - result['UserVpc'] = self.user_vpc.to_map() - if self.workspace_id is not None: - result['WorkspaceId'] = self.workspace_id - return result - - def from_map(self, m: dict = None): - m = m or dict() - if m.get('AlgorithmId') is not None: - self.algorithm_id = m.get('AlgorithmId') - if m.get('AlgorithmName') is not None: - self.algorithm_name = m.get('AlgorithmName') - if m.get('AlgorithmProvider') is not None: - self.algorithm_provider = m.get('AlgorithmProvider') - if m.get('AlgorithmSpec') is not None: - temp_model = AlgorithmSpec() - self.algorithm_spec = temp_model.from_map(m['AlgorithmSpec']) - if m.get('AlgorithmVersion') is not None: - self.algorithm_version = m.get('AlgorithmVersion') - if m.get('ComputeResource') is not None: - temp_model = GetTrainingJobResponseBodyComputeResource() - self.compute_resource = temp_model.from_map(m['ComputeResource']) - if m.get('Duration') is not None: - self.duration = m.get('Duration') - if m.get('Environments') is not None: - self.environments = m.get('Environments') - if m.get('ExperimentConfig') is not None: - temp_model = GetTrainingJobResponseBodyExperimentConfig() - self.experiment_config = temp_model.from_map(m['ExperimentConfig']) - if m.get('GmtCreateTime') is not None: - self.gmt_create_time = m.get('GmtCreateTime') - if m.get('GmtModifiedTime') is not None: - self.gmt_modified_time = m.get('GmtModifiedTime') - self.hyper_parameters = [] - if m.get('HyperParameters') is not None: - for k in m.get('HyperParameters'): - temp_model = GetTrainingJobResponseBodyHyperParameters() - self.hyper_parameters.append(temp_model.from_map(k)) - self.input_channels = [] - if m.get('InputChannels') is not None: - for k in m.get('InputChannels'): - temp_model = GetTrainingJobResponseBodyInputChannels() - self.input_channels.append(temp_model.from_map(k)) - self.instances = [] - if m.get('Instances') is not None: - for k in m.get('Instances'): - temp_model = GetTrainingJobResponseBodyInstances() - self.instances.append(temp_model.from_map(k)) - if m.get('IsTempAlgo') is not None: - self.is_temp_algo = m.get('IsTempAlgo') - self.labels = [] - if m.get('Labels') is not None: - for k in m.get('Labels'): - temp_model = GetTrainingJobResponseBodyLabels() - self.labels.append(temp_model.from_map(k)) - self.latest_metrics = [] - if m.get('LatestMetrics') is not None: - for k in m.get('LatestMetrics'): - temp_model = GetTrainingJobResponseBodyLatestMetrics() - self.latest_metrics.append(temp_model.from_map(k)) - if m.get('LatestProgress') is not None: - temp_model = GetTrainingJobResponseBodyLatestProgress() - self.latest_progress = temp_model.from_map(m['LatestProgress']) - self.output_channels = [] - if m.get('OutputChannels') is not None: - for k in m.get('OutputChannels'): - temp_model = GetTrainingJobResponseBodyOutputChannels() - self.output_channels.append(temp_model.from_map(k)) - if m.get('OutputModel') is not None: - temp_model = GetTrainingJobResponseBodyOutputModel() - self.output_model = temp_model.from_map(m['OutputModel']) - if m.get('PythonRequirements') is not None: - self.python_requirements = m.get('PythonRequirements') - if m.get('ReasonCode') is not None: - self.reason_code = m.get('ReasonCode') - if m.get('ReasonMessage') is not None: - self.reason_message = m.get('ReasonMessage') - if m.get('RequestId') is not None: - self.request_id = m.get('RequestId') - if m.get('RoleArn') is not None: - self.role_arn = m.get('RoleArn') - if m.get('Scheduler') is not None: - temp_model = GetTrainingJobResponseBodyScheduler() - self.scheduler = temp_model.from_map(m['Scheduler']) - if m.get('Settings') is not None: - temp_model = GetTrainingJobResponseBodySettings() - self.settings = temp_model.from_map(m['Settings']) - if m.get('Status') is not None: - self.status = m.get('Status') - self.status_transitions = [] - if m.get('StatusTransitions') is not None: - for k in m.get('StatusTransitions'): - temp_model = GetTrainingJobResponseBodyStatusTransitions() - self.status_transitions.append(temp_model.from_map(k)) - if m.get('TrainingJobDescription') is not None: - self.training_job_description = m.get('TrainingJobDescription') - if m.get('TrainingJobId') is not None: - self.training_job_id = m.get('TrainingJobId') - if m.get('TrainingJobName') is not None: - self.training_job_name = m.get('TrainingJobName') - if m.get('TrainingJobUrl') is not None: - self.training_job_url = m.get('TrainingJobUrl') - if m.get('UserId') is not None: - self.user_id = m.get('UserId') - if m.get('UserVpc') is not None: - temp_model = GetTrainingJobResponseBodyUserVpc() - self.user_vpc = temp_model.from_map(m['UserVpc']) - if m.get('WorkspaceId') is not None: - self.workspace_id = m.get('WorkspaceId') - return self - - -class GetTrainingJobResponse(TeaModel): - def __init__( - self, - headers: Dict[str, str] = None, - status_code: int = None, - body: GetTrainingJobResponseBody = None, - ): - self.headers = headers - self.status_code = status_code - self.body = body - - def validate(self): - if self.body: - self.body.validate() - - def to_map(self): - _map = super().to_map() - if _map is not None: - return _map - - result = dict() - if self.headers is not None: - result['headers'] = self.headers - if self.status_code is not None: - result['statusCode'] = self.status_code - if self.body is not None: - result['body'] = self.body.to_map() - return result - - def from_map(self, m: dict = None): - m = m or dict() - if m.get('headers') is not None: - self.headers = m.get('headers') - if m.get('statusCode') is not None: - self.status_code = m.get('statusCode') - if m.get('body') is not None: - temp_model = GetTrainingJobResponseBody() - self.body = temp_model.from_map(m['body']) - return self - - -class GetTrainingJobErrorInfoRequest(TeaModel): - def __init__( - self, - token: str = None, - ): - self.token = token - - def validate(self): - pass - - def to_map(self): - _map = super().to_map() - if _map is not None: - return _map - - result = dict() - if self.token is not None: - result['Token'] = self.token - return result - - def from_map(self, m: dict = None): - m = m or dict() - if m.get('Token') is not None: - self.token = m.get('Token') - return self - - -class GetTrainingJobErrorInfoResponseBodyErrorInfo(TeaModel): - def __init__( - self, - additional_info: str = None, - code: str = None, - message: str = None, - ): - self.additional_info = additional_info - self.code = code - self.message = message - - def validate(self): - pass - - def to_map(self): - _map = super().to_map() - if _map is not None: - return _map - - result = dict() - if self.additional_info is not None: - result['AdditionalInfo'] = self.additional_info - if self.code is not None: - result['Code'] = self.code - if self.message is not None: - result['Message'] = self.message - return result - - def from_map(self, m: dict = None): - m = m or dict() - if m.get('AdditionalInfo') is not None: - self.additional_info = m.get('AdditionalInfo') - if m.get('Code') is not None: - self.code = m.get('Code') - if m.get('Message') is not None: - self.message = m.get('Message') - return self - - -class GetTrainingJobErrorInfoResponseBody(TeaModel): - def __init__( - self, - error_info: GetTrainingJobErrorInfoResponseBodyErrorInfo = None, - request_id: str = None, - ): - self.error_info = error_info - self.request_id = request_id - - def validate(self): - if self.error_info: - self.error_info.validate() - - def to_map(self): - _map = super().to_map() - if _map is not None: - return _map - - result = dict() - if self.error_info is not None: - result['ErrorInfo'] = self.error_info.to_map() - if self.request_id is not None: - result['RequestId'] = self.request_id - return result - - def from_map(self, m: dict = None): - m = m or dict() - if m.get('ErrorInfo') is not None: - temp_model = GetTrainingJobErrorInfoResponseBodyErrorInfo() - self.error_info = temp_model.from_map(m['ErrorInfo']) - if m.get('RequestId') is not None: - self.request_id = m.get('RequestId') - return self - - -class GetTrainingJobErrorInfoResponse(TeaModel): - def __init__( - self, - headers: Dict[str, str] = None, - status_code: int = None, - body: GetTrainingJobErrorInfoResponseBody = None, - ): - self.headers = headers - self.status_code = status_code - self.body = body - - def validate(self): - if self.body: - self.body.validate() - - def to_map(self): - _map = super().to_map() - if _map is not None: - return _map - - result = dict() - if self.headers is not None: - result['headers'] = self.headers - if self.status_code is not None: - result['statusCode'] = self.status_code - if self.body is not None: - result['body'] = self.body.to_map() - return result - - def from_map(self, m: dict = None): - m = m or dict() - if m.get('headers') is not None: - self.headers = m.get('headers') - if m.get('statusCode') is not None: - self.status_code = m.get('statusCode') - if m.get('body') is not None: - temp_model = GetTrainingJobErrorInfoResponseBody() - self.body = temp_model.from_map(m['body']) - return self - - -class GetTrainingJobLatestMetricsRequest(TeaModel): - def __init__( - self, - names: str = None, - token: str = None, - ): - self.names = names - self.token = token - - def validate(self): - pass - - def to_map(self): - _map = super().to_map() - if _map is not None: - return _map - - result = dict() - if self.names is not None: - result['Names'] = self.names - if self.token is not None: - result['Token'] = self.token - return result - - def from_map(self, m: dict = None): - m = m or dict() - if m.get('Names') is not None: - self.names = m.get('Names') - if m.get('Token') is not None: - self.token = m.get('Token') - return self - - -class GetTrainingJobLatestMetricsResponseBodyMetrics(TeaModel): - def __init__( - self, - name: str = None, - timestamp: str = None, - value: float = None, - ): - self.name = name - self.timestamp = timestamp - self.value = value - - def validate(self): - pass - - def to_map(self): - _map = super().to_map() - if _map is not None: - return _map - - result = dict() - if self.name is not None: - result['Name'] = self.name - if self.timestamp is not None: - result['Timestamp'] = self.timestamp - if self.value is not None: - result['Value'] = self.value - return result - - def from_map(self, m: dict = None): - m = m or dict() - if m.get('Name') is not None: - self.name = m.get('Name') - if m.get('Timestamp') is not None: - self.timestamp = m.get('Timestamp') - if m.get('Value') is not None: - self.value = m.get('Value') - return self - - -class GetTrainingJobLatestMetricsResponseBody(TeaModel): - def __init__( - self, - metrics: List[GetTrainingJobLatestMetricsResponseBodyMetrics] = None, - request_id: str = None, - ): - self.metrics = metrics - self.request_id = request_id - - def validate(self): - if self.metrics: - for k in self.metrics: - if k: - k.validate() - - def to_map(self): - _map = super().to_map() - if _map is not None: - return _map - - result = dict() - result['Metrics'] = [] - if self.metrics is not None: - for k in self.metrics: - result['Metrics'].append(k.to_map() if k else None) - if self.request_id is not None: - result['RequestId'] = self.request_id - return result - - def from_map(self, m: dict = None): - m = m or dict() - self.metrics = [] - if m.get('Metrics') is not None: - for k in m.get('Metrics'): - temp_model = GetTrainingJobLatestMetricsResponseBodyMetrics() - self.metrics.append(temp_model.from_map(k)) - if m.get('RequestId') is not None: - self.request_id = m.get('RequestId') - return self - - -class GetTrainingJobLatestMetricsResponse(TeaModel): - def __init__( - self, - headers: Dict[str, str] = None, - status_code: int = None, - body: GetTrainingJobLatestMetricsResponseBody = None, - ): - self.headers = headers - self.status_code = status_code - self.body = body - - def validate(self): - if self.body: - self.body.validate() - - def to_map(self): - _map = super().to_map() - if _map is not None: - return _map - - result = dict() - if self.headers is not None: - result['headers'] = self.headers - if self.status_code is not None: - result['statusCode'] = self.status_code - if self.body is not None: - result['body'] = self.body.to_map() - return result - - def from_map(self, m: dict = None): - m = m or dict() - if m.get('headers') is not None: - self.headers = m.get('headers') - if m.get('statusCode') is not None: - self.status_code = m.get('statusCode') - if m.get('body') is not None: - temp_model = GetTrainingJobLatestMetricsResponseBody() - self.body = temp_model.from_map(m['body']) - return self - - -class GetUserViewMetricsRequest(TeaModel): - def __init__( - self, - order: str = None, - page_number: str = None, - page_size: str = None, - sort_by: str = None, - time_step: str = None, - user_id: str = None, - workspace_id: str = None, - ): - self.order = order - self.page_number = page_number - self.page_size = page_size - self.sort_by = sort_by - self.time_step = time_step - self.user_id = user_id - self.workspace_id = workspace_id - - def validate(self): - pass - - def to_map(self): - _map = super().to_map() - if _map is not None: - return _map - - result = dict() - if self.order is not None: - result['Order'] = self.order - if self.page_number is not None: - result['PageNumber'] = self.page_number - if self.page_size is not None: - result['PageSize'] = self.page_size - if self.sort_by is not None: - result['SortBy'] = self.sort_by - if self.time_step is not None: - result['TimeStep'] = self.time_step + if self.algorithm_id is not None: + result['AlgorithmId'] = self.algorithm_id + if self.algorithm_name is not None: + result['AlgorithmName'] = self.algorithm_name + if self.algorithm_provider is not None: + result['AlgorithmProvider'] = self.algorithm_provider + if self.algorithm_spec is not None: + result['AlgorithmSpec'] = self.algorithm_spec.to_map() + if self.algorithm_version is not None: + result['AlgorithmVersion'] = self.algorithm_version + if self.compute_resource is not None: + result['ComputeResource'] = self.compute_resource.to_map() + if self.duration is not None: + result['Duration'] = self.duration + if self.environments is not None: + result['Environments'] = self.environments + if self.experiment_config is not None: + result['ExperimentConfig'] = self.experiment_config.to_map() + if self.gmt_create_time is not None: + result['GmtCreateTime'] = self.gmt_create_time + if self.gmt_modified_time is not None: + result['GmtModifiedTime'] = self.gmt_modified_time + result['HyperParameters'] = [] + if self.hyper_parameters is not None: + for k in self.hyper_parameters: + result['HyperParameters'].append(k.to_map() if k else None) + result['InputChannels'] = [] + if self.input_channels is not None: + for k in self.input_channels: + result['InputChannels'].append(k.to_map() if k else None) + result['Instances'] = [] + if self.instances is not None: + for k in self.instances: + result['Instances'].append(k.to_map() if k else None) + if self.is_temp_algo is not None: + result['IsTempAlgo'] = self.is_temp_algo + result['Labels'] = [] + if self.labels is not None: + for k in self.labels: + result['Labels'].append(k.to_map() if k else None) + result['LatestMetrics'] = [] + if self.latest_metrics is not None: + for k in self.latest_metrics: + result['LatestMetrics'].append(k.to_map() if k else None) + if self.latest_progress is not None: + result['LatestProgress'] = self.latest_progress.to_map() + result['OutputChannels'] = [] + if self.output_channels is not None: + for k in self.output_channels: + result['OutputChannels'].append(k.to_map() if k else None) + if self.output_model is not None: + result['OutputModel'] = self.output_model.to_map() + if self.python_requirements is not None: + result['PythonRequirements'] = self.python_requirements + if self.reason_code is not None: + result['ReasonCode'] = self.reason_code + if self.reason_message is not None: + result['ReasonMessage'] = self.reason_message + if self.request_id is not None: + result['RequestId'] = self.request_id + if self.role_arn is not None: + result['RoleArn'] = self.role_arn + if self.scheduler is not None: + result['Scheduler'] = self.scheduler.to_map() + if self.settings is not None: + result['Settings'] = self.settings.to_map() + if self.status is not None: + result['Status'] = self.status + result['StatusTransitions'] = [] + if self.status_transitions is not None: + for k in self.status_transitions: + result['StatusTransitions'].append(k.to_map() if k else None) + if self.training_job_description is not None: + result['TrainingJobDescription'] = self.training_job_description + if self.training_job_id is not None: + result['TrainingJobId'] = self.training_job_id + if self.training_job_name is not None: + result['TrainingJobName'] = self.training_job_name + if self.training_job_url is not None: + result['TrainingJobUrl'] = self.training_job_url if self.user_id is not None: result['UserId'] = self.user_id + if self.user_vpc is not None: + result['UserVpc'] = self.user_vpc.to_map() if self.workspace_id is not None: result['WorkspaceId'] = self.workspace_id return result def from_map(self, m: dict = None): m = m or dict() - if m.get('Order') is not None: - self.order = m.get('Order') - if m.get('PageNumber') is not None: - self.page_number = m.get('PageNumber') - if m.get('PageSize') is not None: - self.page_size = m.get('PageSize') - if m.get('SortBy') is not None: - self.sort_by = m.get('SortBy') - if m.get('TimeStep') is not None: - self.time_step = m.get('TimeStep') - if m.get('UserId') is not None: - self.user_id = m.get('UserId') - if m.get('WorkspaceId') is not None: - self.workspace_id = m.get('WorkspaceId') - return self - - -class GetUserViewMetricsResponseBody(TeaModel): - def __init__( - self, - resource_group_id: str = None, - summary: UserViewMetric = None, - total: int = None, - user_metrics: List[UserViewMetric] = None, - ): - self.resource_group_id = resource_group_id - self.summary = summary - self.total = total - self.user_metrics = user_metrics - - def validate(self): - if self.summary: - self.summary.validate() - if self.user_metrics: - for k in self.user_metrics: - if k: - k.validate() - - def to_map(self): - _map = super().to_map() - if _map is not None: - return _map - - result = dict() - if self.resource_group_id is not None: - result['ResourceGroupId'] = self.resource_group_id - if self.summary is not None: - result['Summary'] = self.summary.to_map() - if self.total is not None: - result['Total'] = self.total - result['UserMetrics'] = [] - if self.user_metrics is not None: - for k in self.user_metrics: - result['UserMetrics'].append(k.to_map() if k else None) - return result - - def from_map(self, m: dict = None): - m = m or dict() - if m.get('ResourceGroupId') is not None: - self.resource_group_id = m.get('ResourceGroupId') - if m.get('Summary') is not None: - temp_model = UserViewMetric() - self.summary = temp_model.from_map(m['Summary']) - if m.get('Total') is not None: - self.total = m.get('Total') - self.user_metrics = [] - if m.get('UserMetrics') is not None: - for k in m.get('UserMetrics'): - temp_model = UserViewMetric() - self.user_metrics.append(temp_model.from_map(k)) + if m.get('AlgorithmId') is not None: + self.algorithm_id = m.get('AlgorithmId') + if m.get('AlgorithmName') is not None: + self.algorithm_name = m.get('AlgorithmName') + if m.get('AlgorithmProvider') is not None: + self.algorithm_provider = m.get('AlgorithmProvider') + if m.get('AlgorithmSpec') is not None: + temp_model = AlgorithmSpec() + self.algorithm_spec = temp_model.from_map(m['AlgorithmSpec']) + if m.get('AlgorithmVersion') is not None: + self.algorithm_version = m.get('AlgorithmVersion') + if m.get('ComputeResource') is not None: + temp_model = GetTrainingJobResponseBodyComputeResource() + self.compute_resource = temp_model.from_map(m['ComputeResource']) + if m.get('Duration') is not None: + self.duration = m.get('Duration') + if m.get('Environments') is not None: + self.environments = m.get('Environments') + if m.get('ExperimentConfig') is not None: + temp_model = GetTrainingJobResponseBodyExperimentConfig() + self.experiment_config = temp_model.from_map(m['ExperimentConfig']) + if m.get('GmtCreateTime') is not None: + self.gmt_create_time = m.get('GmtCreateTime') + if m.get('GmtModifiedTime') is not None: + self.gmt_modified_time = m.get('GmtModifiedTime') + self.hyper_parameters = [] + if m.get('HyperParameters') is not None: + for k in m.get('HyperParameters'): + temp_model = GetTrainingJobResponseBodyHyperParameters() + self.hyper_parameters.append(temp_model.from_map(k)) + self.input_channels = [] + if m.get('InputChannels') is not None: + for k in m.get('InputChannels'): + temp_model = GetTrainingJobResponseBodyInputChannels() + self.input_channels.append(temp_model.from_map(k)) + self.instances = [] + if m.get('Instances') is not None: + for k in m.get('Instances'): + temp_model = GetTrainingJobResponseBodyInstances() + self.instances.append(temp_model.from_map(k)) + if m.get('IsTempAlgo') is not None: + self.is_temp_algo = m.get('IsTempAlgo') + self.labels = [] + if m.get('Labels') is not None: + for k in m.get('Labels'): + temp_model = GetTrainingJobResponseBodyLabels() + self.labels.append(temp_model.from_map(k)) + self.latest_metrics = [] + if m.get('LatestMetrics') is not None: + for k in m.get('LatestMetrics'): + temp_model = GetTrainingJobResponseBodyLatestMetrics() + self.latest_metrics.append(temp_model.from_map(k)) + if m.get('LatestProgress') is not None: + temp_model = GetTrainingJobResponseBodyLatestProgress() + self.latest_progress = temp_model.from_map(m['LatestProgress']) + self.output_channels = [] + if m.get('OutputChannels') is not None: + for k in m.get('OutputChannels'): + temp_model = GetTrainingJobResponseBodyOutputChannels() + self.output_channels.append(temp_model.from_map(k)) + if m.get('OutputModel') is not None: + temp_model = GetTrainingJobResponseBodyOutputModel() + self.output_model = temp_model.from_map(m['OutputModel']) + if m.get('PythonRequirements') is not None: + self.python_requirements = m.get('PythonRequirements') + if m.get('ReasonCode') is not None: + self.reason_code = m.get('ReasonCode') + if m.get('ReasonMessage') is not None: + self.reason_message = m.get('ReasonMessage') + if m.get('RequestId') is not None: + self.request_id = m.get('RequestId') + if m.get('RoleArn') is not None: + self.role_arn = m.get('RoleArn') + if m.get('Scheduler') is not None: + temp_model = GetTrainingJobResponseBodyScheduler() + self.scheduler = temp_model.from_map(m['Scheduler']) + if m.get('Settings') is not None: + temp_model = GetTrainingJobResponseBodySettings() + self.settings = temp_model.from_map(m['Settings']) + if m.get('Status') is not None: + self.status = m.get('Status') + self.status_transitions = [] + if m.get('StatusTransitions') is not None: + for k in m.get('StatusTransitions'): + temp_model = GetTrainingJobResponseBodyStatusTransitions() + self.status_transitions.append(temp_model.from_map(k)) + if m.get('TrainingJobDescription') is not None: + self.training_job_description = m.get('TrainingJobDescription') + if m.get('TrainingJobId') is not None: + self.training_job_id = m.get('TrainingJobId') + if m.get('TrainingJobName') is not None: + self.training_job_name = m.get('TrainingJobName') + if m.get('TrainingJobUrl') is not None: + self.training_job_url = m.get('TrainingJobUrl') + if m.get('UserId') is not None: + self.user_id = m.get('UserId') + if m.get('UserVpc') is not None: + temp_model = GetTrainingJobResponseBodyUserVpc() + self.user_vpc = temp_model.from_map(m['UserVpc']) + if m.get('WorkspaceId') is not None: + self.workspace_id = m.get('WorkspaceId') return self -class GetUserViewMetricsResponse(TeaModel): +class GetTrainingJobResponse(TeaModel): def __init__( self, headers: Dict[str, str] = None, status_code: int = None, - body: GetUserViewMetricsResponseBody = None, + body: GetTrainingJobResponseBody = None, ): self.headers = headers self.status_code = status_code @@ -15010,19 +13859,17 @@ def from_map(self, m: dict = None): if m.get('statusCode') is not None: self.status_code = m.get('statusCode') if m.get('body') is not None: - temp_model = GetUserViewMetricsResponseBody() + temp_model = GetTrainingJobResponseBody() self.body = temp_model.from_map(m['body']) return self -class ListAI4DSerivcesRequest(TeaModel): +class GetTrainingJobErrorInfoRequest(TeaModel): def __init__( self, - service_type: str = None, - workspace_id: str = None, + token: str = None, ): - self.service_type = service_type - self.workspace_id = workspace_id + self.token = token def validate(self): pass @@ -15033,29 +13880,27 @@ def to_map(self): return _map result = dict() - if self.service_type is not None: - result['ServiceType'] = self.service_type - if self.workspace_id is not None: - result['WorkspaceId'] = self.workspace_id + if self.token is not None: + result['Token'] = self.token return result def from_map(self, m: dict = None): m = m or dict() - if m.get('ServiceType') is not None: - self.service_type = m.get('ServiceType') - if m.get('WorkspaceId') is not None: - self.workspace_id = m.get('WorkspaceId') + if m.get('Token') is not None: + self.token = m.get('Token') return self -class ListAI4DSerivcesResponseBodyServices(TeaModel): +class GetTrainingJobErrorInfoResponseBodyErrorInfo(TeaModel): def __init__( self, - service_name: str = None, - service_type: str = None, + additional_info: str = None, + code: str = None, + message: str = None, ): - self.service_name = service_name - self.service_type = service_type + self.additional_info = additional_info + self.code = code + self.message = message def validate(self): pass @@ -15066,35 +13911,37 @@ def to_map(self): return _map result = dict() - if self.service_name is not None: - result['ServiceName'] = self.service_name - if self.service_type is not None: - result['ServiceType'] = self.service_type + if self.additional_info is not None: + result['AdditionalInfo'] = self.additional_info + if self.code is not None: + result['Code'] = self.code + if self.message is not None: + result['Message'] = self.message return result def from_map(self, m: dict = None): m = m or dict() - if m.get('ServiceName') is not None: - self.service_name = m.get('ServiceName') - if m.get('ServiceType') is not None: - self.service_type = m.get('ServiceType') + if m.get('AdditionalInfo') is not None: + self.additional_info = m.get('AdditionalInfo') + if m.get('Code') is not None: + self.code = m.get('Code') + if m.get('Message') is not None: + self.message = m.get('Message') return self -class ListAI4DSerivcesResponseBody(TeaModel): +class GetTrainingJobErrorInfoResponseBody(TeaModel): def __init__( self, + error_info: GetTrainingJobErrorInfoResponseBodyErrorInfo = None, request_id: str = None, - services: List[ListAI4DSerivcesResponseBodyServices] = None, ): + self.error_info = error_info self.request_id = request_id - self.services = services def validate(self): - if self.services: - for k in self.services: - if k: - k.validate() + if self.error_info: + self.error_info.validate() def to_map(self): _map = super().to_map() @@ -15102,32 +13949,28 @@ def to_map(self): return _map result = dict() + if self.error_info is not None: + result['ErrorInfo'] = self.error_info.to_map() if self.request_id is not None: result['RequestId'] = self.request_id - result['Services'] = [] - if self.services is not None: - for k in self.services: - result['Services'].append(k.to_map() if k else None) return result def from_map(self, m: dict = None): m = m or dict() + if m.get('ErrorInfo') is not None: + temp_model = GetTrainingJobErrorInfoResponseBodyErrorInfo() + self.error_info = temp_model.from_map(m['ErrorInfo']) if m.get('RequestId') is not None: self.request_id = m.get('RequestId') - self.services = [] - if m.get('Services') is not None: - for k in m.get('Services'): - temp_model = ListAI4DSerivcesResponseBodyServices() - self.services.append(temp_model.from_map(k)) return self -class ListAI4DSerivcesResponse(TeaModel): +class GetTrainingJobErrorInfoResponse(TeaModel): def __init__( self, headers: Dict[str, str] = None, status_code: int = None, - body: ListAI4DSerivcesResponseBody = None, + body: GetTrainingJobErrorInfoResponseBody = None, ): self.headers = headers self.status_code = status_code @@ -15158,19 +14001,19 @@ def from_map(self, m: dict = None): if m.get('statusCode') is not None: self.status_code = m.get('statusCode') if m.get('body') is not None: - temp_model = ListAI4DSerivcesResponseBody() + temp_model = GetTrainingJobErrorInfoResponseBody() self.body = temp_model.from_map(m['body']) return self -class ListAI4DServiceTemplatesRequest(TeaModel): +class GetTrainingJobLatestMetricsRequest(TeaModel): def __init__( self, - service_type: str = None, - workspace_id: str = None, + names: str = None, + token: str = None, ): - self.service_type = service_type - self.workspace_id = workspace_id + self.names = names + self.token = token def validate(self): pass @@ -15181,28 +14024,31 @@ def to_map(self): return _map result = dict() - if self.service_type is not None: - result['ServiceType'] = self.service_type - if self.workspace_id is not None: - result['WorkspaceId'] = self.workspace_id + if self.names is not None: + result['Names'] = self.names + if self.token is not None: + result['Token'] = self.token return result def from_map(self, m: dict = None): m = m or dict() - if m.get('ServiceType') is not None: - self.service_type = m.get('ServiceType') - if m.get('WorkspaceId') is not None: - self.workspace_id = m.get('WorkspaceId') + if m.get('Names') is not None: + self.names = m.get('Names') + if m.get('Token') is not None: + self.token = m.get('Token') return self -class ListAI4DServiceTemplatesResponseBodyServiceTemplatesLabels(TeaModel): +class GetTrainingJobLatestMetricsResponseBodyMetrics(TeaModel): def __init__( self, - key: str = None, - value: str = None, + name: str = None, + timestamp: str = None, + value: float = None, ): - self.key = key + self.name = name + # Use the UTC time format: yyyy-MM-ddTHH:mmZ + self.timestamp = timestamp self.value = value def validate(self): @@ -15214,43 +14060,130 @@ def to_map(self): return _map result = dict() - if self.key is not None: - result['Key'] = self.key + if self.name is not None: + result['Name'] = self.name + if self.timestamp is not None: + result['Timestamp'] = self.timestamp if self.value is not None: result['Value'] = self.value return result def from_map(self, m: dict = None): m = m or dict() - if m.get('Key') is not None: - self.key = m.get('Key') + if m.get('Name') is not None: + self.name = m.get('Name') + if m.get('Timestamp') is not None: + self.timestamp = m.get('Timestamp') if m.get('Value') is not None: self.value = m.get('Value') return self -class ListAI4DServiceTemplatesResponseBodyServiceTemplates(TeaModel): +class GetTrainingJobLatestMetricsResponseBody(TeaModel): def __init__( self, - inference_spec: Dict[str, Any] = None, - labels: List[ListAI4DServiceTemplatesResponseBodyServiceTemplatesLabels] = None, - service_template_description: str = None, - service_template_doc: str = None, - service_template_id: str = None, - service_template_name: str = None, + metrics: List[GetTrainingJobLatestMetricsResponseBodyMetrics] = None, + request_id: str = None, + ): + self.metrics = metrics + self.request_id = request_id + + def validate(self): + if self.metrics: + for k in self.metrics: + if k: + k.validate() + + def to_map(self): + _map = super().to_map() + if _map is not None: + return _map + + result = dict() + result['Metrics'] = [] + if self.metrics is not None: + for k in self.metrics: + result['Metrics'].append(k.to_map() if k else None) + if self.request_id is not None: + result['RequestId'] = self.request_id + return result + + def from_map(self, m: dict = None): + m = m or dict() + self.metrics = [] + if m.get('Metrics') is not None: + for k in m.get('Metrics'): + temp_model = GetTrainingJobLatestMetricsResponseBodyMetrics() + self.metrics.append(temp_model.from_map(k)) + if m.get('RequestId') is not None: + self.request_id = m.get('RequestId') + return self + + +class GetTrainingJobLatestMetricsResponse(TeaModel): + def __init__( + self, + headers: Dict[str, str] = None, + status_code: int = None, + body: GetTrainingJobLatestMetricsResponseBody = None, + ): + self.headers = headers + self.status_code = status_code + self.body = body + + def validate(self): + if self.body: + self.body.validate() + + def to_map(self): + _map = super().to_map() + if _map is not None: + return _map + + result = dict() + if self.headers is not None: + result['headers'] = self.headers + if self.status_code is not None: + result['statusCode'] = self.status_code + if self.body is not None: + result['body'] = self.body.to_map() + return result + + def from_map(self, m: dict = None): + m = m or dict() + if m.get('headers') is not None: + self.headers = m.get('headers') + if m.get('statusCode') is not None: + self.status_code = m.get('statusCode') + if m.get('body') is not None: + temp_model = GetTrainingJobLatestMetricsResponseBody() + self.body = temp_model.from_map(m['body']) + return self + + +class GetUserViewMetricsRequest(TeaModel): + def __init__( + self, + order: str = None, + page_number: str = None, + page_size: str = None, + sort_by: str = None, + time_step: str = None, + user_id: str = None, + workspace_id: str = None, ): - self.inference_spec = inference_spec - self.labels = labels - self.service_template_description = service_template_description - self.service_template_doc = service_template_doc - self.service_template_id = service_template_id - self.service_template_name = service_template_name + self.order = order + # This parameter is required. + self.page_number = page_number + # This parameter is required. + self.page_size = page_size + self.sort_by = sort_by + self.time_step = time_step + self.user_id = user_id + self.workspace_id = workspace_id def validate(self): - if self.labels: - for k in self.labels: - if k: - k.validate() + pass def to_map(self): _map = super().to_map() @@ -15258,54 +14191,59 @@ def to_map(self): return _map result = dict() - if self.inference_spec is not None: - result['InferenceSpec'] = self.inference_spec - result['Labels'] = [] - if self.labels is not None: - for k in self.labels: - result['Labels'].append(k.to_map() if k else None) - if self.service_template_description is not None: - result['ServiceTemplateDescription'] = self.service_template_description - if self.service_template_doc is not None: - result['ServiceTemplateDoc'] = self.service_template_doc - if self.service_template_id is not None: - result['ServiceTemplateId'] = self.service_template_id - if self.service_template_name is not None: - result['ServiceTemplateName'] = self.service_template_name + if self.order is not None: + result['Order'] = self.order + if self.page_number is not None: + result['PageNumber'] = self.page_number + if self.page_size is not None: + result['PageSize'] = self.page_size + if self.sort_by is not None: + result['SortBy'] = self.sort_by + if self.time_step is not None: + result['TimeStep'] = self.time_step + if self.user_id is not None: + result['UserId'] = self.user_id + if self.workspace_id is not None: + result['WorkspaceId'] = self.workspace_id return result def from_map(self, m: dict = None): m = m or dict() - if m.get('InferenceSpec') is not None: - self.inference_spec = m.get('InferenceSpec') - self.labels = [] - if m.get('Labels') is not None: - for k in m.get('Labels'): - temp_model = ListAI4DServiceTemplatesResponseBodyServiceTemplatesLabels() - self.labels.append(temp_model.from_map(k)) - if m.get('ServiceTemplateDescription') is not None: - self.service_template_description = m.get('ServiceTemplateDescription') - if m.get('ServiceTemplateDoc') is not None: - self.service_template_doc = m.get('ServiceTemplateDoc') - if m.get('ServiceTemplateId') is not None: - self.service_template_id = m.get('ServiceTemplateId') - if m.get('ServiceTemplateName') is not None: - self.service_template_name = m.get('ServiceTemplateName') + if m.get('Order') is not None: + self.order = m.get('Order') + if m.get('PageNumber') is not None: + self.page_number = m.get('PageNumber') + if m.get('PageSize') is not None: + self.page_size = m.get('PageSize') + if m.get('SortBy') is not None: + self.sort_by = m.get('SortBy') + if m.get('TimeStep') is not None: + self.time_step = m.get('TimeStep') + if m.get('UserId') is not None: + self.user_id = m.get('UserId') + if m.get('WorkspaceId') is not None: + self.workspace_id = m.get('WorkspaceId') return self -class ListAI4DServiceTemplatesResponseBody(TeaModel): +class GetUserViewMetricsResponseBody(TeaModel): def __init__( self, - request_id: str = None, - service_templates: List[ListAI4DServiceTemplatesResponseBodyServiceTemplates] = None, + resource_group_id: str = None, + summary: UserViewMetric = None, + total: int = None, + user_metrics: List[UserViewMetric] = None, ): - self.request_id = request_id - self.service_templates = service_templates + self.resource_group_id = resource_group_id + self.summary = summary + self.total = total + self.user_metrics = user_metrics def validate(self): - if self.service_templates: - for k in self.service_templates: + if self.summary: + self.summary.validate() + if self.user_metrics: + for k in self.user_metrics: if k: k.validate() @@ -15315,32 +14253,41 @@ def to_map(self): return _map result = dict() - if self.request_id is not None: - result['RequestId'] = self.request_id - result['ServiceTemplates'] = [] - if self.service_templates is not None: - for k in self.service_templates: - result['ServiceTemplates'].append(k.to_map() if k else None) + if self.resource_group_id is not None: + result['ResourceGroupId'] = self.resource_group_id + if self.summary is not None: + result['Summary'] = self.summary.to_map() + if self.total is not None: + result['Total'] = self.total + result['UserMetrics'] = [] + if self.user_metrics is not None: + for k in self.user_metrics: + result['UserMetrics'].append(k.to_map() if k else None) return result def from_map(self, m: dict = None): m = m or dict() - if m.get('RequestId') is not None: - self.request_id = m.get('RequestId') - self.service_templates = [] - if m.get('ServiceTemplates') is not None: - for k in m.get('ServiceTemplates'): - temp_model = ListAI4DServiceTemplatesResponseBodyServiceTemplates() - self.service_templates.append(temp_model.from_map(k)) + if m.get('ResourceGroupId') is not None: + self.resource_group_id = m.get('ResourceGroupId') + if m.get('Summary') is not None: + temp_model = UserViewMetric() + self.summary = temp_model.from_map(m['Summary']) + if m.get('Total') is not None: + self.total = m.get('Total') + self.user_metrics = [] + if m.get('UserMetrics') is not None: + for k in m.get('UserMetrics'): + temp_model = UserViewMetric() + self.user_metrics.append(temp_model.from_map(k)) return self -class ListAI4DServiceTemplatesResponse(TeaModel): +class GetUserViewMetricsResponse(TeaModel): def __init__( self, headers: Dict[str, str] = None, status_code: int = None, - body: ListAI4DServiceTemplatesResponseBody = None, + body: GetUserViewMetricsResponseBody = None, ): self.headers = headers self.status_code = status_code @@ -15371,19 +14318,20 @@ def from_map(self, m: dict = None): if m.get('statusCode') is not None: self.status_code = m.get('statusCode') if m.get('body') is not None: - temp_model = ListAI4DServiceTemplatesResponseBody() + temp_model = GetUserViewMetricsResponseBody() self.body = temp_model.from_map(m['body']) return self -class ListAlgorithmVersionsRequest(TeaModel): +class ListAI4DSerivcesRequest(TeaModel): def __init__( self, - page_number: int = None, - page_size: int = None, + service_type: str = None, + workspace_id: str = None, ): - self.page_number = page_number - self.page_size = page_size + self.service_type = service_type + # This parameter is required. + self.workspace_id = workspace_id def validate(self): pass @@ -15394,41 +14342,29 @@ def to_map(self): return _map result = dict() - if self.page_number is not None: - result['PageNumber'] = self.page_number - if self.page_size is not None: - result['PageSize'] = self.page_size + if self.service_type is not None: + result['ServiceType'] = self.service_type + if self.workspace_id is not None: + result['WorkspaceId'] = self.workspace_id return result def from_map(self, m: dict = None): m = m or dict() - if m.get('PageNumber') is not None: - self.page_number = m.get('PageNumber') - if m.get('PageSize') is not None: - self.page_size = m.get('PageSize') + if m.get('ServiceType') is not None: + self.service_type = m.get('ServiceType') + if m.get('WorkspaceId') is not None: + self.workspace_id = m.get('WorkspaceId') return self -class ListAlgorithmVersionsResponseBodyAlgorithmVersions(TeaModel): +class ListAI4DSerivcesResponseBodyServices(TeaModel): def __init__( self, - algorithm_id: str = None, - algorithm_name: str = None, - algorithm_provider: str = None, - algorithm_version: str = None, - gmt_create_time: str = None, - gmt_modified_time: str = None, - tenant_id: str = None, - user_id: str = None, + service_name: str = None, + service_type: str = None, ): - self.algorithm_id = algorithm_id - self.algorithm_name = algorithm_name - self.algorithm_provider = algorithm_provider - self.algorithm_version = algorithm_version - self.gmt_create_time = gmt_create_time - self.gmt_modified_time = gmt_modified_time - self.tenant_id = tenant_id - self.user_id = user_id + self.service_name = service_name + self.service_type = service_type def validate(self): pass @@ -15439,59 +14375,33 @@ def to_map(self): return _map result = dict() - if self.algorithm_id is not None: - result['AlgorithmId'] = self.algorithm_id - if self.algorithm_name is not None: - result['AlgorithmName'] = self.algorithm_name - if self.algorithm_provider is not None: - result['AlgorithmProvider'] = self.algorithm_provider - if self.algorithm_version is not None: - result['AlgorithmVersion'] = self.algorithm_version - if self.gmt_create_time is not None: - result['GmtCreateTime'] = self.gmt_create_time - if self.gmt_modified_time is not None: - result['GmtModifiedTime'] = self.gmt_modified_time - if self.tenant_id is not None: - result['TenantId'] = self.tenant_id - if self.user_id is not None: - result['UserId'] = self.user_id + if self.service_name is not None: + result['ServiceName'] = self.service_name + if self.service_type is not None: + result['ServiceType'] = self.service_type return result def from_map(self, m: dict = None): m = m or dict() - if m.get('AlgorithmId') is not None: - self.algorithm_id = m.get('AlgorithmId') - if m.get('AlgorithmName') is not None: - self.algorithm_name = m.get('AlgorithmName') - if m.get('AlgorithmProvider') is not None: - self.algorithm_provider = m.get('AlgorithmProvider') - if m.get('AlgorithmVersion') is not None: - self.algorithm_version = m.get('AlgorithmVersion') - if m.get('GmtCreateTime') is not None: - self.gmt_create_time = m.get('GmtCreateTime') - if m.get('GmtModifiedTime') is not None: - self.gmt_modified_time = m.get('GmtModifiedTime') - if m.get('TenantId') is not None: - self.tenant_id = m.get('TenantId') - if m.get('UserId') is not None: - self.user_id = m.get('UserId') + if m.get('ServiceName') is not None: + self.service_name = m.get('ServiceName') + if m.get('ServiceType') is not None: + self.service_type = m.get('ServiceType') return self -class ListAlgorithmVersionsResponseBody(TeaModel): +class ListAI4DSerivcesResponseBody(TeaModel): def __init__( self, - algorithm_versions: List[ListAlgorithmVersionsResponseBodyAlgorithmVersions] = None, request_id: str = None, - total_count: int = None, + services: List[ListAI4DSerivcesResponseBodyServices] = None, ): - self.algorithm_versions = algorithm_versions self.request_id = request_id - self.total_count = total_count + self.services = services def validate(self): - if self.algorithm_versions: - for k in self.algorithm_versions: + if self.services: + for k in self.services: if k: k.validate() @@ -15501,36 +14411,32 @@ def to_map(self): return _map result = dict() - result['AlgorithmVersions'] = [] - if self.algorithm_versions is not None: - for k in self.algorithm_versions: - result['AlgorithmVersions'].append(k.to_map() if k else None) if self.request_id is not None: result['RequestId'] = self.request_id - if self.total_count is not None: - result['TotalCount'] = self.total_count + result['Services'] = [] + if self.services is not None: + for k in self.services: + result['Services'].append(k.to_map() if k else None) return result - - def from_map(self, m: dict = None): - m = m or dict() - self.algorithm_versions = [] - if m.get('AlgorithmVersions') is not None: - for k in m.get('AlgorithmVersions'): - temp_model = ListAlgorithmVersionsResponseBodyAlgorithmVersions() - self.algorithm_versions.append(temp_model.from_map(k)) + + def from_map(self, m: dict = None): + m = m or dict() if m.get('RequestId') is not None: self.request_id = m.get('RequestId') - if m.get('TotalCount') is not None: - self.total_count = m.get('TotalCount') + self.services = [] + if m.get('Services') is not None: + for k in m.get('Services'): + temp_model = ListAI4DSerivcesResponseBodyServices() + self.services.append(temp_model.from_map(k)) return self -class ListAlgorithmVersionsResponse(TeaModel): +class ListAI4DSerivcesResponse(TeaModel): def __init__( self, headers: Dict[str, str] = None, status_code: int = None, - body: ListAlgorithmVersionsResponseBody = None, + body: ListAI4DSerivcesResponseBody = None, ): self.headers = headers self.status_code = status_code @@ -15561,26 +14467,19 @@ def from_map(self, m: dict = None): if m.get('statusCode') is not None: self.status_code = m.get('statusCode') if m.get('body') is not None: - temp_model = ListAlgorithmVersionsResponseBody() + temp_model = ListAI4DSerivcesResponseBody() self.body = temp_model.from_map(m['body']) return self -class ListAlgorithmsRequest(TeaModel): +class ListAI4DServiceTemplatesRequest(TeaModel): def __init__( self, - algorithm_id: str = None, - algorithm_name: str = None, - algorithm_provider: str = None, - page_number: int = None, - page_size: int = None, + service_type: str = None, workspace_id: str = None, ): - self.algorithm_id = algorithm_id - self.algorithm_name = algorithm_name - self.algorithm_provider = algorithm_provider - self.page_number = page_number - self.page_size = page_size + self.service_type = service_type + # This parameter is required. self.workspace_id = workspace_id def validate(self): @@ -15592,59 +14491,29 @@ def to_map(self): return _map result = dict() - if self.algorithm_id is not None: - result['AlgorithmId'] = self.algorithm_id - if self.algorithm_name is not None: - result['AlgorithmName'] = self.algorithm_name - if self.algorithm_provider is not None: - result['AlgorithmProvider'] = self.algorithm_provider - if self.page_number is not None: - result['PageNumber'] = self.page_number - if self.page_size is not None: - result['PageSize'] = self.page_size + if self.service_type is not None: + result['ServiceType'] = self.service_type if self.workspace_id is not None: result['WorkspaceId'] = self.workspace_id return result def from_map(self, m: dict = None): m = m or dict() - if m.get('AlgorithmId') is not None: - self.algorithm_id = m.get('AlgorithmId') - if m.get('AlgorithmName') is not None: - self.algorithm_name = m.get('AlgorithmName') - if m.get('AlgorithmProvider') is not None: - self.algorithm_provider = m.get('AlgorithmProvider') - if m.get('PageNumber') is not None: - self.page_number = m.get('PageNumber') - if m.get('PageSize') is not None: - self.page_size = m.get('PageSize') + if m.get('ServiceType') is not None: + self.service_type = m.get('ServiceType') if m.get('WorkspaceId') is not None: self.workspace_id = m.get('WorkspaceId') return self -class ListAlgorithmsResponseBodyAlgorithms(TeaModel): +class ListAI4DServiceTemplatesResponseBodyServiceTemplatesLabels(TeaModel): def __init__( self, - algorithm_description: str = None, - algorithm_id: str = None, - algorithm_name: str = None, - algorithm_provider: str = None, - display_name: str = None, - gmt_create_time: str = None, - gmt_modified_time: str = None, - user_id: str = None, - workspace_id: str = None, + key: str = None, + value: str = None, ): - self.algorithm_description = algorithm_description - self.algorithm_id = algorithm_id - self.algorithm_name = algorithm_name - self.algorithm_provider = algorithm_provider - self.display_name = display_name - self.gmt_create_time = gmt_create_time - self.gmt_modified_time = gmt_modified_time - self.user_id = user_id - self.workspace_id = workspace_id + self.key = key + self.value = value def validate(self): pass @@ -15655,63 +14524,98 @@ def to_map(self): return _map result = dict() - if self.algorithm_description is not None: - result['AlgorithmDescription'] = self.algorithm_description - if self.algorithm_id is not None: - result['AlgorithmId'] = self.algorithm_id - if self.algorithm_name is not None: - result['AlgorithmName'] = self.algorithm_name - if self.algorithm_provider is not None: - result['AlgorithmProvider'] = self.algorithm_provider - if self.display_name is not None: - result['DisplayName'] = self.display_name - if self.gmt_create_time is not None: - result['GmtCreateTime'] = self.gmt_create_time - if self.gmt_modified_time is not None: - result['GmtModifiedTime'] = self.gmt_modified_time - if self.user_id is not None: - result['UserId'] = self.user_id - if self.workspace_id is not None: - result['WorkspaceId'] = self.workspace_id + if self.key is not None: + result['Key'] = self.key + if self.value is not None: + result['Value'] = self.value return result def from_map(self, m: dict = None): m = m or dict() - if m.get('AlgorithmDescription') is not None: - self.algorithm_description = m.get('AlgorithmDescription') - if m.get('AlgorithmId') is not None: - self.algorithm_id = m.get('AlgorithmId') - if m.get('AlgorithmName') is not None: - self.algorithm_name = m.get('AlgorithmName') - if m.get('AlgorithmProvider') is not None: - self.algorithm_provider = m.get('AlgorithmProvider') - if m.get('DisplayName') is not None: - self.display_name = m.get('DisplayName') - if m.get('GmtCreateTime') is not None: - self.gmt_create_time = m.get('GmtCreateTime') - if m.get('GmtModifiedTime') is not None: - self.gmt_modified_time = m.get('GmtModifiedTime') - if m.get('UserId') is not None: - self.user_id = m.get('UserId') - if m.get('WorkspaceId') is not None: - self.workspace_id = m.get('WorkspaceId') + if m.get('Key') is not None: + self.key = m.get('Key') + if m.get('Value') is not None: + self.value = m.get('Value') return self -class ListAlgorithmsResponseBody(TeaModel): +class ListAI4DServiceTemplatesResponseBodyServiceTemplates(TeaModel): + def __init__( + self, + inference_spec: Dict[str, Any] = None, + labels: List[ListAI4DServiceTemplatesResponseBodyServiceTemplatesLabels] = None, + service_template_description: str = None, + service_template_doc: str = None, + service_template_id: str = None, + service_template_name: str = None, + ): + self.inference_spec = inference_spec + self.labels = labels + self.service_template_description = service_template_description + self.service_template_doc = service_template_doc + self.service_template_id = service_template_id + self.service_template_name = service_template_name + + def validate(self): + if self.labels: + for k in self.labels: + if k: + k.validate() + + def to_map(self): + _map = super().to_map() + if _map is not None: + return _map + + result = dict() + if self.inference_spec is not None: + result['InferenceSpec'] = self.inference_spec + result['Labels'] = [] + if self.labels is not None: + for k in self.labels: + result['Labels'].append(k.to_map() if k else None) + if self.service_template_description is not None: + result['ServiceTemplateDescription'] = self.service_template_description + if self.service_template_doc is not None: + result['ServiceTemplateDoc'] = self.service_template_doc + if self.service_template_id is not None: + result['ServiceTemplateId'] = self.service_template_id + if self.service_template_name is not None: + result['ServiceTemplateName'] = self.service_template_name + return result + + def from_map(self, m: dict = None): + m = m or dict() + if m.get('InferenceSpec') is not None: + self.inference_spec = m.get('InferenceSpec') + self.labels = [] + if m.get('Labels') is not None: + for k in m.get('Labels'): + temp_model = ListAI4DServiceTemplatesResponseBodyServiceTemplatesLabels() + self.labels.append(temp_model.from_map(k)) + if m.get('ServiceTemplateDescription') is not None: + self.service_template_description = m.get('ServiceTemplateDescription') + if m.get('ServiceTemplateDoc') is not None: + self.service_template_doc = m.get('ServiceTemplateDoc') + if m.get('ServiceTemplateId') is not None: + self.service_template_id = m.get('ServiceTemplateId') + if m.get('ServiceTemplateName') is not None: + self.service_template_name = m.get('ServiceTemplateName') + return self + + +class ListAI4DServiceTemplatesResponseBody(TeaModel): def __init__( self, - algorithms: List[ListAlgorithmsResponseBodyAlgorithms] = None, request_id: str = None, - total_count: int = None, + service_templates: List[ListAI4DServiceTemplatesResponseBodyServiceTemplates] = None, ): - self.algorithms = algorithms self.request_id = request_id - self.total_count = total_count + self.service_templates = service_templates def validate(self): - if self.algorithms: - for k in self.algorithms: + if self.service_templates: + for k in self.service_templates: if k: k.validate() @@ -15721,36 +14625,32 @@ def to_map(self): return _map result = dict() - result['Algorithms'] = [] - if self.algorithms is not None: - for k in self.algorithms: - result['Algorithms'].append(k.to_map() if k else None) if self.request_id is not None: result['RequestId'] = self.request_id - if self.total_count is not None: - result['TotalCount'] = self.total_count + result['ServiceTemplates'] = [] + if self.service_templates is not None: + for k in self.service_templates: + result['ServiceTemplates'].append(k.to_map() if k else None) return result def from_map(self, m: dict = None): m = m or dict() - self.algorithms = [] - if m.get('Algorithms') is not None: - for k in m.get('Algorithms'): - temp_model = ListAlgorithmsResponseBodyAlgorithms() - self.algorithms.append(temp_model.from_map(k)) if m.get('RequestId') is not None: self.request_id = m.get('RequestId') - if m.get('TotalCount') is not None: - self.total_count = m.get('TotalCount') + self.service_templates = [] + if m.get('ServiceTemplates') is not None: + for k in m.get('ServiceTemplates'): + temp_model = ListAI4DServiceTemplatesResponseBodyServiceTemplates() + self.service_templates.append(temp_model.from_map(k)) return self -class ListAlgorithmsResponse(TeaModel): +class ListAI4DServiceTemplatesResponse(TeaModel): def __init__( self, headers: Dict[str, str] = None, status_code: int = None, - body: ListAlgorithmsResponseBody = None, + body: ListAI4DServiceTemplatesResponseBody = None, ): self.headers = headers self.status_code = status_code @@ -15781,29 +14681,19 @@ def from_map(self, m: dict = None): if m.get('statusCode') is not None: self.status_code = m.get('statusCode') if m.get('body') is not None: - temp_model = ListAlgorithmsResponseBody() + temp_model = ListAI4DServiceTemplatesResponseBody() self.body = temp_model.from_map(m['body']) return self -class ListComponentVersionSnapshotsRequest(TeaModel): +class ListAlgorithmVersionsRequest(TeaModel): def __init__( self, - component_id: str = None, - order: str = None, page_number: int = None, page_size: int = None, - snapshot_id: str = None, - sort_by: str = None, - version: str = None, ): - self.component_id = component_id - self.order = order self.page_number = page_number self.page_size = page_size - self.snapshot_id = snapshot_id - self.sort_by = sort_by - self.version = version def validate(self): pass @@ -15814,61 +14704,41 @@ def to_map(self): return _map result = dict() - if self.component_id is not None: - result['ComponentId'] = self.component_id - if self.order is not None: - result['Order'] = self.order if self.page_number is not None: result['PageNumber'] = self.page_number if self.page_size is not None: result['PageSize'] = self.page_size - if self.snapshot_id is not None: - result['SnapshotId'] = self.snapshot_id - if self.sort_by is not None: - result['SortBy'] = self.sort_by - if self.version is not None: - result['Version'] = self.version return result - - def from_map(self, m: dict = None): - m = m or dict() - if m.get('ComponentId') is not None: - self.component_id = m.get('ComponentId') - if m.get('Order') is not None: - self.order = m.get('Order') + + def from_map(self, m: dict = None): + m = m or dict() if m.get('PageNumber') is not None: self.page_number = m.get('PageNumber') if m.get('PageSize') is not None: self.page_size = m.get('PageSize') - if m.get('SnapshotId') is not None: - self.snapshot_id = m.get('SnapshotId') - if m.get('SortBy') is not None: - self.sort_by = m.get('SortBy') - if m.get('Version') is not None: - self.version = m.get('Version') return self -class ListComponentVersionSnapshotsResponseBodySnapshots(TeaModel): +class ListAlgorithmVersionsResponseBodyAlgorithmVersions(TeaModel): def __init__( self, - component_id: str = None, - description: str = None, - is_current_version: bool = None, - snapshot_id: str = None, + algorithm_id: str = None, + algorithm_name: str = None, + algorithm_provider: str = None, + algorithm_version: str = None, + gmt_create_time: str = None, + gmt_modified_time: str = None, tenant_id: str = None, user_id: str = None, - version: str = None, - workspace_id: str = None, ): - self.component_id = component_id - self.description = description - self.is_current_version = is_current_version - self.snapshot_id = snapshot_id + self.algorithm_id = algorithm_id + self.algorithm_name = algorithm_name + self.algorithm_provider = algorithm_provider + self.algorithm_version = algorithm_version + self.gmt_create_time = gmt_create_time + self.gmt_modified_time = gmt_modified_time self.tenant_id = tenant_id self.user_id = user_id - self.version = version - self.workspace_id = workspace_id def validate(self): pass @@ -15879,59 +14749,59 @@ def to_map(self): return _map result = dict() - if self.component_id is not None: - result['ComponentId'] = self.component_id - if self.description is not None: - result['Description'] = self.description - if self.is_current_version is not None: - result['IsCurrentVersion'] = self.is_current_version - if self.snapshot_id is not None: - result['SnapshotId'] = self.snapshot_id + if self.algorithm_id is not None: + result['AlgorithmId'] = self.algorithm_id + if self.algorithm_name is not None: + result['AlgorithmName'] = self.algorithm_name + if self.algorithm_provider is not None: + result['AlgorithmProvider'] = self.algorithm_provider + if self.algorithm_version is not None: + result['AlgorithmVersion'] = self.algorithm_version + if self.gmt_create_time is not None: + result['GmtCreateTime'] = self.gmt_create_time + if self.gmt_modified_time is not None: + result['GmtModifiedTime'] = self.gmt_modified_time if self.tenant_id is not None: result['TenantId'] = self.tenant_id if self.user_id is not None: result['UserId'] = self.user_id - if self.version is not None: - result['Version'] = self.version - if self.workspace_id is not None: - result['WorkspaceId'] = self.workspace_id return result def from_map(self, m: dict = None): m = m or dict() - if m.get('ComponentId') is not None: - self.component_id = m.get('ComponentId') - if m.get('Description') is not None: - self.description = m.get('Description') - if m.get('IsCurrentVersion') is not None: - self.is_current_version = m.get('IsCurrentVersion') - if m.get('SnapshotId') is not None: - self.snapshot_id = m.get('SnapshotId') + if m.get('AlgorithmId') is not None: + self.algorithm_id = m.get('AlgorithmId') + if m.get('AlgorithmName') is not None: + self.algorithm_name = m.get('AlgorithmName') + if m.get('AlgorithmProvider') is not None: + self.algorithm_provider = m.get('AlgorithmProvider') + if m.get('AlgorithmVersion') is not None: + self.algorithm_version = m.get('AlgorithmVersion') + if m.get('GmtCreateTime') is not None: + self.gmt_create_time = m.get('GmtCreateTime') + if m.get('GmtModifiedTime') is not None: + self.gmt_modified_time = m.get('GmtModifiedTime') if m.get('TenantId') is not None: self.tenant_id = m.get('TenantId') if m.get('UserId') is not None: self.user_id = m.get('UserId') - if m.get('Version') is not None: - self.version = m.get('Version') - if m.get('WorkspaceId') is not None: - self.workspace_id = m.get('WorkspaceId') return self -class ListComponentVersionSnapshotsResponseBody(TeaModel): +class ListAlgorithmVersionsResponseBody(TeaModel): def __init__( self, + algorithm_versions: List[ListAlgorithmVersionsResponseBodyAlgorithmVersions] = None, request_id: str = None, - snapshots: List[ListComponentVersionSnapshotsResponseBodySnapshots] = None, total_count: int = None, ): + self.algorithm_versions = algorithm_versions self.request_id = request_id - self.snapshots = snapshots self.total_count = total_count def validate(self): - if self.snapshots: - for k in self.snapshots: + if self.algorithm_versions: + for k in self.algorithm_versions: if k: k.validate() @@ -15941,36 +14811,36 @@ def to_map(self): return _map result = dict() + result['AlgorithmVersions'] = [] + if self.algorithm_versions is not None: + for k in self.algorithm_versions: + result['AlgorithmVersions'].append(k.to_map() if k else None) if self.request_id is not None: result['RequestId'] = self.request_id - result['Snapshots'] = [] - if self.snapshots is not None: - for k in self.snapshots: - result['Snapshots'].append(k.to_map() if k else None) if self.total_count is not None: result['TotalCount'] = self.total_count return result def from_map(self, m: dict = None): m = m or dict() + self.algorithm_versions = [] + if m.get('AlgorithmVersions') is not None: + for k in m.get('AlgorithmVersions'): + temp_model = ListAlgorithmVersionsResponseBodyAlgorithmVersions() + self.algorithm_versions.append(temp_model.from_map(k)) if m.get('RequestId') is not None: self.request_id = m.get('RequestId') - self.snapshots = [] - if m.get('Snapshots') is not None: - for k in m.get('Snapshots'): - temp_model = ListComponentVersionSnapshotsResponseBodySnapshots() - self.snapshots.append(temp_model.from_map(k)) if m.get('TotalCount') is not None: self.total_count = m.get('TotalCount') return self -class ListComponentVersionSnapshotsResponse(TeaModel): +class ListAlgorithmVersionsResponse(TeaModel): def __init__( self, headers: Dict[str, str] = None, status_code: int = None, - body: ListComponentVersionSnapshotsResponseBody = None, + body: ListAlgorithmVersionsResponseBody = None, ): self.headers = headers self.status_code = status_code @@ -16001,84 +14871,27 @@ def from_map(self, m: dict = None): if m.get('statusCode') is not None: self.status_code = m.get('statusCode') if m.get('body') is not None: - temp_model = ListComponentVersionSnapshotsResponseBody() + temp_model = ListAlgorithmVersionsResponseBody() self.body = temp_model.from_map(m['body']) return self -class ListComponentVersionsRequest(TeaModel): - def __init__( - self, - labels: Dict[str, str] = None, - order: str = None, - page_number: int = None, - page_size: int = None, - sort_by: str = None, - version: str = None, - ): - self.labels = labels - self.order = order - self.page_number = page_number - self.page_size = page_size - self.sort_by = sort_by - self.version = version - - def validate(self): - pass - - def to_map(self): - _map = super().to_map() - if _map is not None: - return _map - - result = dict() - if self.labels is not None: - result['Labels'] = self.labels - if self.order is not None: - result['Order'] = self.order - if self.page_number is not None: - result['PageNumber'] = self.page_number - if self.page_size is not None: - result['PageSize'] = self.page_size - if self.sort_by is not None: - result['SortBy'] = self.sort_by - if self.version is not None: - result['Version'] = self.version - return result - - def from_map(self, m: dict = None): - m = m or dict() - if m.get('Labels') is not None: - self.labels = m.get('Labels') - if m.get('Order') is not None: - self.order = m.get('Order') - if m.get('PageNumber') is not None: - self.page_number = m.get('PageNumber') - if m.get('PageSize') is not None: - self.page_size = m.get('PageSize') - if m.get('SortBy') is not None: - self.sort_by = m.get('SortBy') - if m.get('Version') is not None: - self.version = m.get('Version') - return self - - -class ListComponentVersionsShrinkRequest(TeaModel): +class ListAlgorithmsRequest(TeaModel): def __init__( self, - labels_shrink: str = None, - order: str = None, + algorithm_id: str = None, + algorithm_name: str = None, + algorithm_provider: str = None, page_number: int = None, page_size: int = None, - sort_by: str = None, - version: str = None, + workspace_id: str = None, ): - self.labels_shrink = labels_shrink - self.order = order + self.algorithm_id = algorithm_id + self.algorithm_name = algorithm_name + self.algorithm_provider = algorithm_provider self.page_number = page_number self.page_size = page_size - self.sort_by = sort_by - self.version = version + self.workspace_id = workspace_id def validate(self): pass @@ -16089,146 +14902,126 @@ def to_map(self): return _map result = dict() - if self.labels_shrink is not None: - result['Labels'] = self.labels_shrink - if self.order is not None: - result['Order'] = self.order + if self.algorithm_id is not None: + result['AlgorithmId'] = self.algorithm_id + if self.algorithm_name is not None: + result['AlgorithmName'] = self.algorithm_name + if self.algorithm_provider is not None: + result['AlgorithmProvider'] = self.algorithm_provider if self.page_number is not None: result['PageNumber'] = self.page_number if self.page_size is not None: result['PageSize'] = self.page_size - if self.sort_by is not None: - result['SortBy'] = self.sort_by - if self.version is not None: - result['Version'] = self.version + if self.workspace_id is not None: + result['WorkspaceId'] = self.workspace_id return result def from_map(self, m: dict = None): m = m or dict() - if m.get('Labels') is not None: - self.labels_shrink = m.get('Labels') - if m.get('Order') is not None: - self.order = m.get('Order') + if m.get('AlgorithmId') is not None: + self.algorithm_id = m.get('AlgorithmId') + if m.get('AlgorithmName') is not None: + self.algorithm_name = m.get('AlgorithmName') + if m.get('AlgorithmProvider') is not None: + self.algorithm_provider = m.get('AlgorithmProvider') if m.get('PageNumber') is not None: self.page_number = m.get('PageNumber') if m.get('PageSize') is not None: self.page_size = m.get('PageSize') - if m.get('SortBy') is not None: - self.sort_by = m.get('SortBy') - if m.get('Version') is not None: - self.version = m.get('Version') + if m.get('WorkspaceId') is not None: + self.workspace_id = m.get('WorkspaceId') return self -class ListComponentVersionsResponseBodyComponentVersions(TeaModel): +class ListAlgorithmsResponseBodyAlgorithms(TeaModel): def __init__( self, - component_id: str = None, + algorithm_description: str = None, + algorithm_id: str = None, + algorithm_name: str = None, + algorithm_provider: str = None, + display_name: str = None, gmt_create_time: str = None, gmt_modified_time: str = None, - labels: List[Label] = None, - name: str = None, - provider: str = None, - status: str = None, - tenant_id: str = None, user_id: str = None, - version: str = None, workspace_id: str = None, ): - self.component_id = component_id + self.algorithm_description = algorithm_description + self.algorithm_id = algorithm_id + self.algorithm_name = algorithm_name + self.algorithm_provider = algorithm_provider + self.display_name = display_name self.gmt_create_time = gmt_create_time self.gmt_modified_time = gmt_modified_time - self.labels = labels - self.name = name - self.provider = provider - self.status = status - self.tenant_id = tenant_id self.user_id = user_id - self.version = version self.workspace_id = workspace_id - def validate(self): - if self.labels: - for k in self.labels: - if k: - k.validate() - + def validate(self): + pass + def to_map(self): _map = super().to_map() if _map is not None: return _map result = dict() - if self.component_id is not None: - result['ComponentId'] = self.component_id + if self.algorithm_description is not None: + result['AlgorithmDescription'] = self.algorithm_description + if self.algorithm_id is not None: + result['AlgorithmId'] = self.algorithm_id + if self.algorithm_name is not None: + result['AlgorithmName'] = self.algorithm_name + if self.algorithm_provider is not None: + result['AlgorithmProvider'] = self.algorithm_provider + if self.display_name is not None: + result['DisplayName'] = self.display_name if self.gmt_create_time is not None: result['GmtCreateTime'] = self.gmt_create_time if self.gmt_modified_time is not None: result['GmtModifiedTime'] = self.gmt_modified_time - result['Labels'] = [] - if self.labels is not None: - for k in self.labels: - result['Labels'].append(k.to_map() if k else None) - if self.name is not None: - result['Name'] = self.name - if self.provider is not None: - result['Provider'] = self.provider - if self.status is not None: - result['Status'] = self.status - if self.tenant_id is not None: - result['TenantId'] = self.tenant_id if self.user_id is not None: result['UserId'] = self.user_id - if self.version is not None: - result['Version'] = self.version if self.workspace_id is not None: result['WorkspaceId'] = self.workspace_id return result def from_map(self, m: dict = None): m = m or dict() - if m.get('ComponentId') is not None: - self.component_id = m.get('ComponentId') + if m.get('AlgorithmDescription') is not None: + self.algorithm_description = m.get('AlgorithmDescription') + if m.get('AlgorithmId') is not None: + self.algorithm_id = m.get('AlgorithmId') + if m.get('AlgorithmName') is not None: + self.algorithm_name = m.get('AlgorithmName') + if m.get('AlgorithmProvider') is not None: + self.algorithm_provider = m.get('AlgorithmProvider') + if m.get('DisplayName') is not None: + self.display_name = m.get('DisplayName') if m.get('GmtCreateTime') is not None: self.gmt_create_time = m.get('GmtCreateTime') if m.get('GmtModifiedTime') is not None: self.gmt_modified_time = m.get('GmtModifiedTime') - self.labels = [] - if m.get('Labels') is not None: - for k in m.get('Labels'): - temp_model = Label() - self.labels.append(temp_model.from_map(k)) - if m.get('Name') is not None: - self.name = m.get('Name') - if m.get('Provider') is not None: - self.provider = m.get('Provider') - if m.get('Status') is not None: - self.status = m.get('Status') - if m.get('TenantId') is not None: - self.tenant_id = m.get('TenantId') if m.get('UserId') is not None: self.user_id = m.get('UserId') - if m.get('Version') is not None: - self.version = m.get('Version') if m.get('WorkspaceId') is not None: self.workspace_id = m.get('WorkspaceId') return self -class ListComponentVersionsResponseBody(TeaModel): +class ListAlgorithmsResponseBody(TeaModel): def __init__( self, - component_versions: List[ListComponentVersionsResponseBodyComponentVersions] = None, + algorithms: List[ListAlgorithmsResponseBodyAlgorithms] = None, request_id: str = None, total_count: int = None, ): - self.component_versions = component_versions + self.algorithms = algorithms self.request_id = request_id self.total_count = total_count def validate(self): - if self.component_versions: - for k in self.component_versions: + if self.algorithms: + for k in self.algorithms: if k: k.validate() @@ -16238,10 +15031,10 @@ def to_map(self): return _map result = dict() - result['ComponentVersions'] = [] - if self.component_versions is not None: - for k in self.component_versions: - result['ComponentVersions'].append(k.to_map() if k else None) + result['Algorithms'] = [] + if self.algorithms is not None: + for k in self.algorithms: + result['Algorithms'].append(k.to_map() if k else None) if self.request_id is not None: result['RequestId'] = self.request_id if self.total_count is not None: @@ -16250,11 +15043,11 @@ def to_map(self): def from_map(self, m: dict = None): m = m or dict() - self.component_versions = [] - if m.get('ComponentVersions') is not None: - for k in m.get('ComponentVersions'): - temp_model = ListComponentVersionsResponseBodyComponentVersions() - self.component_versions.append(temp_model.from_map(k)) + self.algorithms = [] + if m.get('Algorithms') is not None: + for k in m.get('Algorithms'): + temp_model = ListAlgorithmsResponseBodyAlgorithms() + self.algorithms.append(temp_model.from_map(k)) if m.get('RequestId') is not None: self.request_id = m.get('RequestId') if m.get('TotalCount') is not None: @@ -16262,12 +15055,12 @@ def from_map(self, m: dict = None): return self -class ListComponentVersionsResponse(TeaModel): +class ListAlgorithmsResponse(TeaModel): def __init__( self, headers: Dict[str, str] = None, status_code: int = None, - body: ListComponentVersionsResponseBody = None, + body: ListAlgorithmsResponseBody = None, ): self.headers = headers self.status_code = status_code @@ -16298,116 +15091,31 @@ def from_map(self, m: dict = None): if m.get('statusCode') is not None: self.status_code = m.get('statusCode') if m.get('body') is not None: - temp_model = ListComponentVersionsResponseBody() + temp_model = ListAlgorithmsResponseBody() self.body = temp_model.from_map(m['body']) return self -class ListComponentsRequest(TeaModel): - def __init__( - self, - component_id: str = None, - component_ids: str = None, - labels: Dict[str, Any] = None, - name: str = None, - order: str = None, - page_number: int = None, - page_size: int = None, - provider: str = None, - sort_by: str = None, - workspace_id: str = None, - ): - self.component_id = component_id - self.component_ids = component_ids - self.labels = labels - self.name = name - self.order = order - self.page_number = page_number - self.page_size = page_size - self.provider = provider - self.sort_by = sort_by - self.workspace_id = workspace_id - - def validate(self): - pass - - def to_map(self): - _map = super().to_map() - if _map is not None: - return _map - - result = dict() - if self.component_id is not None: - result['ComponentId'] = self.component_id - if self.component_ids is not None: - result['ComponentIds'] = self.component_ids - if self.labels is not None: - result['Labels'] = self.labels - if self.name is not None: - result['Name'] = self.name - if self.order is not None: - result['Order'] = self.order - if self.page_number is not None: - result['PageNumber'] = self.page_number - if self.page_size is not None: - result['PageSize'] = self.page_size - if self.provider is not None: - result['Provider'] = self.provider - if self.sort_by is not None: - result['SortBy'] = self.sort_by - if self.workspace_id is not None: - result['WorkspaceId'] = self.workspace_id - return result - - def from_map(self, m: dict = None): - m = m or dict() - if m.get('ComponentId') is not None: - self.component_id = m.get('ComponentId') - if m.get('ComponentIds') is not None: - self.component_ids = m.get('ComponentIds') - if m.get('Labels') is not None: - self.labels = m.get('Labels') - if m.get('Name') is not None: - self.name = m.get('Name') - if m.get('Order') is not None: - self.order = m.get('Order') - if m.get('PageNumber') is not None: - self.page_number = m.get('PageNumber') - if m.get('PageSize') is not None: - self.page_size = m.get('PageSize') - if m.get('Provider') is not None: - self.provider = m.get('Provider') - if m.get('SortBy') is not None: - self.sort_by = m.get('SortBy') - if m.get('WorkspaceId') is not None: - self.workspace_id = m.get('WorkspaceId') - return self - - -class ListComponentsShrinkRequest(TeaModel): +class ListComponentVersionSnapshotsRequest(TeaModel): def __init__( self, component_id: str = None, - component_ids: str = None, - labels_shrink: str = None, - name: str = None, order: str = None, page_number: int = None, page_size: int = None, - provider: str = None, + snapshot_id: str = None, sort_by: str = None, - workspace_id: str = None, + version: str = None, ): + # This parameter is required. self.component_id = component_id - self.component_ids = component_ids - self.labels_shrink = labels_shrink - self.name = name self.order = order self.page_number = page_number self.page_size = page_size - self.provider = provider + self.snapshot_id = snapshot_id self.sort_by = sort_by - self.workspace_id = workspace_id + # This parameter is required. + self.version = version def validate(self): pass @@ -16420,134 +15128,62 @@ def to_map(self): result = dict() if self.component_id is not None: result['ComponentId'] = self.component_id - if self.component_ids is not None: - result['ComponentIds'] = self.component_ids - if self.labels_shrink is not None: - result['Labels'] = self.labels_shrink - if self.name is not None: - result['Name'] = self.name if self.order is not None: result['Order'] = self.order if self.page_number is not None: result['PageNumber'] = self.page_number if self.page_size is not None: result['PageSize'] = self.page_size - if self.provider is not None: - result['Provider'] = self.provider + if self.snapshot_id is not None: + result['SnapshotId'] = self.snapshot_id if self.sort_by is not None: result['SortBy'] = self.sort_by - if self.workspace_id is not None: - result['WorkspaceId'] = self.workspace_id + if self.version is not None: + result['Version'] = self.version return result def from_map(self, m: dict = None): m = m or dict() if m.get('ComponentId') is not None: self.component_id = m.get('ComponentId') - if m.get('ComponentIds') is not None: - self.component_ids = m.get('ComponentIds') - if m.get('Labels') is not None: - self.labels_shrink = m.get('Labels') - if m.get('Name') is not None: - self.name = m.get('Name') if m.get('Order') is not None: self.order = m.get('Order') - if m.get('PageNumber') is not None: - self.page_number = m.get('PageNumber') - if m.get('PageSize') is not None: - self.page_size = m.get('PageSize') - if m.get('Provider') is not None: - self.provider = m.get('Provider') - if m.get('SortBy') is not None: - self.sort_by = m.get('SortBy') - if m.get('WorkspaceId') is not None: - self.workspace_id = m.get('WorkspaceId') - return self - - -class ListComponentsResponseBodyComponentsVersions(TeaModel): - def __init__( - self, - gmt_create_time: str = None, - snapshot_id: str = None, - status: str = None, - version: str = None, - ): - self.gmt_create_time = gmt_create_time - self.snapshot_id = snapshot_id - self.status = status - self.version = version - - def validate(self): - pass - - def to_map(self): - _map = super().to_map() - if _map is not None: - return _map - - result = dict() - if self.gmt_create_time is not None: - result['GmtCreateTime'] = self.gmt_create_time - if self.snapshot_id is not None: - result['SnapshotId'] = self.snapshot_id - if self.status is not None: - result['Status'] = self.status - if self.version is not None: - result['Version'] = self.version - return result - - def from_map(self, m: dict = None): - m = m or dict() - if m.get('GmtCreateTime') is not None: - self.gmt_create_time = m.get('GmtCreateTime') + if m.get('PageNumber') is not None: + self.page_number = m.get('PageNumber') + if m.get('PageSize') is not None: + self.page_size = m.get('PageSize') if m.get('SnapshotId') is not None: self.snapshot_id = m.get('SnapshotId') - if m.get('Status') is not None: - self.status = m.get('Status') + if m.get('SortBy') is not None: + self.sort_by = m.get('SortBy') if m.get('Version') is not None: self.version = m.get('Version') return self -class ListComponentsResponseBodyComponents(TeaModel): +class ListComponentVersionSnapshotsResponseBodySnapshots(TeaModel): def __init__( self, component_id: str = None, description: str = None, - display_name: str = None, - gmt_create_time: str = None, - gmt_modified_time: str = None, - labels: List[Label] = None, - name: str = None, - provider: str = None, + is_current_version: bool = None, + snapshot_id: str = None, tenant_id: str = None, user_id: str = None, - versions: List[ListComponentsResponseBodyComponentsVersions] = None, + version: str = None, workspace_id: str = None, ): self.component_id = component_id self.description = description - self.display_name = display_name - self.gmt_create_time = gmt_create_time - self.gmt_modified_time = gmt_modified_time - self.labels = labels - self.name = name - self.provider = provider + self.is_current_version = is_current_version + self.snapshot_id = snapshot_id self.tenant_id = tenant_id self.user_id = user_id - self.versions = versions + self.version = version self.workspace_id = workspace_id def validate(self): - if self.labels: - for k in self.labels: - if k: - k.validate() - if self.versions: - for k in self.versions: - if k: - k.validate() + pass def to_map(self): _map = super().to_map() @@ -16559,28 +15195,16 @@ def to_map(self): result['ComponentId'] = self.component_id if self.description is not None: result['Description'] = self.description - if self.display_name is not None: - result['DisplayName'] = self.display_name - if self.gmt_create_time is not None: - result['GmtCreateTime'] = self.gmt_create_time - if self.gmt_modified_time is not None: - result['GmtModifiedTime'] = self.gmt_modified_time - result['Labels'] = [] - if self.labels is not None: - for k in self.labels: - result['Labels'].append(k.to_map() if k else None) - if self.name is not None: - result['Name'] = self.name - if self.provider is not None: - result['Provider'] = self.provider + if self.is_current_version is not None: + result['IsCurrentVersion'] = self.is_current_version + if self.snapshot_id is not None: + result['SnapshotId'] = self.snapshot_id if self.tenant_id is not None: result['TenantId'] = self.tenant_id if self.user_id is not None: result['UserId'] = self.user_id - result['Versions'] = [] - if self.versions is not None: - for k in self.versions: - result['Versions'].append(k.to_map() if k else None) + if self.version is not None: + result['Version'] = self.version if self.workspace_id is not None: result['WorkspaceId'] = self.workspace_id return result @@ -16591,49 +15215,35 @@ def from_map(self, m: dict = None): self.component_id = m.get('ComponentId') if m.get('Description') is not None: self.description = m.get('Description') - if m.get('DisplayName') is not None: - self.display_name = m.get('DisplayName') - if m.get('GmtCreateTime') is not None: - self.gmt_create_time = m.get('GmtCreateTime') - if m.get('GmtModifiedTime') is not None: - self.gmt_modified_time = m.get('GmtModifiedTime') - self.labels = [] - if m.get('Labels') is not None: - for k in m.get('Labels'): - temp_model = Label() - self.labels.append(temp_model.from_map(k)) - if m.get('Name') is not None: - self.name = m.get('Name') - if m.get('Provider') is not None: - self.provider = m.get('Provider') + if m.get('IsCurrentVersion') is not None: + self.is_current_version = m.get('IsCurrentVersion') + if m.get('SnapshotId') is not None: + self.snapshot_id = m.get('SnapshotId') if m.get('TenantId') is not None: self.tenant_id = m.get('TenantId') if m.get('UserId') is not None: self.user_id = m.get('UserId') - self.versions = [] - if m.get('Versions') is not None: - for k in m.get('Versions'): - temp_model = ListComponentsResponseBodyComponentsVersions() - self.versions.append(temp_model.from_map(k)) + if m.get('Version') is not None: + self.version = m.get('Version') if m.get('WorkspaceId') is not None: self.workspace_id = m.get('WorkspaceId') return self -class ListComponentsResponseBody(TeaModel): +class ListComponentVersionSnapshotsResponseBody(TeaModel): def __init__( self, - components: List[ListComponentsResponseBodyComponents] = None, request_id: str = None, + snapshots: List[ListComponentVersionSnapshotsResponseBodySnapshots] = None, total_count: int = None, ): - self.components = components self.request_id = request_id + self.snapshots = snapshots self.total_count = total_count def validate(self): - if self.components: - for k in self.components: + if self.snapshots: + for k in self.snapshots: if k: k.validate() @@ -16643,36 +15253,36 @@ def to_map(self): return _map result = dict() - result['Components'] = [] - if self.components is not None: - for k in self.components: - result['Components'].append(k.to_map() if k else None) if self.request_id is not None: result['RequestId'] = self.request_id + result['Snapshots'] = [] + if self.snapshots is not None: + for k in self.snapshots: + result['Snapshots'].append(k.to_map() if k else None) if self.total_count is not None: result['TotalCount'] = self.total_count return result def from_map(self, m: dict = None): m = m or dict() - self.components = [] - if m.get('Components') is not None: - for k in m.get('Components'): - temp_model = ListComponentsResponseBodyComponents() - self.components.append(temp_model.from_map(k)) if m.get('RequestId') is not None: self.request_id = m.get('RequestId') + self.snapshots = [] + if m.get('Snapshots') is not None: + for k in m.get('Snapshots'): + temp_model = ListComponentVersionSnapshotsResponseBodySnapshots() + self.snapshots.append(temp_model.from_map(k)) if m.get('TotalCount') is not None: self.total_count = m.get('TotalCount') return self -class ListComponentsResponse(TeaModel): +class ListComponentVersionSnapshotsResponse(TeaModel): def __init__( self, headers: Dict[str, str] = None, status_code: int = None, - body: ListComponentsResponseBody = None, + body: ListComponentVersionSnapshotsResponseBody = None, ): self.headers = headers self.status_code = status_code @@ -16703,27 +15313,27 @@ def from_map(self, m: dict = None): if m.get('statusCode') is not None: self.status_code = m.get('statusCode') if m.get('body') is not None: - temp_model = ListComponentsResponseBody() + temp_model = ListComponentVersionSnapshotsResponseBody() self.body = temp_model.from_map(m['body']) return self -class ListInstanceJobsRequest(TeaModel): +class ListComponentVersionsRequest(TeaModel): def __init__( self, - instance_job_type: str = None, + labels: Dict[str, str] = None, order: str = None, page_number: int = None, page_size: int = None, sort_by: str = None, - status: str = None, + version: str = None, ): - self.instance_job_type = instance_job_type + self.labels = labels self.order = order self.page_number = page_number self.page_size = page_size self.sort_by = sort_by - self.status = status + self.version = version def validate(self): pass @@ -16734,8 +15344,8 @@ def to_map(self): return _map result = dict() - if self.instance_job_type is not None: - result['InstanceJobType'] = self.instance_job_type + if self.labels is not None: + result['Labels'] = self.labels if self.order is not None: result['Order'] = self.order if self.page_number is not None: @@ -16744,14 +15354,14 @@ def to_map(self): result['PageSize'] = self.page_size if self.sort_by is not None: result['SortBy'] = self.sort_by - if self.status is not None: - result['Status'] = self.status + if self.version is not None: + result['Version'] = self.version return result def from_map(self, m: dict = None): m = m or dict() - if m.get('InstanceJobType') is not None: - self.instance_job_type = m.get('InstanceJobType') + if m.get('Labels') is not None: + self.labels = m.get('Labels') if m.get('Order') is not None: self.order = m.get('Order') if m.get('PageNumber') is not None: @@ -16760,100 +15370,181 @@ def from_map(self, m: dict = None): self.page_size = m.get('PageSize') if m.get('SortBy') is not None: self.sort_by = m.get('SortBy') - if m.get('Status') is not None: - self.status = m.get('Status') + if m.get('Version') is not None: + self.version = m.get('Version') return self -class ListInstanceJobsResponseBodyInstanceJobs(TeaModel): +class ListComponentVersionsShrinkRequest(TeaModel): def __init__( self, - creator: str = None, + labels_shrink: str = None, + order: str = None, + page_number: int = None, + page_size: int = None, + sort_by: str = None, + version: str = None, + ): + self.labels_shrink = labels_shrink + self.order = order + self.page_number = page_number + self.page_size = page_size + self.sort_by = sort_by + self.version = version + + def validate(self): + pass + + def to_map(self): + _map = super().to_map() + if _map is not None: + return _map + + result = dict() + if self.labels_shrink is not None: + result['Labels'] = self.labels_shrink + if self.order is not None: + result['Order'] = self.order + if self.page_number is not None: + result['PageNumber'] = self.page_number + if self.page_size is not None: + result['PageSize'] = self.page_size + if self.sort_by is not None: + result['SortBy'] = self.sort_by + if self.version is not None: + result['Version'] = self.version + return result + + def from_map(self, m: dict = None): + m = m or dict() + if m.get('Labels') is not None: + self.labels_shrink = m.get('Labels') + if m.get('Order') is not None: + self.order = m.get('Order') + if m.get('PageNumber') is not None: + self.page_number = m.get('PageNumber') + if m.get('PageSize') is not None: + self.page_size = m.get('PageSize') + if m.get('SortBy') is not None: + self.sort_by = m.get('SortBy') + if m.get('Version') is not None: + self.version = m.get('Version') + return self + + +class ListComponentVersionsResponseBodyComponentVersions(TeaModel): + def __init__( + self, + component_id: str = None, gmt_create_time: str = None, - instance_id: str = None, - instance_job_id: str = None, - instance_job_type: str = None, - reason_code: str = None, - reason_message: str = None, + gmt_modified_time: str = None, + labels: List[Label] = None, + name: str = None, + provider: str = None, status: str = None, + tenant_id: str = None, + user_id: str = None, + version: str = None, workspace_id: str = None, ): - self.creator = creator + self.component_id = component_id + # Use the UTC time format: yyyy-MM-ddTHH:mmZ self.gmt_create_time = gmt_create_time - self.instance_id = instance_id - self.instance_job_id = instance_job_id - self.instance_job_type = instance_job_type - self.reason_code = reason_code - self.reason_message = reason_message + # Use the UTC time format: yyyy-MM-ddTHH:mmZ + self.gmt_modified_time = gmt_modified_time + self.labels = labels + self.name = name + self.provider = provider self.status = status + self.tenant_id = tenant_id + self.user_id = user_id + self.version = version self.workspace_id = workspace_id def validate(self): - pass + if self.labels: + for k in self.labels: + if k: + k.validate() def to_map(self): _map = super().to_map() if _map is not None: return _map - result = dict() - if self.creator is not None: - result['Creator'] = self.creator - if self.gmt_create_time is not None: - result['GmtCreateTime'] = self.gmt_create_time - if self.instance_id is not None: - result['InstanceId'] = self.instance_id - if self.instance_job_id is not None: - result['InstanceJobId'] = self.instance_job_id - if self.instance_job_type is not None: - result['InstanceJobType'] = self.instance_job_type - if self.reason_code is not None: - result['ReasonCode'] = self.reason_code - if self.reason_message is not None: - result['ReasonMessage'] = self.reason_message + result = dict() + if self.component_id is not None: + result['ComponentId'] = self.component_id + if self.gmt_create_time is not None: + result['GmtCreateTime'] = self.gmt_create_time + if self.gmt_modified_time is not None: + result['GmtModifiedTime'] = self.gmt_modified_time + result['Labels'] = [] + if self.labels is not None: + for k in self.labels: + result['Labels'].append(k.to_map() if k else None) + if self.name is not None: + result['Name'] = self.name + if self.provider is not None: + result['Provider'] = self.provider if self.status is not None: result['Status'] = self.status + if self.tenant_id is not None: + result['TenantId'] = self.tenant_id + if self.user_id is not None: + result['UserId'] = self.user_id + if self.version is not None: + result['Version'] = self.version if self.workspace_id is not None: result['WorkspaceId'] = self.workspace_id return result def from_map(self, m: dict = None): m = m or dict() - if m.get('Creator') is not None: - self.creator = m.get('Creator') + if m.get('ComponentId') is not None: + self.component_id = m.get('ComponentId') if m.get('GmtCreateTime') is not None: self.gmt_create_time = m.get('GmtCreateTime') - if m.get('InstanceId') is not None: - self.instance_id = m.get('InstanceId') - if m.get('InstanceJobId') is not None: - self.instance_job_id = m.get('InstanceJobId') - if m.get('InstanceJobType') is not None: - self.instance_job_type = m.get('InstanceJobType') - if m.get('ReasonCode') is not None: - self.reason_code = m.get('ReasonCode') - if m.get('ReasonMessage') is not None: - self.reason_message = m.get('ReasonMessage') + if m.get('GmtModifiedTime') is not None: + self.gmt_modified_time = m.get('GmtModifiedTime') + self.labels = [] + if m.get('Labels') is not None: + for k in m.get('Labels'): + temp_model = Label() + self.labels.append(temp_model.from_map(k)) + if m.get('Name') is not None: + self.name = m.get('Name') + if m.get('Provider') is not None: + self.provider = m.get('Provider') if m.get('Status') is not None: self.status = m.get('Status') + if m.get('TenantId') is not None: + self.tenant_id = m.get('TenantId') + if m.get('UserId') is not None: + self.user_id = m.get('UserId') + if m.get('Version') is not None: + self.version = m.get('Version') if m.get('WorkspaceId') is not None: self.workspace_id = m.get('WorkspaceId') return self -class ListInstanceJobsResponseBody(TeaModel): +class ListComponentVersionsResponseBody(TeaModel): def __init__( self, - instance_jobs: ListInstanceJobsResponseBodyInstanceJobs = None, + component_versions: List[ListComponentVersionsResponseBodyComponentVersions] = None, request_id: str = None, total_count: int = None, ): - self.instance_jobs = instance_jobs + self.component_versions = component_versions self.request_id = request_id self.total_count = total_count def validate(self): - if self.instance_jobs: - self.instance_jobs.validate() + if self.component_versions: + for k in self.component_versions: + if k: + k.validate() def to_map(self): _map = super().to_map() @@ -16861,8 +15552,10 @@ def to_map(self): return _map result = dict() - if self.instance_jobs is not None: - result['InstanceJobs'] = self.instance_jobs.to_map() + result['ComponentVersions'] = [] + if self.component_versions is not None: + for k in self.component_versions: + result['ComponentVersions'].append(k.to_map() if k else None) if self.request_id is not None: result['RequestId'] = self.request_id if self.total_count is not None: @@ -16871,9 +15564,11 @@ def to_map(self): def from_map(self, m: dict = None): m = m or dict() - if m.get('InstanceJobs') is not None: - temp_model = ListInstanceJobsResponseBodyInstanceJobs() - self.instance_jobs = temp_model.from_map(m['InstanceJobs']) + self.component_versions = [] + if m.get('ComponentVersions') is not None: + for k in m.get('ComponentVersions'): + temp_model = ListComponentVersionsResponseBodyComponentVersions() + self.component_versions.append(temp_model.from_map(k)) if m.get('RequestId') is not None: self.request_id = m.get('RequestId') if m.get('TotalCount') is not None: @@ -16881,12 +15576,12 @@ def from_map(self, m: dict = None): return self -class ListInstanceJobsResponse(TeaModel): +class ListComponentVersionsResponse(TeaModel): def __init__( self, headers: Dict[str, str] = None, status_code: int = None, - body: ListInstanceJobsResponseBody = None, + body: ListComponentVersionsResponseBody = None, ): self.headers = headers self.status_code = status_code @@ -16917,25 +15612,33 @@ def from_map(self, m: dict = None): if m.get('statusCode') is not None: self.status_code = m.get('statusCode') if m.get('body') is not None: - temp_model = ListInstanceJobsResponseBody() + temp_model = ListComponentVersionsResponseBody() self.body = temp_model.from_map(m['body']) return self -class ListLLMProjectsRequest(TeaModel): +class ListComponentsRequest(TeaModel): def __init__( self, + component_id: str = None, + component_ids: str = None, + labels: Dict[str, Any] = None, + name: str = None, order: str = None, page_number: int = None, page_size: int = None, - project_name: str = None, + provider: str = None, sort_by: str = None, workspace_id: str = None, ): + self.component_id = component_id + self.component_ids = component_ids + self.labels = labels + self.name = name self.order = order self.page_number = page_number self.page_size = page_size - self.project_name = project_name + self.provider = provider self.sort_by = sort_by self.workspace_id = workspace_id @@ -16948,14 +15651,22 @@ def to_map(self): return _map result = dict() + if self.component_id is not None: + result['ComponentId'] = self.component_id + if self.component_ids is not None: + result['ComponentIds'] = self.component_ids + if self.labels is not None: + result['Labels'] = self.labels + if self.name is not None: + result['Name'] = self.name if self.order is not None: result['Order'] = self.order if self.page_number is not None: result['PageNumber'] = self.page_number if self.page_size is not None: result['PageSize'] = self.page_size - if self.project_name is not None: - result['ProjectName'] = self.project_name + if self.provider is not None: + result['Provider'] = self.provider if self.sort_by is not None: result['SortBy'] = self.sort_by if self.workspace_id is not None: @@ -16964,14 +15675,22 @@ def to_map(self): def from_map(self, m: dict = None): m = m or dict() + if m.get('ComponentId') is not None: + self.component_id = m.get('ComponentId') + if m.get('ComponentIds') is not None: + self.component_ids = m.get('ComponentIds') + if m.get('Labels') is not None: + self.labels = m.get('Labels') + if m.get('Name') is not None: + self.name = m.get('Name') if m.get('Order') is not None: self.order = m.get('Order') if m.get('PageNumber') is not None: self.page_number = m.get('PageNumber') if m.get('PageSize') is not None: self.page_size = m.get('PageSize') - if m.get('ProjectName') is not None: - self.project_name = m.get('ProjectName') + if m.get('Provider') is not None: + self.provider = m.get('Provider') if m.get('SortBy') is not None: self.sort_by = m.get('SortBy') if m.get('WorkspaceId') is not None: @@ -16979,14 +15698,30 @@ def from_map(self, m: dict = None): return self -class ListLLMProjectsResponseBodyProjectsLabels(TeaModel): +class ListComponentsShrinkRequest(TeaModel): def __init__( self, - key: str = None, - value: str = None, + component_id: str = None, + component_ids: str = None, + labels_shrink: str = None, + name: str = None, + order: str = None, + page_number: int = None, + page_size: int = None, + provider: str = None, + sort_by: str = None, + workspace_id: str = None, ): - self.key = key - self.value = value + self.component_id = component_id + self.component_ids = component_ids + self.labels_shrink = labels_shrink + self.name = name + self.order = order + self.page_number = page_number + self.page_size = page_size + self.provider = provider + self.sort_by = sort_by + self.workspace_id = workspace_id def validate(self): pass @@ -16997,29 +15732,66 @@ def to_map(self): return _map result = dict() - if self.key is not None: - result['Key'] = self.key - if self.value is not None: - result['Value'] = self.value + if self.component_id is not None: + result['ComponentId'] = self.component_id + if self.component_ids is not None: + result['ComponentIds'] = self.component_ids + if self.labels_shrink is not None: + result['Labels'] = self.labels_shrink + if self.name is not None: + result['Name'] = self.name + if self.order is not None: + result['Order'] = self.order + if self.page_number is not None: + result['PageNumber'] = self.page_number + if self.page_size is not None: + result['PageSize'] = self.page_size + if self.provider is not None: + result['Provider'] = self.provider + if self.sort_by is not None: + result['SortBy'] = self.sort_by + if self.workspace_id is not None: + result['WorkspaceId'] = self.workspace_id return result def from_map(self, m: dict = None): m = m or dict() - if m.get('Key') is not None: - self.key = m.get('Key') - if m.get('Value') is not None: - self.value = m.get('Value') + if m.get('ComponentId') is not None: + self.component_id = m.get('ComponentId') + if m.get('ComponentIds') is not None: + self.component_ids = m.get('ComponentIds') + if m.get('Labels') is not None: + self.labels_shrink = m.get('Labels') + if m.get('Name') is not None: + self.name = m.get('Name') + if m.get('Order') is not None: + self.order = m.get('Order') + if m.get('PageNumber') is not None: + self.page_number = m.get('PageNumber') + if m.get('PageSize') is not None: + self.page_size = m.get('PageSize') + if m.get('Provider') is not None: + self.provider = m.get('Provider') + if m.get('SortBy') is not None: + self.sort_by = m.get('SortBy') + if m.get('WorkspaceId') is not None: + self.workspace_id = m.get('WorkspaceId') return self -class ListLLMProjectsResponseBodyProjectsRuntime(TeaModel): +class ListComponentsResponseBodyComponentsVersions(TeaModel): def __init__( self, - runtime_id: str = None, - runtime_type: str = None, + gmt_create_time: str = None, + snapshot_id: str = None, + status: str = None, + version: str = None, ): - self.runtime_id = runtime_id - self.runtime_type = runtime_type + # Use the UTC time format: yyyy-MM-ddTHH:mmZ + self.gmt_create_time = gmt_create_time + self.snapshot_id = snapshot_id + self.status = status + self.version = version def validate(self): pass @@ -17030,48 +15802,58 @@ def to_map(self): return _map result = dict() - if self.runtime_id is not None: - result['RuntimeId'] = self.runtime_id - if self.runtime_type is not None: - result['RuntimeType'] = self.runtime_type + if self.gmt_create_time is not None: + result['GmtCreateTime'] = self.gmt_create_time + if self.snapshot_id is not None: + result['SnapshotId'] = self.snapshot_id + if self.status is not None: + result['Status'] = self.status + if self.version is not None: + result['Version'] = self.version return result def from_map(self, m: dict = None): m = m or dict() - if m.get('RuntimeId') is not None: - self.runtime_id = m.get('RuntimeId') - if m.get('RuntimeType') is not None: - self.runtime_type = m.get('RuntimeType') + if m.get('GmtCreateTime') is not None: + self.gmt_create_time = m.get('GmtCreateTime') + if m.get('SnapshotId') is not None: + self.snapshot_id = m.get('SnapshotId') + if m.get('Status') is not None: + self.status = m.get('Status') + if m.get('Version') is not None: + self.version = m.get('Version') return self -class ListLLMProjectsResponseBodyProjects(TeaModel): +class ListComponentsResponseBodyComponents(TeaModel): def __init__( self, + component_id: str = None, + description: str = None, + display_name: str = None, gmt_create_time: str = None, gmt_modified_time: str = None, - labels: List[ListLLMProjectsResponseBodyProjectsLabels] = None, - owner_id: str = None, - project_description: str = None, - project_id: str = None, - project_name: str = None, - project_type: str = None, - root_path: str = None, - runtime: ListLLMProjectsResponseBodyProjectsRuntime = None, + labels: List[Label] = None, + name: str = None, + provider: str = None, + tenant_id: str = None, user_id: str = None, + versions: List[ListComponentsResponseBodyComponentsVersions] = None, workspace_id: str = None, ): + self.component_id = component_id + self.description = description + self.display_name = display_name + # Use the UTC time format: yyyy-MM-ddTHH:mmZ self.gmt_create_time = gmt_create_time + # Use the UTC time format: yyyy-MM-ddTHH:mmZ self.gmt_modified_time = gmt_modified_time self.labels = labels - self.owner_id = owner_id - self.project_description = project_description - self.project_id = project_id - self.project_name = project_name - self.project_type = project_type - self.root_path = root_path - self.runtime = runtime + self.name = name + self.provider = provider + self.tenant_id = tenant_id self.user_id = user_id + self.versions = versions self.workspace_id = workspace_id def validate(self): @@ -17079,8 +15861,10 @@ def validate(self): for k in self.labels: if k: k.validate() - if self.runtime: - self.runtime.validate() + if self.versions: + for k in self.versions: + if k: + k.validate() def to_map(self): _map = super().to_map() @@ -17088,6 +15872,12 @@ def to_map(self): return _map result = dict() + if self.component_id is not None: + result['ComponentId'] = self.component_id + if self.description is not None: + result['Description'] = self.description + if self.display_name is not None: + result['DisplayName'] = self.display_name if self.gmt_create_time is not None: result['GmtCreateTime'] = self.gmt_create_time if self.gmt_modified_time is not None: @@ -17096,28 +15886,30 @@ def to_map(self): if self.labels is not None: for k in self.labels: result['Labels'].append(k.to_map() if k else None) - if self.owner_id is not None: - result['OwnerId'] = self.owner_id - if self.project_description is not None: - result['ProjectDescription'] = self.project_description - if self.project_id is not None: - result['ProjectId'] = self.project_id - if self.project_name is not None: - result['ProjectName'] = self.project_name - if self.project_type is not None: - result['ProjectType'] = self.project_type - if self.root_path is not None: - result['RootPath'] = self.root_path - if self.runtime is not None: - result['Runtime'] = self.runtime.to_map() + if self.name is not None: + result['Name'] = self.name + if self.provider is not None: + result['Provider'] = self.provider + if self.tenant_id is not None: + result['TenantId'] = self.tenant_id if self.user_id is not None: result['UserId'] = self.user_id + result['Versions'] = [] + if self.versions is not None: + for k in self.versions: + result['Versions'].append(k.to_map() if k else None) if self.workspace_id is not None: result['WorkspaceId'] = self.workspace_id return result def from_map(self, m: dict = None): m = m or dict() + if m.get('ComponentId') is not None: + self.component_id = m.get('ComponentId') + if m.get('Description') is not None: + self.description = m.get('Description') + if m.get('DisplayName') is not None: + self.display_name = m.get('DisplayName') if m.get('GmtCreateTime') is not None: self.gmt_create_time = m.get('GmtCreateTime') if m.get('GmtModifiedTime') is not None: @@ -17125,42 +15917,40 @@ def from_map(self, m: dict = None): self.labels = [] if m.get('Labels') is not None: for k in m.get('Labels'): - temp_model = ListLLMProjectsResponseBodyProjectsLabels() + temp_model = Label() self.labels.append(temp_model.from_map(k)) - if m.get('OwnerId') is not None: - self.owner_id = m.get('OwnerId') - if m.get('ProjectDescription') is not None: - self.project_description = m.get('ProjectDescription') - if m.get('ProjectId') is not None: - self.project_id = m.get('ProjectId') - if m.get('ProjectName') is not None: - self.project_name = m.get('ProjectName') - if m.get('ProjectType') is not None: - self.project_type = m.get('ProjectType') - if m.get('RootPath') is not None: - self.root_path = m.get('RootPath') - if m.get('Runtime') is not None: - temp_model = ListLLMProjectsResponseBodyProjectsRuntime() - self.runtime = temp_model.from_map(m['Runtime']) + if m.get('Name') is not None: + self.name = m.get('Name') + if m.get('Provider') is not None: + self.provider = m.get('Provider') + if m.get('TenantId') is not None: + self.tenant_id = m.get('TenantId') if m.get('UserId') is not None: self.user_id = m.get('UserId') + self.versions = [] + if m.get('Versions') is not None: + for k in m.get('Versions'): + temp_model = ListComponentsResponseBodyComponentsVersions() + self.versions.append(temp_model.from_map(k)) if m.get('WorkspaceId') is not None: self.workspace_id = m.get('WorkspaceId') return self -class ListLLMProjectsResponseBody(TeaModel): +class ListComponentsResponseBody(TeaModel): def __init__( self, - projects: List[ListLLMProjectsResponseBodyProjects] = None, + components: List[ListComponentsResponseBodyComponents] = None, request_id: str = None, + total_count: int = None, ): - self.projects = projects + self.components = components self.request_id = request_id + self.total_count = total_count def validate(self): - if self.projects: - for k in self.projects: + if self.components: + for k in self.components: if k: k.validate() @@ -17170,32 +15960,36 @@ def to_map(self): return _map result = dict() - result['Projects'] = [] - if self.projects is not None: - for k in self.projects: - result['Projects'].append(k.to_map() if k else None) + result['Components'] = [] + if self.components is not None: + for k in self.components: + result['Components'].append(k.to_map() if k else None) if self.request_id is not None: result['RequestId'] = self.request_id + if self.total_count is not None: + result['TotalCount'] = self.total_count return result def from_map(self, m: dict = None): m = m or dict() - self.projects = [] - if m.get('Projects') is not None: - for k in m.get('Projects'): - temp_model = ListLLMProjectsResponseBodyProjects() - self.projects.append(temp_model.from_map(k)) + self.components = [] + if m.get('Components') is not None: + for k in m.get('Components'): + temp_model = ListComponentsResponseBodyComponents() + self.components.append(temp_model.from_map(k)) if m.get('RequestId') is not None: self.request_id = m.get('RequestId') + if m.get('TotalCount') is not None: + self.total_count = m.get('TotalCount') return self -class ListLLMProjectsResponse(TeaModel): +class ListComponentsResponse(TeaModel): def __init__( self, headers: Dict[str, str] = None, status_code: int = None, - body: ListLLMProjectsResponseBody = None, + body: ListComponentsResponseBody = None, ): self.headers = headers self.status_code = status_code @@ -17226,23 +16020,27 @@ def from_map(self, m: dict = None): if m.get('statusCode') is not None: self.status_code = m.get('statusCode') if m.get('body') is not None: - temp_model = ListLLMProjectsResponseBody() + temp_model = ListComponentsResponseBody() self.body = temp_model.from_map(m['body']) return self -class ListLLMSnapshotsRequest(TeaModel): +class ListInstanceJobsRequest(TeaModel): def __init__( self, + instance_job_type: str = None, order: str = None, page_number: int = None, page_size: int = None, sort_by: str = None, + status: str = None, ): + self.instance_job_type = instance_job_type self.order = order self.page_number = page_number self.page_size = page_size self.sort_by = sort_by + self.status = status def validate(self): pass @@ -17253,6 +16051,8 @@ def to_map(self): return _map result = dict() + if self.instance_job_type is not None: + result['InstanceJobType'] = self.instance_job_type if self.order is not None: result['Order'] = self.order if self.page_number is not None: @@ -17261,10 +16061,14 @@ def to_map(self): result['PageSize'] = self.page_size if self.sort_by is not None: result['SortBy'] = self.sort_by + if self.status is not None: + result['Status'] = self.status return result def from_map(self, m: dict = None): m = m or dict() + if m.get('InstanceJobType') is not None: + self.instance_job_type = m.get('InstanceJobType') if m.get('Order') is not None: self.order = m.get('Order') if m.get('PageNumber') is not None: @@ -17273,17 +16077,34 @@ def from_map(self, m: dict = None): self.page_size = m.get('PageSize') if m.get('SortBy') is not None: self.sort_by = m.get('SortBy') + if m.get('Status') is not None: + self.status = m.get('Status') return self -class ListLLMSnapshotsResponseBodySnapshotsContentStorage(TeaModel): +class ListInstanceJobsResponseBodyInstanceJobs(TeaModel): def __init__( self, - location: str = None, - type: str = None, + creator: str = None, + gmt_create_time: str = None, + instance_id: str = None, + instance_job_id: str = None, + instance_job_type: str = None, + reason_code: str = None, + reason_message: str = None, + status: str = None, + workspace_id: str = None, ): - self.location = location - self.type = type + self.creator = creator + # Use the UTC time format: yyyy-MM-ddTHH:mmZ + self.gmt_create_time = gmt_create_time + self.instance_id = instance_id + self.instance_job_id = instance_job_id + self.instance_job_type = instance_job_type + self.reason_code = reason_code + self.reason_message = reason_message + self.status = status + self.workspace_id = workspace_id def validate(self): pass @@ -17294,43 +16115,63 @@ def to_map(self): return _map result = dict() - if self.location is not None: - result['Location'] = self.location - if self.type is not None: - result['Type'] = self.type + if self.creator is not None: + result['Creator'] = self.creator + if self.gmt_create_time is not None: + result['GmtCreateTime'] = self.gmt_create_time + if self.instance_id is not None: + result['InstanceId'] = self.instance_id + if self.instance_job_id is not None: + result['InstanceJobId'] = self.instance_job_id + if self.instance_job_type is not None: + result['InstanceJobType'] = self.instance_job_type + if self.reason_code is not None: + result['ReasonCode'] = self.reason_code + if self.reason_message is not None: + result['ReasonMessage'] = self.reason_message + if self.status is not None: + result['Status'] = self.status + if self.workspace_id is not None: + result['WorkspaceId'] = self.workspace_id return result def from_map(self, m: dict = None): m = m or dict() - if m.get('Location') is not None: - self.location = m.get('Location') - if m.get('Type') is not None: - self.type = m.get('Type') + if m.get('Creator') is not None: + self.creator = m.get('Creator') + if m.get('GmtCreateTime') is not None: + self.gmt_create_time = m.get('GmtCreateTime') + if m.get('InstanceId') is not None: + self.instance_id = m.get('InstanceId') + if m.get('InstanceJobId') is not None: + self.instance_job_id = m.get('InstanceJobId') + if m.get('InstanceJobType') is not None: + self.instance_job_type = m.get('InstanceJobType') + if m.get('ReasonCode') is not None: + self.reason_code = m.get('ReasonCode') + if m.get('ReasonMessage') is not None: + self.reason_message = m.get('ReasonMessage') + if m.get('Status') is not None: + self.status = m.get('Status') + if m.get('WorkspaceId') is not None: + self.workspace_id = m.get('WorkspaceId') return self -class ListLLMSnapshotsResponseBodySnapshots(TeaModel): +class ListInstanceJobsResponseBody(TeaModel): def __init__( self, - content_storage: ListLLMSnapshotsResponseBodySnapshotsContentStorage = None, - gmt_create_time: str = None, - gmt_modified_time: str = None, - owner_id: str = None, - project_id: str = None, - snapshot_id: str = None, - user_id: str = None, + instance_jobs: ListInstanceJobsResponseBodyInstanceJobs = None, + request_id: str = None, + total_count: int = None, ): - self.content_storage = content_storage - self.gmt_create_time = gmt_create_time - self.gmt_modified_time = gmt_modified_time - self.owner_id = owner_id - self.project_id = project_id - self.snapshot_id = snapshot_id - self.user_id = user_id + self.instance_jobs = instance_jobs + self.request_id = request_id + self.total_count = total_count def validate(self): - if self.content_storage: - self.content_storage.validate() + if self.instance_jobs: + self.instance_jobs.validate() def to_map(self): _map = super().to_map() @@ -17338,56 +16179,133 @@ def to_map(self): return _map result = dict() - if self.content_storage is not None: - result['ContentStorage'] = self.content_storage.to_map() - if self.gmt_create_time is not None: - result['GmtCreateTime'] = self.gmt_create_time - if self.gmt_modified_time is not None: - result['GmtModifiedTime'] = self.gmt_modified_time - if self.owner_id is not None: - result['OwnerId'] = self.owner_id - if self.project_id is not None: - result['ProjectId'] = self.project_id - if self.snapshot_id is not None: - result['SnapshotId'] = self.snapshot_id - if self.user_id is not None: - result['UserId'] = self.user_id + if self.instance_jobs is not None: + result['InstanceJobs'] = self.instance_jobs.to_map() + if self.request_id is not None: + result['RequestId'] = self.request_id + if self.total_count is not None: + result['TotalCount'] = self.total_count return result def from_map(self, m: dict = None): m = m or dict() - if m.get('ContentStorage') is not None: - temp_model = ListLLMSnapshotsResponseBodySnapshotsContentStorage() - self.content_storage = temp_model.from_map(m['ContentStorage']) - if m.get('GmtCreateTime') is not None: - self.gmt_create_time = m.get('GmtCreateTime') - if m.get('GmtModifiedTime') is not None: - self.gmt_modified_time = m.get('GmtModifiedTime') - if m.get('OwnerId') is not None: - self.owner_id = m.get('OwnerId') - if m.get('ProjectId') is not None: - self.project_id = m.get('ProjectId') - if m.get('SnapshotId') is not None: - self.snapshot_id = m.get('SnapshotId') - if m.get('UserId') is not None: - self.user_id = m.get('UserId') + if m.get('InstanceJobs') is not None: + temp_model = ListInstanceJobsResponseBodyInstanceJobs() + self.instance_jobs = temp_model.from_map(m['InstanceJobs']) + if m.get('RequestId') is not None: + self.request_id = m.get('RequestId') + if m.get('TotalCount') is not None: + self.total_count = m.get('TotalCount') return self -class ListLLMSnapshotsResponseBody(TeaModel): +class ListInstanceJobsResponse(TeaModel): def __init__( self, - request_id: str = None, - snapshots: List[ListLLMSnapshotsResponseBodySnapshots] = None, - total_count: int = None, + headers: Dict[str, str] = None, + status_code: int = None, + body: ListInstanceJobsResponseBody = None, ): - self.request_id = request_id - self.snapshots = snapshots - self.total_count = total_count + self.headers = headers + self.status_code = status_code + self.body = body def validate(self): - if self.snapshots: - for k in self.snapshots: + if self.body: + self.body.validate() + + def to_map(self): + _map = super().to_map() + if _map is not None: + return _map + + result = dict() + if self.headers is not None: + result['headers'] = self.headers + if self.status_code is not None: + result['statusCode'] = self.status_code + if self.body is not None: + result['body'] = self.body.to_map() + return result + + def from_map(self, m: dict = None): + m = m or dict() + if m.get('headers') is not None: + self.headers = m.get('headers') + if m.get('statusCode') is not None: + self.status_code = m.get('statusCode') + if m.get('body') is not None: + temp_model = ListInstanceJobsResponseBody() + self.body = temp_model.from_map(m['body']) + return self + + +class ListNodeGPUMetricsRequest(TeaModel): + def __init__( + self, + end_time: str = None, + gputype: str = None, + metric_type: str = None, + node_type: str = None, + start_time: str = None, + ): + self.end_time = end_time + self.gputype = gputype + # This parameter is required. + self.metric_type = metric_type + self.node_type = node_type + self.start_time = start_time + + def validate(self): + pass + + def to_map(self): + _map = super().to_map() + if _map is not None: + return _map + + result = dict() + if self.end_time is not None: + result['EndTime'] = self.end_time + if self.gputype is not None: + result['GPUType'] = self.gputype + if self.metric_type is not None: + result['MetricType'] = self.metric_type + if self.node_type is not None: + result['NodeType'] = self.node_type + if self.start_time is not None: + result['StartTime'] = self.start_time + return result + + def from_map(self, m: dict = None): + m = m or dict() + if m.get('EndTime') is not None: + self.end_time = m.get('EndTime') + if m.get('GPUType') is not None: + self.gputype = m.get('GPUType') + if m.get('MetricType') is not None: + self.metric_type = m.get('MetricType') + if m.get('NodeType') is not None: + self.node_type = m.get('NodeType') + if m.get('StartTime') is not None: + self.start_time = m.get('StartTime') + return self + + +class ListNodeGPUMetricsResponseBody(TeaModel): + def __init__( + self, + metric_type: str = None, + node_gpumetrics: List[NodeGPUMetric] = None, + quota_id: str = None, + ): + self.metric_type = metric_type + self.node_gpumetrics = node_gpumetrics + self.quota_id = quota_id + + def validate(self): + if self.node_gpumetrics: + for k in self.node_gpumetrics: if k: k.validate() @@ -17397,36 +16315,36 @@ def to_map(self): return _map result = dict() - if self.request_id is not None: - result['RequestId'] = self.request_id - result['Snapshots'] = [] - if self.snapshots is not None: - for k in self.snapshots: - result['Snapshots'].append(k.to_map() if k else None) - if self.total_count is not None: - result['TotalCount'] = self.total_count + if self.metric_type is not None: + result['MetricType'] = self.metric_type + result['NodeGPUMetrics'] = [] + if self.node_gpumetrics is not None: + for k in self.node_gpumetrics: + result['NodeGPUMetrics'].append(k.to_map() if k else None) + if self.quota_id is not None: + result['QuotaId'] = self.quota_id return result def from_map(self, m: dict = None): m = m or dict() - if m.get('RequestId') is not None: - self.request_id = m.get('RequestId') - self.snapshots = [] - if m.get('Snapshots') is not None: - for k in m.get('Snapshots'): - temp_model = ListLLMSnapshotsResponseBodySnapshots() - self.snapshots.append(temp_model.from_map(k)) - if m.get('TotalCount') is not None: - self.total_count = m.get('TotalCount') + if m.get('MetricType') is not None: + self.metric_type = m.get('MetricType') + self.node_gpumetrics = [] + if m.get('NodeGPUMetrics') is not None: + for k in m.get('NodeGPUMetrics'): + temp_model = NodeGPUMetric() + self.node_gpumetrics.append(temp_model.from_map(k)) + if m.get('QuotaId') is not None: + self.quota_id = m.get('QuotaId') return self -class ListLLMSnapshotsResponse(TeaModel): +class ListNodeGPUMetricsResponse(TeaModel): def __init__( self, headers: Dict[str, str] = None, status_code: int = None, - body: ListLLMSnapshotsResponseBody = None, + body: ListNodeGPUMetricsResponseBody = None, ): self.headers = headers self.status_code = status_code @@ -17457,7 +16375,7 @@ def from_map(self, m: dict = None): if m.get('statusCode') is not None: self.status_code = m.get('statusCode') if m.get('body') is not None: - temp_model = ListLLMSnapshotsResponseBody() + temp_model = ListNodeGPUMetricsResponseBody() self.body = temp_model.from_map(m['body']) return self @@ -17467,6 +16385,7 @@ def __init__( self, resource_group_id: str = None, ): + # This parameter is required. self.resource_group_id = resource_group_id def validate(self): @@ -17723,7 +16642,10 @@ class ListNodesRequest(TeaModel): def __init__( self, accelerator_type: str = None, + filter_by_quota_id: str = None, + filter_by_resource_group_ids: str = None, gputype: str = None, + node_names: str = None, node_statuses: str = None, node_types: str = None, order: str = None, @@ -17736,7 +16658,10 @@ def __init__( verbose: bool = None, ): self.accelerator_type = accelerator_type + self.filter_by_quota_id = filter_by_quota_id + self.filter_by_resource_group_ids = filter_by_resource_group_ids self.gputype = gputype + self.node_names = node_names self.node_statuses = node_statuses self.node_types = node_types self.order = order @@ -17759,8 +16684,14 @@ def to_map(self): result = dict() if self.accelerator_type is not None: result['AcceleratorType'] = self.accelerator_type + if self.filter_by_quota_id is not None: + result['FilterByQuotaId'] = self.filter_by_quota_id + if self.filter_by_resource_group_ids is not None: + result['FilterByResourceGroupIds'] = self.filter_by_resource_group_ids if self.gputype is not None: result['GPUType'] = self.gputype + if self.node_names is not None: + result['NodeNames'] = self.node_names if self.node_statuses is not None: result['NodeStatuses'] = self.node_statuses if self.node_types is not None: @@ -17787,8 +16718,14 @@ def from_map(self, m: dict = None): m = m or dict() if m.get('AcceleratorType') is not None: self.accelerator_type = m.get('AcceleratorType') + if m.get('FilterByQuotaId') is not None: + self.filter_by_quota_id = m.get('FilterByQuotaId') + if m.get('FilterByResourceGroupIds') is not None: + self.filter_by_resource_group_ids = m.get('FilterByResourceGroupIds') if m.get('GPUType') is not None: self.gputype = m.get('GPUType') + if m.get('NodeNames') is not None: + self.node_names = m.get('NodeNames') if m.get('NodeStatuses') is not None: self.node_statuses = m.get('NodeStatuses') if m.get('NodeTypes') is not None: @@ -17812,20 +16749,267 @@ def from_map(self, m: dict = None): return self -class ListNodesResponseBody(TeaModel): +class ListNodesResponseBody(TeaModel): + def __init__( + self, + nodes: List[Node] = None, + request_id: str = None, + total_count: int = None, + ): + self.nodes = nodes + self.request_id = request_id + self.total_count = total_count + + def validate(self): + if self.nodes: + for k in self.nodes: + if k: + k.validate() + + def to_map(self): + _map = super().to_map() + if _map is not None: + return _map + + result = dict() + result['Nodes'] = [] + if self.nodes is not None: + for k in self.nodes: + result['Nodes'].append(k.to_map() if k else None) + if self.request_id is not None: + result['RequestId'] = self.request_id + if self.total_count is not None: + result['TotalCount'] = self.total_count + return result + + def from_map(self, m: dict = None): + m = m or dict() + self.nodes = [] + if m.get('Nodes') is not None: + for k in m.get('Nodes'): + temp_model = Node() + self.nodes.append(temp_model.from_map(k)) + if m.get('RequestId') is not None: + self.request_id = m.get('RequestId') + if m.get('TotalCount') is not None: + self.total_count = m.get('TotalCount') + return self + + +class ListNodesResponse(TeaModel): + def __init__( + self, + headers: Dict[str, str] = None, + status_code: int = None, + body: ListNodesResponseBody = None, + ): + self.headers = headers + self.status_code = status_code + self.body = body + + def validate(self): + if self.body: + self.body.validate() + + def to_map(self): + _map = super().to_map() + if _map is not None: + return _map + + result = dict() + if self.headers is not None: + result['headers'] = self.headers + if self.status_code is not None: + result['statusCode'] = self.status_code + if self.body is not None: + result['body'] = self.body.to_map() + return result + + def from_map(self, m: dict = None): + m = m or dict() + if m.get('headers') is not None: + self.headers = m.get('headers') + if m.get('statusCode') is not None: + self.status_code = m.get('statusCode') + if m.get('body') is not None: + temp_model = ListNodesResponseBody() + self.body = temp_model.from_map(m['body']) + return self + + +class ListOperationsRequest(TeaModel): + def __init__( + self, + object_id: str = None, + object_type: str = None, + operation_id: str = None, + operation_type: str = None, + order: str = None, + page_number: int = None, + page_size: int = None, + sort_by: str = None, + status: str = None, + ): + self.object_id = object_id + self.object_type = object_type + self.operation_id = operation_id + self.operation_type = operation_type + self.order = order + self.page_number = page_number + self.page_size = page_size + self.sort_by = sort_by + self.status = status + + def validate(self): + pass + + def to_map(self): + _map = super().to_map() + if _map is not None: + return _map + + result = dict() + if self.object_id is not None: + result['ObjectId'] = self.object_id + if self.object_type is not None: + result['ObjectType'] = self.object_type + if self.operation_id is not None: + result['OperationId'] = self.operation_id + if self.operation_type is not None: + result['OperationType'] = self.operation_type + if self.order is not None: + result['Order'] = self.order + if self.page_number is not None: + result['PageNumber'] = self.page_number + if self.page_size is not None: + result['PageSize'] = self.page_size + if self.sort_by is not None: + result['SortBy'] = self.sort_by + if self.status is not None: + result['Status'] = self.status + return result + + def from_map(self, m: dict = None): + m = m or dict() + if m.get('ObjectId') is not None: + self.object_id = m.get('ObjectId') + if m.get('ObjectType') is not None: + self.object_type = m.get('ObjectType') + if m.get('OperationId') is not None: + self.operation_id = m.get('OperationId') + if m.get('OperationType') is not None: + self.operation_type = m.get('OperationType') + if m.get('Order') is not None: + self.order = m.get('Order') + if m.get('PageNumber') is not None: + self.page_number = m.get('PageNumber') + if m.get('PageSize') is not None: + self.page_size = m.get('PageSize') + if m.get('SortBy') is not None: + self.sort_by = m.get('SortBy') + if m.get('Status') is not None: + self.status = m.get('Status') + return self + + +class ListOperationsResponseBody(TeaModel): + def __init__( + self, + operations: List[ResourceOperation] = None, + request_id: str = None, + ): + self.operations = operations + self.request_id = request_id + + def validate(self): + if self.operations: + for k in self.operations: + if k: + k.validate() + + def to_map(self): + _map = super().to_map() + if _map is not None: + return _map + + result = dict() + result['Operations'] = [] + if self.operations is not None: + for k in self.operations: + result['Operations'].append(k.to_map() if k else None) + if self.request_id is not None: + result['RequestId'] = self.request_id + return result + + def from_map(self, m: dict = None): + m = m or dict() + self.operations = [] + if m.get('Operations') is not None: + for k in m.get('Operations'): + temp_model = ResourceOperation() + self.operations.append(temp_model.from_map(k)) + if m.get('RequestId') is not None: + self.request_id = m.get('RequestId') + return self + + +class ListOperationsResponse(TeaModel): + def __init__( + self, + headers: Dict[str, str] = None, + status_code: int = None, + body: ListOperationsResponseBody = None, + ): + self.headers = headers + self.status_code = status_code + self.body = body + + def validate(self): + if self.body: + self.body.validate() + + def to_map(self): + _map = super().to_map() + if _map is not None: + return _map + + result = dict() + if self.headers is not None: + result['headers'] = self.headers + if self.status_code is not None: + result['statusCode'] = self.status_code + if self.body is not None: + result['body'] = self.body.to_map() + return result + + def from_map(self, m: dict = None): + m = m or dict() + if m.get('headers') is not None: + self.headers = m.get('headers') + if m.get('statusCode') is not None: + self.status_code = m.get('statusCode') + if m.get('body') is not None: + temp_model = ListOperationsResponseBody() + self.body = temp_model.from_map(m['body']) + return self + + +class ListPermissionsResponseBody(TeaModel): def __init__( self, - nodes: List[Node] = None, + features: Features = None, + permissions: List[Permission] = None, request_id: str = None, - total_count: int = None, ): - self.nodes = nodes + self.features = features + self.permissions = permissions self.request_id = request_id - self.total_count = total_count def validate(self): - if self.nodes: - for k in self.nodes: + if self.features: + self.features.validate() + if self.permissions: + for k in self.permissions: if k: k.validate() @@ -17835,36 +17019,37 @@ def to_map(self): return _map result = dict() - result['Nodes'] = [] - if self.nodes is not None: - for k in self.nodes: - result['Nodes'].append(k.to_map() if k else None) + if self.features is not None: + result['Features'] = self.features.to_map() + result['Permissions'] = [] + if self.permissions is not None: + for k in self.permissions: + result['Permissions'].append(k.to_map() if k else None) if self.request_id is not None: - result['RequestId'] = self.request_id - if self.total_count is not None: - result['TotalCount'] = self.total_count + result['requestId'] = self.request_id return result def from_map(self, m: dict = None): m = m or dict() - self.nodes = [] - if m.get('Nodes') is not None: - for k in m.get('Nodes'): - temp_model = Node() - self.nodes.append(temp_model.from_map(k)) - if m.get('RequestId') is not None: - self.request_id = m.get('RequestId') - if m.get('TotalCount') is not None: - self.total_count = m.get('TotalCount') + if m.get('Features') is not None: + temp_model = Features() + self.features = temp_model.from_map(m['Features']) + self.permissions = [] + if m.get('Permissions') is not None: + for k in m.get('Permissions'): + temp_model = Permission() + self.permissions.append(temp_model.from_map(k)) + if m.get('requestId') is not None: + self.request_id = m.get('requestId') return self -class ListNodesResponse(TeaModel): +class ListPermissionsResponse(TeaModel): def __init__( self, headers: Dict[str, str] = None, status_code: int = None, - body: ListNodesResponseBody = None, + body: ListPermissionsResponseBody = None, ): self.headers = headers self.status_code = status_code @@ -17895,33 +17080,31 @@ def from_map(self, m: dict = None): if m.get('statusCode') is not None: self.status_code = m.get('statusCode') if m.get('body') is not None: - temp_model = ListNodesResponseBody() + temp_model = ListPermissionsResponseBody() self.body = temp_model.from_map(m['body']) return self -class ListOperationsRequest(TeaModel): +class ListQuotaUsersRequest(TeaModel): def __init__( self, - object_id: str = None, - object_type: str = None, - operation_id: str = None, - operation_type: str = None, order: str = None, page_number: int = None, page_size: int = None, + self_only: bool = None, sort_by: str = None, - status: str = None, + user_id: str = None, + username: str = None, + workspace_id: str = None, ): - self.object_id = object_id - self.object_type = object_type - self.operation_id = operation_id - self.operation_type = operation_type self.order = order self.page_number = page_number self.page_size = page_size + self.self_only = self_only self.sort_by = sort_by - self.status = status + self.user_id = user_id + self.username = username + self.workspace_id = workspace_id def validate(self): pass @@ -17932,61 +17115,59 @@ def to_map(self): return _map result = dict() - if self.object_id is not None: - result['ObjectId'] = self.object_id - if self.object_type is not None: - result['ObjectType'] = self.object_type - if self.operation_id is not None: - result['OperationId'] = self.operation_id - if self.operation_type is not None: - result['OperationType'] = self.operation_type if self.order is not None: result['Order'] = self.order if self.page_number is not None: result['PageNumber'] = self.page_number if self.page_size is not None: result['PageSize'] = self.page_size + if self.self_only is not None: + result['SelfOnly'] = self.self_only if self.sort_by is not None: result['SortBy'] = self.sort_by - if self.status is not None: - result['Status'] = self.status + if self.user_id is not None: + result['UserId'] = self.user_id + if self.username is not None: + result['Username'] = self.username + if self.workspace_id is not None: + result['WorkspaceId'] = self.workspace_id return result def from_map(self, m: dict = None): m = m or dict() - if m.get('ObjectId') is not None: - self.object_id = m.get('ObjectId') - if m.get('ObjectType') is not None: - self.object_type = m.get('ObjectType') - if m.get('OperationId') is not None: - self.operation_id = m.get('OperationId') - if m.get('OperationType') is not None: - self.operation_type = m.get('OperationType') if m.get('Order') is not None: self.order = m.get('Order') if m.get('PageNumber') is not None: self.page_number = m.get('PageNumber') if m.get('PageSize') is not None: self.page_size = m.get('PageSize') + if m.get('SelfOnly') is not None: + self.self_only = m.get('SelfOnly') if m.get('SortBy') is not None: self.sort_by = m.get('SortBy') - if m.get('Status') is not None: - self.status = m.get('Status') + if m.get('UserId') is not None: + self.user_id = m.get('UserId') + if m.get('Username') is not None: + self.username = m.get('Username') + if m.get('WorkspaceId') is not None: + self.workspace_id = m.get('WorkspaceId') return self -class ListOperationsResponseBody(TeaModel): +class ListQuotaUsersResponseBody(TeaModel): def __init__( self, - operations: List[ResourceOperation] = None, + quota_users: List[QuotaUser] = None, request_id: str = None, + total_count: str = None, ): - self.operations = operations + self.quota_users = quota_users self.request_id = request_id + self.total_count = total_count def validate(self): - if self.operations: - for k in self.operations: + if self.quota_users: + for k in self.quota_users: if k: k.validate() @@ -17996,32 +17177,36 @@ def to_map(self): return _map result = dict() - result['Operations'] = [] - if self.operations is not None: - for k in self.operations: - result['Operations'].append(k.to_map() if k else None) + result['QuotaUsers'] = [] + if self.quota_users is not None: + for k in self.quota_users: + result['QuotaUsers'].append(k.to_map() if k else None) if self.request_id is not None: result['RequestId'] = self.request_id + if self.total_count is not None: + result['TotalCount'] = self.total_count return result def from_map(self, m: dict = None): m = m or dict() - self.operations = [] - if m.get('Operations') is not None: - for k in m.get('Operations'): - temp_model = ResourceOperation() - self.operations.append(temp_model.from_map(k)) + self.quota_users = [] + if m.get('QuotaUsers') is not None: + for k in m.get('QuotaUsers'): + temp_model = QuotaUser() + self.quota_users.append(temp_model.from_map(k)) if m.get('RequestId') is not None: self.request_id = m.get('RequestId') + if m.get('TotalCount') is not None: + self.total_count = m.get('TotalCount') return self -class ListOperationsResponse(TeaModel): +class ListQuotaUsersResponse(TeaModel): def __init__( self, headers: Dict[str, str] = None, status_code: int = None, - body: ListOperationsResponseBody = None, + body: ListQuotaUsersResponseBody = None, ): self.headers = headers self.status_code = status_code @@ -18052,27 +17237,132 @@ def from_map(self, m: dict = None): if m.get('statusCode') is not None: self.status_code = m.get('statusCode') if m.get('body') is not None: - temp_model = ListOperationsResponseBody() + temp_model = ListQuotaUsersResponseBody() self.body = temp_model.from_map(m['body']) return self -class ListPermissionsResponseBody(TeaModel): +class ListQuotaWorkloadsRequest(TeaModel): + def __init__( + self, + before_workload_id: str = None, + node_name: str = None, + order: str = None, + page_number: int = None, + page_size: int = None, + show_own: bool = None, + sort_by: str = None, + status: str = None, + sub_quota_ids: str = None, + user_ids: str = None, + workload_created_time_range: TimeRangeFilter = None, + workload_ids: str = None, + workload_type: str = None, + workspace_ids: str = None, + ): + self.before_workload_id = before_workload_id + self.node_name = node_name + self.order = order + self.page_number = page_number + self.page_size = page_size + self.show_own = show_own + self.sort_by = sort_by + self.status = status + self.sub_quota_ids = sub_quota_ids + self.user_ids = user_ids + self.workload_created_time_range = workload_created_time_range + self.workload_ids = workload_ids + self.workload_type = workload_type + self.workspace_ids = workspace_ids + + def validate(self): + if self.workload_created_time_range: + self.workload_created_time_range.validate() + + def to_map(self): + _map = super().to_map() + if _map is not None: + return _map + + result = dict() + if self.before_workload_id is not None: + result['BeforeWorkloadId'] = self.before_workload_id + if self.node_name is not None: + result['NodeName'] = self.node_name + if self.order is not None: + result['Order'] = self.order + if self.page_number is not None: + result['PageNumber'] = self.page_number + if self.page_size is not None: + result['PageSize'] = self.page_size + if self.show_own is not None: + result['ShowOwn'] = self.show_own + if self.sort_by is not None: + result['SortBy'] = self.sort_by + if self.status is not None: + result['Status'] = self.status + if self.sub_quota_ids is not None: + result['SubQuotaIds'] = self.sub_quota_ids + if self.user_ids is not None: + result['UserIds'] = self.user_ids + if self.workload_created_time_range is not None: + result['WorkloadCreatedTimeRange'] = self.workload_created_time_range.to_map() + if self.workload_ids is not None: + result['WorkloadIds'] = self.workload_ids + if self.workload_type is not None: + result['WorkloadType'] = self.workload_type + if self.workspace_ids is not None: + result['WorkspaceIds'] = self.workspace_ids + return result + + def from_map(self, m: dict = None): + m = m or dict() + if m.get('BeforeWorkloadId') is not None: + self.before_workload_id = m.get('BeforeWorkloadId') + if m.get('NodeName') is not None: + self.node_name = m.get('NodeName') + if m.get('Order') is not None: + self.order = m.get('Order') + if m.get('PageNumber') is not None: + self.page_number = m.get('PageNumber') + if m.get('PageSize') is not None: + self.page_size = m.get('PageSize') + if m.get('ShowOwn') is not None: + self.show_own = m.get('ShowOwn') + if m.get('SortBy') is not None: + self.sort_by = m.get('SortBy') + if m.get('Status') is not None: + self.status = m.get('Status') + if m.get('SubQuotaIds') is not None: + self.sub_quota_ids = m.get('SubQuotaIds') + if m.get('UserIds') is not None: + self.user_ids = m.get('UserIds') + if m.get('WorkloadCreatedTimeRange') is not None: + temp_model = TimeRangeFilter() + self.workload_created_time_range = temp_model.from_map(m['WorkloadCreatedTimeRange']) + if m.get('WorkloadIds') is not None: + self.workload_ids = m.get('WorkloadIds') + if m.get('WorkloadType') is not None: + self.workload_type = m.get('WorkloadType') + if m.get('WorkspaceIds') is not None: + self.workspace_ids = m.get('WorkspaceIds') + return self + + +class ListQuotaWorkloadsResponseBody(TeaModel): def __init__( self, - features: Features = None, - permissions: List[Permission] = None, request_id: str = None, + total_count: int = None, + workloads: List[QueueInfo] = None, ): - self.features = features - self.permissions = permissions self.request_id = request_id + self.total_count = total_count + self.workloads = workloads def validate(self): - if self.features: - self.features.validate() - if self.permissions: - for k in self.permissions: + if self.workloads: + for k in self.workloads: if k: k.validate() @@ -18081,38 +17371,37 @@ def to_map(self): if _map is not None: return _map - result = dict() - if self.features is not None: - result['Features'] = self.features.to_map() - result['Permissions'] = [] - if self.permissions is not None: - for k in self.permissions: - result['Permissions'].append(k.to_map() if k else None) + result = dict() if self.request_id is not None: - result['requestId'] = self.request_id + result['RequestId'] = self.request_id + if self.total_count is not None: + result['TotalCount'] = self.total_count + result['Workloads'] = [] + if self.workloads is not None: + for k in self.workloads: + result['Workloads'].append(k.to_map() if k else None) return result def from_map(self, m: dict = None): m = m or dict() - if m.get('Features') is not None: - temp_model = Features() - self.features = temp_model.from_map(m['Features']) - self.permissions = [] - if m.get('Permissions') is not None: - for k in m.get('Permissions'): - temp_model = Permission() - self.permissions.append(temp_model.from_map(k)) - if m.get('requestId') is not None: - self.request_id = m.get('requestId') + if m.get('RequestId') is not None: + self.request_id = m.get('RequestId') + if m.get('TotalCount') is not None: + self.total_count = m.get('TotalCount') + self.workloads = [] + if m.get('Workloads') is not None: + for k in m.get('Workloads'): + temp_model = QueueInfo() + self.workloads.append(temp_model.from_map(k)) return self -class ListPermissionsResponse(TeaModel): +class ListQuotaWorkloadsResponse(TeaModel): def __init__( self, headers: Dict[str, str] = None, status_code: int = None, - body: ListPermissionsResponseBody = None, + body: ListQuotaWorkloadsResponseBody = None, ): self.headers = headers self.status_code = status_code @@ -18143,7 +17432,7 @@ def from_map(self, m: dict = None): if m.get('statusCode') is not None: self.status_code = m.get('statusCode') if m.get('body') is not None: - temp_model = ListPermissionsResponseBody() + temp_model = ListQuotaWorkloadsResponseBody() self.body = temp_model.from_map(m['body']) return self @@ -18162,6 +17451,7 @@ def __init__( resource_type: str = None, sort_by: str = None, statuses: str = None, + verbose: bool = None, workspace_ids: str = None, ): self.labels = labels @@ -18175,6 +17465,7 @@ def __init__( self.resource_type = resource_type self.sort_by = sort_by self.statuses = statuses + self.verbose = verbose self.workspace_ids = workspace_ids def validate(self): @@ -18208,6 +17499,8 @@ def to_map(self): result['SortBy'] = self.sort_by if self.statuses is not None: result['Statuses'] = self.statuses + if self.verbose is not None: + result['Verbose'] = self.verbose if self.workspace_ids is not None: result['WorkspaceIds'] = self.workspace_ids return result @@ -18236,6 +17529,8 @@ def from_map(self, m: dict = None): self.sort_by = m.get('SortBy') if m.get('Statuses') is not None: self.statuses = m.get('Statuses') + if m.get('Verbose') is not None: + self.verbose = m.get('Verbose') if m.get('WorkspaceIds') is not None: self.workspace_ids = m.get('WorkspaceIds') return self @@ -18336,6 +17631,7 @@ def __init__( ecs_spec: str = None, name: str = None, order: str = None, + order_instance_id: str = None, page_number: int = None, page_size: int = None, payment_duration: str = None, @@ -18348,6 +17644,7 @@ def __init__( self.ecs_spec = ecs_spec self.name = name self.order = order + self.order_instance_id = order_instance_id self.page_number = page_number self.page_size = page_size self.payment_duration = payment_duration @@ -18373,6 +17670,8 @@ def to_map(self): result['Name'] = self.name if self.order is not None: result['Order'] = self.order + if self.order_instance_id is not None: + result['OrderInstanceId'] = self.order_instance_id if self.page_number is not None: result['PageNumber'] = self.page_number if self.page_size is not None: @@ -18399,6 +17698,8 @@ def from_map(self, m: dict = None): self.name = m.get('Name') if m.get('Order') is not None: self.order = m.get('Order') + if m.get('OrderInstanceId') is not None: + self.order_instance_id = m.get('OrderInstanceId') if m.get('PageNumber') is not None: self.page_number = m.get('PageNumber') if m.get('PageSize') is not None: @@ -18588,6 +17889,7 @@ def __init__( ): self.request_id = request_id self.resource_groups = resource_groups + # This parameter is required. self.total_count = total_count def validate(self): @@ -18819,8 +18121,10 @@ def __init__( tag: List[ListTagResourcesRequestTag] = None, ): self.next_token = next_token + # This parameter is required. self.region_id = region_id self.resource_id = resource_id + # This parameter is required. self.resource_type = resource_type self.tag = tag @@ -18878,8 +18182,10 @@ def __init__( tag_shrink: str = None, ): self.next_token = next_token + # This parameter is required. self.region_id = region_id self.resource_id_shrink = resource_id_shrink + # This parameter is required. self.resource_type = resource_type self.tag_shrink = tag_shrink @@ -19061,9 +18367,11 @@ def __init__( start_time: str = None, token: str = None, ): + # Use the UTC time format: yyyy-MM-ddTHH:mmZ self.end_time = end_time self.page_number = page_number self.page_size = page_size + # Use the UTC time format: yyyy-MM-ddTHH:mmZ self.start_time = start_time self.token = token @@ -19192,9 +18500,11 @@ def __init__( start_time: str = None, token: str = None, ): + # Use the UTC time format: yyyy-MM-ddTHH:mmZ self.end_time = end_time self.page_number = page_number self.page_size = page_size + # Use the UTC time format: yyyy-MM-ddTHH:mmZ self.start_time = start_time self.token = token @@ -19324,9 +18634,12 @@ def __init__( time_step: str = None, token: str = None, ): + # Use the UTC time format: yyyy-MM-ddTHH:mmZ self.end_time = end_time self.instance_id = instance_id + # This parameter is required. self.metric_type = metric_type + # Use the UTC time format: yyyy-MM-ddTHH:mmZ self.start_time = start_time self.time_step = time_step self.token = token @@ -19544,10 +18857,12 @@ def __init__( token: str = None, worker_id: str = None, ): + # Use the UTC time format: yyyy-MM-ddTHH:mmZ self.end_time = end_time self.instance_id = instance_id self.page_number = page_number self.page_size = page_size + # Use the UTC time format: yyyy-MM-ddTHH:mmZ self.start_time = start_time self.token = token self.worker_id = worker_id @@ -19687,11 +19002,13 @@ def __init__( start_time: str = None, token: str = None, ): + # Use the UTC time format: yyyy-MM-ddTHH:mmZ self.end_time = end_time self.name = name self.order = order self.page_number = page_number self.page_size = page_size + # Use the UTC time format: yyyy-MM-ddTHH:mmZ self.start_time = start_time self.token = token @@ -19747,6 +19064,7 @@ def __init__( value: float = None, ): self.name = name + # Use the UTC time format: yyyy-MM-ddTHH:mmZ self.timestamp = timestamp self.value = value @@ -21026,7 +20344,9 @@ def __init__( operation: str = None, resource_group_id: str = None, ): + # This parameter is required. self.operation = operation + # This parameter is required. self.resource_group_id = resource_group_id def validate(self): @@ -21133,6 +20453,7 @@ def __init__( target_algorithm_name: str = None, update_if_exists: bool = None, ): + # This parameter is required. self.target_algorithm_name = target_algorithm_name self.update_if_exists = update_if_exists @@ -21241,6 +20562,7 @@ def __init__( target_algorithm_version: str = None, update_if_exists: bool = None, ): + # This parameter is required. self.target_algorithm_name = target_algorithm_name self.target_algorithm_version = target_algorithm_version self.update_if_exists = update_if_exists @@ -21353,6 +20675,74 @@ def from_map(self, m: dict = None): return self +class ReleaseMachineGroupResponseBody(TeaModel): + def __init__( + self, + request_id: str = None, + ): + self.request_id = request_id + + def validate(self): + pass + + def to_map(self): + _map = super().to_map() + if _map is not None: + return _map + + result = dict() + if self.request_id is not None: + result['requestId'] = self.request_id + return result + + def from_map(self, m: dict = None): + m = m or dict() + if m.get('requestId') is not None: + self.request_id = m.get('requestId') + return self + + +class ReleaseMachineGroupResponse(TeaModel): + def __init__( + self, + headers: Dict[str, str] = None, + status_code: int = None, + body: ReleaseMachineGroupResponseBody = None, + ): + self.headers = headers + self.status_code = status_code + self.body = body + + def validate(self): + if self.body: + self.body.validate() + + def to_map(self): + _map = super().to_map() + if _map is not None: + return _map + + result = dict() + if self.headers is not None: + result['headers'] = self.headers + if self.status_code is not None: + result['statusCode'] = self.status_code + if self.body is not None: + result['body'] = self.body.to_map() + return result + + def from_map(self, m: dict = None): + m = m or dict() + if m.get('headers') is not None: + self.headers = m.get('headers') + if m.get('statusCode') is not None: + self.status_code = m.get('statusCode') + if m.get('body') is not None: + temp_model = ReleaseMachineGroupResponseBody() + self.body = temp_model.from_map(m['body']) + return self + + class ScaleQuotaRequest(TeaModel): def __init__( self, @@ -21695,8 +21085,11 @@ def __init__( tag_key: List[str] = None, ): self.all = all + # This parameter is required. self.region_id = region_id + # This parameter is required. self.resource_id = resource_id + # This parameter is required. self.resource_type = resource_type self.tag_key = tag_key @@ -21746,8 +21139,11 @@ def __init__( tag_key_shrink: str = None, ): self.all = all + # This parameter is required. self.region_id = region_id + # This parameter is required. self.resource_id_shrink = resource_id_shrink + # This parameter is required. self.resource_type = resource_type self.tag_key_shrink = tag_key_shrink @@ -22390,224 +21786,28 @@ def from_map(self, m: dict = None): return self -class UpdateLLMProjectRequestLabels(TeaModel): - def __init__( - self, - key: str = None, - value: str = None, - ): - self.key = key - self.value = value - - def validate(self): - pass - - def to_map(self): - _map = super().to_map() - if _map is not None: - return _map - - result = dict() - if self.key is not None: - result['Key'] = self.key - if self.value is not None: - result['Value'] = self.value - return result - - def from_map(self, m: dict = None): - m = m or dict() - if m.get('Key') is not None: - self.key = m.get('Key') - if m.get('Value') is not None: - self.value = m.get('Value') - return self - - -class UpdateLLMProjectRequestRuntime(TeaModel): - def __init__( - self, - runtime_id: str = None, - runtime_type: str = None, - ): - self.runtime_id = runtime_id - self.runtime_type = runtime_type - - def validate(self): - pass - - def to_map(self): - _map = super().to_map() - if _map is not None: - return _map - - result = dict() - if self.runtime_id is not None: - result['RuntimeId'] = self.runtime_id - if self.runtime_type is not None: - result['RuntimeType'] = self.runtime_type - return result - - def from_map(self, m: dict = None): - m = m or dict() - if m.get('RuntimeId') is not None: - self.runtime_id = m.get('RuntimeId') - if m.get('RuntimeType') is not None: - self.runtime_type = m.get('RuntimeType') - return self - - -class UpdateLLMProjectRequest(TeaModel): - def __init__( - self, - labels: List[UpdateLLMProjectRequestLabels] = None, - project_description: str = None, - project_name: str = None, - root_path: str = None, - runtime: UpdateLLMProjectRequestRuntime = None, - ): - self.labels = labels - self.project_description = project_description - self.project_name = project_name - self.root_path = root_path - self.runtime = runtime - - def validate(self): - if self.labels: - for k in self.labels: - if k: - k.validate() - if self.runtime: - self.runtime.validate() - - def to_map(self): - _map = super().to_map() - if _map is not None: - return _map - - result = dict() - result['Labels'] = [] - if self.labels is not None: - for k in self.labels: - result['Labels'].append(k.to_map() if k else None) - if self.project_description is not None: - result['ProjectDescription'] = self.project_description - if self.project_name is not None: - result['ProjectName'] = self.project_name - if self.root_path is not None: - result['RootPath'] = self.root_path - if self.runtime is not None: - result['Runtime'] = self.runtime.to_map() - return result - - def from_map(self, m: dict = None): - m = m or dict() - self.labels = [] - if m.get('Labels') is not None: - for k in m.get('Labels'): - temp_model = UpdateLLMProjectRequestLabels() - self.labels.append(temp_model.from_map(k)) - if m.get('ProjectDescription') is not None: - self.project_description = m.get('ProjectDescription') - if m.get('ProjectName') is not None: - self.project_name = m.get('ProjectName') - if m.get('RootPath') is not None: - self.root_path = m.get('RootPath') - if m.get('Runtime') is not None: - temp_model = UpdateLLMProjectRequestRuntime() - self.runtime = temp_model.from_map(m['Runtime']) - return self - - -class UpdateLLMProjectResponseBody(TeaModel): - def __init__( - self, - project_id: str = None, - request_id: str = None, - ): - self.project_id = project_id - self.request_id = request_id - - def validate(self): - pass - - def to_map(self): - _map = super().to_map() - if _map is not None: - return _map - - result = dict() - if self.project_id is not None: - result['ProjectId'] = self.project_id - if self.request_id is not None: - result['RequestId'] = self.request_id - return result - - def from_map(self, m: dict = None): - m = m or dict() - if m.get('ProjectId') is not None: - self.project_id = m.get('ProjectId') - if m.get('RequestId') is not None: - self.request_id = m.get('RequestId') - return self - - -class UpdateLLMProjectResponse(TeaModel): - def __init__( - self, - headers: Dict[str, str] = None, - status_code: int = None, - body: UpdateLLMProjectResponseBody = None, - ): - self.headers = headers - self.status_code = status_code - self.body = body - - def validate(self): - if self.body: - self.body.validate() - - def to_map(self): - _map = super().to_map() - if _map is not None: - return _map - - result = dict() - if self.headers is not None: - result['headers'] = self.headers - if self.status_code is not None: - result['statusCode'] = self.status_code - if self.body is not None: - result['body'] = self.body.to_map() - return result - - def from_map(self, m: dict = None): - m = m or dict() - if m.get('headers') is not None: - self.headers = m.get('headers') - if m.get('statusCode') is not None: - self.status_code = m.get('statusCode') - if m.get('body') is not None: - temp_model = UpdateLLMProjectResponseBody() - self.body = temp_model.from_map(m['body']) - return self - - class UpdateQuotaRequest(TeaModel): def __init__( self, description: str = None, labels: List[Label] = None, queue_strategy: str = None, + quota_config: QuotaConfig = None, + quota_name: str = None, ): self.description = description self.labels = labels self.queue_strategy = queue_strategy + self.quota_config = quota_config + self.quota_name = quota_name def validate(self): if self.labels: for k in self.labels: if k: k.validate() + if self.quota_config: + self.quota_config.validate() def to_map(self): _map = super().to_map() @@ -22623,6 +21823,10 @@ def to_map(self): result['Labels'].append(k.to_map() if k else None) if self.queue_strategy is not None: result['QueueStrategy'] = self.queue_strategy + if self.quota_config is not None: + result['QuotaConfig'] = self.quota_config.to_map() + if self.quota_name is not None: + result['QuotaName'] = self.quota_name return result def from_map(self, m: dict = None): @@ -22636,6 +21840,11 @@ def from_map(self, m: dict = None): self.labels.append(temp_model.from_map(k)) if m.get('QueueStrategy') is not None: self.queue_strategy = m.get('QueueStrategy') + if m.get('QuotaConfig') is not None: + temp_model = QuotaConfig() + self.quota_config = temp_model.from_map(m['QuotaConfig']) + if m.get('QuotaName') is not None: + self.quota_name = m.get('QuotaName') return self diff --git a/pai/model/_model_recipe.py b/pai/model/_model_recipe.py index 39f35fc..9d982f8 100644 --- a/pai/model/_model_recipe.py +++ b/pai/model/_model_recipe.py @@ -29,6 +29,8 @@ InstanceSpec, ModelRecipeSpec, OssLocation, + ResourceType, + SpotSpec, TrainingJob, UriInput, UserVpcConfig, @@ -51,6 +53,7 @@ class RecipeInitKwargs(object): model_channel_name: Optional[str] model_uri: Optional[str] hyperparameters: Optional[Dict[str, Any]] + # hyperparameter_definitions: Optional[List[HyperParameterDefinition]] job_type: Optional[str] image_uri: Optional[str] source_dir: Optional[str] @@ -66,6 +69,8 @@ class RecipeInitKwargs(object): input_channels: Optional[List[Channel]] output_channels: Optional[List[Channel]] default_inputs: Optional[Union[UriInput, DatasetConfig]] + customization: Optional[Dict[str, Any]] + supported_instance_types: Optional[List[str]] class ModelRecipeType(enum.Enum): @@ -99,6 +104,8 @@ def __init__( instance_type: Optional[str] = None, instance_spec: Optional[InstanceSpec] = None, resource_id: Optional[str] = None, + resource_type: Optional[Union[str, ResourceType]] = None, + spot_spec: Optional[SpotSpec] = None, user_vpc_config: Optional[UserVpcConfig] = None, labels: Optional[Dict[str, str]] = None, requirements: Optional[List[str]] = None, @@ -109,6 +116,8 @@ def __init__( max_run_time: Optional[int] = None, default_inputs: Optional[Dict[str, Any]] = None, base_job_name: Optional[str] = None, + supported_instance_type: Optional[List[str]] = None, + settings: Optional[Dict[str, Any]] = None, ): init_kwargs = self._init_kwargs( model_name=model_name, @@ -134,6 +143,7 @@ def __init__( output_channels=output_channels, default_inputs=default_inputs, max_run_time=max_run_time, + supported_instance_types=supported_instance_type, ) self.model_name = init_kwargs.model_name self.model_version = init_kwargs.model_version @@ -147,12 +157,18 @@ def __init__( self.command = init_kwargs.command self.source_dir = init_kwargs.source_dir self.default_inputs = init_kwargs.default_inputs + self.customization = init_kwargs.customization + self.supported_instance_types = init_kwargs.supported_instance_types + self.input_channels = init_kwargs.input_channels + self.output_channels = init_kwargs.output_channels super().__init__( + resource_type=resource_type, base_job_name=base_job_name, experiment_config=experiment_config, resource_id=resource_id, user_vpc_config=user_vpc_config, + spot_spec=spot_spec, instance_type=init_kwargs.instance_type, instance_count=init_kwargs.instance_count, instance_spec=init_kwargs.instance_spec, @@ -160,6 +176,7 @@ def __init__( environments=init_kwargs.environments, requirements=init_kwargs.requirements, labels=init_kwargs.labels, + settings=settings, ) @classmethod @@ -188,6 +205,7 @@ def _init_kwargs( input_channels: List[Channel] = None, output_channels: List[Channel] = None, default_inputs: Optional[Union[UriInput, DatasetConfig]] = None, + supported_instance_types: Optional[List[str]] = None, ) -> RecipeInitKwargs: model = ( RegisteredModel( @@ -204,6 +222,7 @@ def _init_kwargs( else None ) model_uri = model_uri or (model and model.uri) + customization = None if not model_recipe_spec: return RecipeInitKwargs( model_name=model_name, @@ -228,6 +247,8 @@ def _init_kwargs( output_channels=output_channels, max_run_time=max_run_time, default_inputs=default_inputs, + customization=customization, + supported_instance_types=supported_instance_types, ) if not model_uri: input_ = next( @@ -257,22 +278,31 @@ def _init_kwargs( else: default_inputs[item.name] = item algorithm_spec = cls._get_algorithm_spec(model_recipe_spec) + supported_instance_types = ( + supported_instance_types or model_recipe_spec.supported_instance_types + ) if algorithm_spec: if ( not source_dir and algorithm_spec.code_dir - and isinstance(algorithm_spec.code_dir, OssLocation) + and isinstance(algorithm_spec.code_dir.location_value, OssLocation) ): - source_dir = f"oss://{0}.{1}/{2}".format( - algorithm_spec.code_dir.bucket, - algorithm_spec.code_dir.endpoint, - algorithm_spec.code_dir.key.lstrip("/"), - ) + oss_location = algorithm_spec.code_dir.location_value + if oss_location.endpoint: + source_dir = f"oss://{oss_location.bucket}.{oss_location.endpoint}/{oss_location.key.lstrip('/')}" + else: + source_dir = ( + f"oss://{oss_location.bucket}/{oss_location.key.lstrip('/')}" + ) image_uri = image_uri or algorithm_spec.image command = command or algorithm_spec.command job_type = job_type or algorithm_spec.job_type input_channels = input_channels or algorithm_spec.input_channels output_channels = output_channels or algorithm_spec.output_channels + customization = algorithm_spec.customization + supported_instance_types = ( + supported_instance_types or algorithm_spec.supported_channel_types + ) instance_type, instance_spec, instance_count = cls._get_compute_resource_config( instance_type=instance_type, @@ -280,6 +310,7 @@ def _init_kwargs( instance_count=instance_count, resource_id=resource_id, compute_resource=model_recipe_spec.compute_resource, + supported_instance_types=supported_instance_types, ) hyperparameters = hyperparameters or {} hyperparameters = { @@ -295,6 +326,7 @@ def _init_kwargs( } requirements = requirements or model_recipe_spec.requirements environments = environments or model_recipe_spec.environments + return RecipeInitKwargs( model_name=model_name, model_version=model_version, @@ -318,6 +350,8 @@ def _init_kwargs( output_channels=output_channels, resource_id=resource_id, default_inputs=default_inputs, + customization=customization, + supported_instance_types=supported_instance_types, ) @staticmethod @@ -327,6 +361,7 @@ def _get_compute_resource_config( instance_spec: InstanceSpec, resource_id: str, compute_resource: ComputeResource, + supported_instance_types: List[str], ) -> Tuple[str, InstanceSpec, int]: if resource_id: if instance_type: @@ -355,7 +390,12 @@ def _get_compute_resource_config( compute_resource and compute_resource.ecs_spec ) if not instance_type: - raise ValueError("No instance type is specified for the training job") + if not supported_instance_types: + raise ValueError( + "No instance type is specified for the training job" + ) + else: + instance_type = supported_instance_types[0] instance_count = ( instance_count or (compute_resource and compute_resource.ecs_count) or 1 ) @@ -395,11 +435,14 @@ def _build_algorithm_spec( image=self.image_uri, job_type=self.job_type, code_dir=code_input, - output_channels=self._default_training_output_channels(), - input_channels=[ + output_channels=self.output_channels + or self._default_training_output_channels(), + input_channels=self.input_channels + or [ Channel(name=channel_name, required=False) for channel_name in inputs.keys() ], + customization=self.customization, ) return algorithm_spec @@ -514,8 +557,10 @@ def __init__( command: Union[str, List[str]] = None, instance_count: Optional[int] = None, instance_type: Optional[str] = None, + spot_spec: Optional[SpotSpec] = None, instance_spec: Optional[InstanceSpec] = None, resource_id: Optional[str] = None, + resource_type: Optional[Union[str, ResourceType]] = None, user_vpc_config: Optional[UserVpcConfig] = None, labels: Optional[Dict[str, str]] = None, requirements: Optional[List[str]] = None, @@ -526,6 +571,7 @@ def __init__( max_run_time: Optional[int] = None, default_training_inputs: Optional[Dict[str, Any]] = None, base_job_name: Optional[str] = None, + **kwargs, ): """Initialize a ModelTrainingRecipe object. @@ -567,6 +613,11 @@ def __init__( be provided when the instance spec is set. Default to None. resource_id (str, optional): The ID of the resource group used to run the training job. Default to None. + spot_spec (:class:`pai.model.SpotSpec`, optional): The spot instance config + used to run the training job. If provided, spot instance will be used. + resource_type (str, optional): The resource type used to run the training job. + By default, general computing resource is used. If the resource_type is + 'Lingjun', Lingjun computing resource is used. user_vpc_config (:class:`pai.model.UserVpcConfig`, optional): The VPC configuration used to enable the job instance to connect to the specified user VPC. Default to None. @@ -594,7 +645,9 @@ def __init__( instance_count=instance_count, instance_type=instance_type, instance_spec=instance_spec, + resource_type=resource_type, resource_id=resource_id, + spot_spec=spot_spec, user_vpc_config=user_vpc_config, labels=labels, requirements=requirements, @@ -605,6 +658,7 @@ def __init__( max_run_time=max_run_time, default_inputs=default_training_inputs, base_job_name=base_job_name, + **kwargs, ) def train( diff --git a/pai/processor.py b/pai/processor.py index e73b085..6626c20 100644 --- a/pai/processor.py +++ b/pai/processor.py @@ -23,10 +23,13 @@ Channel, CodeDir, ExperimentConfig, + SpotSpec, TrainingJob, + UriOutput, UserVpcConfig, _TrainingJobSubmitter, ) +from .job._training_job import ResourceType from .session import Session, get_default_session logger = get_logger(__name__) @@ -47,9 +50,12 @@ def __init__( base_job_name: Optional[str] = None, output_path: Optional[str] = None, instance_type: Optional[str] = None, + spot_spec: Optional[SpotSpec] = None, + resource_type: Optional[Union[str, ResourceType]] = None, instance_count: Optional[int] = None, user_vpc_config: Optional[UserVpcConfig] = None, experiment_config: Optional[ExperimentConfig] = None, + settings: Optional[Dict[str, Any]] = None, labels: Optional[Dict[str, str]] = None, session: Optional[Session] = None, ): @@ -142,6 +148,9 @@ def __init__( If the instance_type is "local", the job is executed locally using docker. instance_count (int): The number of machines used to run the job. + resource_type (str, optional): The resource type used to run the training job. + By default, general computing resource is used. If the resource_type is + 'Lingjun', Lingjun computing resource is used. user_vpc_config (:class:`pai.estimator.UserVpcConfig`, optional): The VPC configuration used to enable the job instance to connect to the specified user VPC. If provided, an Elastic Network Interface (ENI) will @@ -152,6 +161,8 @@ def __init__( experiment configuration used to construct the relationship between the job and the experiment. If provided, the training job will belong to the specified experiment, in which case the job will use artifact_uri of + settings (dict, optional): A dictionary that represents the additional settings + for job, such as AIMaster configurations. experiment as default output path. Default to None. labels (Dict[str, str], optional): A dictionary that maps label names to their values. This optional field allows you to provide a set of labels @@ -170,6 +181,8 @@ def __init__( self._input_channels = None self._output_channels = None super().__init__( + resource_type=resource_type, + spot_spec=spot_spec, base_job_name=base_job_name, output_path=output_path, experiment_config=experiment_config, @@ -180,6 +193,7 @@ def __init__( environments=environments, requirements=requirements, labels=labels, + settings=settings, ) def run( @@ -307,8 +321,8 @@ def get_outputs_data(self) -> Dict[str, str]: raise RuntimeError("Current no Job for the processor.") return { - ch["Name"]: ch["OutputUri"] or ch["DatasetId"] - for ch in self.latest_job.output_channels + ch.name: ch.output_uri if isinstance(ch, UriOutput) else ch.dataset_id + for ch in self.latest_job.outputs } def set_input_channels(self, channels: List[Channel]): diff --git a/pai/version.py b/pai/version.py index 32f748d..65ac819 100644 --- a/pai/version.py +++ b/pai/version.py @@ -12,4 +12,4 @@ # See the License for the specific language governing permissions and # limitations under the License. -VERSION = "0.4.8" +VERSION = "0.4.9.dev0" diff --git a/tests/integration/test_estimator.py b/tests/integration/test_estimator.py index 52e266b..d91bbf4 100644 --- a/tests/integration/test_estimator.py +++ b/tests/integration/test_estimator.py @@ -15,7 +15,7 @@ import os import posixpath import re -from unittest import skipUnless +from unittest import skipIf, skipUnless import pytest @@ -24,7 +24,7 @@ from pai.estimator import AlgorithmEstimator, Estimator from pai.experiment import Experiment from pai.image import retrieve -from pai.job._training_job import ExperimentConfig +from pai.job._training_job import ExperimentConfig, ResourceType, SpotSpec from pai.session import get_default_session from tests.integration import BaseIntegTestCase from tests.integration.utils import t_context @@ -72,11 +72,24 @@ def test_xgb_train(self): "test": self.breast_cancer_test_data_uri, }, ) - model_path = os.path.join(os.path.join(est.model_data(), "model.json")) - self.assertTrue(self.is_oss_object_exists(model_path)) + @skipIf(t_context.support_spot_instance, "Skip spot instance test") + def test_use_spot_instance(self): + xgb_image_uri = retrieve("xgboost", framework_version="latest").image_uri + est = Estimator( + command="echo helloworld", + instance_type="ml.gu7ef.8xlarge-gu100", + image_uri=xgb_image_uri, + spot_spec=SpotSpec( + spot_strategy="SpotWithPriceLimit", + spot_discount_limit=0.5, + ), + resource_type=ResourceType.Lingjun, + ) + est.fit() + def test_torch_run(self): torch_image_uri = retrieve("pytorch", framework_version="1.12").image_uri est = Estimator( diff --git a/tests/integration/test_model/test_model_recipe.py b/tests/integration/test_model/test_model_recipe.py index 55d4e6c..954c779 100644 --- a/tests/integration/test_model/test_model_recipe.py +++ b/tests/integration/test_model/test_model_recipe.py @@ -12,12 +12,16 @@ # See the License for the specific language governing permissions and # limitations under the License. import os +from unittest import skipIf import pytest from pai.common.utils import camel_to_snake, random_str -from pai.model import RegisteredModel +from pai.job import SpotSpec +from pai.job._training_job import ResourceType +from pai.model import ModelTrainingRecipe, RegisteredModel from tests.integration import BaseIntegTestCase +from tests.integration.utils import t_context from tests.test_data import test_data_dir @@ -61,6 +65,26 @@ def test_training_e2e(self): ) self.assertIsNotNone(resp.choices[0].message.content) + @skipIf(t_context.support_spot_instance, "Skip spot instance test") + def test_spot_instance(self): + training_recipe = ModelTrainingRecipe( + model_name="qwen2-7b-instruct", + model_provider="pai", + method="Standard", + resource_type=ResourceType.Lingjun, + spot_spec=SpotSpec( + spot_strategy="SpotWithPriceLimit", + spot_discount_limit=0.5, + ), + instance_type="ml.gu7ef.8xlarge-gu100", + ) + train_data = os.path.join(test_data_dir, "chinese_medical/train_sampled.json") + training_recipe.train( + inputs={ + "train": train_data, + }, + ) + def test_custom_inputs_train(self): model = RegisteredModel(model_name="qwen1.5-0.5b-chat", model_provider="pai") training_recipe = model.training_recipe(method="QLoRA_LLM") diff --git a/tests/integration/utils.py b/tests/integration/utils.py index bf6a3c0..7d4cd41 100644 --- a/tests/integration/utils.py +++ b/tests/integration/utils.py @@ -103,6 +103,10 @@ def has_gpu(self): def is_inner(self): return self.pai_service_config.region_id == "center" + @property + def support_spot_instance(self): + return self.pai_service_config.region_id == "cn-wulanchabu" + @classmethod def _load_test_config(cls): test_config = os.environ.get("PAI_TEST_CONFIG", "test.ini")