1# coding=utf-8
2# --------------------------------------------------------------------------
3# Copyright (c) Microsoft Corporation. All rights reserved.
4# Licensed under the MIT License. See License.txt in the project root for license information.
5# Code generated by Microsoft (R) AutoRest Code Generator.
6# Changes may cause incorrect behavior and will be lost if the code is regenerated.
7# --------------------------------------------------------------------------
8
9import datetime
10from typing import Any, Dict, List, Optional, Union
11
12import msrest.serialization
13
14from ._batch_ai_enums import *
15
16
17class AppInsightsReference(msrest.serialization.Model):
18    """Azure Application Insights information for performance counters reporting.
19
20    All required parameters must be populated in order to send to Azure.
21
22    :param component: Required. Azure Application Insights component resource ID.
23    :type component: ~batch_ai.models.ResourceId
24    :param instrumentation_key: Value of the Azure Application Insights instrumentation key.
25    :type instrumentation_key: str
26    :param instrumentation_key_secret_reference: KeyVault Store and Secret which contains Azure
27     Application Insights instrumentation key. One of instrumentationKey or
28     instrumentationKeySecretReference must be specified.
29    :type instrumentation_key_secret_reference: ~batch_ai.models.KeyVaultSecretReference
30    """
31
32    _validation = {
33        'component': {'required': True},
34    }
35
36    _attribute_map = {
37        'component': {'key': 'component', 'type': 'ResourceId'},
38        'instrumentation_key': {'key': 'instrumentationKey', 'type': 'str'},
39        'instrumentation_key_secret_reference': {'key': 'instrumentationKeySecretReference', 'type': 'KeyVaultSecretReference'},
40    }
41
42    def __init__(
43        self,
44        *,
45        component: "ResourceId",
46        instrumentation_key: Optional[str] = None,
47        instrumentation_key_secret_reference: Optional["KeyVaultSecretReference"] = None,
48        **kwargs
49    ):
50        super(AppInsightsReference, self).__init__(**kwargs)
51        self.component = component
52        self.instrumentation_key = instrumentation_key
53        self.instrumentation_key_secret_reference = instrumentation_key_secret_reference
54
55
56class AutoScaleSettings(msrest.serialization.Model):
57    """Auto-scale settings for the cluster. The system automatically scales the cluster up and down (within minimumNodeCount and maximumNodeCount) based on the number of queued and running jobs assigned to the cluster.
58
59    All required parameters must be populated in order to send to Azure.
60
61    :param minimum_node_count: Required. The minimum number of compute nodes the Batch AI service
62     will try to allocate for the cluster. Note, the actual number of nodes can be less than the
63     specified value if the subscription has not enough quota to fulfill the request.
64    :type minimum_node_count: int
65    :param maximum_node_count: Required. The maximum number of compute nodes the cluster can have.
66    :type maximum_node_count: int
67    :param initial_node_count: The number of compute nodes to allocate on cluster creation. Note
68     that this value is used only during cluster creation. Default: 0.
69    :type initial_node_count: int
70    """
71
72    _validation = {
73        'minimum_node_count': {'required': True},
74        'maximum_node_count': {'required': True},
75    }
76
77    _attribute_map = {
78        'minimum_node_count': {'key': 'minimumNodeCount', 'type': 'int'},
79        'maximum_node_count': {'key': 'maximumNodeCount', 'type': 'int'},
80        'initial_node_count': {'key': 'initialNodeCount', 'type': 'int'},
81    }
82
83    def __init__(
84        self,
85        *,
86        minimum_node_count: int,
87        maximum_node_count: int,
88        initial_node_count: Optional[int] = 0,
89        **kwargs
90    ):
91        super(AutoScaleSettings, self).__init__(**kwargs)
92        self.minimum_node_count = minimum_node_count
93        self.maximum_node_count = maximum_node_count
94        self.initial_node_count = initial_node_count
95
96
97class AzureBlobFileSystemReference(msrest.serialization.Model):
98    """Azure Blob Storage Container mounting configuration.
99
100    All required parameters must be populated in order to send to Azure.
101
102    :param account_name: Required. Name of the Azure storage account.
103    :type account_name: str
104    :param container_name: Required. Name of the Azure Blob Storage container to mount on the
105     cluster.
106    :type container_name: str
107    :param credentials: Required. Information about the Azure storage credentials.
108    :type credentials: ~batch_ai.models.AzureStorageCredentialsInfo
109    :param relative_mount_path: Required. The relative path on the compute node where the Azure
110     File container will be mounted. Note that all cluster level containers will be mounted under
111     $AZ_BATCHAI_MOUNT_ROOT location and all job level containers will be mounted under
112     $AZ_BATCHAI_JOB_MOUNT_ROOT.
113    :type relative_mount_path: str
114    :param mount_options: Mount options for mounting blobfuse file system.
115    :type mount_options: str
116    """
117
118    _validation = {
119        'account_name': {'required': True},
120        'container_name': {'required': True},
121        'credentials': {'required': True},
122        'relative_mount_path': {'required': True},
123    }
124
125    _attribute_map = {
126        'account_name': {'key': 'accountName', 'type': 'str'},
127        'container_name': {'key': 'containerName', 'type': 'str'},
128        'credentials': {'key': 'credentials', 'type': 'AzureStorageCredentialsInfo'},
129        'relative_mount_path': {'key': 'relativeMountPath', 'type': 'str'},
130        'mount_options': {'key': 'mountOptions', 'type': 'str'},
131    }
132
133    def __init__(
134        self,
135        *,
136        account_name: str,
137        container_name: str,
138        credentials: "AzureStorageCredentialsInfo",
139        relative_mount_path: str,
140        mount_options: Optional[str] = None,
141        **kwargs
142    ):
143        super(AzureBlobFileSystemReference, self).__init__(**kwargs)
144        self.account_name = account_name
145        self.container_name = container_name
146        self.credentials = credentials
147        self.relative_mount_path = relative_mount_path
148        self.mount_options = mount_options
149
150
151class AzureFileShareReference(msrest.serialization.Model):
152    """Azure File Share mounting configuration.
153
154    All required parameters must be populated in order to send to Azure.
155
156    :param account_name: Required. Name of the Azure storage account.
157    :type account_name: str
158    :param azure_file_url: Required. URL to access the Azure File.
159    :type azure_file_url: str
160    :param credentials: Required. Information about the Azure storage credentials.
161    :type credentials: ~batch_ai.models.AzureStorageCredentialsInfo
162    :param relative_mount_path: Required. The relative path on the compute node where the Azure
163     File share will be mounted. Note that all cluster level file shares will be mounted under
164     $AZ_BATCHAI_MOUNT_ROOT location and all job level file shares will be mounted under
165     $AZ_BATCHAI_JOB_MOUNT_ROOT.
166    :type relative_mount_path: str
167    :param file_mode: File mode for files on the mounted file share. Default value: 0777.
168    :type file_mode: str
169    :param directory_mode: File mode for directories on the mounted file share. Default value:
170     0777.
171    :type directory_mode: str
172    """
173
174    _validation = {
175        'account_name': {'required': True},
176        'azure_file_url': {'required': True},
177        'credentials': {'required': True},
178        'relative_mount_path': {'required': True},
179    }
180
181    _attribute_map = {
182        'account_name': {'key': 'accountName', 'type': 'str'},
183        'azure_file_url': {'key': 'azureFileUrl', 'type': 'str'},
184        'credentials': {'key': 'credentials', 'type': 'AzureStorageCredentialsInfo'},
185        'relative_mount_path': {'key': 'relativeMountPath', 'type': 'str'},
186        'file_mode': {'key': 'fileMode', 'type': 'str'},
187        'directory_mode': {'key': 'directoryMode', 'type': 'str'},
188    }
189
190    def __init__(
191        self,
192        *,
193        account_name: str,
194        azure_file_url: str,
195        credentials: "AzureStorageCredentialsInfo",
196        relative_mount_path: str,
197        file_mode: Optional[str] = "0777",
198        directory_mode: Optional[str] = "0777",
199        **kwargs
200    ):
201        super(AzureFileShareReference, self).__init__(**kwargs)
202        self.account_name = account_name
203        self.azure_file_url = azure_file_url
204        self.credentials = credentials
205        self.relative_mount_path = relative_mount_path
206        self.file_mode = file_mode
207        self.directory_mode = directory_mode
208
209
210class AzureStorageCredentialsInfo(msrest.serialization.Model):
211    """Azure storage account credentials.
212
213    :param account_key: Storage account key. One of accountKey or accountKeySecretReference must be
214     specified.
215    :type account_key: str
216    :param account_key_secret_reference: Information about KeyVault secret storing the storage
217     account key. One of accountKey or accountKeySecretReference must be specified.
218    :type account_key_secret_reference: ~batch_ai.models.KeyVaultSecretReference
219    """
220
221    _attribute_map = {
222        'account_key': {'key': 'accountKey', 'type': 'str'},
223        'account_key_secret_reference': {'key': 'accountKeySecretReference', 'type': 'KeyVaultSecretReference'},
224    }
225
226    def __init__(
227        self,
228        *,
229        account_key: Optional[str] = None,
230        account_key_secret_reference: Optional["KeyVaultSecretReference"] = None,
231        **kwargs
232    ):
233        super(AzureStorageCredentialsInfo, self).__init__(**kwargs)
234        self.account_key = account_key
235        self.account_key_secret_reference = account_key_secret_reference
236
237
238class BatchAIError(msrest.serialization.Model):
239    """An error response from the Batch AI service.
240
241    Variables are only populated by the server, and will be ignored when sending a request.
242
243    :ivar code: An identifier of the error. Codes are invariant and are intended to be consumed
244     programmatically.
245    :vartype code: str
246    :ivar message: A message describing the error, intended to be suitable for display in a user
247     interface.
248    :vartype message: str
249    :ivar details: A list of additional details about the error.
250    :vartype details: list[~batch_ai.models.NameValuePair]
251    """
252
253    _validation = {
254        'code': {'readonly': True},
255        'message': {'readonly': True},
256        'details': {'readonly': True},
257    }
258
259    _attribute_map = {
260        'code': {'key': 'code', 'type': 'str'},
261        'message': {'key': 'message', 'type': 'str'},
262        'details': {'key': 'details', 'type': '[NameValuePair]'},
263    }
264
265    def __init__(
266        self,
267        **kwargs
268    ):
269        super(BatchAIError, self).__init__(**kwargs)
270        self.code = None
271        self.message = None
272        self.details = None
273
274
275class Caffe2Settings(msrest.serialization.Model):
276    """Caffe2 job settings.
277
278    All required parameters must be populated in order to send to Azure.
279
280    :param python_script_file_path: Required. The python script to execute.
281    :type python_script_file_path: str
282    :param python_interpreter_path: The path to the Python interpreter.
283    :type python_interpreter_path: str
284    :param command_line_args: Command line arguments that need to be passed to the python script.
285    :type command_line_args: str
286    """
287
288    _validation = {
289        'python_script_file_path': {'required': True},
290    }
291
292    _attribute_map = {
293        'python_script_file_path': {'key': 'pythonScriptFilePath', 'type': 'str'},
294        'python_interpreter_path': {'key': 'pythonInterpreterPath', 'type': 'str'},
295        'command_line_args': {'key': 'commandLineArgs', 'type': 'str'},
296    }
297
298    def __init__(
299        self,
300        *,
301        python_script_file_path: str,
302        python_interpreter_path: Optional[str] = None,
303        command_line_args: Optional[str] = None,
304        **kwargs
305    ):
306        super(Caffe2Settings, self).__init__(**kwargs)
307        self.python_script_file_path = python_script_file_path
308        self.python_interpreter_path = python_interpreter_path
309        self.command_line_args = command_line_args
310
311
312class CaffeSettings(msrest.serialization.Model):
313    """Caffe job settings.
314
315    :param config_file_path: Path of the config file for the job. This property cannot be specified
316     if pythonScriptFilePath is specified.
317    :type config_file_path: str
318    :param python_script_file_path: Python script to execute. This property cannot be specified if
319     configFilePath is specified.
320    :type python_script_file_path: str
321    :param python_interpreter_path: The path to the Python interpreter. The property can be
322     specified only if the pythonScriptFilePath is specified.
323    :type python_interpreter_path: str
324    :param command_line_args: Command line arguments that need to be passed to the Caffe job.
325    :type command_line_args: str
326    :param process_count: Number of processes to launch for the job execution. The default value
327     for this property is equal to nodeCount property.
328    :type process_count: int
329    """
330
331    _attribute_map = {
332        'config_file_path': {'key': 'configFilePath', 'type': 'str'},
333        'python_script_file_path': {'key': 'pythonScriptFilePath', 'type': 'str'},
334        'python_interpreter_path': {'key': 'pythonInterpreterPath', 'type': 'str'},
335        'command_line_args': {'key': 'commandLineArgs', 'type': 'str'},
336        'process_count': {'key': 'processCount', 'type': 'int'},
337    }
338
339    def __init__(
340        self,
341        *,
342        config_file_path: Optional[str] = None,
343        python_script_file_path: Optional[str] = None,
344        python_interpreter_path: Optional[str] = None,
345        command_line_args: Optional[str] = None,
346        process_count: Optional[int] = None,
347        **kwargs
348    ):
349        super(CaffeSettings, self).__init__(**kwargs)
350        self.config_file_path = config_file_path
351        self.python_script_file_path = python_script_file_path
352        self.python_interpreter_path = python_interpreter_path
353        self.command_line_args = command_line_args
354        self.process_count = process_count
355
356
357class ChainerSettings(msrest.serialization.Model):
358    """Chainer job settings.
359
360    All required parameters must be populated in order to send to Azure.
361
362    :param python_script_file_path: Required. The python script to execute.
363    :type python_script_file_path: str
364    :param python_interpreter_path: The path to the Python interpreter.
365    :type python_interpreter_path: str
366    :param command_line_args: Command line arguments that need to be passed to the python script.
367    :type command_line_args: str
368    :param process_count: Number of processes to launch for the job execution. The default value
369     for this property is equal to nodeCount property.
370    :type process_count: int
371    """
372
373    _validation = {
374        'python_script_file_path': {'required': True},
375    }
376
377    _attribute_map = {
378        'python_script_file_path': {'key': 'pythonScriptFilePath', 'type': 'str'},
379        'python_interpreter_path': {'key': 'pythonInterpreterPath', 'type': 'str'},
380        'command_line_args': {'key': 'commandLineArgs', 'type': 'str'},
381        'process_count': {'key': 'processCount', 'type': 'int'},
382    }
383
384    def __init__(
385        self,
386        *,
387        python_script_file_path: str,
388        python_interpreter_path: Optional[str] = None,
389        command_line_args: Optional[str] = None,
390        process_count: Optional[int] = None,
391        **kwargs
392    ):
393        super(ChainerSettings, self).__init__(**kwargs)
394        self.python_script_file_path = python_script_file_path
395        self.python_interpreter_path = python_interpreter_path
396        self.command_line_args = command_line_args
397        self.process_count = process_count
398
399
400class CloudErrorBody(msrest.serialization.Model):
401    """An error response from the Batch AI service.
402
403    Variables are only populated by the server, and will be ignored when sending a request.
404
405    :ivar code: An identifier for the error. Codes are invariant and are intended to be consumed
406     programmatically.
407    :vartype code: str
408    :ivar message: A message describing the error, intended to be suitable for display in a user
409     interface.
410    :vartype message: str
411    :ivar target: The target of the particular error. For example, the name of the property in
412     error.
413    :vartype target: str
414    :ivar details: A list of additional details about the error.
415    :vartype details: list[~batch_ai.models.CloudErrorBody]
416    """
417
418    _validation = {
419        'code': {'readonly': True},
420        'message': {'readonly': True},
421        'target': {'readonly': True},
422        'details': {'readonly': True},
423    }
424
425    _attribute_map = {
426        'code': {'key': 'code', 'type': 'str'},
427        'message': {'key': 'message', 'type': 'str'},
428        'target': {'key': 'target', 'type': 'str'},
429        'details': {'key': 'details', 'type': '[CloudErrorBody]'},
430    }
431
432    def __init__(
433        self,
434        **kwargs
435    ):
436        super(CloudErrorBody, self).__init__(**kwargs)
437        self.code = None
438        self.message = None
439        self.target = None
440        self.details = None
441
442
443class ProxyResource(msrest.serialization.Model):
444    """A definition of an Azure proxy resource.
445
446    Variables are only populated by the server, and will be ignored when sending a request.
447
448    :ivar id: The ID of the resource.
449    :vartype id: str
450    :ivar name: The name of the resource.
451    :vartype name: str
452    :ivar type: The type of the resource.
453    :vartype type: str
454    """
455
456    _validation = {
457        'id': {'readonly': True},
458        'name': {'readonly': True},
459        'type': {'readonly': True},
460    }
461
462    _attribute_map = {
463        'id': {'key': 'id', 'type': 'str'},
464        'name': {'key': 'name', 'type': 'str'},
465        'type': {'key': 'type', 'type': 'str'},
466    }
467
468    def __init__(
469        self,
470        **kwargs
471    ):
472        super(ProxyResource, self).__init__(**kwargs)
473        self.id = None
474        self.name = None
475        self.type = None
476
477
478class Cluster(ProxyResource):
479    """Information about a Cluster.
480
481    Variables are only populated by the server, and will be ignored when sending a request.
482
483    :ivar id: The ID of the resource.
484    :vartype id: str
485    :ivar name: The name of the resource.
486    :vartype name: str
487    :ivar type: The type of the resource.
488    :vartype type: str
489    :param vm_size: The size of the virtual machines in the cluster. All nodes in a cluster have
490     the same VM size.
491    :type vm_size: str
492    :param vm_priority: VM priority of cluster nodes. Possible values include: "dedicated",
493     "lowpriority".
494    :type vm_priority: str or ~batch_ai.models.VmPriority
495    :param scale_settings: Scale settings of the cluster.
496    :type scale_settings: ~batch_ai.models.ScaleSettings
497    :param virtual_machine_configuration: Virtual machine configuration (OS image) of the compute
498     nodes. All nodes in a cluster have the same OS image configuration.
499    :type virtual_machine_configuration: ~batch_ai.models.VirtualMachineConfiguration
500    :param node_setup: Setup (mount file systems, performance counters settings and custom setup
501     task) to be performed on each compute node in the cluster.
502    :type node_setup: ~batch_ai.models.NodeSetup
503    :param user_account_settings: Administrator user account settings which can be used to SSH to
504     compute nodes.
505    :type user_account_settings: ~batch_ai.models.UserAccountSettings
506    :param subnet: Virtual network subnet resource ID the cluster nodes belong to.
507    :type subnet: ~batch_ai.models.ResourceId
508    :ivar creation_time: The time when the cluster was created.
509    :vartype creation_time: ~datetime.datetime
510    :ivar provisioning_state: Provisioning state of the cluster. Possible value are: creating -
511     Specifies that the cluster is being created. succeeded - Specifies that the cluster has been
512     created successfully. failed - Specifies that the cluster creation has failed. deleting -
513     Specifies that the cluster is being deleted. Possible values include: "creating", "succeeded",
514     "failed", "deleting".
515    :vartype provisioning_state: str or ~batch_ai.models.ProvisioningState
516    :ivar provisioning_state_transition_time: Time when the provisioning state was changed.
517    :vartype provisioning_state_transition_time: ~datetime.datetime
518    :ivar allocation_state: Allocation state of the cluster. Possible values are: steady -
519     Indicates that the cluster is not resizing. There are no changes to the number of compute nodes
520     in the cluster in progress. A cluster enters this state when it is created and when no
521     operations are being performed on the cluster to change the number of compute nodes. resizing -
522     Indicates that the cluster is resizing; that is, compute nodes are being added to or removed
523     from the cluster. Possible values include: "steady", "resizing".
524    :vartype allocation_state: str or ~batch_ai.models.AllocationState
525    :ivar allocation_state_transition_time: The time at which the cluster entered its current
526     allocation state.
527    :vartype allocation_state_transition_time: ~datetime.datetime
528    :ivar errors: Collection of errors encountered by various compute nodes during node setup.
529    :vartype errors: list[~batch_ai.models.BatchAIError]
530    :ivar current_node_count: The number of compute nodes currently assigned to the cluster.
531    :vartype current_node_count: int
532    :ivar node_state_counts: Counts of various node states on the cluster.
533    :vartype node_state_counts: ~batch_ai.models.NodeStateCounts
534    """
535
536    _validation = {
537        'id': {'readonly': True},
538        'name': {'readonly': True},
539        'type': {'readonly': True},
540        'creation_time': {'readonly': True},
541        'provisioning_state': {'readonly': True},
542        'provisioning_state_transition_time': {'readonly': True},
543        'allocation_state': {'readonly': True},
544        'allocation_state_transition_time': {'readonly': True},
545        'errors': {'readonly': True},
546        'current_node_count': {'readonly': True},
547        'node_state_counts': {'readonly': True},
548    }
549
550    _attribute_map = {
551        'id': {'key': 'id', 'type': 'str'},
552        'name': {'key': 'name', 'type': 'str'},
553        'type': {'key': 'type', 'type': 'str'},
554        'vm_size': {'key': 'properties.vmSize', 'type': 'str'},
555        'vm_priority': {'key': 'properties.vmPriority', 'type': 'str'},
556        'scale_settings': {'key': 'properties.scaleSettings', 'type': 'ScaleSettings'},
557        'virtual_machine_configuration': {'key': 'properties.virtualMachineConfiguration', 'type': 'VirtualMachineConfiguration'},
558        'node_setup': {'key': 'properties.nodeSetup', 'type': 'NodeSetup'},
559        'user_account_settings': {'key': 'properties.userAccountSettings', 'type': 'UserAccountSettings'},
560        'subnet': {'key': 'properties.subnet', 'type': 'ResourceId'},
561        'creation_time': {'key': 'properties.creationTime', 'type': 'iso-8601'},
562        'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'},
563        'provisioning_state_transition_time': {'key': 'properties.provisioningStateTransitionTime', 'type': 'iso-8601'},
564        'allocation_state': {'key': 'properties.allocationState', 'type': 'str'},
565        'allocation_state_transition_time': {'key': 'properties.allocationStateTransitionTime', 'type': 'iso-8601'},
566        'errors': {'key': 'properties.errors', 'type': '[BatchAIError]'},
567        'current_node_count': {'key': 'properties.currentNodeCount', 'type': 'int'},
568        'node_state_counts': {'key': 'properties.nodeStateCounts', 'type': 'NodeStateCounts'},
569    }
570
571    def __init__(
572        self,
573        *,
574        vm_size: Optional[str] = None,
575        vm_priority: Optional[Union[str, "VmPriority"]] = None,
576        scale_settings: Optional["ScaleSettings"] = None,
577        virtual_machine_configuration: Optional["VirtualMachineConfiguration"] = None,
578        node_setup: Optional["NodeSetup"] = None,
579        user_account_settings: Optional["UserAccountSettings"] = None,
580        subnet: Optional["ResourceId"] = None,
581        **kwargs
582    ):
583        super(Cluster, self).__init__(**kwargs)
584        self.vm_size = vm_size
585        self.vm_priority = vm_priority
586        self.scale_settings = scale_settings
587        self.virtual_machine_configuration = virtual_machine_configuration
588        self.node_setup = node_setup
589        self.user_account_settings = user_account_settings
590        self.subnet = subnet
591        self.creation_time = None
592        self.provisioning_state = None
593        self.provisioning_state_transition_time = None
594        self.allocation_state = None
595        self.allocation_state_transition_time = None
596        self.errors = None
597        self.current_node_count = None
598        self.node_state_counts = None
599
600
601class ClusterCreateParameters(msrest.serialization.Model):
602    """Cluster creation operation.
603
604    :param vm_size: The size of the virtual machines in the cluster. All nodes in a cluster have
605     the same VM size. For information about available VM sizes for clusters using images from the
606     Virtual Machines Marketplace see Sizes for Virtual Machines (Linux). Batch AI service supports
607     all Azure VM sizes except STANDARD_A0 and those with premium storage (STANDARD_GS, STANDARD_DS,
608     and STANDARD_DSV2 series).
609    :type vm_size: str
610    :param vm_priority: VM priority. Allowed values are: dedicated (default) and lowpriority.
611     Possible values include: "dedicated", "lowpriority".
612    :type vm_priority: str or ~batch_ai.models.VmPriority
613    :param scale_settings: Scale settings for the cluster. Batch AI service supports manual and
614     auto scale clusters.
615    :type scale_settings: ~batch_ai.models.ScaleSettings
616    :param virtual_machine_configuration: OS image configuration for cluster nodes. All nodes in a
617     cluster have the same OS image.
618    :type virtual_machine_configuration: ~batch_ai.models.VirtualMachineConfiguration
619    :param node_setup: Setup to be performed on each compute node in the cluster.
620    :type node_setup: ~batch_ai.models.NodeSetup
621    :param user_account_settings: Settings for an administrator user account that will be created
622     on each compute node in the cluster.
623    :type user_account_settings: ~batch_ai.models.UserAccountSettings
624    :param subnet: Existing virtual network subnet to put the cluster nodes in. Note, if a File
625     Server mount configured in node setup, the File Server's subnet will be used automatically.
626    :type subnet: ~batch_ai.models.ResourceId
627    """
628
629    _attribute_map = {
630        'vm_size': {'key': 'properties.vmSize', 'type': 'str'},
631        'vm_priority': {'key': 'properties.vmPriority', 'type': 'str'},
632        'scale_settings': {'key': 'properties.scaleSettings', 'type': 'ScaleSettings'},
633        'virtual_machine_configuration': {'key': 'properties.virtualMachineConfiguration', 'type': 'VirtualMachineConfiguration'},
634        'node_setup': {'key': 'properties.nodeSetup', 'type': 'NodeSetup'},
635        'user_account_settings': {'key': 'properties.userAccountSettings', 'type': 'UserAccountSettings'},
636        'subnet': {'key': 'properties.subnet', 'type': 'ResourceId'},
637    }
638
639    def __init__(
640        self,
641        *,
642        vm_size: Optional[str] = None,
643        vm_priority: Optional[Union[str, "VmPriority"]] = None,
644        scale_settings: Optional["ScaleSettings"] = None,
645        virtual_machine_configuration: Optional["VirtualMachineConfiguration"] = None,
646        node_setup: Optional["NodeSetup"] = None,
647        user_account_settings: Optional["UserAccountSettings"] = None,
648        subnet: Optional["ResourceId"] = None,
649        **kwargs
650    ):
651        super(ClusterCreateParameters, self).__init__(**kwargs)
652        self.vm_size = vm_size
653        self.vm_priority = vm_priority
654        self.scale_settings = scale_settings
655        self.virtual_machine_configuration = virtual_machine_configuration
656        self.node_setup = node_setup
657        self.user_account_settings = user_account_settings
658        self.subnet = subnet
659
660
661class ClusterListResult(msrest.serialization.Model):
662    """Values returned by the List Clusters operation.
663
664    Variables are only populated by the server, and will be ignored when sending a request.
665
666    :ivar value: The collection of returned Clusters.
667    :vartype value: list[~batch_ai.models.Cluster]
668    :ivar next_link: The continuation token.
669    :vartype next_link: str
670    """
671
672    _validation = {
673        'value': {'readonly': True},
674        'next_link': {'readonly': True},
675    }
676
677    _attribute_map = {
678        'value': {'key': 'value', 'type': '[Cluster]'},
679        'next_link': {'key': 'nextLink', 'type': 'str'},
680    }
681
682    def __init__(
683        self,
684        **kwargs
685    ):
686        super(ClusterListResult, self).__init__(**kwargs)
687        self.value = None
688        self.next_link = None
689
690
691class ClustersListByWorkspaceOptions(msrest.serialization.Model):
692    """Parameter group.
693
694    :param max_results: The maximum number of items to return in the response. A maximum of 1000
695     files can be returned.
696    :type max_results: int
697    """
698
699    _validation = {
700        'max_results': {'maximum': 1000, 'minimum': 1},
701    }
702
703    _attribute_map = {
704        'max_results': {'key': 'maxResults', 'type': 'int'},
705    }
706
707    def __init__(
708        self,
709        *,
710        max_results: Optional[int] = 1000,
711        **kwargs
712    ):
713        super(ClustersListByWorkspaceOptions, self).__init__(**kwargs)
714        self.max_results = max_results
715
716
717class ClusterUpdateParameters(msrest.serialization.Model):
718    """Cluster update parameters.
719
720    :param scale_settings: Desired scale settings for the cluster. Batch AI service supports manual
721     and auto scale clusters.
722    :type scale_settings: ~batch_ai.models.ScaleSettings
723    """
724
725    _attribute_map = {
726        'scale_settings': {'key': 'properties.scaleSettings', 'type': 'ScaleSettings'},
727    }
728
729    def __init__(
730        self,
731        *,
732        scale_settings: Optional["ScaleSettings"] = None,
733        **kwargs
734    ):
735        super(ClusterUpdateParameters, self).__init__(**kwargs)
736        self.scale_settings = scale_settings
737
738
739class CNTKsettings(msrest.serialization.Model):
740    """CNTK (aka Microsoft Cognitive Toolkit) job settings.
741
742    :param language_type: The language to use for launching CNTK (aka Microsoft Cognitive Toolkit)
743     job. Valid values are 'BrainScript' or 'Python'.
744    :type language_type: str
745    :param config_file_path: Specifies the path of the BrainScript config file. This property can
746     be specified only if the languageType is 'BrainScript'.
747    :type config_file_path: str
748    :param python_script_file_path: Python script to execute. This property can be specified only
749     if the languageType is 'Python'.
750    :type python_script_file_path: str
751    :param python_interpreter_path: The path to the Python interpreter. This property can be
752     specified only if the languageType is 'Python'.
753    :type python_interpreter_path: str
754    :param command_line_args: Command line arguments that need to be passed to the python script or
755     cntk executable.
756    :type command_line_args: str
757    :param process_count: Number of processes to launch for the job execution. The default value
758     for this property is equal to nodeCount property.
759    :type process_count: int
760    """
761
762    _attribute_map = {
763        'language_type': {'key': 'languageType', 'type': 'str'},
764        'config_file_path': {'key': 'configFilePath', 'type': 'str'},
765        'python_script_file_path': {'key': 'pythonScriptFilePath', 'type': 'str'},
766        'python_interpreter_path': {'key': 'pythonInterpreterPath', 'type': 'str'},
767        'command_line_args': {'key': 'commandLineArgs', 'type': 'str'},
768        'process_count': {'key': 'processCount', 'type': 'int'},
769    }
770
771    def __init__(
772        self,
773        *,
774        language_type: Optional[str] = None,
775        config_file_path: Optional[str] = None,
776        python_script_file_path: Optional[str] = None,
777        python_interpreter_path: Optional[str] = None,
778        command_line_args: Optional[str] = None,
779        process_count: Optional[int] = None,
780        **kwargs
781    ):
782        super(CNTKsettings, self).__init__(**kwargs)
783        self.language_type = language_type
784        self.config_file_path = config_file_path
785        self.python_script_file_path = python_script_file_path
786        self.python_interpreter_path = python_interpreter_path
787        self.command_line_args = command_line_args
788        self.process_count = process_count
789
790
791class ContainerSettings(msrest.serialization.Model):
792    """Docker container settings.
793
794    All required parameters must be populated in order to send to Azure.
795
796    :param image_source_registry: Required. Information about docker image and docker registry to
797     download the container from.
798    :type image_source_registry: ~batch_ai.models.ImageSourceRegistry
799    :param shm_size: Size of /dev/shm. Please refer to docker documentation for supported argument
800     formats.
801    :type shm_size: str
802    """
803
804    _validation = {
805        'image_source_registry': {'required': True},
806    }
807
808    _attribute_map = {
809        'image_source_registry': {'key': 'imageSourceRegistry', 'type': 'ImageSourceRegistry'},
810        'shm_size': {'key': 'shmSize', 'type': 'str'},
811    }
812
813    def __init__(
814        self,
815        *,
816        image_source_registry: "ImageSourceRegistry",
817        shm_size: Optional[str] = None,
818        **kwargs
819    ):
820        super(ContainerSettings, self).__init__(**kwargs)
821        self.image_source_registry = image_source_registry
822        self.shm_size = shm_size
823
824
825class CustomMpiSettings(msrest.serialization.Model):
826    """Custom MPI job settings.
827
828    All required parameters must be populated in order to send to Azure.
829
830    :param command_line: Required. The command line to be executed by mpi runtime on each compute
831     node.
832    :type command_line: str
833    :param process_count: Number of processes to launch for the job execution. The default value
834     for this property is equal to nodeCount property.
835    :type process_count: int
836    """
837
838    _validation = {
839        'command_line': {'required': True},
840    }
841
842    _attribute_map = {
843        'command_line': {'key': 'commandLine', 'type': 'str'},
844        'process_count': {'key': 'processCount', 'type': 'int'},
845    }
846
847    def __init__(
848        self,
849        *,
850        command_line: str,
851        process_count: Optional[int] = None,
852        **kwargs
853    ):
854        super(CustomMpiSettings, self).__init__(**kwargs)
855        self.command_line = command_line
856        self.process_count = process_count
857
858
859class CustomToolkitSettings(msrest.serialization.Model):
860    """Custom tool kit job settings.
861
862    :param command_line: The command line to execute on the master node.
863    :type command_line: str
864    """
865
866    _attribute_map = {
867        'command_line': {'key': 'commandLine', 'type': 'str'},
868    }
869
870    def __init__(
871        self,
872        *,
873        command_line: Optional[str] = None,
874        **kwargs
875    ):
876        super(CustomToolkitSettings, self).__init__(**kwargs)
877        self.command_line = command_line
878
879
880class DataDisks(msrest.serialization.Model):
881    """Data disks settings.
882
883    All required parameters must be populated in order to send to Azure.
884
885    :param disk_size_in_gb: Required. Disk size in GB for the blank data disks.
886    :type disk_size_in_gb: int
887    :param caching_type: Caching type for the disks. Available values are none (default), readonly,
888     readwrite. Caching type can be set only for VM sizes supporting premium storage. Possible
889     values include: "none", "readonly", "readwrite". Default value: "none".
890    :type caching_type: str or ~batch_ai.models.CachingType
891    :param disk_count: Required. Number of data disks attached to the File Server. If multiple
892     disks attached, they will be configured in RAID level 0.
893    :type disk_count: int
894    :param storage_account_type: Required. Type of storage account to be used on the disk. Possible
895     values are: Standard_LRS or Premium_LRS. Premium storage account type can only be used with VM
896     sizes supporting premium storage. Possible values include: "Standard_LRS", "Premium_LRS".
897    :type storage_account_type: str or ~batch_ai.models.StorageAccountType
898    """
899
900    _validation = {
901        'disk_size_in_gb': {'required': True},
902        'disk_count': {'required': True},
903        'storage_account_type': {'required': True},
904    }
905
906    _attribute_map = {
907        'disk_size_in_gb': {'key': 'diskSizeInGB', 'type': 'int'},
908        'caching_type': {'key': 'cachingType', 'type': 'str'},
909        'disk_count': {'key': 'diskCount', 'type': 'int'},
910        'storage_account_type': {'key': 'storageAccountType', 'type': 'str'},
911    }
912
913    def __init__(
914        self,
915        *,
916        disk_size_in_gb: int,
917        disk_count: int,
918        storage_account_type: Union[str, "StorageAccountType"],
919        caching_type: Optional[Union[str, "CachingType"]] = "none",
920        **kwargs
921    ):
922        super(DataDisks, self).__init__(**kwargs)
923        self.disk_size_in_gb = disk_size_in_gb
924        self.caching_type = caching_type
925        self.disk_count = disk_count
926        self.storage_account_type = storage_account_type
927
928
929class EnvironmentVariable(msrest.serialization.Model):
930    """An environment variable definition.
931
932    All required parameters must be populated in order to send to Azure.
933
934    :param name: Required. The name of the environment variable.
935    :type name: str
936    :param value: Required. The value of the environment variable.
937    :type value: str
938    """
939
940    _validation = {
941        'name': {'required': True},
942        'value': {'required': True},
943    }
944
945    _attribute_map = {
946        'name': {'key': 'name', 'type': 'str'},
947        'value': {'key': 'value', 'type': 'str'},
948    }
949
950    def __init__(
951        self,
952        *,
953        name: str,
954        value: str,
955        **kwargs
956    ):
957        super(EnvironmentVariable, self).__init__(**kwargs)
958        self.name = name
959        self.value = value
960
961
962class EnvironmentVariableWithSecretValue(msrest.serialization.Model):
963    """An environment variable with secret value definition.
964
965    All required parameters must be populated in order to send to Azure.
966
967    :param name: Required. The name of the environment variable to store the secret value.
968    :type name: str
969    :param value: The value of the environment variable. This value will never be reported back by
970     Batch AI.
971    :type value: str
972    :param value_secret_reference: KeyVault store and secret which contains the value for the
973     environment variable. One of value or valueSecretReference must be provided.
974    :type value_secret_reference: ~batch_ai.models.KeyVaultSecretReference
975    """
976
977    _validation = {
978        'name': {'required': True},
979    }
980
981    _attribute_map = {
982        'name': {'key': 'name', 'type': 'str'},
983        'value': {'key': 'value', 'type': 'str'},
984        'value_secret_reference': {'key': 'valueSecretReference', 'type': 'KeyVaultSecretReference'},
985    }
986
987    def __init__(
988        self,
989        *,
990        name: str,
991        value: Optional[str] = None,
992        value_secret_reference: Optional["KeyVaultSecretReference"] = None,
993        **kwargs
994    ):
995        super(EnvironmentVariableWithSecretValue, self).__init__(**kwargs)
996        self.name = name
997        self.value = value
998        self.value_secret_reference = value_secret_reference
999
1000
1001class Experiment(ProxyResource):
1002    """Experiment information.
1003
1004    Variables are only populated by the server, and will be ignored when sending a request.
1005
1006    :ivar id: The ID of the resource.
1007    :vartype id: str
1008    :ivar name: The name of the resource.
1009    :vartype name: str
1010    :ivar type: The type of the resource.
1011    :vartype type: str
1012    :ivar creation_time: Time when the Experiment was created.
1013    :vartype creation_time: ~datetime.datetime
1014    :ivar provisioning_state: The provisioned state of the experiment. Possible values include:
1015     "creating", "succeeded", "failed", "deleting".
1016    :vartype provisioning_state: str or ~batch_ai.models.ProvisioningState
1017    :ivar provisioning_state_transition_time: The time at which the experiment entered its current
1018     provisioning state.
1019    :vartype provisioning_state_transition_time: ~datetime.datetime
1020    """
1021
1022    _validation = {
1023        'id': {'readonly': True},
1024        'name': {'readonly': True},
1025        'type': {'readonly': True},
1026        'creation_time': {'readonly': True},
1027        'provisioning_state': {'readonly': True},
1028        'provisioning_state_transition_time': {'readonly': True},
1029    }
1030
1031    _attribute_map = {
1032        'id': {'key': 'id', 'type': 'str'},
1033        'name': {'key': 'name', 'type': 'str'},
1034        'type': {'key': 'type', 'type': 'str'},
1035        'creation_time': {'key': 'properties.creationTime', 'type': 'iso-8601'},
1036        'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'},
1037        'provisioning_state_transition_time': {'key': 'properties.provisioningStateTransitionTime', 'type': 'iso-8601'},
1038    }
1039
1040    def __init__(
1041        self,
1042        **kwargs
1043    ):
1044        super(Experiment, self).__init__(**kwargs)
1045        self.creation_time = None
1046        self.provisioning_state = None
1047        self.provisioning_state_transition_time = None
1048
1049
1050class ExperimentListResult(msrest.serialization.Model):
1051    """Values returned by the List operation.
1052
1053    Variables are only populated by the server, and will be ignored when sending a request.
1054
1055    :ivar value: The collection of experiments.
1056    :vartype value: list[~batch_ai.models.Experiment]
1057    :ivar next_link: The continuation token.
1058    :vartype next_link: str
1059    """
1060
1061    _validation = {
1062        'value': {'readonly': True},
1063        'next_link': {'readonly': True},
1064    }
1065
1066    _attribute_map = {
1067        'value': {'key': 'value', 'type': '[Experiment]'},
1068        'next_link': {'key': 'nextLink', 'type': 'str'},
1069    }
1070
1071    def __init__(
1072        self,
1073        **kwargs
1074    ):
1075        super(ExperimentListResult, self).__init__(**kwargs)
1076        self.value = None
1077        self.next_link = None
1078
1079
1080class ExperimentsListByWorkspaceOptions(msrest.serialization.Model):
1081    """Parameter group.
1082
1083    :param max_results: The maximum number of items to return in the response. A maximum of 1000
1084     files can be returned.
1085    :type max_results: int
1086    """
1087
1088    _validation = {
1089        'max_results': {'maximum': 1000, 'minimum': 1},
1090    }
1091
1092    _attribute_map = {
1093        'max_results': {'key': 'maxResults', 'type': 'int'},
1094    }
1095
1096    def __init__(
1097        self,
1098        *,
1099        max_results: Optional[int] = 1000,
1100        **kwargs
1101    ):
1102        super(ExperimentsListByWorkspaceOptions, self).__init__(**kwargs)
1103        self.max_results = max_results
1104
1105
1106class File(msrest.serialization.Model):
1107    """Properties of the file or directory.
1108
1109    Variables are only populated by the server, and will be ignored when sending a request.
1110
1111    :ivar name: Name of the file.
1112    :vartype name: str
1113    :ivar file_type: Type of the file. Possible values are file and directory. Possible values
1114     include: "file", "directory".
1115    :vartype file_type: str or ~batch_ai.models.FileType
1116    :ivar download_url: URL to download the corresponding file. The downloadUrl is not returned for
1117     directories.
1118    :vartype download_url: str
1119    :ivar last_modified: The time at which the file was last modified.
1120    :vartype last_modified: ~datetime.datetime
1121    :ivar content_length: The file of the size.
1122    :vartype content_length: long
1123    """
1124
1125    _validation = {
1126        'name': {'readonly': True},
1127        'file_type': {'readonly': True},
1128        'download_url': {'readonly': True},
1129        'last_modified': {'readonly': True},
1130        'content_length': {'readonly': True},
1131    }
1132
1133    _attribute_map = {
1134        'name': {'key': 'name', 'type': 'str'},
1135        'file_type': {'key': 'fileType', 'type': 'str'},
1136        'download_url': {'key': 'downloadUrl', 'type': 'str'},
1137        'last_modified': {'key': 'properties.lastModified', 'type': 'iso-8601'},
1138        'content_length': {'key': 'properties.contentLength', 'type': 'long'},
1139    }
1140
1141    def __init__(
1142        self,
1143        **kwargs
1144    ):
1145        super(File, self).__init__(**kwargs)
1146        self.name = None
1147        self.file_type = None
1148        self.download_url = None
1149        self.last_modified = None
1150        self.content_length = None
1151
1152
1153class FileListResult(msrest.serialization.Model):
1154    """Values returned by the List operation.
1155
1156    Variables are only populated by the server, and will be ignored when sending a request.
1157
1158    :ivar value: The collection of returned job directories and files.
1159    :vartype value: list[~batch_ai.models.File]
1160    :ivar next_link: The continuation token.
1161    :vartype next_link: str
1162    """
1163
1164    _validation = {
1165        'value': {'readonly': True},
1166        'next_link': {'readonly': True},
1167    }
1168
1169    _attribute_map = {
1170        'value': {'key': 'value', 'type': '[File]'},
1171        'next_link': {'key': 'nextLink', 'type': 'str'},
1172    }
1173
1174    def __init__(
1175        self,
1176        **kwargs
1177    ):
1178        super(FileListResult, self).__init__(**kwargs)
1179        self.value = None
1180        self.next_link = None
1181
1182
1183class FileServer(ProxyResource):
1184    """File Server information.
1185
1186    Variables are only populated by the server, and will be ignored when sending a request.
1187
1188    :ivar id: The ID of the resource.
1189    :vartype id: str
1190    :ivar name: The name of the resource.
1191    :vartype name: str
1192    :ivar type: The type of the resource.
1193    :vartype type: str
1194    :param vm_size: VM size of the File Server.
1195    :type vm_size: str
1196    :param ssh_configuration: SSH configuration for accessing the File Server node.
1197    :type ssh_configuration: ~batch_ai.models.SshConfiguration
1198    :param data_disks: Information about disks attached to File Server VM.
1199    :type data_disks: ~batch_ai.models.DataDisks
1200    :param subnet: File Server virtual network subnet resource ID.
1201    :type subnet: ~batch_ai.models.ResourceId
1202    :ivar mount_settings: File Server mount settings.
1203    :vartype mount_settings: ~batch_ai.models.MountSettings
1204    :ivar provisioning_state_transition_time: Time when the provisioning state was changed.
1205    :vartype provisioning_state_transition_time: ~datetime.datetime
1206    :ivar creation_time: Time when the FileServer was created.
1207    :vartype creation_time: ~datetime.datetime
1208    :ivar provisioning_state: Provisioning state of the File Server. Possible values: creating -
1209     The File Server is getting created; updating - The File Server creation has been accepted and
1210     it is getting updated; deleting - The user has requested that the File Server be deleted, and
1211     it is in the process of being deleted; failed - The File Server creation has failed with the
1212     specified error code. Details about the error code are specified in the message field;
1213     succeeded - The File Server creation has succeeded. Possible values include: "creating",
1214     "updating", "deleting", "succeeded", "failed".
1215    :vartype provisioning_state: str or ~batch_ai.models.FileServerProvisioningState
1216    """
1217
1218    _validation = {
1219        'id': {'readonly': True},
1220        'name': {'readonly': True},
1221        'type': {'readonly': True},
1222        'mount_settings': {'readonly': True},
1223        'provisioning_state_transition_time': {'readonly': True},
1224        'creation_time': {'readonly': True},
1225        'provisioning_state': {'readonly': True},
1226    }
1227
1228    _attribute_map = {
1229        'id': {'key': 'id', 'type': 'str'},
1230        'name': {'key': 'name', 'type': 'str'},
1231        'type': {'key': 'type', 'type': 'str'},
1232        'vm_size': {'key': 'properties.vmSize', 'type': 'str'},
1233        'ssh_configuration': {'key': 'properties.sshConfiguration', 'type': 'SshConfiguration'},
1234        'data_disks': {'key': 'properties.dataDisks', 'type': 'DataDisks'},
1235        'subnet': {'key': 'properties.subnet', 'type': 'ResourceId'},
1236        'mount_settings': {'key': 'properties.mountSettings', 'type': 'MountSettings'},
1237        'provisioning_state_transition_time': {'key': 'properties.provisioningStateTransitionTime', 'type': 'iso-8601'},
1238        'creation_time': {'key': 'properties.creationTime', 'type': 'iso-8601'},
1239        'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'},
1240    }
1241
1242    def __init__(
1243        self,
1244        *,
1245        vm_size: Optional[str] = None,
1246        ssh_configuration: Optional["SshConfiguration"] = None,
1247        data_disks: Optional["DataDisks"] = None,
1248        subnet: Optional["ResourceId"] = None,
1249        **kwargs
1250    ):
1251        super(FileServer, self).__init__(**kwargs)
1252        self.vm_size = vm_size
1253        self.ssh_configuration = ssh_configuration
1254        self.data_disks = data_disks
1255        self.subnet = subnet
1256        self.mount_settings = None
1257        self.provisioning_state_transition_time = None
1258        self.creation_time = None
1259        self.provisioning_state = None
1260
1261
1262class FileServerCreateParameters(msrest.serialization.Model):
1263    """File Server creation parameters.
1264
1265    :param vm_size: The size of the virtual machine for the File Server. For information about
1266     available VM sizes from the Virtual Machines Marketplace, see Sizes for Virtual Machines
1267     (Linux).
1268    :type vm_size: str
1269    :param ssh_configuration: SSH configuration for the File Server node.
1270    :type ssh_configuration: ~batch_ai.models.SshConfiguration
1271    :param data_disks: Settings for the data disks which will be created for the File Server.
1272    :type data_disks: ~batch_ai.models.DataDisks
1273    :param subnet: Identifier of an existing virtual network subnet to put the File Server in. If
1274     not provided, a new virtual network and subnet will be created.
1275    :type subnet: ~batch_ai.models.ResourceId
1276    """
1277
1278    _attribute_map = {
1279        'vm_size': {'key': 'properties.vmSize', 'type': 'str'},
1280        'ssh_configuration': {'key': 'properties.sshConfiguration', 'type': 'SshConfiguration'},
1281        'data_disks': {'key': 'properties.dataDisks', 'type': 'DataDisks'},
1282        'subnet': {'key': 'properties.subnet', 'type': 'ResourceId'},
1283    }
1284
1285    def __init__(
1286        self,
1287        *,
1288        vm_size: Optional[str] = None,
1289        ssh_configuration: Optional["SshConfiguration"] = None,
1290        data_disks: Optional["DataDisks"] = None,
1291        subnet: Optional["ResourceId"] = None,
1292        **kwargs
1293    ):
1294        super(FileServerCreateParameters, self).__init__(**kwargs)
1295        self.vm_size = vm_size
1296        self.ssh_configuration = ssh_configuration
1297        self.data_disks = data_disks
1298        self.subnet = subnet
1299
1300
1301class FileServerListResult(msrest.serialization.Model):
1302    """Values returned by the File Server List operation.
1303
1304    Variables are only populated by the server, and will be ignored when sending a request.
1305
1306    :param value: The collection of File Servers.
1307    :type value: list[~batch_ai.models.FileServer]
1308    :ivar next_link: The continuation token.
1309    :vartype next_link: str
1310    """
1311
1312    _validation = {
1313        'next_link': {'readonly': True},
1314    }
1315
1316    _attribute_map = {
1317        'value': {'key': 'value', 'type': '[FileServer]'},
1318        'next_link': {'key': 'nextLink', 'type': 'str'},
1319    }
1320
1321    def __init__(
1322        self,
1323        *,
1324        value: Optional[List["FileServer"]] = None,
1325        **kwargs
1326    ):
1327        super(FileServerListResult, self).__init__(**kwargs)
1328        self.value = value
1329        self.next_link = None
1330
1331
1332class FileServerReference(msrest.serialization.Model):
1333    """File Server mounting configuration.
1334
1335    All required parameters must be populated in order to send to Azure.
1336
1337    :param file_server: Required. Resource ID of the existing File Server to be mounted.
1338    :type file_server: ~batch_ai.models.ResourceId
1339    :param source_directory: File Server directory that needs to be mounted. If this property is
1340     not specified, the entire File Server will be mounted.
1341    :type source_directory: str
1342    :param relative_mount_path: Required. The relative path on the compute node where the File
1343     Server will be mounted. Note that all cluster level file servers will be mounted under
1344     $AZ_BATCHAI_MOUNT_ROOT location and all job level file servers will be mounted under
1345     $AZ_BATCHAI_JOB_MOUNT_ROOT.
1346    :type relative_mount_path: str
1347    :param mount_options: Mount options to be passed to mount command.
1348    :type mount_options: str
1349    """
1350
1351    _validation = {
1352        'file_server': {'required': True},
1353        'relative_mount_path': {'required': True},
1354    }
1355
1356    _attribute_map = {
1357        'file_server': {'key': 'fileServer', 'type': 'ResourceId'},
1358        'source_directory': {'key': 'sourceDirectory', 'type': 'str'},
1359        'relative_mount_path': {'key': 'relativeMountPath', 'type': 'str'},
1360        'mount_options': {'key': 'mountOptions', 'type': 'str'},
1361    }
1362
1363    def __init__(
1364        self,
1365        *,
1366        file_server: "ResourceId",
1367        relative_mount_path: str,
1368        source_directory: Optional[str] = None,
1369        mount_options: Optional[str] = None,
1370        **kwargs
1371    ):
1372        super(FileServerReference, self).__init__(**kwargs)
1373        self.file_server = file_server
1374        self.source_directory = source_directory
1375        self.relative_mount_path = relative_mount_path
1376        self.mount_options = mount_options
1377
1378
1379class FileServersListByWorkspaceOptions(msrest.serialization.Model):
1380    """Parameter group.
1381
1382    :param max_results: The maximum number of items to return in the response. A maximum of 1000
1383     files can be returned.
1384    :type max_results: int
1385    """
1386
1387    _validation = {
1388        'max_results': {'maximum': 1000, 'minimum': 1},
1389    }
1390
1391    _attribute_map = {
1392        'max_results': {'key': 'maxResults', 'type': 'int'},
1393    }
1394
1395    def __init__(
1396        self,
1397        *,
1398        max_results: Optional[int] = 1000,
1399        **kwargs
1400    ):
1401        super(FileServersListByWorkspaceOptions, self).__init__(**kwargs)
1402        self.max_results = max_results
1403
1404
1405class HorovodSettings(msrest.serialization.Model):
1406    """Specifies the settings for Horovod job.
1407
1408    All required parameters must be populated in order to send to Azure.
1409
1410    :param python_script_file_path: Required. The python script to execute.
1411    :type python_script_file_path: str
1412    :param python_interpreter_path: The path to the Python interpreter.
1413    :type python_interpreter_path: str
1414    :param command_line_args: Command line arguments that need to be passed to the python script.
1415    :type command_line_args: str
1416    :param process_count: Number of processes to launch for the job execution. The default value
1417     for this property is equal to nodeCount property.
1418    :type process_count: int
1419    """
1420
1421    _validation = {
1422        'python_script_file_path': {'required': True},
1423    }
1424
1425    _attribute_map = {
1426        'python_script_file_path': {'key': 'pythonScriptFilePath', 'type': 'str'},
1427        'python_interpreter_path': {'key': 'pythonInterpreterPath', 'type': 'str'},
1428        'command_line_args': {'key': 'commandLineArgs', 'type': 'str'},
1429        'process_count': {'key': 'processCount', 'type': 'int'},
1430    }
1431
1432    def __init__(
1433        self,
1434        *,
1435        python_script_file_path: str,
1436        python_interpreter_path: Optional[str] = None,
1437        command_line_args: Optional[str] = None,
1438        process_count: Optional[int] = None,
1439        **kwargs
1440    ):
1441        super(HorovodSettings, self).__init__(**kwargs)
1442        self.python_script_file_path = python_script_file_path
1443        self.python_interpreter_path = python_interpreter_path
1444        self.command_line_args = command_line_args
1445        self.process_count = process_count
1446
1447
1448class ImageReference(msrest.serialization.Model):
1449    """The OS image reference.
1450
1451    All required parameters must be populated in order to send to Azure.
1452
1453    :param publisher: Required. Publisher of the image.
1454    :type publisher: str
1455    :param offer: Required. Offer of the image.
1456    :type offer: str
1457    :param sku: Required. SKU of the image.
1458    :type sku: str
1459    :param version: Version of the image.
1460    :type version: str
1461    :param virtual_machine_image_id: The ARM resource identifier of the virtual machine image for
1462     the compute nodes. This is of the form
1463     /subscriptions/{subscriptionId}/resourceGroups/{resourceGroup}/providers/Microsoft.Compute/images/{imageName}.
1464     The virtual machine image must be in the same region and subscription as the cluster. For
1465     information about the firewall settings for the Batch node agent to communicate with the Batch
1466     service see
1467     https://docs.microsoft.com/en-us/azure/batch/batch-api-basics#virtual-network-vnet-and-firewall-configuration.
1468     Note, you need to provide publisher, offer and sku of the base OS image of which the custom
1469     image has been derived from.
1470    :type virtual_machine_image_id: str
1471    """
1472
1473    _validation = {
1474        'publisher': {'required': True},
1475        'offer': {'required': True},
1476        'sku': {'required': True},
1477    }
1478
1479    _attribute_map = {
1480        'publisher': {'key': 'publisher', 'type': 'str'},
1481        'offer': {'key': 'offer', 'type': 'str'},
1482        'sku': {'key': 'sku', 'type': 'str'},
1483        'version': {'key': 'version', 'type': 'str'},
1484        'virtual_machine_image_id': {'key': 'virtualMachineImageId', 'type': 'str'},
1485    }
1486
1487    def __init__(
1488        self,
1489        *,
1490        publisher: str,
1491        offer: str,
1492        sku: str,
1493        version: Optional[str] = None,
1494        virtual_machine_image_id: Optional[str] = None,
1495        **kwargs
1496    ):
1497        super(ImageReference, self).__init__(**kwargs)
1498        self.publisher = publisher
1499        self.offer = offer
1500        self.sku = sku
1501        self.version = version
1502        self.virtual_machine_image_id = virtual_machine_image_id
1503
1504
1505class ImageSourceRegistry(msrest.serialization.Model):
1506    """Information about docker image for the job.
1507
1508    All required parameters must be populated in order to send to Azure.
1509
1510    :param server_url: URL for image repository.
1511    :type server_url: str
1512    :param image: Required. The name of the image in the image repository.
1513    :type image: str
1514    :param credentials: Credentials to access the private docker repository.
1515    :type credentials: ~batch_ai.models.PrivateRegistryCredentials
1516    """
1517
1518    _validation = {
1519        'image': {'required': True},
1520    }
1521
1522    _attribute_map = {
1523        'server_url': {'key': 'serverUrl', 'type': 'str'},
1524        'image': {'key': 'image', 'type': 'str'},
1525        'credentials': {'key': 'credentials', 'type': 'PrivateRegistryCredentials'},
1526    }
1527
1528    def __init__(
1529        self,
1530        *,
1531        image: str,
1532        server_url: Optional[str] = None,
1533        credentials: Optional["PrivateRegistryCredentials"] = None,
1534        **kwargs
1535    ):
1536        super(ImageSourceRegistry, self).__init__(**kwargs)
1537        self.server_url = server_url
1538        self.image = image
1539        self.credentials = credentials
1540
1541
1542class InputDirectory(msrest.serialization.Model):
1543    """Input directory for the job.
1544
1545    All required parameters must be populated in order to send to Azure.
1546
1547    :param id: Required. The ID for the input directory. The job can use AZ_BATCHAI\ *INPUT*\
1548     :code:`<id>` environment variable to find the directory path, where :code:`<id>` is the value
1549     of id attribute.
1550    :type id: str
1551    :param path: Required. The path to the input directory.
1552    :type path: str
1553    """
1554
1555    _validation = {
1556        'id': {'required': True},
1557        'path': {'required': True},
1558    }
1559
1560    _attribute_map = {
1561        'id': {'key': 'id', 'type': 'str'},
1562        'path': {'key': 'path', 'type': 'str'},
1563    }
1564
1565    def __init__(
1566        self,
1567        *,
1568        id: str,
1569        path: str,
1570        **kwargs
1571    ):
1572        super(InputDirectory, self).__init__(**kwargs)
1573        self.id = id
1574        self.path = path
1575
1576
1577class Job(ProxyResource):
1578    """Information about a Job.
1579
1580    Variables are only populated by the server, and will be ignored when sending a request.
1581
1582    :ivar id: The ID of the resource.
1583    :vartype id: str
1584    :ivar name: The name of the resource.
1585    :vartype name: str
1586    :ivar type: The type of the resource.
1587    :vartype type: str
1588    :param scheduling_priority: Scheduling priority associated with the job. Possible values
1589     include: "low", "normal", "high".
1590    :type scheduling_priority: str or ~batch_ai.models.JobPriority
1591    :param cluster: Resource ID of the cluster associated with the job.
1592    :type cluster: ~batch_ai.models.ResourceId
1593    :param mount_volumes: Collection of mount volumes available to the job during execution. These
1594     volumes are mounted before the job execution and unmounted after the job completion. The
1595     volumes are mounted at location specified by $AZ_BATCHAI_JOB_MOUNT_ROOT environment variable.
1596    :type mount_volumes: ~batch_ai.models.MountVolumes
1597    :param node_count: The job will be gang scheduled on that many compute nodes.
1598    :type node_count: int
1599    :param container_settings: If the container was downloaded as part of cluster setup then the
1600     same container image will be used. If not provided, the job will run on the VM.
1601    :type container_settings: ~batch_ai.models.ContainerSettings
1602    :param tool_type: Possible values are: cntk, tensorflow, caffe, caffe2, chainer, pytorch,
1603     custom, custommpi, horovod. Possible values include: "cntk", "tensorflow", "caffe", "caffe2",
1604     "chainer", "horovod", "custommpi", "custom".
1605    :type tool_type: str or ~batch_ai.models.ToolType
1606    :param cntk_settings: CNTK (aka Microsoft Cognitive Toolkit) job settings.
1607    :type cntk_settings: ~batch_ai.models.CNTKsettings
1608    :param py_torch_settings: pyTorch job settings.
1609    :type py_torch_settings: ~batch_ai.models.PyTorchSettings
1610    :param tensor_flow_settings: TensorFlow job settings.
1611    :type tensor_flow_settings: ~batch_ai.models.TensorFlowSettings
1612    :param caffe_settings: Caffe job settings.
1613    :type caffe_settings: ~batch_ai.models.CaffeSettings
1614    :param caffe2_settings: Caffe2 job settings.
1615    :type caffe2_settings: ~batch_ai.models.Caffe2Settings
1616    :param chainer_settings: Chainer job settings.
1617    :type chainer_settings: ~batch_ai.models.ChainerSettings
1618    :param custom_toolkit_settings: Custom tool kit job settings.
1619    :type custom_toolkit_settings: ~batch_ai.models.CustomToolkitSettings
1620    :param custom_mpi_settings: Custom MPI job settings.
1621    :type custom_mpi_settings: ~batch_ai.models.CustomMpiSettings
1622    :param horovod_settings: Specifies the settings for Horovod job.
1623    :type horovod_settings: ~batch_ai.models.HorovodSettings
1624    :param job_preparation: The specified actions will run on all the nodes that are part of the
1625     job.
1626    :type job_preparation: ~batch_ai.models.JobPreparation
1627    :ivar job_output_directory_path_segment: A segment of job's output directories path created by
1628     Batch AI. Batch AI creates job's output directories under an unique path to avoid conflicts
1629     between jobs. This value contains a path segment generated by Batch AI to make the path unique
1630     and can be used to find the output directory on the node or mounted filesystem.
1631    :vartype job_output_directory_path_segment: str
1632    :param std_out_err_path_prefix: The path where the Batch AI service stores stdout, stderror and
1633     execution log of the job.
1634    :type std_out_err_path_prefix: str
1635    :param input_directories: A list of input directories for the job.
1636    :type input_directories: list[~batch_ai.models.InputDirectory]
1637    :param output_directories: A list of output directories for the job.
1638    :type output_directories: list[~batch_ai.models.OutputDirectory]
1639    :param environment_variables: A collection of user defined environment variables to be setup
1640     for the job.
1641    :type environment_variables: list[~batch_ai.models.EnvironmentVariable]
1642    :param secrets: A collection of user defined environment variables with secret values to be
1643     setup for the job. Server will never report values of these variables back.
1644    :type secrets: list[~batch_ai.models.EnvironmentVariableWithSecretValue]
1645    :param constraints: Constraints associated with the Job.
1646    :type constraints: ~batch_ai.models.JobPropertiesConstraints
1647    :ivar creation_time: The creation time of the job.
1648    :vartype creation_time: ~datetime.datetime
1649    :ivar provisioning_state: The provisioned state of the Batch AI job. Possible values include:
1650     "creating", "succeeded", "failed", "deleting".
1651    :vartype provisioning_state: str or ~batch_ai.models.ProvisioningState
1652    :ivar provisioning_state_transition_time: The time at which the job entered its current
1653     provisioning state.
1654    :vartype provisioning_state_transition_time: ~datetime.datetime
1655    :ivar execution_state: The current state of the job. Possible values are: queued - The job is
1656     queued and able to run. A job enters this state when it is created, or when it is awaiting a
1657     retry after a failed run. running - The job is running on a compute cluster. This includes
1658     job-level preparation such as downloading resource files or set up container specified on the
1659     job - it does not necessarily mean that the job command line has started executing. terminating
1660     - The job is terminated by the user, the terminate operation is in progress. succeeded - The
1661     job has completed running successfully and exited with exit code 0. failed - The job has
1662     finished unsuccessfully (failed with a non-zero exit code) and has exhausted its retry limit. A
1663     job is also marked as failed if an error occurred launching the job. Possible values include:
1664     "queued", "running", "terminating", "succeeded", "failed".
1665    :vartype execution_state: str or ~batch_ai.models.ExecutionState
1666    :ivar execution_state_transition_time: The time at which the job entered its current execution
1667     state.
1668    :vartype execution_state_transition_time: ~datetime.datetime
1669    :param execution_info: Information about the execution of a job.
1670    :type execution_info: ~batch_ai.models.JobPropertiesExecutionInfo
1671    """
1672
1673    _validation = {
1674        'id': {'readonly': True},
1675        'name': {'readonly': True},
1676        'type': {'readonly': True},
1677        'job_output_directory_path_segment': {'readonly': True},
1678        'creation_time': {'readonly': True},
1679        'provisioning_state': {'readonly': True},
1680        'provisioning_state_transition_time': {'readonly': True},
1681        'execution_state': {'readonly': True},
1682        'execution_state_transition_time': {'readonly': True},
1683    }
1684
1685    _attribute_map = {
1686        'id': {'key': 'id', 'type': 'str'},
1687        'name': {'key': 'name', 'type': 'str'},
1688        'type': {'key': 'type', 'type': 'str'},
1689        'scheduling_priority': {'key': 'properties.schedulingPriority', 'type': 'str'},
1690        'cluster': {'key': 'properties.cluster', 'type': 'ResourceId'},
1691        'mount_volumes': {'key': 'properties.mountVolumes', 'type': 'MountVolumes'},
1692        'node_count': {'key': 'properties.nodeCount', 'type': 'int'},
1693        'container_settings': {'key': 'properties.containerSettings', 'type': 'ContainerSettings'},
1694        'tool_type': {'key': 'properties.toolType', 'type': 'str'},
1695        'cntk_settings': {'key': 'properties.cntkSettings', 'type': 'CNTKsettings'},
1696        'py_torch_settings': {'key': 'properties.pyTorchSettings', 'type': 'PyTorchSettings'},
1697        'tensor_flow_settings': {'key': 'properties.tensorFlowSettings', 'type': 'TensorFlowSettings'},
1698        'caffe_settings': {'key': 'properties.caffeSettings', 'type': 'CaffeSettings'},
1699        'caffe2_settings': {'key': 'properties.caffe2Settings', 'type': 'Caffe2Settings'},
1700        'chainer_settings': {'key': 'properties.chainerSettings', 'type': 'ChainerSettings'},
1701        'custom_toolkit_settings': {'key': 'properties.customToolkitSettings', 'type': 'CustomToolkitSettings'},
1702        'custom_mpi_settings': {'key': 'properties.customMpiSettings', 'type': 'CustomMpiSettings'},
1703        'horovod_settings': {'key': 'properties.horovodSettings', 'type': 'HorovodSettings'},
1704        'job_preparation': {'key': 'properties.jobPreparation', 'type': 'JobPreparation'},
1705        'job_output_directory_path_segment': {'key': 'properties.jobOutputDirectoryPathSegment', 'type': 'str'},
1706        'std_out_err_path_prefix': {'key': 'properties.stdOutErrPathPrefix', 'type': 'str'},
1707        'input_directories': {'key': 'properties.inputDirectories', 'type': '[InputDirectory]'},
1708        'output_directories': {'key': 'properties.outputDirectories', 'type': '[OutputDirectory]'},
1709        'environment_variables': {'key': 'properties.environmentVariables', 'type': '[EnvironmentVariable]'},
1710        'secrets': {'key': 'properties.secrets', 'type': '[EnvironmentVariableWithSecretValue]'},
1711        'constraints': {'key': 'properties.constraints', 'type': 'JobPropertiesConstraints'},
1712        'creation_time': {'key': 'properties.creationTime', 'type': 'iso-8601'},
1713        'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'},
1714        'provisioning_state_transition_time': {'key': 'properties.provisioningStateTransitionTime', 'type': 'iso-8601'},
1715        'execution_state': {'key': 'properties.executionState', 'type': 'str'},
1716        'execution_state_transition_time': {'key': 'properties.executionStateTransitionTime', 'type': 'iso-8601'},
1717        'execution_info': {'key': 'properties.executionInfo', 'type': 'JobPropertiesExecutionInfo'},
1718    }
1719
1720    def __init__(
1721        self,
1722        *,
1723        scheduling_priority: Optional[Union[str, "JobPriority"]] = None,
1724        cluster: Optional["ResourceId"] = None,
1725        mount_volumes: Optional["MountVolumes"] = None,
1726        node_count: Optional[int] = None,
1727        container_settings: Optional["ContainerSettings"] = None,
1728        tool_type: Optional[Union[str, "ToolType"]] = None,
1729        cntk_settings: Optional["CNTKsettings"] = None,
1730        py_torch_settings: Optional["PyTorchSettings"] = None,
1731        tensor_flow_settings: Optional["TensorFlowSettings"] = None,
1732        caffe_settings: Optional["CaffeSettings"] = None,
1733        caffe2_settings: Optional["Caffe2Settings"] = None,
1734        chainer_settings: Optional["ChainerSettings"] = None,
1735        custom_toolkit_settings: Optional["CustomToolkitSettings"] = None,
1736        custom_mpi_settings: Optional["CustomMpiSettings"] = None,
1737        horovod_settings: Optional["HorovodSettings"] = None,
1738        job_preparation: Optional["JobPreparation"] = None,
1739        std_out_err_path_prefix: Optional[str] = None,
1740        input_directories: Optional[List["InputDirectory"]] = None,
1741        output_directories: Optional[List["OutputDirectory"]] = None,
1742        environment_variables: Optional[List["EnvironmentVariable"]] = None,
1743        secrets: Optional[List["EnvironmentVariableWithSecretValue"]] = None,
1744        constraints: Optional["JobPropertiesConstraints"] = None,
1745        execution_info: Optional["JobPropertiesExecutionInfo"] = None,
1746        **kwargs
1747    ):
1748        super(Job, self).__init__(**kwargs)
1749        self.scheduling_priority = scheduling_priority
1750        self.cluster = cluster
1751        self.mount_volumes = mount_volumes
1752        self.node_count = node_count
1753        self.container_settings = container_settings
1754        self.tool_type = tool_type
1755        self.cntk_settings = cntk_settings
1756        self.py_torch_settings = py_torch_settings
1757        self.tensor_flow_settings = tensor_flow_settings
1758        self.caffe_settings = caffe_settings
1759        self.caffe2_settings = caffe2_settings
1760        self.chainer_settings = chainer_settings
1761        self.custom_toolkit_settings = custom_toolkit_settings
1762        self.custom_mpi_settings = custom_mpi_settings
1763        self.horovod_settings = horovod_settings
1764        self.job_preparation = job_preparation
1765        self.job_output_directory_path_segment = None
1766        self.std_out_err_path_prefix = std_out_err_path_prefix
1767        self.input_directories = input_directories
1768        self.output_directories = output_directories
1769        self.environment_variables = environment_variables
1770        self.secrets = secrets
1771        self.constraints = constraints
1772        self.creation_time = None
1773        self.provisioning_state = None
1774        self.provisioning_state_transition_time = None
1775        self.execution_state = None
1776        self.execution_state_transition_time = None
1777        self.execution_info = execution_info
1778
1779
1780class JobBasePropertiesConstraints(msrest.serialization.Model):
1781    """Constraints associated with the Job.
1782
1783    :param max_wall_clock_time: Max time the job can run. Default value: 1 week.
1784    :type max_wall_clock_time: ~datetime.timedelta
1785    """
1786
1787    _attribute_map = {
1788        'max_wall_clock_time': {'key': 'maxWallClockTime', 'type': 'duration'},
1789    }
1790
1791    def __init__(
1792        self,
1793        *,
1794        max_wall_clock_time: Optional[datetime.timedelta] = "7.00:00:00",
1795        **kwargs
1796    ):
1797        super(JobBasePropertiesConstraints, self).__init__(**kwargs)
1798        self.max_wall_clock_time = max_wall_clock_time
1799
1800
1801class JobCreateParameters(msrest.serialization.Model):
1802    """Job creation parameters.
1803
1804    :param scheduling_priority: Scheduling priority associated with the job. Possible values: low,
1805     normal, high. Possible values include: "low", "normal", "high".
1806    :type scheduling_priority: str or ~batch_ai.models.JobPriority
1807    :param cluster: Resource ID of the cluster on which this job will run.
1808    :type cluster: ~batch_ai.models.ResourceId
1809    :param mount_volumes: Information on mount volumes to be used by the job. These volumes will be
1810     mounted before the job execution and will be unmounted after the job completion. The volumes
1811     will be mounted at location specified by $AZ_BATCHAI_JOB_MOUNT_ROOT environment variable.
1812    :type mount_volumes: ~batch_ai.models.MountVolumes
1813    :param node_count: Number of compute nodes to run the job on. The job will be gang scheduled on
1814     that many compute nodes.
1815    :type node_count: int
1816    :param container_settings: Docker container settings for the job. If not provided, the job will
1817     run directly on the node.
1818    :type container_settings: ~batch_ai.models.ContainerSettings
1819    :param cntk_settings: Settings for CNTK (aka Microsoft Cognitive Toolkit) job.
1820    :type cntk_settings: ~batch_ai.models.CNTKsettings
1821    :param py_torch_settings: Settings for pyTorch job.
1822    :type py_torch_settings: ~batch_ai.models.PyTorchSettings
1823    :param tensor_flow_settings: Settings for Tensor Flow job.
1824    :type tensor_flow_settings: ~batch_ai.models.TensorFlowSettings
1825    :param caffe_settings: Settings for Caffe job.
1826    :type caffe_settings: ~batch_ai.models.CaffeSettings
1827    :param caffe2_settings: Settings for Caffe2 job.
1828    :type caffe2_settings: ~batch_ai.models.Caffe2Settings
1829    :param chainer_settings: Settings for Chainer job.
1830    :type chainer_settings: ~batch_ai.models.ChainerSettings
1831    :param custom_toolkit_settings: Settings for custom tool kit job.
1832    :type custom_toolkit_settings: ~batch_ai.models.CustomToolkitSettings
1833    :param custom_mpi_settings: Settings for custom MPI job.
1834    :type custom_mpi_settings: ~batch_ai.models.CustomMpiSettings
1835    :param horovod_settings: Settings for Horovod job.
1836    :type horovod_settings: ~batch_ai.models.HorovodSettings
1837    :param job_preparation: A command line to be executed on each node allocated for the job before
1838     tool kit is launched.
1839    :type job_preparation: ~batch_ai.models.JobPreparation
1840    :param std_out_err_path_prefix: The path where the Batch AI service will store stdout, stderror
1841     and execution log of the job.
1842    :type std_out_err_path_prefix: str
1843    :param input_directories: A list of input directories for the job.
1844    :type input_directories: list[~batch_ai.models.InputDirectory]
1845    :param output_directories: A list of output directories for the job.
1846    :type output_directories: list[~batch_ai.models.OutputDirectory]
1847    :param environment_variables: A list of user defined environment variables which will be setup
1848     for the job.
1849    :type environment_variables: list[~batch_ai.models.EnvironmentVariable]
1850    :param secrets: A list of user defined environment variables with secret values which will be
1851     setup for the job. Server will never report values of these variables back.
1852    :type secrets: list[~batch_ai.models.EnvironmentVariableWithSecretValue]
1853    :param constraints: Constraints associated with the Job.
1854    :type constraints: ~batch_ai.models.JobBasePropertiesConstraints
1855    """
1856
1857    _attribute_map = {
1858        'scheduling_priority': {'key': 'properties.schedulingPriority', 'type': 'str'},
1859        'cluster': {'key': 'properties.cluster', 'type': 'ResourceId'},
1860        'mount_volumes': {'key': 'properties.mountVolumes', 'type': 'MountVolumes'},
1861        'node_count': {'key': 'properties.nodeCount', 'type': 'int'},
1862        'container_settings': {'key': 'properties.containerSettings', 'type': 'ContainerSettings'},
1863        'cntk_settings': {'key': 'properties.cntkSettings', 'type': 'CNTKsettings'},
1864        'py_torch_settings': {'key': 'properties.pyTorchSettings', 'type': 'PyTorchSettings'},
1865        'tensor_flow_settings': {'key': 'properties.tensorFlowSettings', 'type': 'TensorFlowSettings'},
1866        'caffe_settings': {'key': 'properties.caffeSettings', 'type': 'CaffeSettings'},
1867        'caffe2_settings': {'key': 'properties.caffe2Settings', 'type': 'Caffe2Settings'},
1868        'chainer_settings': {'key': 'properties.chainerSettings', 'type': 'ChainerSettings'},
1869        'custom_toolkit_settings': {'key': 'properties.customToolkitSettings', 'type': 'CustomToolkitSettings'},
1870        'custom_mpi_settings': {'key': 'properties.customMpiSettings', 'type': 'CustomMpiSettings'},
1871        'horovod_settings': {'key': 'properties.horovodSettings', 'type': 'HorovodSettings'},
1872        'job_preparation': {'key': 'properties.jobPreparation', 'type': 'JobPreparation'},
1873        'std_out_err_path_prefix': {'key': 'properties.stdOutErrPathPrefix', 'type': 'str'},
1874        'input_directories': {'key': 'properties.inputDirectories', 'type': '[InputDirectory]'},
1875        'output_directories': {'key': 'properties.outputDirectories', 'type': '[OutputDirectory]'},
1876        'environment_variables': {'key': 'properties.environmentVariables', 'type': '[EnvironmentVariable]'},
1877        'secrets': {'key': 'properties.secrets', 'type': '[EnvironmentVariableWithSecretValue]'},
1878        'constraints': {'key': 'properties.constraints', 'type': 'JobBasePropertiesConstraints'},
1879    }
1880
1881    def __init__(
1882        self,
1883        *,
1884        scheduling_priority: Optional[Union[str, "JobPriority"]] = None,
1885        cluster: Optional["ResourceId"] = None,
1886        mount_volumes: Optional["MountVolumes"] = None,
1887        node_count: Optional[int] = None,
1888        container_settings: Optional["ContainerSettings"] = None,
1889        cntk_settings: Optional["CNTKsettings"] = None,
1890        py_torch_settings: Optional["PyTorchSettings"] = None,
1891        tensor_flow_settings: Optional["TensorFlowSettings"] = None,
1892        caffe_settings: Optional["CaffeSettings"] = None,
1893        caffe2_settings: Optional["Caffe2Settings"] = None,
1894        chainer_settings: Optional["ChainerSettings"] = None,
1895        custom_toolkit_settings: Optional["CustomToolkitSettings"] = None,
1896        custom_mpi_settings: Optional["CustomMpiSettings"] = None,
1897        horovod_settings: Optional["HorovodSettings"] = None,
1898        job_preparation: Optional["JobPreparation"] = None,
1899        std_out_err_path_prefix: Optional[str] = None,
1900        input_directories: Optional[List["InputDirectory"]] = None,
1901        output_directories: Optional[List["OutputDirectory"]] = None,
1902        environment_variables: Optional[List["EnvironmentVariable"]] = None,
1903        secrets: Optional[List["EnvironmentVariableWithSecretValue"]] = None,
1904        constraints: Optional["JobBasePropertiesConstraints"] = None,
1905        **kwargs
1906    ):
1907        super(JobCreateParameters, self).__init__(**kwargs)
1908        self.scheduling_priority = scheduling_priority
1909        self.cluster = cluster
1910        self.mount_volumes = mount_volumes
1911        self.node_count = node_count
1912        self.container_settings = container_settings
1913        self.cntk_settings = cntk_settings
1914        self.py_torch_settings = py_torch_settings
1915        self.tensor_flow_settings = tensor_flow_settings
1916        self.caffe_settings = caffe_settings
1917        self.caffe2_settings = caffe2_settings
1918        self.chainer_settings = chainer_settings
1919        self.custom_toolkit_settings = custom_toolkit_settings
1920        self.custom_mpi_settings = custom_mpi_settings
1921        self.horovod_settings = horovod_settings
1922        self.job_preparation = job_preparation
1923        self.std_out_err_path_prefix = std_out_err_path_prefix
1924        self.input_directories = input_directories
1925        self.output_directories = output_directories
1926        self.environment_variables = environment_variables
1927        self.secrets = secrets
1928        self.constraints = constraints
1929
1930
1931class JobListResult(msrest.serialization.Model):
1932    """Values returned by the List operation.
1933
1934    Variables are only populated by the server, and will be ignored when sending a request.
1935
1936    :ivar value: The collection of jobs.
1937    :vartype value: list[~batch_ai.models.Job]
1938    :ivar next_link: The continuation token.
1939    :vartype next_link: str
1940    """
1941
1942    _validation = {
1943        'value': {'readonly': True},
1944        'next_link': {'readonly': True},
1945    }
1946
1947    _attribute_map = {
1948        'value': {'key': 'value', 'type': '[Job]'},
1949        'next_link': {'key': 'nextLink', 'type': 'str'},
1950    }
1951
1952    def __init__(
1953        self,
1954        **kwargs
1955    ):
1956        super(JobListResult, self).__init__(**kwargs)
1957        self.value = None
1958        self.next_link = None
1959
1960
1961class JobPreparation(msrest.serialization.Model):
1962    """Job preparation settings.
1963
1964    All required parameters must be populated in order to send to Azure.
1965
1966    :param command_line: Required. The command line to execute. If containerSettings is specified
1967     on the job, this commandLine will be executed in the same container as job. Otherwise it will
1968     be executed on the node.
1969    :type command_line: str
1970    """
1971
1972    _validation = {
1973        'command_line': {'required': True},
1974    }
1975
1976    _attribute_map = {
1977        'command_line': {'key': 'commandLine', 'type': 'str'},
1978    }
1979
1980    def __init__(
1981        self,
1982        *,
1983        command_line: str,
1984        **kwargs
1985    ):
1986        super(JobPreparation, self).__init__(**kwargs)
1987        self.command_line = command_line
1988
1989
1990class JobPropertiesConstraints(msrest.serialization.Model):
1991    """Constraints associated with the Job.
1992
1993    :param max_wall_clock_time: Max time the job can run. Default value: 1 week.
1994    :type max_wall_clock_time: ~datetime.timedelta
1995    """
1996
1997    _attribute_map = {
1998        'max_wall_clock_time': {'key': 'maxWallClockTime', 'type': 'duration'},
1999    }
2000
2001    def __init__(
2002        self,
2003        *,
2004        max_wall_clock_time: Optional[datetime.timedelta] = "7.00:00:00",
2005        **kwargs
2006    ):
2007        super(JobPropertiesConstraints, self).__init__(**kwargs)
2008        self.max_wall_clock_time = max_wall_clock_time
2009
2010
2011class JobPropertiesExecutionInfo(msrest.serialization.Model):
2012    """Information about the execution of a job.
2013
2014    Variables are only populated by the server, and will be ignored when sending a request.
2015
2016    :ivar start_time: The time at which the job started running. 'Running' corresponds to the
2017     running state. If the job has been restarted or retried, this is the most recent time at which
2018     the job started running. This property is present only for job that are in the running or
2019     completed state.
2020    :vartype start_time: ~datetime.datetime
2021    :ivar end_time: The time at which the job completed. This property is only returned if the job
2022     is in completed state.
2023    :vartype end_time: ~datetime.datetime
2024    :ivar exit_code: The exit code of the job. This property is only returned if the job is in
2025     completed state.
2026    :vartype exit_code: int
2027    :ivar errors: A collection of errors encountered by the service during job execution.
2028    :vartype errors: list[~batch_ai.models.BatchAIError]
2029    """
2030
2031    _validation = {
2032        'start_time': {'readonly': True},
2033        'end_time': {'readonly': True},
2034        'exit_code': {'readonly': True},
2035        'errors': {'readonly': True},
2036    }
2037
2038    _attribute_map = {
2039        'start_time': {'key': 'startTime', 'type': 'iso-8601'},
2040        'end_time': {'key': 'endTime', 'type': 'iso-8601'},
2041        'exit_code': {'key': 'exitCode', 'type': 'int'},
2042        'errors': {'key': 'errors', 'type': '[BatchAIError]'},
2043    }
2044
2045    def __init__(
2046        self,
2047        **kwargs
2048    ):
2049        super(JobPropertiesExecutionInfo, self).__init__(**kwargs)
2050        self.start_time = None
2051        self.end_time = None
2052        self.exit_code = None
2053        self.errors = None
2054
2055
2056class JobsListByExperimentOptions(msrest.serialization.Model):
2057    """Parameter group.
2058
2059    :param max_results: The maximum number of items to return in the response. A maximum of 1000
2060     files can be returned.
2061    :type max_results: int
2062    """
2063
2064    _validation = {
2065        'max_results': {'maximum': 1000, 'minimum': 1},
2066    }
2067
2068    _attribute_map = {
2069        'max_results': {'key': 'maxResults', 'type': 'int'},
2070    }
2071
2072    def __init__(
2073        self,
2074        *,
2075        max_results: Optional[int] = 1000,
2076        **kwargs
2077    ):
2078        super(JobsListByExperimentOptions, self).__init__(**kwargs)
2079        self.max_results = max_results
2080
2081
2082class JobsListOutputFilesOptions(msrest.serialization.Model):
2083    """Parameter group.
2084
2085    All required parameters must be populated in order to send to Azure.
2086
2087    :param outputdirectoryid: Required. Id of the job output directory. This is the
2088     OutputDirectory-->id parameter that is given by the user during Create Job.
2089    :type outputdirectoryid: str
2090    :param directory: The path to the directory.
2091    :type directory: str
2092    :param linkexpiryinminutes: The number of minutes after which the download link will expire.
2093    :type linkexpiryinminutes: int
2094    :param max_results: The maximum number of items to return in the response. A maximum of 1000
2095     files can be returned.
2096    :type max_results: int
2097    """
2098
2099    _validation = {
2100        'outputdirectoryid': {'required': True},
2101        'linkexpiryinminutes': {'maximum': 600, 'minimum': 5},
2102        'max_results': {'maximum': 1000, 'minimum': 1},
2103    }
2104
2105    _attribute_map = {
2106        'outputdirectoryid': {'key': 'outputdirectoryid', 'type': 'str'},
2107        'directory': {'key': 'directory', 'type': 'str'},
2108        'linkexpiryinminutes': {'key': 'linkexpiryinminutes', 'type': 'int'},
2109        'max_results': {'key': 'maxResults', 'type': 'int'},
2110    }
2111
2112    def __init__(
2113        self,
2114        *,
2115        outputdirectoryid: str,
2116        directory: Optional[str] = ".",
2117        linkexpiryinminutes: Optional[int] = 60,
2118        max_results: Optional[int] = 1000,
2119        **kwargs
2120    ):
2121        super(JobsListOutputFilesOptions, self).__init__(**kwargs)
2122        self.outputdirectoryid = outputdirectoryid
2123        self.directory = directory
2124        self.linkexpiryinminutes = linkexpiryinminutes
2125        self.max_results = max_results
2126
2127
2128class KeyVaultSecretReference(msrest.serialization.Model):
2129    """Key Vault Secret reference.
2130
2131    All required parameters must be populated in order to send to Azure.
2132
2133    :param source_vault: Required. Fully qualified resource identifier of the Key Vault.
2134    :type source_vault: ~batch_ai.models.ResourceId
2135    :param secret_url: Required. The URL referencing a secret in the Key Vault.
2136    :type secret_url: str
2137    """
2138
2139    _validation = {
2140        'source_vault': {'required': True},
2141        'secret_url': {'required': True},
2142    }
2143
2144    _attribute_map = {
2145        'source_vault': {'key': 'sourceVault', 'type': 'ResourceId'},
2146        'secret_url': {'key': 'secretUrl', 'type': 'str'},
2147    }
2148
2149    def __init__(
2150        self,
2151        *,
2152        source_vault: "ResourceId",
2153        secret_url: str,
2154        **kwargs
2155    ):
2156        super(KeyVaultSecretReference, self).__init__(**kwargs)
2157        self.source_vault = source_vault
2158        self.secret_url = secret_url
2159
2160
2161class ListUsagesResult(msrest.serialization.Model):
2162    """The List Usages operation response.
2163
2164    Variables are only populated by the server, and will be ignored when sending a request.
2165
2166    :ivar value: The list of compute resource usages.
2167    :vartype value: list[~batch_ai.models.Usage]
2168    :ivar next_link: The URI to fetch the next page of compute resource usage information. Call
2169     ListNext() with this to fetch the next page of compute resource usage information.
2170    :vartype next_link: str
2171    """
2172
2173    _validation = {
2174        'value': {'readonly': True},
2175        'next_link': {'readonly': True},
2176    }
2177
2178    _attribute_map = {
2179        'value': {'key': 'value', 'type': '[Usage]'},
2180        'next_link': {'key': 'nextLink', 'type': 'str'},
2181    }
2182
2183    def __init__(
2184        self,
2185        **kwargs
2186    ):
2187        super(ListUsagesResult, self).__init__(**kwargs)
2188        self.value = None
2189        self.next_link = None
2190
2191
2192class ManualScaleSettings(msrest.serialization.Model):
2193    """Manual scale settings for the cluster.
2194
2195    All required parameters must be populated in order to send to Azure.
2196
2197    :param target_node_count: Required. The desired number of compute nodes in the Cluster. Default
2198     is 0.
2199    :type target_node_count: int
2200    :param node_deallocation_option: An action to be performed when the cluster size is decreasing.
2201     The default value is requeue. Possible values include: "requeue", "terminate",
2202     "waitforjobcompletion". Default value: "requeue".
2203    :type node_deallocation_option: str or ~batch_ai.models.DeallocationOption
2204    """
2205
2206    _validation = {
2207        'target_node_count': {'required': True},
2208    }
2209
2210    _attribute_map = {
2211        'target_node_count': {'key': 'targetNodeCount', 'type': 'int'},
2212        'node_deallocation_option': {'key': 'nodeDeallocationOption', 'type': 'str'},
2213    }
2214
2215    def __init__(
2216        self,
2217        *,
2218        target_node_count: int,
2219        node_deallocation_option: Optional[Union[str, "DeallocationOption"]] = "requeue",
2220        **kwargs
2221    ):
2222        super(ManualScaleSettings, self).__init__(**kwargs)
2223        self.target_node_count = target_node_count
2224        self.node_deallocation_option = node_deallocation_option
2225
2226
2227class MountSettings(msrest.serialization.Model):
2228    """File Server mount Information.
2229
2230    :param mount_point: Path where the data disks are mounted on the File Server.
2231    :type mount_point: str
2232    :param file_server_public_ip: Public IP address of the File Server which can be used to SSH to
2233     the node from outside of the subnet.
2234    :type file_server_public_ip: str
2235    :param file_server_internal_ip: Internal IP address of the File Server which can be used to
2236     access the File Server from within the subnet.
2237    :type file_server_internal_ip: str
2238    """
2239
2240    _attribute_map = {
2241        'mount_point': {'key': 'mountPoint', 'type': 'str'},
2242        'file_server_public_ip': {'key': 'fileServerPublicIP', 'type': 'str'},
2243        'file_server_internal_ip': {'key': 'fileServerInternalIP', 'type': 'str'},
2244    }
2245
2246    def __init__(
2247        self,
2248        *,
2249        mount_point: Optional[str] = None,
2250        file_server_public_ip: Optional[str] = None,
2251        file_server_internal_ip: Optional[str] = None,
2252        **kwargs
2253    ):
2254        super(MountSettings, self).__init__(**kwargs)
2255        self.mount_point = mount_point
2256        self.file_server_public_ip = file_server_public_ip
2257        self.file_server_internal_ip = file_server_internal_ip
2258
2259
2260class MountVolumes(msrest.serialization.Model):
2261    """Details of volumes to mount on the cluster.
2262
2263    :param azure_file_shares: A collection of Azure File Shares that are to be mounted to the
2264     cluster nodes.
2265    :type azure_file_shares: list[~batch_ai.models.AzureFileShareReference]
2266    :param azure_blob_file_systems: A collection of Azure Blob Containers that are to be mounted to
2267     the cluster nodes.
2268    :type azure_blob_file_systems: list[~batch_ai.models.AzureBlobFileSystemReference]
2269    :param file_servers: A collection of Batch AI File Servers that are to be mounted to the
2270     cluster nodes.
2271    :type file_servers: list[~batch_ai.models.FileServerReference]
2272    :param unmanaged_file_systems: A collection of unmanaged file systems that are to be mounted to
2273     the cluster nodes.
2274    :type unmanaged_file_systems: list[~batch_ai.models.UnmanagedFileSystemReference]
2275    """
2276
2277    _attribute_map = {
2278        'azure_file_shares': {'key': 'azureFileShares', 'type': '[AzureFileShareReference]'},
2279        'azure_blob_file_systems': {'key': 'azureBlobFileSystems', 'type': '[AzureBlobFileSystemReference]'},
2280        'file_servers': {'key': 'fileServers', 'type': '[FileServerReference]'},
2281        'unmanaged_file_systems': {'key': 'unmanagedFileSystems', 'type': '[UnmanagedFileSystemReference]'},
2282    }
2283
2284    def __init__(
2285        self,
2286        *,
2287        azure_file_shares: Optional[List["AzureFileShareReference"]] = None,
2288        azure_blob_file_systems: Optional[List["AzureBlobFileSystemReference"]] = None,
2289        file_servers: Optional[List["FileServerReference"]] = None,
2290        unmanaged_file_systems: Optional[List["UnmanagedFileSystemReference"]] = None,
2291        **kwargs
2292    ):
2293        super(MountVolumes, self).__init__(**kwargs)
2294        self.azure_file_shares = azure_file_shares
2295        self.azure_blob_file_systems = azure_blob_file_systems
2296        self.file_servers = file_servers
2297        self.unmanaged_file_systems = unmanaged_file_systems
2298
2299
2300class NameValuePair(msrest.serialization.Model):
2301    """Name-value pair.
2302
2303    :param name: The name in the name-value pair.
2304    :type name: str
2305    :param value: The value in the name-value pair.
2306    :type value: str
2307    """
2308
2309    _attribute_map = {
2310        'name': {'key': 'name', 'type': 'str'},
2311        'value': {'key': 'value', 'type': 'str'},
2312    }
2313
2314    def __init__(
2315        self,
2316        *,
2317        name: Optional[str] = None,
2318        value: Optional[str] = None,
2319        **kwargs
2320    ):
2321        super(NameValuePair, self).__init__(**kwargs)
2322        self.name = name
2323        self.value = value
2324
2325
2326class NodeSetup(msrest.serialization.Model):
2327    """Node setup settings.
2328
2329    :param setup_task: Setup task to run on cluster nodes when nodes got created or rebooted. The
2330     setup task code needs to be idempotent. Generally the setup task is used to download static
2331     data that is required for all jobs that run on the cluster VMs and/or to download/install
2332     software.
2333    :type setup_task: ~batch_ai.models.SetupTask
2334    :param mount_volumes: Mount volumes to be available to setup task and all jobs executing on the
2335     cluster. The volumes will be mounted at location specified by $AZ_BATCHAI_MOUNT_ROOT
2336     environment variable.
2337    :type mount_volumes: ~batch_ai.models.MountVolumes
2338    :param performance_counters_settings: Settings for performance counters collecting and
2339     uploading.
2340    :type performance_counters_settings: ~batch_ai.models.PerformanceCountersSettings
2341    """
2342
2343    _attribute_map = {
2344        'setup_task': {'key': 'setupTask', 'type': 'SetupTask'},
2345        'mount_volumes': {'key': 'mountVolumes', 'type': 'MountVolumes'},
2346        'performance_counters_settings': {'key': 'performanceCountersSettings', 'type': 'PerformanceCountersSettings'},
2347    }
2348
2349    def __init__(
2350        self,
2351        *,
2352        setup_task: Optional["SetupTask"] = None,
2353        mount_volumes: Optional["MountVolumes"] = None,
2354        performance_counters_settings: Optional["PerformanceCountersSettings"] = None,
2355        **kwargs
2356    ):
2357        super(NodeSetup, self).__init__(**kwargs)
2358        self.setup_task = setup_task
2359        self.mount_volumes = mount_volumes
2360        self.performance_counters_settings = performance_counters_settings
2361
2362
2363class NodeStateCounts(msrest.serialization.Model):
2364    """Counts of various compute node states on the cluster.
2365
2366    Variables are only populated by the server, and will be ignored when sending a request.
2367
2368    :ivar idle_node_count: Number of compute nodes in idle state.
2369    :vartype idle_node_count: int
2370    :ivar running_node_count: Number of compute nodes which are running jobs.
2371    :vartype running_node_count: int
2372    :ivar preparing_node_count: Number of compute nodes which are being prepared.
2373    :vartype preparing_node_count: int
2374    :ivar unusable_node_count: Number of compute nodes which are in unusable state.
2375    :vartype unusable_node_count: int
2376    :ivar leaving_node_count: Number of compute nodes which are leaving the cluster.
2377    :vartype leaving_node_count: int
2378    """
2379
2380    _validation = {
2381        'idle_node_count': {'readonly': True},
2382        'running_node_count': {'readonly': True},
2383        'preparing_node_count': {'readonly': True},
2384        'unusable_node_count': {'readonly': True},
2385        'leaving_node_count': {'readonly': True},
2386    }
2387
2388    _attribute_map = {
2389        'idle_node_count': {'key': 'idleNodeCount', 'type': 'int'},
2390        'running_node_count': {'key': 'runningNodeCount', 'type': 'int'},
2391        'preparing_node_count': {'key': 'preparingNodeCount', 'type': 'int'},
2392        'unusable_node_count': {'key': 'unusableNodeCount', 'type': 'int'},
2393        'leaving_node_count': {'key': 'leavingNodeCount', 'type': 'int'},
2394    }
2395
2396    def __init__(
2397        self,
2398        **kwargs
2399    ):
2400        super(NodeStateCounts, self).__init__(**kwargs)
2401        self.idle_node_count = None
2402        self.running_node_count = None
2403        self.preparing_node_count = None
2404        self.unusable_node_count = None
2405        self.leaving_node_count = None
2406
2407
2408class Operation(msrest.serialization.Model):
2409    """Details of a REST API operation.
2410
2411    Variables are only populated by the server, and will be ignored when sending a request.
2412
2413    :ivar name: This is of the format {provider}/{resource}/{operation}.
2414    :vartype name: str
2415    :param display: The object that describes the operation.
2416    :type display: ~batch_ai.models.OperationDisplay
2417    :ivar origin: The intended executor of the operation.
2418    :vartype origin: str
2419    :param properties: Any object.
2420    :type properties: any
2421    """
2422
2423    _validation = {
2424        'name': {'readonly': True},
2425        'origin': {'readonly': True},
2426    }
2427
2428    _attribute_map = {
2429        'name': {'key': 'name', 'type': 'str'},
2430        'display': {'key': 'display', 'type': 'OperationDisplay'},
2431        'origin': {'key': 'origin', 'type': 'str'},
2432        'properties': {'key': 'properties', 'type': 'object'},
2433    }
2434
2435    def __init__(
2436        self,
2437        *,
2438        display: Optional["OperationDisplay"] = None,
2439        properties: Optional[Any] = None,
2440        **kwargs
2441    ):
2442        super(Operation, self).__init__(**kwargs)
2443        self.name = None
2444        self.display = display
2445        self.origin = None
2446        self.properties = properties
2447
2448
2449class OperationDisplay(msrest.serialization.Model):
2450    """The object that describes the operation.
2451
2452    Variables are only populated by the server, and will be ignored when sending a request.
2453
2454    :ivar provider: Friendly name of the resource provider.
2455    :vartype provider: str
2456    :ivar operation: For example: read, write, delete, or listKeys/action.
2457    :vartype operation: str
2458    :ivar resource: The resource type on which the operation is performed.
2459    :vartype resource: str
2460    :ivar description: The friendly name of the operation.
2461    :vartype description: str
2462    """
2463
2464    _validation = {
2465        'provider': {'readonly': True},
2466        'operation': {'readonly': True},
2467        'resource': {'readonly': True},
2468        'description': {'readonly': True},
2469    }
2470
2471    _attribute_map = {
2472        'provider': {'key': 'provider', 'type': 'str'},
2473        'operation': {'key': 'operation', 'type': 'str'},
2474        'resource': {'key': 'resource', 'type': 'str'},
2475        'description': {'key': 'description', 'type': 'str'},
2476    }
2477
2478    def __init__(
2479        self,
2480        **kwargs
2481    ):
2482        super(OperationDisplay, self).__init__(**kwargs)
2483        self.provider = None
2484        self.operation = None
2485        self.resource = None
2486        self.description = None
2487
2488
2489class OperationListResult(msrest.serialization.Model):
2490    """Contains the list of all operations supported by BatchAI resource provider.
2491
2492    Variables are only populated by the server, and will be ignored when sending a request.
2493
2494    :ivar value: The list of operations supported by the resource provider.
2495    :vartype value: list[~batch_ai.models.Operation]
2496    :ivar next_link: The URL to get the next set of operation list results if there are any.
2497    :vartype next_link: str
2498    """
2499
2500    _validation = {
2501        'value': {'readonly': True},
2502        'next_link': {'readonly': True},
2503    }
2504
2505    _attribute_map = {
2506        'value': {'key': 'value', 'type': '[Operation]'},
2507        'next_link': {'key': 'nextLink', 'type': 'str'},
2508    }
2509
2510    def __init__(
2511        self,
2512        **kwargs
2513    ):
2514        super(OperationListResult, self).__init__(**kwargs)
2515        self.value = None
2516        self.next_link = None
2517
2518
2519class OutputDirectory(msrest.serialization.Model):
2520    """Output directory for the job.
2521
2522    All required parameters must be populated in order to send to Azure.
2523
2524    :param id: Required. The ID of the output directory. The job can use AZ_BATCHAI\ *OUTPUT*\
2525     :code:`<id>` environment variable to find the directory path, where :code:`<id>` is the value
2526     of id attribute.
2527    :type id: str
2528    :param path_prefix: Required. The prefix path where the output directory will be created. Note,
2529     this is an absolute path to prefix. E.g. $AZ_BATCHAI_MOUNT_ROOT/MyNFS/MyLogs. The full path to
2530     the output directory by combining pathPrefix, jobOutputDirectoryPathSegment (reported by get
2531     job) and pathSuffix.
2532    :type path_prefix: str
2533    :param path_suffix: The suffix path where the output directory will be created. E.g. models.
2534     You can find the full path to the output directory by combining pathPrefix,
2535     jobOutputDirectoryPathSegment (reported by get job) and pathSuffix.
2536    :type path_suffix: str
2537    """
2538
2539    _validation = {
2540        'id': {'required': True},
2541        'path_prefix': {'required': True},
2542    }
2543
2544    _attribute_map = {
2545        'id': {'key': 'id', 'type': 'str'},
2546        'path_prefix': {'key': 'pathPrefix', 'type': 'str'},
2547        'path_suffix': {'key': 'pathSuffix', 'type': 'str'},
2548    }
2549
2550    def __init__(
2551        self,
2552        *,
2553        id: str,
2554        path_prefix: str,
2555        path_suffix: Optional[str] = None,
2556        **kwargs
2557    ):
2558        super(OutputDirectory, self).__init__(**kwargs)
2559        self.id = id
2560        self.path_prefix = path_prefix
2561        self.path_suffix = path_suffix
2562
2563
2564class PerformanceCountersSettings(msrest.serialization.Model):
2565    """Performance counters reporting settings.
2566
2567    All required parameters must be populated in order to send to Azure.
2568
2569    :param app_insights_reference: Required. Azure Application Insights information for performance
2570     counters reporting. If provided, Batch AI will upload node performance counters to the
2571     corresponding Azure Application Insights account.
2572    :type app_insights_reference: ~batch_ai.models.AppInsightsReference
2573    """
2574
2575    _validation = {
2576        'app_insights_reference': {'required': True},
2577    }
2578
2579    _attribute_map = {
2580        'app_insights_reference': {'key': 'appInsightsReference', 'type': 'AppInsightsReference'},
2581    }
2582
2583    def __init__(
2584        self,
2585        *,
2586        app_insights_reference: "AppInsightsReference",
2587        **kwargs
2588    ):
2589        super(PerformanceCountersSettings, self).__init__(**kwargs)
2590        self.app_insights_reference = app_insights_reference
2591
2592
2593class PrivateRegistryCredentials(msrest.serialization.Model):
2594    """Credentials to access a container image in a private repository.
2595
2596    All required parameters must be populated in order to send to Azure.
2597
2598    :param username: Required. User name to login to the repository.
2599    :type username: str
2600    :param password: User password to login to the docker repository. One of password or
2601     passwordSecretReference must be specified.
2602    :type password: str
2603    :param password_secret_reference: KeyVault Secret storing the password. Users can store their
2604     secrets in Azure KeyVault and pass it to the Batch AI service to integrate with KeyVault. One
2605     of password or passwordSecretReference must be specified.
2606    :type password_secret_reference: ~batch_ai.models.KeyVaultSecretReference
2607    """
2608
2609    _validation = {
2610        'username': {'required': True},
2611    }
2612
2613    _attribute_map = {
2614        'username': {'key': 'username', 'type': 'str'},
2615        'password': {'key': 'password', 'type': 'str'},
2616        'password_secret_reference': {'key': 'passwordSecretReference', 'type': 'KeyVaultSecretReference'},
2617    }
2618
2619    def __init__(
2620        self,
2621        *,
2622        username: str,
2623        password: Optional[str] = None,
2624        password_secret_reference: Optional["KeyVaultSecretReference"] = None,
2625        **kwargs
2626    ):
2627        super(PrivateRegistryCredentials, self).__init__(**kwargs)
2628        self.username = username
2629        self.password = password
2630        self.password_secret_reference = password_secret_reference
2631
2632
2633class PyTorchSettings(msrest.serialization.Model):
2634    """pyTorch job settings.
2635
2636    All required parameters must be populated in order to send to Azure.
2637
2638    :param python_script_file_path: Required. The python script to execute.
2639    :type python_script_file_path: str
2640    :param python_interpreter_path: The path to the Python interpreter.
2641    :type python_interpreter_path: str
2642    :param command_line_args: Command line arguments that need to be passed to the python script.
2643    :type command_line_args: str
2644    :param process_count: Number of processes to launch for the job execution. The default value
2645     for this property is equal to nodeCount property.
2646    :type process_count: int
2647    :param communication_backend: Type of the communication backend for distributed jobs. Valid
2648     values are 'TCP', 'Gloo' or 'MPI'. Not required for non-distributed jobs.
2649    :type communication_backend: str
2650    """
2651
2652    _validation = {
2653        'python_script_file_path': {'required': True},
2654    }
2655
2656    _attribute_map = {
2657        'python_script_file_path': {'key': 'pythonScriptFilePath', 'type': 'str'},
2658        'python_interpreter_path': {'key': 'pythonInterpreterPath', 'type': 'str'},
2659        'command_line_args': {'key': 'commandLineArgs', 'type': 'str'},
2660        'process_count': {'key': 'processCount', 'type': 'int'},
2661        'communication_backend': {'key': 'communicationBackend', 'type': 'str'},
2662    }
2663
2664    def __init__(
2665        self,
2666        *,
2667        python_script_file_path: str,
2668        python_interpreter_path: Optional[str] = None,
2669        command_line_args: Optional[str] = None,
2670        process_count: Optional[int] = None,
2671        communication_backend: Optional[str] = None,
2672        **kwargs
2673    ):
2674        super(PyTorchSettings, self).__init__(**kwargs)
2675        self.python_script_file_path = python_script_file_path
2676        self.python_interpreter_path = python_interpreter_path
2677        self.command_line_args = command_line_args
2678        self.process_count = process_count
2679        self.communication_backend = communication_backend
2680
2681
2682class RemoteLoginInformation(msrest.serialization.Model):
2683    """Login details to SSH to a compute node in cluster.
2684
2685    Variables are only populated by the server, and will be ignored when sending a request.
2686
2687    :ivar node_id: ID of the compute node.
2688    :vartype node_id: str
2689    :ivar ip_address: Public IP address of the compute node.
2690    :vartype ip_address: str
2691    :ivar port: SSH port number of the node.
2692    :vartype port: int
2693    """
2694
2695    _validation = {
2696        'node_id': {'readonly': True},
2697        'ip_address': {'readonly': True},
2698        'port': {'readonly': True},
2699    }
2700
2701    _attribute_map = {
2702        'node_id': {'key': 'nodeId', 'type': 'str'},
2703        'ip_address': {'key': 'ipAddress', 'type': 'str'},
2704        'port': {'key': 'port', 'type': 'int'},
2705    }
2706
2707    def __init__(
2708        self,
2709        **kwargs
2710    ):
2711        super(RemoteLoginInformation, self).__init__(**kwargs)
2712        self.node_id = None
2713        self.ip_address = None
2714        self.port = None
2715
2716
2717class RemoteLoginInformationListResult(msrest.serialization.Model):
2718    """Values returned by the List operation.
2719
2720    Variables are only populated by the server, and will be ignored when sending a request.
2721
2722    :ivar value: The collection of returned remote login details.
2723    :vartype value: list[~batch_ai.models.RemoteLoginInformation]
2724    :ivar next_link: The continuation token.
2725    :vartype next_link: str
2726    """
2727
2728    _validation = {
2729        'value': {'readonly': True},
2730        'next_link': {'readonly': True},
2731    }
2732
2733    _attribute_map = {
2734        'value': {'key': 'value', 'type': '[RemoteLoginInformation]'},
2735        'next_link': {'key': 'nextLink', 'type': 'str'},
2736    }
2737
2738    def __init__(
2739        self,
2740        **kwargs
2741    ):
2742        super(RemoteLoginInformationListResult, self).__init__(**kwargs)
2743        self.value = None
2744        self.next_link = None
2745
2746
2747class Resource(msrest.serialization.Model):
2748    """A definition of an Azure resource.
2749
2750    Variables are only populated by the server, and will be ignored when sending a request.
2751
2752    :ivar id: The ID of the resource.
2753    :vartype id: str
2754    :ivar name: The name of the resource.
2755    :vartype name: str
2756    :ivar type: The type of the resource.
2757    :vartype type: str
2758    :ivar location: The location of the resource.
2759    :vartype location: str
2760    :ivar tags: A set of tags. The tags of the resource.
2761    :vartype tags: dict[str, str]
2762    """
2763
2764    _validation = {
2765        'id': {'readonly': True},
2766        'name': {'readonly': True},
2767        'type': {'readonly': True},
2768        'location': {'readonly': True},
2769        'tags': {'readonly': True},
2770    }
2771
2772    _attribute_map = {
2773        'id': {'key': 'id', 'type': 'str'},
2774        'name': {'key': 'name', 'type': 'str'},
2775        'type': {'key': 'type', 'type': 'str'},
2776        'location': {'key': 'location', 'type': 'str'},
2777        'tags': {'key': 'tags', 'type': '{str}'},
2778    }
2779
2780    def __init__(
2781        self,
2782        **kwargs
2783    ):
2784        super(Resource, self).__init__(**kwargs)
2785        self.id = None
2786        self.name = None
2787        self.type = None
2788        self.location = None
2789        self.tags = None
2790
2791
2792class ResourceId(msrest.serialization.Model):
2793    """Represents a resource ID. For example, for a subnet, it is the resource URL for the subnet.
2794
2795    All required parameters must be populated in order to send to Azure.
2796
2797    :param id: Required. The ID of the resource.
2798    :type id: str
2799    """
2800
2801    _validation = {
2802        'id': {'required': True},
2803    }
2804
2805    _attribute_map = {
2806        'id': {'key': 'id', 'type': 'str'},
2807    }
2808
2809    def __init__(
2810        self,
2811        *,
2812        id: str,
2813        **kwargs
2814    ):
2815        super(ResourceId, self).__init__(**kwargs)
2816        self.id = id
2817
2818
2819class ScaleSettings(msrest.serialization.Model):
2820    """At least one of manual or autoScale settings must be specified. Only one of manual or autoScale settings can be specified. If autoScale settings are specified, the system automatically scales the cluster up and down (within the supplied limits) based on the pending jobs on the cluster.
2821
2822    :param manual: Manual scale settings for the cluster.
2823    :type manual: ~batch_ai.models.ManualScaleSettings
2824    :param auto_scale: Auto-scale settings for the cluster.
2825    :type auto_scale: ~batch_ai.models.AutoScaleSettings
2826    """
2827
2828    _attribute_map = {
2829        'manual': {'key': 'manual', 'type': 'ManualScaleSettings'},
2830        'auto_scale': {'key': 'autoScale', 'type': 'AutoScaleSettings'},
2831    }
2832
2833    def __init__(
2834        self,
2835        *,
2836        manual: Optional["ManualScaleSettings"] = None,
2837        auto_scale: Optional["AutoScaleSettings"] = None,
2838        **kwargs
2839    ):
2840        super(ScaleSettings, self).__init__(**kwargs)
2841        self.manual = manual
2842        self.auto_scale = auto_scale
2843
2844
2845class SetupTask(msrest.serialization.Model):
2846    """Specifies a setup task which can be used to customize the compute nodes of the cluster.
2847
2848    Variables are only populated by the server, and will be ignored when sending a request.
2849
2850    All required parameters must be populated in order to send to Azure.
2851
2852    :param command_line: Required. The command line to be executed on each cluster's node after it
2853     being allocated or rebooted. The command is executed in a bash subshell as a root.
2854    :type command_line: str
2855    :param environment_variables: A collection of user defined environment variables to be set for
2856     setup task.
2857    :type environment_variables: list[~batch_ai.models.EnvironmentVariable]
2858    :param secrets: A collection of user defined environment variables with secret values to be set
2859     for the setup task. Server will never report values of these variables back.
2860    :type secrets: list[~batch_ai.models.EnvironmentVariableWithSecretValue]
2861    :param std_out_err_path_prefix: Required. The prefix of a path where the Batch AI service will
2862     upload the stdout, stderr and execution log of the setup task.
2863    :type std_out_err_path_prefix: str
2864    :ivar std_out_err_path_suffix: A path segment appended by Batch AI to stdOutErrPathPrefix to
2865     form a path where stdout, stderr and execution log of the setup task will be uploaded. Batch AI
2866     creates the setup task output directories under an unique path to avoid conflicts between
2867     different clusters. The full path can be obtained by concatenation of stdOutErrPathPrefix and
2868     stdOutErrPathSuffix.
2869    :vartype std_out_err_path_suffix: str
2870    """
2871
2872    _validation = {
2873        'command_line': {'required': True},
2874        'std_out_err_path_prefix': {'required': True},
2875        'std_out_err_path_suffix': {'readonly': True},
2876    }
2877
2878    _attribute_map = {
2879        'command_line': {'key': 'commandLine', 'type': 'str'},
2880        'environment_variables': {'key': 'environmentVariables', 'type': '[EnvironmentVariable]'},
2881        'secrets': {'key': 'secrets', 'type': '[EnvironmentVariableWithSecretValue]'},
2882        'std_out_err_path_prefix': {'key': 'stdOutErrPathPrefix', 'type': 'str'},
2883        'std_out_err_path_suffix': {'key': 'stdOutErrPathSuffix', 'type': 'str'},
2884    }
2885
2886    def __init__(
2887        self,
2888        *,
2889        command_line: str,
2890        std_out_err_path_prefix: str,
2891        environment_variables: Optional[List["EnvironmentVariable"]] = None,
2892        secrets: Optional[List["EnvironmentVariableWithSecretValue"]] = None,
2893        **kwargs
2894    ):
2895        super(SetupTask, self).__init__(**kwargs)
2896        self.command_line = command_line
2897        self.environment_variables = environment_variables
2898        self.secrets = secrets
2899        self.std_out_err_path_prefix = std_out_err_path_prefix
2900        self.std_out_err_path_suffix = None
2901
2902
2903class SshConfiguration(msrest.serialization.Model):
2904    """SSH configuration.
2905
2906    All required parameters must be populated in order to send to Azure.
2907
2908    :param public_ips_to_allow: List of source IP ranges to allow SSH connection from. The default
2909     value is '*' (all source IPs are allowed). Maximum number of IP ranges that can be specified is
2910     400.
2911    :type public_ips_to_allow: list[str]
2912    :param user_account_settings: Required. Settings for administrator user account to be created
2913     on a node. The account can be used to establish SSH connection to the node.
2914    :type user_account_settings: ~batch_ai.models.UserAccountSettings
2915    """
2916
2917    _validation = {
2918        'user_account_settings': {'required': True},
2919    }
2920
2921    _attribute_map = {
2922        'public_ips_to_allow': {'key': 'publicIPsToAllow', 'type': '[str]'},
2923        'user_account_settings': {'key': 'userAccountSettings', 'type': 'UserAccountSettings'},
2924    }
2925
2926    def __init__(
2927        self,
2928        *,
2929        user_account_settings: "UserAccountSettings",
2930        public_ips_to_allow: Optional[List[str]] = None,
2931        **kwargs
2932    ):
2933        super(SshConfiguration, self).__init__(**kwargs)
2934        self.public_ips_to_allow = public_ips_to_allow
2935        self.user_account_settings = user_account_settings
2936
2937
2938class TensorFlowSettings(msrest.serialization.Model):
2939    """TensorFlow job settings.
2940
2941    All required parameters must be populated in order to send to Azure.
2942
2943    :param python_script_file_path: Required. The python script to execute.
2944    :type python_script_file_path: str
2945    :param python_interpreter_path: The path to the Python interpreter.
2946    :type python_interpreter_path: str
2947    :param master_command_line_args: Command line arguments that need to be passed to the python
2948     script for the master task.
2949    :type master_command_line_args: str
2950    :param worker_command_line_args: Command line arguments that need to be passed to the python
2951     script for the worker task. Optional for single process jobs.
2952    :type worker_command_line_args: str
2953    :param parameter_server_command_line_args: Command line arguments that need to be passed to the
2954     python script for the parameter server. Optional for single process jobs.
2955    :type parameter_server_command_line_args: str
2956    :param worker_count: The number of worker tasks. If specified, the value must be less than or
2957     equal to (nodeCount * numberOfGPUs per VM). If not specified, the default value is equal to
2958     nodeCount. This property can be specified only for distributed TensorFlow training.
2959    :type worker_count: int
2960    :param parameter_server_count: The number of parameter server tasks. If specified, the value
2961     must be less than or equal to nodeCount. If not specified, the default value is equal to 1 for
2962     distributed TensorFlow training. This property can be specified only for distributed TensorFlow
2963     training.
2964    :type parameter_server_count: int
2965    """
2966
2967    _validation = {
2968        'python_script_file_path': {'required': True},
2969    }
2970
2971    _attribute_map = {
2972        'python_script_file_path': {'key': 'pythonScriptFilePath', 'type': 'str'},
2973        'python_interpreter_path': {'key': 'pythonInterpreterPath', 'type': 'str'},
2974        'master_command_line_args': {'key': 'masterCommandLineArgs', 'type': 'str'},
2975        'worker_command_line_args': {'key': 'workerCommandLineArgs', 'type': 'str'},
2976        'parameter_server_command_line_args': {'key': 'parameterServerCommandLineArgs', 'type': 'str'},
2977        'worker_count': {'key': 'workerCount', 'type': 'int'},
2978        'parameter_server_count': {'key': 'parameterServerCount', 'type': 'int'},
2979    }
2980
2981    def __init__(
2982        self,
2983        *,
2984        python_script_file_path: str,
2985        python_interpreter_path: Optional[str] = None,
2986        master_command_line_args: Optional[str] = None,
2987        worker_command_line_args: Optional[str] = None,
2988        parameter_server_command_line_args: Optional[str] = None,
2989        worker_count: Optional[int] = None,
2990        parameter_server_count: Optional[int] = None,
2991        **kwargs
2992    ):
2993        super(TensorFlowSettings, self).__init__(**kwargs)
2994        self.python_script_file_path = python_script_file_path
2995        self.python_interpreter_path = python_interpreter_path
2996        self.master_command_line_args = master_command_line_args
2997        self.worker_command_line_args = worker_command_line_args
2998        self.parameter_server_command_line_args = parameter_server_command_line_args
2999        self.worker_count = worker_count
3000        self.parameter_server_count = parameter_server_count
3001
3002
3003class UnmanagedFileSystemReference(msrest.serialization.Model):
3004    """Unmanaged file system mounting configuration.
3005
3006    All required parameters must be populated in order to send to Azure.
3007
3008    :param mount_command: Required. Mount command line. Note, Batch AI will append mount path to
3009     the command on its own.
3010    :type mount_command: str
3011    :param relative_mount_path: Required. The relative path on the compute node where the unmanaged
3012     file system will be mounted. Note that all cluster level unmanaged file systems will be mounted
3013     under $AZ_BATCHAI_MOUNT_ROOT location and all job level unmanaged file systems will be mounted
3014     under $AZ_BATCHAI_JOB_MOUNT_ROOT.
3015    :type relative_mount_path: str
3016    """
3017
3018    _validation = {
3019        'mount_command': {'required': True},
3020        'relative_mount_path': {'required': True},
3021    }
3022
3023    _attribute_map = {
3024        'mount_command': {'key': 'mountCommand', 'type': 'str'},
3025        'relative_mount_path': {'key': 'relativeMountPath', 'type': 'str'},
3026    }
3027
3028    def __init__(
3029        self,
3030        *,
3031        mount_command: str,
3032        relative_mount_path: str,
3033        **kwargs
3034    ):
3035        super(UnmanagedFileSystemReference, self).__init__(**kwargs)
3036        self.mount_command = mount_command
3037        self.relative_mount_path = relative_mount_path
3038
3039
3040class Usage(msrest.serialization.Model):
3041    """Describes Batch AI Resource Usage.
3042
3043    Variables are only populated by the server, and will be ignored when sending a request.
3044
3045    :ivar unit: An enum describing the unit of usage measurement. Possible values include: "Count".
3046    :vartype unit: str or ~batch_ai.models.UsageUnit
3047    :ivar current_value: The current usage of the resource.
3048    :vartype current_value: int
3049    :ivar limit: The maximum permitted usage of the resource.
3050    :vartype limit: long
3051    :ivar name: The name of the type of usage.
3052    :vartype name: ~batch_ai.models.UsageName
3053    """
3054
3055    _validation = {
3056        'unit': {'readonly': True},
3057        'current_value': {'readonly': True},
3058        'limit': {'readonly': True},
3059        'name': {'readonly': True},
3060    }
3061
3062    _attribute_map = {
3063        'unit': {'key': 'unit', 'type': 'str'},
3064        'current_value': {'key': 'currentValue', 'type': 'int'},
3065        'limit': {'key': 'limit', 'type': 'long'},
3066        'name': {'key': 'name', 'type': 'UsageName'},
3067    }
3068
3069    def __init__(
3070        self,
3071        **kwargs
3072    ):
3073        super(Usage, self).__init__(**kwargs)
3074        self.unit = None
3075        self.current_value = None
3076        self.limit = None
3077        self.name = None
3078
3079
3080class UsageName(msrest.serialization.Model):
3081    """The Usage Names.
3082
3083    Variables are only populated by the server, and will be ignored when sending a request.
3084
3085    :ivar value: The name of the resource.
3086    :vartype value: str
3087    :ivar localized_value: The localized name of the resource.
3088    :vartype localized_value: str
3089    """
3090
3091    _validation = {
3092        'value': {'readonly': True},
3093        'localized_value': {'readonly': True},
3094    }
3095
3096    _attribute_map = {
3097        'value': {'key': 'value', 'type': 'str'},
3098        'localized_value': {'key': 'localizedValue', 'type': 'str'},
3099    }
3100
3101    def __init__(
3102        self,
3103        **kwargs
3104    ):
3105        super(UsageName, self).__init__(**kwargs)
3106        self.value = None
3107        self.localized_value = None
3108
3109
3110class UserAccountSettings(msrest.serialization.Model):
3111    """Settings for user account that gets created on each on the nodes of a cluster.
3112
3113    All required parameters must be populated in order to send to Azure.
3114
3115    :param admin_user_name: Required. Name of the administrator user account which can be used to
3116     SSH to nodes.
3117    :type admin_user_name: str
3118    :param admin_user_ssh_public_key: SSH public key of the administrator user account.
3119    :type admin_user_ssh_public_key: str
3120    :param admin_user_password: Password of the administrator user account.
3121    :type admin_user_password: str
3122    """
3123
3124    _validation = {
3125        'admin_user_name': {'required': True},
3126    }
3127
3128    _attribute_map = {
3129        'admin_user_name': {'key': 'adminUserName', 'type': 'str'},
3130        'admin_user_ssh_public_key': {'key': 'adminUserSshPublicKey', 'type': 'str'},
3131        'admin_user_password': {'key': 'adminUserPassword', 'type': 'str'},
3132    }
3133
3134    def __init__(
3135        self,
3136        *,
3137        admin_user_name: str,
3138        admin_user_ssh_public_key: Optional[str] = None,
3139        admin_user_password: Optional[str] = None,
3140        **kwargs
3141    ):
3142        super(UserAccountSettings, self).__init__(**kwargs)
3143        self.admin_user_name = admin_user_name
3144        self.admin_user_ssh_public_key = admin_user_ssh_public_key
3145        self.admin_user_password = admin_user_password
3146
3147
3148class VirtualMachineConfiguration(msrest.serialization.Model):
3149    """VM configuration.
3150
3151    :param image_reference: OS image reference for cluster nodes.
3152    :type image_reference: ~batch_ai.models.ImageReference
3153    """
3154
3155    _attribute_map = {
3156        'image_reference': {'key': 'imageReference', 'type': 'ImageReference'},
3157    }
3158
3159    def __init__(
3160        self,
3161        *,
3162        image_reference: Optional["ImageReference"] = None,
3163        **kwargs
3164    ):
3165        super(VirtualMachineConfiguration, self).__init__(**kwargs)
3166        self.image_reference = image_reference
3167
3168
3169class Workspace(Resource):
3170    """Batch AI Workspace information.
3171
3172    Variables are only populated by the server, and will be ignored when sending a request.
3173
3174    :ivar id: The ID of the resource.
3175    :vartype id: str
3176    :ivar name: The name of the resource.
3177    :vartype name: str
3178    :ivar type: The type of the resource.
3179    :vartype type: str
3180    :ivar location: The location of the resource.
3181    :vartype location: str
3182    :ivar tags: A set of tags. The tags of the resource.
3183    :vartype tags: dict[str, str]
3184    :ivar creation_time: Time when the Workspace was created.
3185    :vartype creation_time: ~datetime.datetime
3186    :ivar provisioning_state: The provisioned state of the Workspace. Possible values include:
3187     "creating", "succeeded", "failed", "deleting".
3188    :vartype provisioning_state: str or ~batch_ai.models.ProvisioningState
3189    :ivar provisioning_state_transition_time: The time at which the workspace entered its current
3190     provisioning state.
3191    :vartype provisioning_state_transition_time: ~datetime.datetime
3192    """
3193
3194    _validation = {
3195        'id': {'readonly': True},
3196        'name': {'readonly': True},
3197        'type': {'readonly': True},
3198        'location': {'readonly': True},
3199        'tags': {'readonly': True},
3200        'creation_time': {'readonly': True},
3201        'provisioning_state': {'readonly': True},
3202        'provisioning_state_transition_time': {'readonly': True},
3203    }
3204
3205    _attribute_map = {
3206        'id': {'key': 'id', 'type': 'str'},
3207        'name': {'key': 'name', 'type': 'str'},
3208        'type': {'key': 'type', 'type': 'str'},
3209        'location': {'key': 'location', 'type': 'str'},
3210        'tags': {'key': 'tags', 'type': '{str}'},
3211        'creation_time': {'key': 'properties.creationTime', 'type': 'iso-8601'},
3212        'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'},
3213        'provisioning_state_transition_time': {'key': 'properties.provisioningStateTransitionTime', 'type': 'iso-8601'},
3214    }
3215
3216    def __init__(
3217        self,
3218        **kwargs
3219    ):
3220        super(Workspace, self).__init__(**kwargs)
3221        self.creation_time = None
3222        self.provisioning_state = None
3223        self.provisioning_state_transition_time = None
3224
3225
3226class WorkspaceCreateParameters(msrest.serialization.Model):
3227    """Workspace creation parameters.
3228
3229    All required parameters must be populated in order to send to Azure.
3230
3231    :param location: Required. The region in which to create the Workspace.
3232    :type location: str
3233    :param tags: A set of tags. The user specified tags associated with the Workspace.
3234    :type tags: dict[str, str]
3235    """
3236
3237    _validation = {
3238        'location': {'required': True},
3239    }
3240
3241    _attribute_map = {
3242        'location': {'key': 'location', 'type': 'str'},
3243        'tags': {'key': 'tags', 'type': '{str}'},
3244    }
3245
3246    def __init__(
3247        self,
3248        *,
3249        location: str,
3250        tags: Optional[Dict[str, str]] = None,
3251        **kwargs
3252    ):
3253        super(WorkspaceCreateParameters, self).__init__(**kwargs)
3254        self.location = location
3255        self.tags = tags
3256
3257
3258class WorkspaceListResult(msrest.serialization.Model):
3259    """Values returned by the List operation.
3260
3261    Variables are only populated by the server, and will be ignored when sending a request.
3262
3263    :ivar value: The collection of workspaces.
3264    :vartype value: list[~batch_ai.models.Workspace]
3265    :ivar next_link: The continuation token.
3266    :vartype next_link: str
3267    """
3268
3269    _validation = {
3270        'value': {'readonly': True},
3271        'next_link': {'readonly': True},
3272    }
3273
3274    _attribute_map = {
3275        'value': {'key': 'value', 'type': '[Workspace]'},
3276        'next_link': {'key': 'nextLink', 'type': 'str'},
3277    }
3278
3279    def __init__(
3280        self,
3281        **kwargs
3282    ):
3283        super(WorkspaceListResult, self).__init__(**kwargs)
3284        self.value = None
3285        self.next_link = None
3286
3287
3288class WorkspacesListByResourceGroupOptions(msrest.serialization.Model):
3289    """Parameter group.
3290
3291    :param max_results: The maximum number of items to return in the response. A maximum of 1000
3292     files can be returned.
3293    :type max_results: int
3294    """
3295
3296    _validation = {
3297        'max_results': {'maximum': 1000, 'minimum': 1},
3298    }
3299
3300    _attribute_map = {
3301        'max_results': {'key': 'maxResults', 'type': 'int'},
3302    }
3303
3304    def __init__(
3305        self,
3306        *,
3307        max_results: Optional[int] = 1000,
3308        **kwargs
3309    ):
3310        super(WorkspacesListByResourceGroupOptions, self).__init__(**kwargs)
3311        self.max_results = max_results
3312
3313
3314class WorkspacesListOptions(msrest.serialization.Model):
3315    """Parameter group.
3316
3317    :param max_results: The maximum number of items to return in the response. A maximum of 1000
3318     files can be returned.
3319    :type max_results: int
3320    """
3321
3322    _validation = {
3323        'max_results': {'maximum': 1000, 'minimum': 1},
3324    }
3325
3326    _attribute_map = {
3327        'max_results': {'key': 'maxResults', 'type': 'int'},
3328    }
3329
3330    def __init__(
3331        self,
3332        *,
3333        max_results: Optional[int] = 1000,
3334        **kwargs
3335    ):
3336        super(WorkspacesListOptions, self).__init__(**kwargs)
3337        self.max_results = max_results
3338
3339
3340class WorkspaceUpdateParameters(msrest.serialization.Model):
3341    """Workspace update parameters.
3342
3343    :param tags: A set of tags. The user specified tags associated with the Workspace.
3344    :type tags: dict[str, str]
3345    """
3346
3347    _attribute_map = {
3348        'tags': {'key': 'tags', 'type': '{str}'},
3349    }
3350
3351    def __init__(
3352        self,
3353        *,
3354        tags: Optional[Dict[str, str]] = None,
3355        **kwargs
3356    ):
3357        super(WorkspaceUpdateParameters, self).__init__(**kwargs)
3358        self.tags = tags
3359