diff --git a/scaleway-async/scaleway_async/datalab/v1beta1/api.py b/scaleway-async/scaleway_async/datalab/v1beta1/api.py index a04836837..650da58c8 100644 --- a/scaleway-async/scaleway_async/datalab/v1beta1/api.py +++ b/scaleway-async/scaleway_async/datalab/v1beta1/api.py @@ -50,7 +50,7 @@ class DatalabV1Beta1API(API): """ - This API allows you to manage your Data Lab resources. + This API allows you to manage your Apache Spark™ resources. """ async def create_datalab( @@ -69,15 +69,15 @@ async def create_datalab( total_storage: Optional[Volume] = None, ) -> Datalab: """ - Create a new Data Lab. In this call, one can personalize the node counts, add a notebook, choose the private network, define the persistent volume storage capacity. - :param name: The name of the Data Lab. - :param description: The description of the Data Lab. - :param has_notebook: Select this option to include a notebook as part of the Data Lab. - :param spark_version: The version of Spark running inside the Data Lab, available options can be viewed at ListClusterVersions. - :param private_network_id: The unique identifier of the private network the Data Lab will be attached to. + Create a new cluster. In this call, one can personalize the node counts, add a notebook, choose the private network, define the persistent volume storage capacity. + :param name: The name of the cluster. + :param description: The description of the cluster. + :param has_notebook: Select this option to include a notebook as part of the cluster. + :param spark_version: The version of Apache Spark™ running inside the cluster, available options can be viewed at ListClusterVersions. + :param private_network_id: The unique identifier of the private network the cluster will be attached to. :param region: Region to target. If none is passed will use default region from the config. - :param project_id: The unique identifier of the project where the Data Lab will be created. - :param tags: The tags of the Data Lab. + :param project_id: The unique identifier of the project where the cluster will be created. + :param tags: The tags of the cluster. :param main: The cluster main node specification. It holds the parameters `node_type` which specifies the node type of the main node. See ListNodeTypes for available options. See ListNodeTypes for available options. :param worker: The cluster worker node specification. It holds the parameters `node_type` which specifies the node type of the worker node and `node_count` for specifying the amount of nodes. :param total_storage: The maximum persistent volume storage that will be available during workload. @@ -130,8 +130,8 @@ async def get_datalab( region: Optional[ScwRegion] = None, ) -> Datalab: """ - Retrieve information about a given Data Lab cluster, specified by the `region` and `datalab_id` parameters. Its full details, including name, status, node counts, are returned in the response object. - :param datalab_id: The unique identifier of the Data Lab. + Retrieve information about a given cluster, specified by the `region` and `datalab_id` parameters. Its full details, including name, status, node counts, are returned in the response object. + :param datalab_id: The unique identifier of the cluster. :param region: Region to target. If none is passed will use default region from the config. :return: :class:`Datalab ` @@ -164,8 +164,8 @@ async def wait_for_datalab( options: Optional[WaitForOptions[Datalab, Union[bool, Awaitable[bool]]]] = None, ) -> Datalab: """ - Retrieve information about a given Data Lab cluster, specified by the `region` and `datalab_id` parameters. Its full details, including name, status, node counts, are returned in the response object. - :param datalab_id: The unique identifier of the Data Lab. + Retrieve information about a given cluster, specified by the `region` and `datalab_id` parameters. Its full details, including name, status, node counts, are returned in the response object. + :param datalab_id: The unique identifier of the cluster. :param region: Region to target. If none is passed will use default region from the config. :return: :class:`Datalab ` @@ -205,12 +205,12 @@ async def list_datalabs( order_by: Optional[ListDatalabsRequestOrderBy] = None, ) -> ListDatalabsResponse: """ - List information about Data Lab cluster within a project or an organization. + List information about cluster within a project or an organization. :param region: Region to target. If none is passed will use default region from the config. - :param organization_id: The unique identifier of the organization whose Data Labs you want to list. - :param project_id: The unique identifier of the project whose Data Labs you want to list. - :param name: The name of the Data Lab you want to list. - :param tags: The tags associated with the Data Lab you want to list. + :param organization_id: The unique identifier of the organization whose clusters you want to list. + :param project_id: The unique identifier of the project whose clusters you want to list. + :param name: The name of the cluster you want to list. + :param tags: The tags associated with the cluster you want to list. :param page: The page number for pagination. :param page_size: The page size for pagination. :param order_by: The order by field, available options are `name_asc`, `name_desc`, `created_at_asc`, `created_at_desc`, `updated_at_asc`, `updated_at_desc`. @@ -257,12 +257,12 @@ async def list_datalabs_all( order_by: Optional[ListDatalabsRequestOrderBy] = None, ) -> list[Datalab]: """ - List information about Data Lab cluster within a project or an organization. + List information about cluster within a project or an organization. :param region: Region to target. If none is passed will use default region from the config. - :param organization_id: The unique identifier of the organization whose Data Labs you want to list. - :param project_id: The unique identifier of the project whose Data Labs you want to list. - :param name: The name of the Data Lab you want to list. - :param tags: The tags associated with the Data Lab you want to list. + :param organization_id: The unique identifier of the organization whose clusters you want to list. + :param project_id: The unique identifier of the project whose clusters you want to list. + :param name: The name of the cluster you want to list. + :param tags: The tags associated with the cluster you want to list. :param page: The page number for pagination. :param page_size: The page size for pagination. :param order_by: The order by field, available options are `name_asc`, `name_desc`, `created_at_asc`, `created_at_desc`, `updated_at_asc`, `updated_at_desc`. @@ -301,13 +301,13 @@ async def update_datalab( node_count: Optional[int] = None, ) -> Datalab: """ - Update a Data Labs node counts. Allows for up- and downscaling on demand, depending on the expected workload. - :param datalab_id: The unique identifier of the Data Lab. + Update a cluster node counts. Allows for up- and downscaling on demand, depending on the expected workload. + :param datalab_id: The unique identifier of the cluster. :param region: Region to target. If none is passed will use default region from the config. - :param name: The updated name of the Data Lab. - :param description: The updated description of the Data Lab. - :param tags: The updated tags of the Data Lab. - :param node_count: The updated node count of the Data Lab. Scale up or down the number of worker nodes. + :param name: The updated name of the cluster. + :param description: The updated description of the cluster. + :param tags: The updated tags of the cluster. + :param node_count: The updated node count of the cluster. Scale up or down the number of worker nodes. :return: :class:`Datalab ` Usage: @@ -349,8 +349,8 @@ async def delete_datalab( region: Optional[ScwRegion] = None, ) -> Datalab: """ - Delete a Data Lab based on its region and id. - :param datalab_id: The unique identifier of the Data Lab. + Delete a cluster based on its region and id. + :param datalab_id: The unique identifier of the cluster. :param region: Region to target. If none is passed will use default region from the config. :return: :class:`Datalab ` @@ -386,7 +386,7 @@ async def list_node_types( resource_type: Optional[ListNodeTypesRequestResourceType] = None, ) -> ListNodeTypesResponse: """ - List the available compute node types for creating a Data Lab. + List the available compute node types for creating a new cluster. :param region: Region to target. If none is passed will use default region from the config. :param page: The page number. :param page_size: The page size. @@ -431,7 +431,7 @@ async def list_node_types_all( resource_type: Optional[ListNodeTypesRequestResourceType] = None, ) -> list[NodeType]: """ - List the available compute node types for creating a Data Lab. + List the available compute node types for creating a new cluster. :param region: Region to target. If none is passed will use default region from the config. :param page: The page number. :param page_size: The page size. @@ -542,7 +542,7 @@ async def list_cluster_versions( order_by: Optional[ListClusterVersionsRequestOrderBy] = None, ) -> ListClusterVersionsResponse: """ - List the Spark versions the product is compatible with. + List the Apache Spark™ versions the product is compatible with. :param region: Region to target. If none is passed will use default region from the config. :param page: The page number. :param page_size: The page size. @@ -581,7 +581,7 @@ async def list_cluster_versions_all( order_by: Optional[ListClusterVersionsRequestOrderBy] = None, ) -> list[Cluster]: """ - List the Spark versions the product is compatible with. + List the Apache Spark™ versions the product is compatible with. :param region: Region to target. If none is passed will use default region from the config. :param page: The page number. :param page_size: The page size. diff --git a/scaleway-async/scaleway_async/datalab/v1beta1/types.py b/scaleway-async/scaleway_async/datalab/v1beta1/types.py index 15f60c8ad..b3a066823 100644 --- a/scaleway-async/scaleway_async/datalab/v1beta1/types.py +++ b/scaleway-async/scaleway_async/datalab/v1beta1/types.py @@ -240,62 +240,62 @@ class Cluster: @dataclass class Datalab: """ - A Data Lab resource. + A Clusters for Apache Spark™ resource. """ id: str """ - The unique identifier of the Data Lab. + The unique identifier of the cluster. """ project_id: str """ - The unique identifier of the project where the Data Lab has been created. + The unique identifier of the project where the cluster has been created. """ name: str """ - The name of the Data Lab. + The name of the cluster. """ description: str """ - The description of the Data Lab. + The description of the cluster. """ tags: list[str] """ - The tags of the Data Lab. + The tags of the cluster. """ status: DatalabStatus """ - The status of the Data Lab. For a working Data Lab the status is marked as `ready`. + The status of the cluster. For a working cluster the status is marked as `ready`. """ region: ScwRegion """ - The region of the Data Lab. + The region of the cluster. """ has_notebook: bool """ - Whether a JupyterLab notebook is associated with the Data Lab or not. + Whether a JupyterLab notebook is associated with the cluster or not. """ spark_version: str """ - The version of Spark running inside the Data Lab. + The version of Apache Spark™ running inside the cluster. """ private_network_id: str """ - The unique identifier of the private network to which the Data Lab is attached to. + The unique identifier of the private network to which the cluster is attached to. """ main: Optional[DatalabSparkMain] = None """ - The Spark Main node specification of Data lab. It holds the parameters `node_type`, `spark_ui_url` (available to reach Spark UI), `spark_master_url` (used to reach the cluster within a VPC), `root_volume` (size of the volume assigned to the cluster). + The Apache Spark™ Main node specification of cluster. It holds the parameters `node_type`, `spark_ui_url` (available to reach Apache Spark™ UI), `spark_master_url` (used to reach the cluster within a VPC), `root_volume` (size of the volume assigned to the cluster). """ worker: Optional[DatalabSparkWorker] = None @@ -305,12 +305,12 @@ class Datalab: created_at: Optional[datetime] = None """ - The creation timestamp of the Data Lab. + The creation timestamp of the cluster. """ updated_at: Optional[datetime] = None """ - The last update date of the Data Lab. + The last update date of the cluster. """ notebook_url: Optional[str] = None @@ -320,7 +320,7 @@ class Datalab: total_storage: Optional[Volume] = None """ - The total persistent volume storage selected to run Spark. + The total persistent volume storage selected to run Apache Spark™. """ notebook_master_url: Optional[str] = None @@ -421,32 +421,32 @@ class Notebook: @dataclass class CreateDatalabRequest: """ - A request to create a Data Lab. + A request to create a cluster. """ name: str """ - The name of the Data Lab. + The name of the cluster. """ description: str """ - The description of the Data Lab. + The description of the cluster. """ has_notebook: bool """ - Select this option to include a notebook as part of the Data Lab. + Select this option to include a notebook as part of the cluster. """ spark_version: str """ - The version of Spark running inside the Data Lab, available options can be viewed at ListClusterVersions. + The version of Apache Spark™ running inside the cluster, available options can be viewed at ListClusterVersions. """ private_network_id: str """ - The unique identifier of the private network the Data Lab will be attached to. + The unique identifier of the private network the cluster will be attached to. """ region: Optional[ScwRegion] = None @@ -456,12 +456,12 @@ class CreateDatalabRequest: project_id: Optional[str] = None """ - The unique identifier of the project where the Data Lab will be created. + The unique identifier of the project where the cluster will be created. """ tags: Optional[list[str]] = field(default_factory=list) """ - The tags of the Data Lab. + The tags of the cluster. """ main: Optional[CreateDatalabRequestSparkMain] = None @@ -483,12 +483,12 @@ class CreateDatalabRequest: @dataclass class DeleteDatalabRequest: """ - A request to delete a Data Lab. + A request to delete a cluster. """ datalab_id: str """ - The unique identifier of the Data Lab. + The unique identifier of the cluster. """ region: Optional[ScwRegion] = None @@ -500,12 +500,12 @@ class DeleteDatalabRequest: @dataclass class GetDatalabRequest: """ - A request to get information about a Data Lab. + A request to get information about a cluster. """ datalab_id: str """ - The unique identifier of the Data Lab. + The unique identifier of the cluster. """ region: Optional[ScwRegion] = None @@ -563,7 +563,7 @@ class ListClusterVersionsResponse: @dataclass class ListDatalabsRequest: """ - A request to list Data Labs. + A request to list clusters. """ region: Optional[ScwRegion] = None @@ -573,22 +573,22 @@ class ListDatalabsRequest: organization_id: Optional[str] = None """ - The unique identifier of the organization whose Data Labs you want to list. + The unique identifier of the organization whose clusters you want to list. """ project_id: Optional[str] = None """ - The unique identifier of the project whose Data Labs you want to list. + The unique identifier of the project whose clusters you want to list. """ name: Optional[str] = None """ - The name of the Data Lab you want to list. + The name of the cluster you want to list. """ tags: Optional[list[str]] = field(default_factory=list) """ - The tags associated with the Data Lab you want to list. + The tags associated with the cluster you want to list. """ page: Optional[int] = 0 @@ -610,17 +610,17 @@ class ListDatalabsRequest: @dataclass class ListDatalabsResponse: """ - A response to list Data Labs. + A response to list clusters. """ datalabs: list[Datalab] """ - The list of Data Labs. This is a list composed of messages of type `DataLab`. + The list of clusters. This is a list composed of messages of type `DataLab`. """ total_count: int """ - The total count of Data Labs. + The total count of clusters. """ @@ -731,12 +731,12 @@ class ListNotebookVersionsResponse: @dataclass class UpdateDatalabRequest: """ - A request to update a Data Lab. + A request to update a cluster. """ datalab_id: str """ - The unique identifier of the Data Lab. + The unique identifier of the cluster. """ region: Optional[ScwRegion] = None @@ -746,20 +746,20 @@ class UpdateDatalabRequest: name: Optional[str] = None """ - The updated name of the Data Lab. + The updated name of the cluster. """ description: Optional[str] = None """ - The updated description of the Data Lab. + The updated description of the cluster. """ tags: Optional[list[str]] = field(default_factory=list) """ - The updated tags of the Data Lab. + The updated tags of the cluster. """ node_count: Optional[int] = 0 """ - The updated node count of the Data Lab. Scale up or down the number of worker nodes. + The updated node count of the cluster. Scale up or down the number of worker nodes. """ diff --git a/scaleway/scaleway/datalab/v1beta1/api.py b/scaleway/scaleway/datalab/v1beta1/api.py index 5474ef610..a026b4e3f 100644 --- a/scaleway/scaleway/datalab/v1beta1/api.py +++ b/scaleway/scaleway/datalab/v1beta1/api.py @@ -50,7 +50,7 @@ class DatalabV1Beta1API(API): """ - This API allows you to manage your Data Lab resources. + This API allows you to manage your Apache Spark™ resources. """ def create_datalab( @@ -69,15 +69,15 @@ def create_datalab( total_storage: Optional[Volume] = None, ) -> Datalab: """ - Create a new Data Lab. In this call, one can personalize the node counts, add a notebook, choose the private network, define the persistent volume storage capacity. - :param name: The name of the Data Lab. - :param description: The description of the Data Lab. - :param has_notebook: Select this option to include a notebook as part of the Data Lab. - :param spark_version: The version of Spark running inside the Data Lab, available options can be viewed at ListClusterVersions. - :param private_network_id: The unique identifier of the private network the Data Lab will be attached to. + Create a new cluster. In this call, one can personalize the node counts, add a notebook, choose the private network, define the persistent volume storage capacity. + :param name: The name of the cluster. + :param description: The description of the cluster. + :param has_notebook: Select this option to include a notebook as part of the cluster. + :param spark_version: The version of Apache Spark™ running inside the cluster, available options can be viewed at ListClusterVersions. + :param private_network_id: The unique identifier of the private network the cluster will be attached to. :param region: Region to target. If none is passed will use default region from the config. - :param project_id: The unique identifier of the project where the Data Lab will be created. - :param tags: The tags of the Data Lab. + :param project_id: The unique identifier of the project where the cluster will be created. + :param tags: The tags of the cluster. :param main: The cluster main node specification. It holds the parameters `node_type` which specifies the node type of the main node. See ListNodeTypes for available options. See ListNodeTypes for available options. :param worker: The cluster worker node specification. It holds the parameters `node_type` which specifies the node type of the worker node and `node_count` for specifying the amount of nodes. :param total_storage: The maximum persistent volume storage that will be available during workload. @@ -130,8 +130,8 @@ def get_datalab( region: Optional[ScwRegion] = None, ) -> Datalab: """ - Retrieve information about a given Data Lab cluster, specified by the `region` and `datalab_id` parameters. Its full details, including name, status, node counts, are returned in the response object. - :param datalab_id: The unique identifier of the Data Lab. + Retrieve information about a given cluster, specified by the `region` and `datalab_id` parameters. Its full details, including name, status, node counts, are returned in the response object. + :param datalab_id: The unique identifier of the cluster. :param region: Region to target. If none is passed will use default region from the config. :return: :class:`Datalab ` @@ -164,8 +164,8 @@ def wait_for_datalab( options: Optional[WaitForOptions[Datalab, bool]] = None, ) -> Datalab: """ - Retrieve information about a given Data Lab cluster, specified by the `region` and `datalab_id` parameters. Its full details, including name, status, node counts, are returned in the response object. - :param datalab_id: The unique identifier of the Data Lab. + Retrieve information about a given cluster, specified by the `region` and `datalab_id` parameters. Its full details, including name, status, node counts, are returned in the response object. + :param datalab_id: The unique identifier of the cluster. :param region: Region to target. If none is passed will use default region from the config. :return: :class:`Datalab ` @@ -205,12 +205,12 @@ def list_datalabs( order_by: Optional[ListDatalabsRequestOrderBy] = None, ) -> ListDatalabsResponse: """ - List information about Data Lab cluster within a project or an organization. + List information about cluster within a project or an organization. :param region: Region to target. If none is passed will use default region from the config. - :param organization_id: The unique identifier of the organization whose Data Labs you want to list. - :param project_id: The unique identifier of the project whose Data Labs you want to list. - :param name: The name of the Data Lab you want to list. - :param tags: The tags associated with the Data Lab you want to list. + :param organization_id: The unique identifier of the organization whose clusters you want to list. + :param project_id: The unique identifier of the project whose clusters you want to list. + :param name: The name of the cluster you want to list. + :param tags: The tags associated with the cluster you want to list. :param page: The page number for pagination. :param page_size: The page size for pagination. :param order_by: The order by field, available options are `name_asc`, `name_desc`, `created_at_asc`, `created_at_desc`, `updated_at_asc`, `updated_at_desc`. @@ -257,12 +257,12 @@ def list_datalabs_all( order_by: Optional[ListDatalabsRequestOrderBy] = None, ) -> list[Datalab]: """ - List information about Data Lab cluster within a project or an organization. + List information about cluster within a project or an organization. :param region: Region to target. If none is passed will use default region from the config. - :param organization_id: The unique identifier of the organization whose Data Labs you want to list. - :param project_id: The unique identifier of the project whose Data Labs you want to list. - :param name: The name of the Data Lab you want to list. - :param tags: The tags associated with the Data Lab you want to list. + :param organization_id: The unique identifier of the organization whose clusters you want to list. + :param project_id: The unique identifier of the project whose clusters you want to list. + :param name: The name of the cluster you want to list. + :param tags: The tags associated with the cluster you want to list. :param page: The page number for pagination. :param page_size: The page size for pagination. :param order_by: The order by field, available options are `name_asc`, `name_desc`, `created_at_asc`, `created_at_desc`, `updated_at_asc`, `updated_at_desc`. @@ -301,13 +301,13 @@ def update_datalab( node_count: Optional[int] = None, ) -> Datalab: """ - Update a Data Labs node counts. Allows for up- and downscaling on demand, depending on the expected workload. - :param datalab_id: The unique identifier of the Data Lab. + Update a cluster node counts. Allows for up- and downscaling on demand, depending on the expected workload. + :param datalab_id: The unique identifier of the cluster. :param region: Region to target. If none is passed will use default region from the config. - :param name: The updated name of the Data Lab. - :param description: The updated description of the Data Lab. - :param tags: The updated tags of the Data Lab. - :param node_count: The updated node count of the Data Lab. Scale up or down the number of worker nodes. + :param name: The updated name of the cluster. + :param description: The updated description of the cluster. + :param tags: The updated tags of the cluster. + :param node_count: The updated node count of the cluster. Scale up or down the number of worker nodes. :return: :class:`Datalab ` Usage: @@ -349,8 +349,8 @@ def delete_datalab( region: Optional[ScwRegion] = None, ) -> Datalab: """ - Delete a Data Lab based on its region and id. - :param datalab_id: The unique identifier of the Data Lab. + Delete a cluster based on its region and id. + :param datalab_id: The unique identifier of the cluster. :param region: Region to target. If none is passed will use default region from the config. :return: :class:`Datalab ` @@ -386,7 +386,7 @@ def list_node_types( resource_type: Optional[ListNodeTypesRequestResourceType] = None, ) -> ListNodeTypesResponse: """ - List the available compute node types for creating a Data Lab. + List the available compute node types for creating a new cluster. :param region: Region to target. If none is passed will use default region from the config. :param page: The page number. :param page_size: The page size. @@ -431,7 +431,7 @@ def list_node_types_all( resource_type: Optional[ListNodeTypesRequestResourceType] = None, ) -> list[NodeType]: """ - List the available compute node types for creating a Data Lab. + List the available compute node types for creating a new cluster. :param region: Region to target. If none is passed will use default region from the config. :param page: The page number. :param page_size: The page size. @@ -542,7 +542,7 @@ def list_cluster_versions( order_by: Optional[ListClusterVersionsRequestOrderBy] = None, ) -> ListClusterVersionsResponse: """ - List the Spark versions the product is compatible with. + List the Apache Spark™ versions the product is compatible with. :param region: Region to target. If none is passed will use default region from the config. :param page: The page number. :param page_size: The page size. @@ -581,7 +581,7 @@ def list_cluster_versions_all( order_by: Optional[ListClusterVersionsRequestOrderBy] = None, ) -> list[Cluster]: """ - List the Spark versions the product is compatible with. + List the Apache Spark™ versions the product is compatible with. :param region: Region to target. If none is passed will use default region from the config. :param page: The page number. :param page_size: The page size. diff --git a/scaleway/scaleway/datalab/v1beta1/types.py b/scaleway/scaleway/datalab/v1beta1/types.py index 15f60c8ad..b3a066823 100644 --- a/scaleway/scaleway/datalab/v1beta1/types.py +++ b/scaleway/scaleway/datalab/v1beta1/types.py @@ -240,62 +240,62 @@ class Cluster: @dataclass class Datalab: """ - A Data Lab resource. + A Clusters for Apache Spark™ resource. """ id: str """ - The unique identifier of the Data Lab. + The unique identifier of the cluster. """ project_id: str """ - The unique identifier of the project where the Data Lab has been created. + The unique identifier of the project where the cluster has been created. """ name: str """ - The name of the Data Lab. + The name of the cluster. """ description: str """ - The description of the Data Lab. + The description of the cluster. """ tags: list[str] """ - The tags of the Data Lab. + The tags of the cluster. """ status: DatalabStatus """ - The status of the Data Lab. For a working Data Lab the status is marked as `ready`. + The status of the cluster. For a working cluster the status is marked as `ready`. """ region: ScwRegion """ - The region of the Data Lab. + The region of the cluster. """ has_notebook: bool """ - Whether a JupyterLab notebook is associated with the Data Lab or not. + Whether a JupyterLab notebook is associated with the cluster or not. """ spark_version: str """ - The version of Spark running inside the Data Lab. + The version of Apache Spark™ running inside the cluster. """ private_network_id: str """ - The unique identifier of the private network to which the Data Lab is attached to. + The unique identifier of the private network to which the cluster is attached to. """ main: Optional[DatalabSparkMain] = None """ - The Spark Main node specification of Data lab. It holds the parameters `node_type`, `spark_ui_url` (available to reach Spark UI), `spark_master_url` (used to reach the cluster within a VPC), `root_volume` (size of the volume assigned to the cluster). + The Apache Spark™ Main node specification of cluster. It holds the parameters `node_type`, `spark_ui_url` (available to reach Apache Spark™ UI), `spark_master_url` (used to reach the cluster within a VPC), `root_volume` (size of the volume assigned to the cluster). """ worker: Optional[DatalabSparkWorker] = None @@ -305,12 +305,12 @@ class Datalab: created_at: Optional[datetime] = None """ - The creation timestamp of the Data Lab. + The creation timestamp of the cluster. """ updated_at: Optional[datetime] = None """ - The last update date of the Data Lab. + The last update date of the cluster. """ notebook_url: Optional[str] = None @@ -320,7 +320,7 @@ class Datalab: total_storage: Optional[Volume] = None """ - The total persistent volume storage selected to run Spark. + The total persistent volume storage selected to run Apache Spark™. """ notebook_master_url: Optional[str] = None @@ -421,32 +421,32 @@ class Notebook: @dataclass class CreateDatalabRequest: """ - A request to create a Data Lab. + A request to create a cluster. """ name: str """ - The name of the Data Lab. + The name of the cluster. """ description: str """ - The description of the Data Lab. + The description of the cluster. """ has_notebook: bool """ - Select this option to include a notebook as part of the Data Lab. + Select this option to include a notebook as part of the cluster. """ spark_version: str """ - The version of Spark running inside the Data Lab, available options can be viewed at ListClusterVersions. + The version of Apache Spark™ running inside the cluster, available options can be viewed at ListClusterVersions. """ private_network_id: str """ - The unique identifier of the private network the Data Lab will be attached to. + The unique identifier of the private network the cluster will be attached to. """ region: Optional[ScwRegion] = None @@ -456,12 +456,12 @@ class CreateDatalabRequest: project_id: Optional[str] = None """ - The unique identifier of the project where the Data Lab will be created. + The unique identifier of the project where the cluster will be created. """ tags: Optional[list[str]] = field(default_factory=list) """ - The tags of the Data Lab. + The tags of the cluster. """ main: Optional[CreateDatalabRequestSparkMain] = None @@ -483,12 +483,12 @@ class CreateDatalabRequest: @dataclass class DeleteDatalabRequest: """ - A request to delete a Data Lab. + A request to delete a cluster. """ datalab_id: str """ - The unique identifier of the Data Lab. + The unique identifier of the cluster. """ region: Optional[ScwRegion] = None @@ -500,12 +500,12 @@ class DeleteDatalabRequest: @dataclass class GetDatalabRequest: """ - A request to get information about a Data Lab. + A request to get information about a cluster. """ datalab_id: str """ - The unique identifier of the Data Lab. + The unique identifier of the cluster. """ region: Optional[ScwRegion] = None @@ -563,7 +563,7 @@ class ListClusterVersionsResponse: @dataclass class ListDatalabsRequest: """ - A request to list Data Labs. + A request to list clusters. """ region: Optional[ScwRegion] = None @@ -573,22 +573,22 @@ class ListDatalabsRequest: organization_id: Optional[str] = None """ - The unique identifier of the organization whose Data Labs you want to list. + The unique identifier of the organization whose clusters you want to list. """ project_id: Optional[str] = None """ - The unique identifier of the project whose Data Labs you want to list. + The unique identifier of the project whose clusters you want to list. """ name: Optional[str] = None """ - The name of the Data Lab you want to list. + The name of the cluster you want to list. """ tags: Optional[list[str]] = field(default_factory=list) """ - The tags associated with the Data Lab you want to list. + The tags associated with the cluster you want to list. """ page: Optional[int] = 0 @@ -610,17 +610,17 @@ class ListDatalabsRequest: @dataclass class ListDatalabsResponse: """ - A response to list Data Labs. + A response to list clusters. """ datalabs: list[Datalab] """ - The list of Data Labs. This is a list composed of messages of type `DataLab`. + The list of clusters. This is a list composed of messages of type `DataLab`. """ total_count: int """ - The total count of Data Labs. + The total count of clusters. """ @@ -731,12 +731,12 @@ class ListNotebookVersionsResponse: @dataclass class UpdateDatalabRequest: """ - A request to update a Data Lab. + A request to update a cluster. """ datalab_id: str """ - The unique identifier of the Data Lab. + The unique identifier of the cluster. """ region: Optional[ScwRegion] = None @@ -746,20 +746,20 @@ class UpdateDatalabRequest: name: Optional[str] = None """ - The updated name of the Data Lab. + The updated name of the cluster. """ description: Optional[str] = None """ - The updated description of the Data Lab. + The updated description of the cluster. """ tags: Optional[list[str]] = field(default_factory=list) """ - The updated tags of the Data Lab. + The updated tags of the cluster. """ node_count: Optional[int] = 0 """ - The updated node count of the Data Lab. Scale up or down the number of worker nodes. + The updated node count of the cluster. Scale up or down the number of worker nodes. """