diff --git a/sdk/storage/azure-storage-file-datalake/MANIFEST.in b/sdk/storage/azure-storage-file-datalake/MANIFEST.in index 7bac0512bc31..0253d1882717 100644 --- a/sdk/storage/azure-storage-file-datalake/MANIFEST.in +++ b/sdk/storage/azure-storage-file-datalake/MANIFEST.in @@ -1,7 +1,8 @@ include *.md -include azure/__init__.py -include azure/storage/__init__.py include LICENSE +include azure/storage/filedatalake/_generated/py.typed recursive-include tests *.py recursive-include samples *.py *.md -include azure/storage/filedatalake/py.typed +include azure/__init__.py +include azure/storage/__init__.py +include azure/storage/filedatalake/__init__.py diff --git a/sdk/storage/azure-storage-file-datalake/_metadata.json b/sdk/storage/azure-storage-file-datalake/_metadata.json new file mode 100644 index 000000000000..f7f9301a86e0 --- /dev/null +++ b/sdk/storage/azure-storage-file-datalake/_metadata.json @@ -0,0 +1,6 @@ +{ + "apiVersion": "2026-02-06", + "apiVersions": { + "Storage.DataLake": "2026-02-06" + } +} \ No newline at end of file diff --git a/sdk/storage/azure-storage-file-datalake/apiview-properties.json b/sdk/storage/azure-storage-file-datalake/apiview-properties.json new file mode 100644 index 000000000000..b78fb1883b03 --- /dev/null +++ b/sdk/storage/azure-storage-file-datalake/apiview-properties.json @@ -0,0 +1,69 @@ +{ + "CrossLanguagePackageId": "Storage.DataLake", + "CrossLanguageDefinitionId": { + "azure.storage.filedatalake._generated.models.AclFailedEntry": "Storage.DataLake.AclFailedEntry", + "azure.storage.filedatalake._generated.models.BlobHierarchyListSegment": "Storage.DataLake.BlobHierarchyListSegment", + "azure.storage.filedatalake._generated.models.BlobItemInternal": "Storage.DataLake.BlobItemInternal", + "azure.storage.filedatalake._generated.models.BlobPrefix": "Storage.DataLake.BlobPrefix", + "azure.storage.filedatalake._generated.models.BlobPropertiesInternal": "Storage.DataLake.BlobPropertiesInternal", + "azure.storage.filedatalake._generated.models.FileSystemItem": "Storage.DataLake.FileSystemItem", + "azure.storage.filedatalake._generated.models.FileSystemList": "Storage.DataLake.FileSystemList", + "azure.storage.filedatalake._generated.models.ListBlobsHierarchySegmentResponse": "Storage.DataLake.ListBlobsHierarchySegmentResponse", + "azure.storage.filedatalake._generated.models.PathItem": "Storage.DataLake.PathItem", + "azure.storage.filedatalake._generated.models.PathList": "Storage.DataLake.PathList", + "azure.storage.filedatalake._generated.models.SetAccessControlRecursiveResponse": "Storage.DataLake.SetAccessControlRecursiveResponse", + "azure.storage.filedatalake._generated.models.StorageError": "Storage.DataLake.StorageError", + "azure.storage.filedatalake._generated.models.StorageErrorBody": "Storage.DataLake.StorageErrorBody", + "azure.storage.filedatalake._generated.models.AccountResourceType": "Storage.DataLake.AccountResourceType", + "azure.storage.filedatalake._generated.models.FileSystemResourceType": "Storage.DataLake.FileSystemResourceType", + "azure.storage.filedatalake._generated.models.ListBlobsIncludeItem": "Storage.DataLake.ListBlobsIncludeItem", + "azure.storage.filedatalake._generated.models.ListBlobsShowOnly": "Storage.DataLake.ListBlobsShowOnly", + "azure.storage.filedatalake._generated.models.PathResourceType": "Storage.DataLake.PathResourceType", + "azure.storage.filedatalake._generated.models.PathRenameMode": "Storage.DataLake.PathRenameMode", + "azure.storage.filedatalake._generated.models.EncryptionAlgorithmType": "Storage.DataLake.EncryptionAlgorithmType", + "azure.storage.filedatalake._generated.models.PathExpiryOptions": "Storage.DataLake.PathExpiryOptions", + "azure.storage.filedatalake._generated.models.PathUpdateAction": "Storage.DataLake.PathUpdateAction", + "azure.storage.filedatalake._generated.models.PathSetAccessControlRecursiveMode": "Storage.DataLake.PathSetAccessControlRecursiveMode", + "azure.storage.filedatalake._generated.models.PathLeaseAction": "Storage.DataLake.PathLeaseAction", + "azure.storage.filedatalake._generated.models.PathGetPropertiesAction": "Storage.DataLake.PathGetPropertiesAction", + "azure.storage.filedatalake._generated.models.LeaseAction": "Storage.DataLake.LeaseAction", + "azure.storage.filedatalake._generated.operations.ServiceOperations.list_file_systems": "Storage.DataLake.Service.listFileSystems", + "azure.storage.filedatalake._generated.aio.operations.ServiceOperations.list_file_systems": "Storage.DataLake.Service.listFileSystems", + "azure.storage.filedatalake._generated.operations.FileSystemOperations.create": "Storage.DataLake.FileSystem.create", + "azure.storage.filedatalake._generated.aio.operations.FileSystemOperations.create": "Storage.DataLake.FileSystem.create", + "azure.storage.filedatalake._generated.operations.FileSystemOperations.set_properties": "Storage.DataLake.FileSystem.setProperties", + "azure.storage.filedatalake._generated.aio.operations.FileSystemOperations.set_properties": "Storage.DataLake.FileSystem.setProperties", + "azure.storage.filedatalake._generated.operations.FileSystemOperations.get_properties": "Storage.DataLake.FileSystem.getProperties", + "azure.storage.filedatalake._generated.aio.operations.FileSystemOperations.get_properties": "Storage.DataLake.FileSystem.getProperties", + "azure.storage.filedatalake._generated.operations.FileSystemOperations.delete": "Storage.DataLake.FileSystem.delete", + "azure.storage.filedatalake._generated.aio.operations.FileSystemOperations.delete": "Storage.DataLake.FileSystem.delete", + "azure.storage.filedatalake._generated.operations.FileSystemOperations.list_paths": "Storage.DataLake.FileSystem.listPaths", + "azure.storage.filedatalake._generated.aio.operations.FileSystemOperations.list_paths": "Storage.DataLake.FileSystem.listPaths", + "azure.storage.filedatalake._generated.operations.FileSystemOperations.list_blob_hierarchy_segment": "Storage.DataLake.FileSystem.listBlobHierarchySegment", + "azure.storage.filedatalake._generated.aio.operations.FileSystemOperations.list_blob_hierarchy_segment": "Storage.DataLake.FileSystem.listBlobHierarchySegment", + "azure.storage.filedatalake._generated.operations.PathOperations.create": "Storage.DataLake.Path.create", + "azure.storage.filedatalake._generated.aio.operations.PathOperations.create": "Storage.DataLake.Path.create", + "azure.storage.filedatalake._generated.operations.PathOperations.update": "Storage.DataLake.Path.update", + "azure.storage.filedatalake._generated.aio.operations.PathOperations.update": "Storage.DataLake.Path.update", + "azure.storage.filedatalake._generated.operations.PathOperations.lease": "Storage.DataLake.Path.lease", + "azure.storage.filedatalake._generated.aio.operations.PathOperations.lease": "Storage.DataLake.Path.lease", + "azure.storage.filedatalake._generated.operations.PathOperations.read": "Storage.DataLake.Path.read", + "azure.storage.filedatalake._generated.aio.operations.PathOperations.read": "Storage.DataLake.Path.read", + "azure.storage.filedatalake._generated.operations.PathOperations.get_properties": "Storage.DataLake.Path.getProperties", + "azure.storage.filedatalake._generated.aio.operations.PathOperations.get_properties": "Storage.DataLake.Path.getProperties", + "azure.storage.filedatalake._generated.operations.PathOperations.delete": "Storage.DataLake.Path.delete", + "azure.storage.filedatalake._generated.aio.operations.PathOperations.delete": "Storage.DataLake.Path.delete", + "azure.storage.filedatalake._generated.operations.PathOperations.set_access_control": "Storage.DataLake.Path.setAccessControl", + "azure.storage.filedatalake._generated.aio.operations.PathOperations.set_access_control": "Storage.DataLake.Path.setAccessControl", + "azure.storage.filedatalake._generated.operations.PathOperations.set_access_control_recursive": "Storage.DataLake.Path.setAccessControlRecursive", + "azure.storage.filedatalake._generated.aio.operations.PathOperations.set_access_control_recursive": "Storage.DataLake.Path.setAccessControlRecursive", + "azure.storage.filedatalake._generated.operations.PathOperations.flush_data": "Storage.DataLake.Path.flushData", + "azure.storage.filedatalake._generated.aio.operations.PathOperations.flush_data": "Storage.DataLake.Path.flushData", + "azure.storage.filedatalake._generated.operations.PathOperations.append_data": "Storage.DataLake.Path.appendData", + "azure.storage.filedatalake._generated.aio.operations.PathOperations.append_data": "Storage.DataLake.Path.appendData", + "azure.storage.filedatalake._generated.operations.PathOperations.set_expiry": "Storage.DataLake.Path.setExpiry", + "azure.storage.filedatalake._generated.aio.operations.PathOperations.set_expiry": "Storage.DataLake.Path.setExpiry", + "azure.storage.filedatalake._generated.operations.PathOperations.undelete": "Storage.DataLake.Path.undelete", + "azure.storage.filedatalake._generated.aio.operations.PathOperations.undelete": "Storage.DataLake.Path.undelete" + } +} \ No newline at end of file diff --git a/sdk/storage/azure-storage-file-datalake/azure/__init__.py b/sdk/storage/azure-storage-file-datalake/azure/__init__.py index 0d1f7edf5dc6..d55ccad1f573 100644 --- a/sdk/storage/azure-storage-file-datalake/azure/__init__.py +++ b/sdk/storage/azure-storage-file-datalake/azure/__init__.py @@ -1 +1 @@ -__path__ = __import__('pkgutil').extend_path(__path__, __name__) # type: ignore +__path__ = __import__("pkgutil").extend_path(__path__, __name__) # type: ignore diff --git a/sdk/storage/azure-storage-file-datalake/azure/storage/__init__.py b/sdk/storage/azure-storage-file-datalake/azure/storage/__init__.py index 0d1f7edf5dc6..d55ccad1f573 100644 --- a/sdk/storage/azure-storage-file-datalake/azure/storage/__init__.py +++ b/sdk/storage/azure-storage-file-datalake/azure/storage/__init__.py @@ -1 +1 @@ -__path__ = __import__('pkgutil').extend_path(__path__, __name__) # type: ignore +__path__ = __import__("pkgutil").extend_path(__path__, __name__) # type: ignore diff --git a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_data_lake_directory_client.py b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_data_lake_directory_client.py index 26bed950dba6..55c5b0f78822 100644 --- a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_data_lake_directory_client.py +++ b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_data_lake_directory_client.py @@ -6,10 +6,7 @@ # pylint: disable=docstring-keyword-should-match-keyword-only import functools -from typing import ( - Any, cast, Dict, Optional, Union, - TYPE_CHECKING -) +from typing import Any, cast, Dict, Optional, Union, TYPE_CHECKING from urllib.parse import quote, unquote from typing_extensions import Self @@ -84,22 +81,29 @@ class DataLakeDirectoryClient(PathClient): """The hostname of the primary endpoint.""" def __init__( - self, account_url: str, + self, + account_url: str, file_system_name: str, directory_name: str, - credential: Optional[Union[str, Dict[str, str], "AzureNamedKeyCredential", "AzureSasCredential", "TokenCredential"]] = None, # pylint: disable=line-too-long - **kwargs: Any + credential: Optional[ + Union[str, Dict[str, str], "AzureNamedKeyCredential", "AzureSasCredential", "TokenCredential"] + ] = None, # pylint: disable=line-too-long + **kwargs: Any, ) -> None: - super(DataLakeDirectoryClient, self).__init__(account_url, file_system_name, path_name=directory_name, - credential=credential, **kwargs) + super(DataLakeDirectoryClient, self).__init__( + account_url, file_system_name, path_name=directory_name, credential=credential, **kwargs + ) @classmethod def from_connection_string( - cls, conn_str: str, + cls, + conn_str: str, file_system_name: str, directory_name: str, - credential: Optional[Union[str, Dict[str, str], "AzureNamedKeyCredential", "AzureSasCredential", "TokenCredential"]] = None, # pylint: disable=line-too-long - **kwargs: Any + credential: Optional[ + Union[str, Dict[str, str], "AzureNamedKeyCredential", "AzureSasCredential", "TokenCredential"] + ] = None, # pylint: disable=line-too-long + **kwargs: Any, ) -> Self: """ Create DataLakeDirectoryClient from a Connection String. @@ -135,15 +139,18 @@ def from_connection_string( :return: A DataLakeDirectoryClient. :rtype: ~azure.storage.filedatalake.DataLakeDirectoryClient """ - account_url, _, credential = parse_connection_str(conn_str, credential, 'dfs') + account_url, _, credential = parse_connection_str(conn_str, credential, "dfs") return cls( - account_url, file_system_name=file_system_name, directory_name=directory_name, - credential=credential, **kwargs) + account_url, + file_system_name=file_system_name, + directory_name=directory_name, + credential=credential, + **kwargs, + ) @distributed_trace def create_directory( - self, metadata: Optional[Dict[str, str]] = None, - **kwargs: Any + self, metadata: Optional[Dict[str, str]] = None, **kwargs: Any ) -> Dict[str, Union[str, "datetime"]]: """ Create a new directory. @@ -227,7 +234,7 @@ def create_directory( :dedent: 8 :caption: Create directory. """ - return self._create('directory', metadata=metadata, **kwargs) + return self._create("directory", metadata=metadata, **kwargs) @distributed_trace def delete_directory(self, **kwargs: Any) -> None: @@ -332,11 +339,11 @@ def get_directory_properties(self, **kwargs: Any) -> DirectoryProperties: :dedent: 4 :caption: Getting the properties for a file/directory. """ - upn = kwargs.pop('upn', None) + upn = kwargs.pop("upn", None) if upn: - headers = kwargs.pop('headers', {}) - headers['x-ms-upn'] = str(upn) - kwargs['headers'] = headers + headers = kwargs.pop("headers", {}) + headers["x-ms-upn"] = str(upn) + kwargs["headers"] = headers return cast(DirectoryProperties, self._get_path_properties(cls=deserialize_dir_properties, **kwargs)) @distributed_trace @@ -425,21 +432,26 @@ def rename_directory(self, new_name: str, **kwargs: Any) -> "DataLakeDirectoryCl :caption: Rename the source directory. """ new_file_system, new_path, new_dir_sas = _parse_rename_path( - new_name, self.file_system_name, self._query_str, self._raw_credential) + new_name, self.file_system_name, self._query_str, self._raw_credential + ) new_directory_client = DataLakeDirectoryClient( - f"{self.scheme}://{self.primary_hostname}", new_file_system, directory_name=new_path, - credential=self._raw_credential or new_dir_sas, _hosts=self._hosts, _configuration=self._config, - _pipeline=self._pipeline) + f"{self.scheme}://{self.primary_hostname}", + new_file_system, + directory_name=new_path, + credential=self._raw_credential or new_dir_sas, + _hosts=self._hosts, + _configuration=self._config, + _pipeline=self._pipeline, + ) new_directory_client._rename_path( # pylint: disable=protected-access - f'/{quote(unquote(self.file_system_name))}/{quote(unquote(self.path_name))}{self._query_str}', **kwargs) + f"/{quote(unquote(self.file_system_name))}/{quote(unquote(self.path_name))}{self._query_str}", **kwargs + ) return new_directory_client @distributed_trace def create_sub_directory( - self, sub_directory: Union[DirectoryProperties, str], - metadata: Optional[Dict[str, str]] = None, - **kwargs: Any + self, sub_directory: Union[DirectoryProperties, str], metadata: Optional[Dict[str, str]] = None, **kwargs: Any ) -> "DataLakeDirectoryClient": """ Create a subdirectory and return the subdirectory client to be interacted with. @@ -524,8 +536,7 @@ def create_sub_directory( @distributed_trace def delete_sub_directory( # pylint: disable=delete-operation-wrong-return-type - self, sub_directory: Union[DirectoryProperties, str], - **kwargs: Any + self, sub_directory: Union[DirectoryProperties, str], **kwargs: Any ) -> "DataLakeDirectoryClient": """ Marks the specified subdirectory for deletion. @@ -661,13 +672,14 @@ def create_file(self, file: Union[FileProperties, str], **kwargs: Any) -> DataLa @distributed_trace def get_paths( - self, *, + self, + *, recursive: bool = True, max_results: Optional[int] = None, upn: Optional[bool] = None, start_from: Optional[str] = None, timeout: Optional[int] = None, - **kwargs: Any + **kwargs: Any, ) -> ItemPaged["PathProperties"]: """Returns a generator to list the paths under specified file system and directory. The generator will lazily follow the continuation tokens returned by the service. @@ -702,15 +714,16 @@ def get_paths( url = f"{self.scheme}://{hostname}/{quote(self.file_system_name)}" client = self._build_generated_client(url) command = functools.partial( - client.file_system.list_paths, - path=self.path_name, - begin_from=start_from, - timeout=timeout, - **kwargs + client.file_system.list_paths, path=self.path_name, begin_from=start_from, timeout=timeout, **kwargs ) return ItemPaged( - command, recursive, path=self.path_name, max_results=max_results, - upn=upn, page_iterator_class=PathPropertiesPaged, **kwargs + command, + recursive, + path=self.path_name, + max_results=max_results, + upn=upn, + page_iterator_class=PathPropertiesPaged, + **kwargs, ) def get_file_client(self, file: Union[FileProperties, str]) -> DataLakeFileClient: @@ -726,18 +739,24 @@ def get_file_client(self, file: Union[FileProperties, str]) -> DataLakeFileClien :rtype: ~azure.storage.filedatalake.DataLakeFileClient """ if isinstance(file, FileProperties): - file_path = file.get('name') + file_path = file.get("name") else: - file_path = self.path_name + '/' + str(file) + file_path = self.path_name + "/" + str(file) _pipeline = Pipeline( - transport=TransportWrapper(self._pipeline._transport), # pylint: disable = protected-access - policies=self._pipeline._impl_policies # pylint: disable = protected-access + transport=TransportWrapper(self._pipeline._transport), # pylint: disable = protected-access + policies=self._pipeline._impl_policies, # pylint: disable = protected-access ) return DataLakeFileClient( - self.url, self.file_system_name, file_path=file_path, credential=self._raw_credential, + self.url, + self.file_system_name, + file_path=file_path, + credential=self._raw_credential, api_version=self.api_version, - _hosts=self._hosts, _configuration=self._config, _pipeline=_pipeline) + _hosts=self._hosts, + _configuration=self._config, + _pipeline=_pipeline, + ) def get_sub_directory_client(self, sub_directory: Union[DirectoryProperties, str]) -> "DataLakeDirectoryClient": """Get a client to interact with the specified subdirectory of the current directory. @@ -752,15 +771,21 @@ def get_sub_directory_client(self, sub_directory: Union[DirectoryProperties, str :rtype: ~azure.storage.filedatalake.DataLakeDirectoryClient """ if isinstance(sub_directory, DirectoryProperties): - subdir_path = sub_directory.get('name') + subdir_path = sub_directory.get("name") else: - subdir_path = self.path_name + '/' + str(sub_directory) + subdir_path = self.path_name + "/" + str(sub_directory) _pipeline = Pipeline( - transport=TransportWrapper(self._pipeline._transport), # pylint: disable=protected-access - policies=self._pipeline._impl_policies # pylint: disable=protected-access + transport=TransportWrapper(self._pipeline._transport), # pylint: disable=protected-access + policies=self._pipeline._impl_policies, # pylint: disable=protected-access ) return DataLakeDirectoryClient( - self.url, self.file_system_name, directory_name=subdir_path, credential=self._raw_credential, + self.url, + self.file_system_name, + directory_name=subdir_path, + credential=self._raw_credential, api_version=self.api_version, - _hosts=self._hosts, _configuration=self._config, _pipeline=_pipeline) + _hosts=self._hosts, + _configuration=self._config, + _pipeline=_pipeline, + ) diff --git a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_data_lake_file_client.py b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_data_lake_file_client.py index 6641ea74f21f..b783683cfc22 100644 --- a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_data_lake_file_client.py +++ b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_data_lake_file_client.py @@ -6,10 +6,7 @@ # pylint: disable=docstring-keyword-should-match-keyword-only from datetime import datetime -from typing import ( - Any, AnyStr, cast, Dict, IO, Iterable, Optional, Union, - TYPE_CHECKING -) +from typing import Any, AnyStr, cast, Dict, IO, Iterable, Optional, Union, TYPE_CHECKING from urllib.parse import quote, unquote from typing_extensions import Self @@ -87,22 +84,29 @@ class DataLakeFileClient(PathClient): """The hostname of the primary endpoint.""" def __init__( - self, account_url: str, + self, + account_url: str, file_system_name: str, file_path: str, - credential: Optional[Union[str, Dict[str, str], "AzureNamedKeyCredential", "AzureSasCredential", "TokenCredential"]] = None, # pylint: disable=line-too-long - **kwargs: Any + credential: Optional[ + Union[str, Dict[str, str], "AzureNamedKeyCredential", "AzureSasCredential", "TokenCredential"] + ] = None, # pylint: disable=line-too-long + **kwargs: Any, ) -> None: - super(DataLakeFileClient, self).__init__(account_url, file_system_name, path_name=file_path, - credential=credential, **kwargs) + super(DataLakeFileClient, self).__init__( + account_url, file_system_name, path_name=file_path, credential=credential, **kwargs + ) @classmethod def from_connection_string( - cls, conn_str: str, + cls, + conn_str: str, file_system_name: str, file_path: str, - credential: Optional[Union[str, Dict[str, str], "AzureNamedKeyCredential", "AzureSasCredential", "TokenCredential"]] = None, # pylint: disable=line-too-long - **kwargs: Any + credential: Optional[ + Union[str, Dict[str, str], "AzureNamedKeyCredential", "AzureSasCredential", "TokenCredential"] + ] = None, # pylint: disable=line-too-long + **kwargs: Any, ) -> Self: """ Create DataLakeFileClient from a Connection String. @@ -137,16 +141,15 @@ def from_connection_string( :returns: A DataLakeFileClient. :rtype: ~azure.storage.filedatalake.DataLakeFileClient """ - account_url, _, credential = parse_connection_str(conn_str, credential, 'dfs') - return cls( - account_url, file_system_name=file_system_name, file_path=file_path, - credential=credential, **kwargs) + account_url, _, credential = parse_connection_str(conn_str, credential, "dfs") + return cls(account_url, file_system_name=file_system_name, file_path=file_path, credential=credential, **kwargs) @distributed_trace def create_file( - self, content_settings: Optional["ContentSettings"] = None, + self, + content_settings: Optional["ContentSettings"] = None, metadata: Optional[Dict[str, str]] = None, - **kwargs: Any + **kwargs: Any, ) -> Dict[str, Union[str, datetime]]: """ Create a new file. @@ -240,7 +243,7 @@ def create_file( :dedent: 4 :caption: Create file. """ - return self._create('file', content_settings=content_settings, metadata=metadata, **kwargs) + return self._create("file", content_settings=content_settings, metadata=metadata, **kwargs) @distributed_trace def delete_file(self, **kwargs: Any) -> None: @@ -343,18 +346,16 @@ def get_file_properties(self, **kwargs: Any) -> FileProperties: :dedent: 4 :caption: Getting the properties for a file. """ - upn = kwargs.pop('upn', None) + upn = kwargs.pop("upn", None) if upn: - headers = kwargs.pop('headers', {}) - headers['x-ms-upn'] = str(upn) - kwargs['headers'] = headers + headers = kwargs.pop("headers", {}) + headers["x-ms-upn"] = str(upn) + kwargs["headers"] = headers return cast(FileProperties, self._get_path_properties(cls=deserialize_file_properties, **kwargs)) @distributed_trace def set_file_expiry( - self, expiry_options: str, - expires_on: Optional[Union[datetime, int]] = None, - **kwargs: Any + self, expiry_options: str, expires_on: Optional[Union[datetime, int]] = None, **kwargs: Any ) -> None: """Sets the time a file will expire and be deleted. @@ -382,10 +383,11 @@ def set_file_expiry( @distributed_trace def upload_data( - self, data: Union[bytes, str, Iterable[AnyStr], IO[bytes]], + self, + data: Union[bytes, str, Iterable[AnyStr], IO[bytes]], length: Optional[int] = None, overwrite: Optional[bool] = False, - **kwargs: Any + **kwargs: Any, ) -> Dict[str, Any]: """ Upload data to a file. @@ -469,29 +471,20 @@ def upload_data( :rtype: Dict[str, Any] """ options = _upload_options( - data, - self.scheme, - self._config, - self._client.path, - length=length, - overwrite=overwrite, - **kwargs + data, self.scheme, self._config, self._client.path, length=length, overwrite=overwrite, **kwargs ) return upload_datalake_file(**options) @distributed_trace def append_data( - self, data: Union[bytes, Iterable[bytes], IO[bytes]], - offset: int, - length: Optional[int] = None, - **kwargs: Any + self, data: Union[bytes, Iterable[bytes], IO[bytes]], offset: int, length: Optional[int] = None, **kwargs: Any ) -> Dict[str, Any]: """Append data to the file. :param data: Content to be appended to file :type data: Union[bytes, Iterable[bytes], IO[bytes]] :param int offset: start position of the data to be appended to. - :param length: + :param length: Size of the data to append. Optional if the length of data can be determined. For Iterable and IO, if the length is not provided and cannot be determined, all data will be read into memory. :type length: int or None @@ -547,23 +540,14 @@ def append_data( :dedent: 4 :caption: Append data to the file. """ - options = _append_data_options( - data=data, - offset=offset, - scheme=self.scheme, - length=length, - **kwargs) + options = _append_data_options(data=data, offset=offset, scheme=self.scheme, length=length, **kwargs) try: return self._client.path.append_data(**options) except HttpResponseError as error: process_storage_error(error) @distributed_trace - def flush_data( - self, offset: int, - retain_uncommitted_data: Optional[bool] = False, - **kwargs: Any - ) -> Dict[str, Any]: + def flush_data(self, offset: int, retain_uncommitted_data: Optional[bool] = False, **kwargs: Any) -> Dict[str, Any]: """Commit the previous appended data. :param int offset: offset is equal to the length of the file after commit @@ -651,12 +635,7 @@ def flush_data( :dedent: 8 :caption: Commit the previous appended data. """ - options = _flush_data_options( - offset, - self.scheme, - retain_uncommitted_data=retain_uncommitted_data, - **kwargs - ) + options = _flush_data_options(offset, self.scheme, retain_uncommitted_data=retain_uncommitted_data, **kwargs) try: return self._client.path.flush_data(**options) except HttpResponseError as error: @@ -664,9 +643,7 @@ def flush_data( @distributed_trace def download_file( - self, offset: Optional[int] = None, - length: Optional[int] = None, - **kwargs: Any + self, offset: Optional[int] = None, length: Optional[int] = None, **kwargs: Any ) -> StorageStreamDownloader: """Downloads a file to the StorageStreamDownloader. The readall() method must be used to read all the content, or readinto() must be used to download the file into @@ -821,16 +798,22 @@ def rename_file(self, new_name: str, **kwargs: Any) -> "DataLakeFileClient": :caption: Rename the source file. """ new_file_system, new_path, new_file_sas = _parse_rename_path( - new_name, self.file_system_name, self._query_str, self._raw_credential) + new_name, self.file_system_name, self._query_str, self._raw_credential + ) new_file_client = DataLakeFileClient( - f"{self.scheme}://{self.primary_hostname}", new_file_system, file_path=new_path, + f"{self.scheme}://{self.primary_hostname}", + new_file_system, + file_path=new_path, credential=self._raw_credential or new_file_sas, - _hosts=self._hosts, _configuration=self._config, _pipeline=self._pipeline, - _location_mode=self._location_mode + _hosts=self._hosts, + _configuration=self._config, + _pipeline=self._pipeline, + _location_mode=self._location_mode, ) new_file_client._rename_path( # pylint: disable=protected-access - f'/{quote(unquote(self.file_system_name))}/{quote(unquote(self.path_name))}{self._query_str}', **kwargs) + f"/{quote(unquote(self.file_system_name))}/{quote(unquote(self.path_name))}{self._query_str}", **kwargs + ) return new_file_client @distributed_trace @@ -906,8 +889,7 @@ def query_file(self, query_expression: str, **kwargs: Any) -> DataLakeFileQueryR :caption: select/project on datalake file data by providing simple query expressions. """ query_expression = query_expression.replace("from DataLakeStorage", "from BlobStorage") - blob_quick_query_reader = self._blob_client.query_blob(query_expression, - blob_format=kwargs.pop('file_format', None), - error_cls=DataLakeFileQueryError, - **kwargs) + blob_quick_query_reader = self._blob_client.query_blob( + query_expression, blob_format=kwargs.pop("file_format", None), error_cls=DataLakeFileQueryError, **kwargs + ) return DataLakeFileQueryReader(blob_quick_query_reader) diff --git a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_data_lake_file_client_helpers.py b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_data_lake_file_client_helpers.py index 86a0a521d15d..dd5015773c07 100644 --- a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_data_lake_file_client_helpers.py +++ b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_data_lake_file_client_helpers.py @@ -6,8 +6,16 @@ from io import BytesIO from typing import ( - Any, AnyStr, AsyncGenerator, AsyncIterable, cast, - Dict, IO, Iterable, Optional, Union, + Any, + AnyStr, + AsyncGenerator, + AsyncIterable, + cast, + Dict, + IO, + Iterable, + Optional, + Union, TYPE_CHECKING, ) @@ -17,7 +25,7 @@ get_cpk_info, get_lease_action_properties, get_mod_conditions, - get_path_http_headers + get_path_http_headers, ) from ._shared.constants import DEFAULT_MAX_CONCURRENCY from ._shared.request_handlers import get_length, read_length @@ -36,10 +44,10 @@ def _append_data_options( offset: int, scheme: str, length: Optional[int] = None, - **kwargs: Any + **kwargs: Any, ) -> Dict[str, Any]: if isinstance(data, str): - data = data.encode(kwargs.pop('encoding', 'UTF-8')) # type: ignore + data = data.encode(kwargs.pop("encoding", "UTF-8")) # type: ignore if length is None: length = get_length(data) if length is None: @@ -51,13 +59,13 @@ def _append_data_options( kwargs.update(get_lease_action_properties(kwargs)) options = { - 'body': data, - 'position': offset, - 'content_length': length, - 'validate_content': kwargs.pop('validate_content', False), - 'cpk_info': cpk_info, - 'timeout': kwargs.pop('timeout', None), - 'cls': return_response_headers + "body": data, + "position": offset, + "content_length": length, + "validate_content": kwargs.pop("validate_content", False), + "cpk_info": cpk_info, + "timeout": kwargs.pop("timeout", None), + "cls": return_response_headers, } options.update(kwargs) return options @@ -68,7 +76,7 @@ def _flush_data_options( scheme: str, content_settings: Optional["ContentSettings"] = None, retain_uncommitted_data: Optional[bool] = False, - **kwargs + **kwargs, ) -> Dict[str, Any]: mod_conditions = get_mod_conditions(kwargs) @@ -80,15 +88,15 @@ def _flush_data_options( kwargs.update(get_lease_action_properties(kwargs)) options = { - 'position': offset, - 'content_length': 0, - 'path_http_headers': path_http_headers, - 'retain_uncommitted_data': retain_uncommitted_data, - 'close': kwargs.pop('close', False), - 'modified_access_conditions': mod_conditions, - 'cpk_info': cpk_info, - 'timeout': kwargs.pop('timeout', None), - 'cls': return_response_headers + "position": offset, + "content_length": 0, + "path_http_headers": path_http_headers, + "retain_uncommitted_data": retain_uncommitted_data, + "close": kwargs.pop("close", False), + "modified_access_conditions": mod_conditions, + "cpk_info": cpk_info, + "timeout": kwargs.pop("timeout", None), + "cls": return_response_headers, } options.update(kwargs) return options @@ -100,9 +108,9 @@ def _upload_options( config: "StorageConfiguration", path: "PathOperations", length: Optional[int] = None, - **kwargs: Any + **kwargs: Any, ) -> Dict[str, Any]: - encoding = kwargs.pop('encoding', 'UTF-8') + encoding = kwargs.pop("encoding", "UTF-8") if isinstance(data, str): data = data.encode(encoding) if length is None: @@ -113,35 +121,35 @@ def _upload_options( stream: Optional[Any] = None if isinstance(data, bytes): stream = BytesIO(data) - elif hasattr(data, 'read'): + elif hasattr(data, "read"): stream = data - elif hasattr(data, '__iter__'): + elif hasattr(data, "__iter__"): stream = IterStreamer(data, encoding=encoding) - elif hasattr(data, '__aiter__'): + elif hasattr(data, "__aiter__"): stream = AsyncIterStreamer(cast(AsyncGenerator, data), encoding=encoding) else: raise TypeError(f"Unsupported data type: {type(data)}") - validate_content = kwargs.pop('validate_content', False) - content_settings = kwargs.pop('content_settings', None) - metadata = kwargs.pop('metadata', None) - max_concurrency = kwargs.pop('max_concurrency', None) + validate_content = kwargs.pop("validate_content", False) + content_settings = kwargs.pop("content_settings", None) + metadata = kwargs.pop("metadata", None) + max_concurrency = kwargs.pop("max_concurrency", None) if max_concurrency is None: max_concurrency = DEFAULT_MAX_CONCURRENCY - kwargs['properties'] = add_metadata_headers(metadata) - kwargs['lease_access_conditions'] = get_access_conditions(kwargs.pop('lease', None)) - kwargs['modified_access_conditions'] = get_mod_conditions(kwargs) - kwargs['cpk_info'] = get_cpk_info(scheme, kwargs) + kwargs["properties"] = add_metadata_headers(metadata) + kwargs["lease_access_conditions"] = get_access_conditions(kwargs.pop("lease", None)) + kwargs["modified_access_conditions"] = get_mod_conditions(kwargs) + kwargs["cpk_info"] = get_cpk_info(scheme, kwargs) if content_settings: - kwargs['path_http_headers'] = get_path_http_headers(content_settings) - - kwargs['stream'] = stream - kwargs['length'] = length - kwargs['validate_content'] = validate_content - kwargs['max_concurrency'] = max_concurrency - kwargs['client'] = path - kwargs['file_settings'] = config + kwargs["path_http_headers"] = get_path_http_headers(content_settings) + + kwargs["stream"] = stream + kwargs["length"] = length + kwargs["validate_content"] = validate_content + kwargs["max_concurrency"] = max_concurrency + kwargs["client"] = path + kwargs["file_settings"] = config return kwargs diff --git a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_data_lake_lease.py b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_data_lake_lease.py index 01ae48891ccc..40efc6d421ae 100644 --- a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_data_lake_lease.py +++ b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_data_lake_lease.py @@ -1,3 +1,4 @@ +# pylint: disable=line-too-long,useless-suppression # ------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for @@ -6,10 +7,7 @@ # pylint: disable=docstring-keyword-should-match-keyword-only import uuid -from typing import ( - Union, Optional, Any, - TYPE_CHECKING -) +from typing import Union, Optional, Any, TYPE_CHECKING from typing_extensions import Self from azure.core.tracing.decorator import distributed_trace @@ -49,16 +47,17 @@ class DataLakeLeaseClient: # pylint: disable=client-accepts-api-version-keyword This will be `None` if no lease has yet been acquired or modified.""" def __init__( # pylint: disable=missing-client-constructor-parameter-credential, missing-client-constructor-parameter-kwargs - self, client: Union["FileSystemClient", "DataLakeDirectoryClient", "DataLakeFileClient"], - lease_id: Optional[str] = None + self, + client: Union["FileSystemClient", "DataLakeDirectoryClient", "DataLakeFileClient"], + lease_id: Optional[str] = None, ) -> None: self.id = lease_id or str(uuid.uuid4()) self.last_modified = None self.etag = None - if hasattr(client, '_blob_client'): + if hasattr(client, "_blob_client"): _client = client._blob_client - elif hasattr(client, '_container_client'): + elif hasattr(client, "_container_client"): _client = client._container_client else: raise TypeError("Lease must use any of FileSystemClient, DataLakeDirectoryClient, or DataLakeFileClient.") diff --git a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_data_lake_lease.pyi b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_data_lake_lease.pyi index d9ee9df15e6a..b6ba38048762 100644 --- a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_data_lake_lease.pyi +++ b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_data_lake_lease.pyi @@ -7,7 +7,9 @@ from datetime import datetime from typing import ( - Any, Optional, Union, + Any, + Optional, + Union, ) from types import TracebackType from typing_extensions import Self @@ -18,7 +20,6 @@ from ._file_system_client import FileSystemClient from ._data_lake_directory_client import DataLakeDirectoryClient from ._data_lake_file_client import DataLakeFileClient - class DataLakeLeaseClient: id: str etag: Optional[str] @@ -26,7 +27,7 @@ class DataLakeLeaseClient: def __init__( self, client: Union[FileSystemClient, DataLakeDirectoryClient, DataLakeFileClient], - lease_id: Optional[str] = None + lease_id: Optional[str] = None, ) -> None: ... def __enter__(self) -> Self: ... def __exit__( diff --git a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_data_lake_service_client.py b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_data_lake_service_client.py index b30a532af67e..012cf1641b53 100644 --- a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_data_lake_service_client.py +++ b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_data_lake_service_client.py @@ -5,10 +5,7 @@ # -------------------------------------------------------------------------- # pylint: disable=docstring-keyword-should-match-keyword-only -from typing import ( - Any, cast, Dict, Optional, Union, - TYPE_CHECKING -) +from typing import Any, cast, Dict, Optional, Union, TYPE_CHECKING from typing_extensions import Self from azure.core.paging import ItemPaged @@ -27,7 +24,7 @@ FileSystemProperties, FileSystemPropertiesPaged, LocationMode, - UserDelegationKey + UserDelegationKey, ) from ._serialize import convert_dfs_url_to_blob_url, get_api_version from ._shared.base_client import parse_connection_str, parse_query, StorageAccountHostsMixin, TransportWrapper @@ -97,8 +94,11 @@ class DataLakeServiceClient(StorageAccountHostsMixin): """The hostname of the primary endpoint.""" def __init__( - self, account_url: str, - credential: Optional[Union[str, Dict[str, str], "AzureNamedKeyCredential", "AzureSasCredential", "TokenCredential"]] = None, # pylint: disable=line-too-long + self, + account_url: str, + credential: Optional[ + Union[str, Dict[str, str], "AzureNamedKeyCredential", "AzureSasCredential", "TokenCredential"] + ] = None, # pylint: disable=line-too-long **kwargs: Any ) -> None: parsed_url = _parse_url(account_url=account_url) @@ -111,8 +111,9 @@ def __init__( _, sas_token = parse_query(parsed_url.query) self._query_str, self._raw_credential = self._format_query_string(sas_token, credential) - super(DataLakeServiceClient, self).__init__(parsed_url, service='dfs', - credential=self._raw_credential, **kwargs) + super(DataLakeServiceClient, self).__init__( + parsed_url, service="dfs", credential=self._raw_credential, **kwargs + ) # ADLS doesn't support secondary endpoint, make sure it's empty self._hosts[LocationMode.SECONDARY] = "" @@ -149,8 +150,11 @@ def _format_url(self, hostname: str) -> str: @classmethod def from_connection_string( - cls, conn_str: str, - credential: Optional[Union[str, Dict[str, str], "AzureNamedKeyCredential", "AzureSasCredential", "TokenCredential"]] = None, # pylint: disable=line-too-long + cls, + conn_str: str, + credential: Optional[ + Union[str, Dict[str, str], "AzureNamedKeyCredential", "AzureSasCredential", "TokenCredential"] + ] = None, # pylint: disable=line-too-long **kwargs: Any ) -> Self: """ @@ -188,12 +192,13 @@ def from_connection_string( :dedent: 8 :caption: Creating the DataLakeServiceClient from a connection string. """ - account_url, _, credential = parse_connection_str(conn_str, credential, 'dfs') + account_url, _, credential = parse_connection_str(conn_str, credential, "dfs") return cls(account_url, credential=credential, **kwargs) @distributed_trace def get_user_delegation_key( - self, key_start_time: "datetime", + self, + key_start_time: "datetime", key_expiry_time: "datetime", *, delegated_user_tid: Optional[str] = None, @@ -236,9 +241,7 @@ def get_user_delegation_key( @distributed_trace def list_file_systems( - self, name_starts_with: Optional[str] = None, - include_metadata: bool = False, - **kwargs: Any + self, name_starts_with: Optional[str] = None, include_metadata: bool = False, **kwargs: Any ) -> ItemPaged[FileSystemProperties]: """Returns a generator to list the file systems under the specified account. @@ -280,16 +283,15 @@ def list_file_systems( :caption: Listing the file systems in the datalake service. """ item_paged = self._blob_service_client.list_containers( - name_starts_with=name_starts_with, - include_metadata=include_metadata, - **kwargs + name_starts_with=name_starts_with, include_metadata=include_metadata, **kwargs ) item_paged._page_iterator_class = FileSystemPropertiesPaged # pylint: disable=protected-access return cast(ItemPaged[FileSystemProperties], item_paged) @distributed_trace def create_file_system( - self, file_system: Union[FileSystemProperties, str], + self, + file_system: Union[FileSystemProperties, str], metadata: Optional[Dict[str, str]] = None, public_access: Optional["PublicAccess"] = None, **kwargs: Any @@ -360,7 +362,7 @@ def _rename_file_system(self, name: str, new_name: str, **kwargs: Any) -> FileSy :returns: A FileSystemClient with the specified file system renamed. :rtype: ~azure.storage.filedatalake.FileSystemClient """ - self._blob_service_client._rename_container(name, new_name, **kwargs) # pylint: disable=protected-access + self._blob_service_client._rename_container(name, new_name, **kwargs) # pylint: disable=protected-access renamed_file_system = self.get_file_system_client(new_name) return renamed_file_system @@ -387,14 +389,15 @@ def undelete_file_system(self, name: str, deleted_version: str, **kwargs: Any) - :returns: The restored solft-deleted FileSystemClient. :rtype: ~azure.storage.filedatalake.FileSystemClient """ - new_name = kwargs.pop('new_name', None) + new_name = kwargs.pop("new_name", None) file_system = self.get_file_system_client(new_name or name) - self._blob_service_client.undelete_container( - name, deleted_version, new_name=new_name, **kwargs) + self._blob_service_client.undelete_container(name, deleted_version, new_name=new_name, **kwargs) return file_system @distributed_trace - def delete_file_system(self, file_system: Union[FileSystemProperties, str], **kwargs: Any) -> FileSystemClient: # pylint: disable=delete-operation-wrong-return-type + def delete_file_system( + self, file_system: Union[FileSystemProperties, str], **kwargs: Any + ) -> FileSystemClient: # pylint: disable=delete-operation-wrong-return-type """Marks the specified file system for deletion. The file system and any files contained within it are later deleted during garbage collection. @@ -476,16 +479,20 @@ def get_file_system_client(self, file_system: Union[FileSystemProperties, str]) _pipeline = Pipeline( transport=TransportWrapper(self._pipeline._transport), # pylint: disable=protected-access - policies=self._pipeline._impl_policies # pylint: disable=protected-access + policies=self._pipeline._impl_policies, # pylint: disable=protected-access + ) + return FileSystemClient( + self.url, + file_system_name, + credential=self._raw_credential, + api_version=self.api_version, + _configuration=self._config, + _pipeline=_pipeline, + _hosts=self._hosts, ) - return FileSystemClient(self.url, file_system_name, credential=self._raw_credential, - api_version=self.api_version, - _configuration=self._config, - _pipeline=_pipeline, _hosts=self._hosts) def get_directory_client( - self, file_system: Union[FileSystemProperties, str], - directory: Union[DirectoryProperties, str] + self, file_system: Union[FileSystemProperties, str], directory: Union[DirectoryProperties, str] ) -> DataLakeDirectoryClient: """Get a client to interact with the specified directory. @@ -522,17 +529,21 @@ def get_directory_client( _pipeline = Pipeline( transport=TransportWrapper(self._pipeline._transport), # pylint: disable=protected-access - policies=self._pipeline._impl_policies # pylint: disable=protected-access + policies=self._pipeline._impl_policies, # pylint: disable=protected-access + ) + return DataLakeDirectoryClient( + self.url, + file_system_name, + directory_name=directory_name, + credential=self._raw_credential, + api_version=self.api_version, + _configuration=self._config, + _pipeline=_pipeline, + _hosts=self._hosts, ) - return DataLakeDirectoryClient(self.url, file_system_name, directory_name=directory_name, - credential=self._raw_credential, - api_version=self.api_version, - _configuration=self._config, _pipeline=_pipeline, - _hosts=self._hosts) def get_file_client( - self, file_system: Union[FileSystemProperties, str], - file_path: Union[FileProperties, str] + self, file_system: Union[FileSystemProperties, str], file_path: Union[FileProperties, str] ) -> DataLakeFileClient: """Get a client to interact with the specified file. @@ -569,12 +580,18 @@ def get_file_client( _pipeline = Pipeline( transport=TransportWrapper(self._pipeline._transport), # pylint: disable=protected-access - policies=self._pipeline._impl_policies # pylint: disable=protected-access + policies=self._pipeline._impl_policies, # pylint: disable=protected-access ) return DataLakeFileClient( - self.url, file_system_name, file_path=file_path, credential=self._raw_credential, + self.url, + file_system_name, + file_path=file_path, + credential=self._raw_credential, api_version=self.api_version, - _hosts=self._hosts, _configuration=self._config, _pipeline=_pipeline) + _hosts=self._hosts, + _configuration=self._config, + _pipeline=_pipeline, + ) @distributed_trace def set_service_properties(self, **kwargs: Any) -> None: diff --git a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_data_lake_service_client_helpers.py b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_data_lake_service_client_helpers.py index 600302e39c44..cc381dc581e9 100644 --- a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_data_lake_service_client_helpers.py +++ b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_data_lake_service_client_helpers.py @@ -13,11 +13,11 @@ def _parse_url(account_url: str) -> "ParseResult": try: - if not account_url.lower().startswith('http'): + if not account_url.lower().startswith("http"): account_url = "https://" + account_url except AttributeError as exc: raise ValueError("Account URL must be a string.") from exc - parsed_url = urlparse(account_url.rstrip('/')) + parsed_url = urlparse(account_url.rstrip("/")) if not parsed_url.netloc: raise ValueError(f"Invalid URL: {account_url}") return parsed_url diff --git a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_deserialize.py b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_deserialize.py index c758060a65e3..4283701dcc38 100644 --- a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_deserialize.py +++ b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_deserialize.py @@ -4,10 +4,7 @@ # license information. # -------------------------------------------------------------------------- import logging -from typing import ( - Any, cast, Collection, Dict, List, NoReturn, Tuple, - TYPE_CHECKING -) +from typing import Any, cast, Collection, Dict, List, NoReturn, Tuple, TYPE_CHECKING from xml.etree.ElementTree import Element from azure.core.pipeline.policies import ContentDecodePolicy @@ -17,7 +14,7 @@ ResourceModifiedError, ClientAuthenticationError, ResourceNotFoundError, - ResourceExistsError + ResourceExistsError, ) from ._models import ( AnalyticsLogging, @@ -28,7 +25,7 @@ Metrics, PathProperties, RetentionPolicy, - StaticWebsite + StaticWebsite, ) from ._shared.models import StorageErrorCode from ._shared.response_handlers import deserialize_metadata @@ -36,52 +33,40 @@ if TYPE_CHECKING: from azure.core.rest import HttpResponse from azure.storage.blob import BlobProperties - from ._generated.models import ( - BlobItemInternal, - Path, - PathList - ) + from ._generated.models import BlobItemInternal, Path, PathList from ._models import ContentSettings _LOGGER = logging.getLogger(__name__) -def deserialize_dir_properties( - response: "HttpResponse", - obj: Any, - headers: Dict[str, Any] -) -> DirectoryProperties: +def deserialize_dir_properties(response: "HttpResponse", obj: Any, headers: Dict[str, Any]) -> DirectoryProperties: metadata = deserialize_metadata(response, obj, headers) dir_properties = DirectoryProperties( metadata=metadata, - owner=response.headers.get('x-ms-owner'), - group=response.headers.get('x-ms-group'), - permissions=response.headers.get('x-ms-permissions'), - acl=response.headers.get('x-ms-acl'), - **headers + owner=response.headers.get("x-ms-owner"), + group=response.headers.get("x-ms-group"), + permissions=response.headers.get("x-ms-permissions"), + acl=response.headers.get("x-ms-acl"), + **headers, ) return dir_properties -def deserialize_file_properties( - response: "HttpResponse", - obj: Any, - headers: Dict[str, Any] -) -> FileProperties: +def deserialize_file_properties(response: "HttpResponse", obj: Any, headers: Dict[str, Any]) -> FileProperties: metadata = deserialize_metadata(response, obj, headers) # DataLake specific headers that are not deserialized in blob are pulled directly from the raw response header file_properties = FileProperties( metadata=metadata, - encryption_context=response.headers.get('x-ms-encryption-context'), - owner=response.headers.get('x-ms-owner'), - group=response.headers.get('x-ms-group'), - permissions=response.headers.get('x-ms-permissions'), - acl=response.headers.get('x-ms-acl'), - **headers + encryption_context=response.headers.get("x-ms-encryption-context"), + owner=response.headers.get("x-ms-owner"), + group=response.headers.get("x-ms-group"), + permissions=response.headers.get("x-ms-permissions"), + acl=response.headers.get("x-ms-acl"), + **headers, ) - if 'Content-Range' in headers: - if 'x-ms-blob-content-md5' in headers: - file_properties.content_settings.content_md5 = headers['x-ms-blob-content-md5'] + if "Content-Range" in headers: + if "x-ms-blob-content-md5" in headers: + file_properties.content_settings.content_md5 = headers["x-ms-blob-content-md5"] else: file_properties.content_settings.content_md5 = None return file_properties @@ -92,14 +77,14 @@ def deserialize_path_properties(path_list: List["Path"]) -> List[PathProperties] def return_headers_and_deserialized_path_list( # pylint: disable=name-too-long, unused-argument - _, - deserialized: "PathList", - response_headers: Dict[str, Any] + _, deserialized: "PathList", response_headers: Dict[str, Any] ) -> Tuple[Collection["Path"], Dict[str, Any]]: return deserialized.paths if deserialized.paths else {}, normalize_headers(response_headers) -def get_deleted_path_properties_from_generated_code(generated: "BlobItemInternal") -> DeletedPathProperties: # pylint: disable=name-too-long +def get_deleted_path_properties_from_generated_code( + generated: "BlobItemInternal", +) -> DeletedPathProperties: # pylint: disable=name-too-long deleted_path = DeletedPathProperties() deleted_path.name = generated.name deleted_path.deleted_time = generated.properties.deleted_time @@ -109,19 +94,27 @@ def get_deleted_path_properties_from_generated_code(generated: "BlobItemInternal def is_file_path(_, __, headers: Dict[str, Any]) -> bool: - return headers['x-ms-resource-type'] == "file" + return headers["x-ms-resource-type"] == "file" def get_datalake_service_properties(datalake_properties: Dict[str, Any]) -> Dict[str, Any]: datalake_properties["analytics_logging"] = AnalyticsLogging._from_generated( # pylint: disable=protected-access - datalake_properties["analytics_logging"]) - datalake_properties["hour_metrics"] = Metrics._from_generated(datalake_properties["hour_metrics"]) # pylint: disable=protected-access + datalake_properties["analytics_logging"] + ) + datalake_properties["hour_metrics"] = Metrics._from_generated( + datalake_properties["hour_metrics"] + ) # pylint: disable=protected-access datalake_properties["minute_metrics"] = Metrics._from_generated( # pylint: disable=protected-access - datalake_properties["minute_metrics"]) - datalake_properties["delete_retention_policy"] = RetentionPolicy._from_generated( # pylint: disable=protected-access - datalake_properties["delete_retention_policy"]) + datalake_properties["minute_metrics"] + ) + datalake_properties["delete_retention_policy"] = ( + RetentionPolicy._from_generated( # pylint: disable=protected-access + datalake_properties["delete_retention_policy"] + ) + ) datalake_properties["static_website"] = StaticWebsite._from_generated( # pylint: disable=protected-access - datalake_properties["static_website"]) + datalake_properties["static_website"] + ) return datalake_properties @@ -141,11 +134,11 @@ def from_blob_properties(blob_properties: "BlobProperties", **additional_args: A file_props.content_settings = cast("ContentSettings", blob_properties.content_settings) # Parse additional Datalake-only properties - file_props.encryption_context = additional_args.pop('encryption_context', None) - file_props.owner = additional_args.pop('owner', None) - file_props.group = additional_args.pop('group', None) - file_props.permissions = additional_args.pop('permissions', None) - file_props.acl = additional_args.pop('acl', None) + file_props.encryption_context = additional_args.pop("encryption_context", None) + file_props.owner = additional_args.pop("owner", None) + file_props.group = additional_args.pop("group", None) + file_props.permissions = additional_args.pop("permissions", None) + file_props.acl = additional_args.pop("acl", None) return file_props @@ -153,9 +146,9 @@ def from_blob_properties(blob_properties: "BlobProperties", **additional_args: A def normalize_headers(headers: Dict[str, Any]) -> Dict[str, Any]: normalized = {} for key, value in headers.items(): - if key.startswith('x-ms-'): + if key.startswith("x-ms-"): key = key[5:] - normalized[key.lower().replace('-', '_')] = value + normalized[key.lower().replace("-", "_")] = value return normalized @@ -167,7 +160,7 @@ def process_storage_error(storage_error) -> NoReturn: # type: ignore [misc] # p # If it is one of those three then it has been serialized prior by the generated layer. if isinstance(storage_error, (ResourceNotFoundError, ClientAuthenticationError, ResourceExistsError)): serialized = True - error_code = storage_error.response.headers.get('x-ms-error-code') + error_code = storage_error.response.headers.get("x-ms-error-code") error_message = storage_error.message additional_data = {} error_dict = {} @@ -175,23 +168,21 @@ def process_storage_error(storage_error) -> NoReturn: # type: ignore [misc] # p error_body = ContentDecodePolicy.deserialize_from_http_generics(storage_error.response) # If it is an XML response if isinstance(error_body, Element): - error_dict = { - child.tag.lower(): child.text - for child in error_body - } + error_dict = {child.tag.lower(): child.text for child in error_body} # If it is a JSON response elif isinstance(error_body, dict): - error_dict = error_body.get('error', {}) + error_dict = error_body.get("error", {}) elif not error_code: _LOGGER.warning( - 'Unexpected return type %s from ContentDecodePolicy.deserialize_from_http_generics.', type(error_body)) - error_dict = {'message': str(error_body)} + "Unexpected return type %s from ContentDecodePolicy.deserialize_from_http_generics.", type(error_body) + ) + error_dict = {"message": str(error_body)} # If we extracted from a Json or XML response if error_dict: - error_code = error_dict.get('code') - error_message = error_dict.get('message') - additional_data = {k: v for k, v in error_dict.items() if k not in {'code', 'message'}} + error_code = error_dict.get("code") + error_message = error_dict.get("message") + additional_data = {k: v for k, v in error_dict.items() if k not in {"code", "message"}} except DecodeError: pass @@ -202,33 +193,36 @@ def process_storage_error(storage_error) -> NoReturn: # type: ignore [misc] # p error_code = StorageErrorCode(error_code) if error_code in [StorageErrorCode.condition_not_met]: raise_error = ResourceModifiedError - if error_code in [StorageErrorCode.invalid_authentication_info, - StorageErrorCode.authentication_failed]: + if error_code in [StorageErrorCode.invalid_authentication_info, StorageErrorCode.authentication_failed]: raise_error = ClientAuthenticationError - if error_code in [StorageErrorCode.resource_not_found, - StorageErrorCode.invalid_property_name, - StorageErrorCode.invalid_source_uri, - StorageErrorCode.source_path_not_found, - StorageErrorCode.lease_name_mismatch, - StorageErrorCode.file_system_not_found, - StorageErrorCode.path_not_found, - StorageErrorCode.parent_not_found, - StorageErrorCode.invalid_destination_path, - StorageErrorCode.invalid_rename_source_path, - StorageErrorCode.lease_is_already_broken, - StorageErrorCode.invalid_source_or_destination_resource_type, - StorageErrorCode.rename_destination_parent_path_not_found]: + if error_code in [ + StorageErrorCode.resource_not_found, + StorageErrorCode.invalid_property_name, + StorageErrorCode.invalid_source_uri, + StorageErrorCode.source_path_not_found, + StorageErrorCode.lease_name_mismatch, + StorageErrorCode.file_system_not_found, + StorageErrorCode.path_not_found, + StorageErrorCode.parent_not_found, + StorageErrorCode.invalid_destination_path, + StorageErrorCode.invalid_rename_source_path, + StorageErrorCode.lease_is_already_broken, + StorageErrorCode.invalid_source_or_destination_resource_type, + StorageErrorCode.rename_destination_parent_path_not_found, + ]: raise_error = ResourceNotFoundError - if error_code in [StorageErrorCode.account_already_exists, - StorageErrorCode.account_being_created, - StorageErrorCode.resource_already_exists, - StorageErrorCode.resource_type_mismatch, - StorageErrorCode.source_path_is_being_deleted, - StorageErrorCode.path_already_exists, - StorageErrorCode.destination_path_is_being_deleted, - StorageErrorCode.file_system_already_exists, - StorageErrorCode.file_system_being_deleted, - StorageErrorCode.path_conflict]: + if error_code in [ + StorageErrorCode.account_already_exists, + StorageErrorCode.account_being_created, + StorageErrorCode.resource_already_exists, + StorageErrorCode.resource_type_mismatch, + StorageErrorCode.source_path_is_being_deleted, + StorageErrorCode.path_already_exists, + StorageErrorCode.destination_path_is_being_deleted, + StorageErrorCode.file_system_already_exists, + StorageErrorCode.file_system_being_deleted, + StorageErrorCode.path_conflict, + ]: raise_error = ResourceExistsError except ValueError: # Got an unknown error code @@ -256,6 +250,6 @@ def process_storage_error(storage_error) -> NoReturn: # type: ignore [misc] # p try: # `from None` prevents us from double printing the exception (suppresses generated layer error context) - exec("raise error from None") # pylint: disable=exec-used # nosec + exec("raise error from None") # pylint: disable=exec-used # nosec except SyntaxError as exc: raise error from exc diff --git a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_download.py b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_download.py index f8b6aa3fbd3c..5c20cb1731c0 100644 --- a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_download.py +++ b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_download.py @@ -3,10 +3,7 @@ # Licensed under the MIT License. See License.txt in the project root for # license information. # -------------------------------------------------------------------------- -from typing import ( - Any, cast, IO, Iterator, - TYPE_CHECKING -) +from typing import Any, cast, IO, Iterator, TYPE_CHECKING from ._deserialize import from_blob_properties @@ -31,13 +28,12 @@ def __init__(self, downloader: Any) -> None: self.name = self._downloader.name # Parse additional Datalake-only properties - encryption_context = self._downloader._response.response.headers.get('x-ms-encryption-context') - acl = self._downloader._response.response.headers.get('x-ms-acl') + encryption_context = self._downloader._response.response.headers.get("x-ms-encryption-context") + acl = self._downloader._response.response.headers.get("x-ms-acl") self.properties = from_blob_properties( - self._downloader.properties, - encryption_context=encryption_context, - acl=acl) + self._downloader.properties, encryption_context=encryption_context, acl=acl + ) self.size = self._downloader.size def __len__(self) -> int: diff --git a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_file_system_client.py b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_file_system_client.py index 8a5f6d643152..b28d84f8563a 100644 --- a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_file_system_client.py +++ b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_file_system_client.py @@ -1,3 +1,4 @@ +# pylint: disable=too-many-lines # ------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for @@ -6,10 +7,7 @@ # pylint: disable=too-many-lines, docstring-keyword-should-match-keyword-only import functools -from typing import ( - Any, cast, Dict, Optional, Union, - TYPE_CHECKING -) +from typing import Any, cast, Dict, Optional, Union, TYPE_CHECKING from typing_extensions import Self from azure.core.exceptions import HttpResponseError @@ -30,7 +28,7 @@ FileProperties, FileSystemProperties, LocationMode, - PublicAccess + PublicAccess, ) from ._shared.base_client import parse_connection_str, parse_query, TransportWrapper, StorageAccountHostsMixin from ._serialize import convert_dfs_url_to_blob_url, get_api_version @@ -93,10 +91,13 @@ class FileSystemClient(StorageAccountHostsMixin): """The hostname of the primary endpoint.""" def __init__( - self, account_url: str, + self, + account_url: str, file_system_name: str, - credential: Optional[Union[str, Dict[str, str], "AzureNamedKeyCredential", "AzureSasCredential", "TokenCredential"]] = None, # pylint: disable=line-too-long - **kwargs: Any + credential: Optional[ + Union[str, Dict[str, str], "AzureNamedKeyCredential", "AzureSasCredential", "TokenCredential"] + ] = None, # pylint: disable=line-too-long + **kwargs: Any, ) -> None: if not file_system_name: raise ValueError("Please specify a file system name.") @@ -107,24 +108,21 @@ def __init__( # TODO: add self.account_url to base_client and remove _blob_account_url self._blob_account_url = blob_account_url - datalake_hosts = kwargs.pop('_hosts', None) + datalake_hosts = kwargs.pop("_hosts", None) blob_hosts = None if datalake_hosts: blob_primary_account_url = convert_dfs_url_to_blob_url(datalake_hosts[LocationMode.PRIMARY]) blob_hosts = {LocationMode.PRIMARY: blob_primary_account_url, LocationMode.SECONDARY: ""} self._container_client = ContainerClient( - self._blob_account_url, - self.file_system_name, - credential=credential, - _hosts=blob_hosts, - **kwargs + self._blob_account_url, self.file_system_name, credential=credential, _hosts=blob_hosts, **kwargs ) _, sas_token = parse_query(parsed_url.query) self._query_str, self._raw_credential = self._format_query_string(sas_token, credential) - super(FileSystemClient, self).__init__(parsed_url, service='dfs', credential=self._raw_credential, - _hosts=datalake_hosts, **kwargs) + super(FileSystemClient, self).__init__( + parsed_url, service="dfs", credential=self._raw_credential, _hosts=datalake_hosts, **kwargs + ) # ADLS doesn't support secondary endpoint, make sure it's empty self._hosts[LocationMode.SECONDARY] = "" @@ -156,10 +154,7 @@ def close(self) -> None: def _build_generated_client(self, url: str) -> AzureDataLakeStorageRESTAPI: client = AzureDataLakeStorageRESTAPI( - url, - base_url=url, - file_system=self.file_system_name, - pipeline=self._pipeline + url, base_url=url, file_system=self.file_system_name, pipeline=self._pipeline ) client._config.version = self._api_version # type: ignore [assignment] # pylint: disable=protected-access return client @@ -169,10 +164,13 @@ def _format_url(self, hostname: str) -> str: @classmethod def from_connection_string( - cls, conn_str: str, + cls, + conn_str: str, file_system_name: str, - credential: Optional[Union[str, Dict[str, str], "AzureNamedKeyCredential", "AzureSasCredential", "TokenCredential"]] = None, # pylint: disable=line-too-long - **kwargs: Any + credential: Optional[ + Union[str, Dict[str, str], "AzureNamedKeyCredential", "AzureSasCredential", "TokenCredential"] + ] = None, # pylint: disable=line-too-long + **kwargs: Any, ) -> Self: """ Create FileSystemClient from a Connection String. @@ -210,14 +208,12 @@ def from_connection_string( :dedent: 8 :caption: Create FileSystemClient from connection string """ - account_url, _, credential = parse_connection_str(conn_str, credential, 'dfs') + account_url, _, credential = parse_connection_str(conn_str, credential, "dfs") return cls(account_url, file_system_name=file_system_name, credential=credential, **kwargs) @distributed_trace def acquire_lease( - self, lease_duration: int = -1, - lease_id: Optional[str] = None, - **kwargs: Any + self, lease_duration: int = -1, lease_id: Optional[str] = None, **kwargs: Any ) -> DataLakeLeaseClient: """ Requests a new lease. If the file system does not have an active lease, @@ -273,9 +269,7 @@ def acquire_lease( @distributed_trace def create_file_system( - self, metadata: Optional[Dict[str, str]] = None, - public_access: Optional[PublicAccess] = None, - **kwargs: Any + self, metadata: Optional[Dict[str, str]] = None, public_access: Optional[PublicAccess] = None, **kwargs: Any ) -> Dict[str, Union[str, "datetime"]]: """Creates a new file system under the specified account. @@ -315,12 +309,12 @@ def create_file_system( :dedent: 12 :caption: Creating a file system in the datalake service. """ - encryption_scope_options = kwargs.pop('encryption_scope_options', None) + encryption_scope_options = kwargs.pop("encryption_scope_options", None) return self._container_client.create_container( metadata=metadata, public_access=public_access, container_encryption_scope=encryption_scope_options, - **kwargs + **kwargs, ) @distributed_trace @@ -360,11 +354,17 @@ def _rename_file_system(self, new_name: str, **kwargs: Any) -> "FileSystemClient :rtype: ~azure.storage.filedatalake.FileSystemClient """ self._container_client._rename_container(new_name, **kwargs) # pylint: disable=protected-access - #TODO: self._raw_credential would not work with SAS tokens + # TODO: self._raw_credential would not work with SAS tokens renamed_file_system = FileSystemClient( - f"{self.scheme}://{self.primary_hostname}", file_system_name=new_name, - credential=self._raw_credential, api_version=self.api_version, _configuration=self._config, - _pipeline=self._pipeline, _location_mode=self._location_mode, _hosts=self._hosts) + f"{self.scheme}://{self.primary_hostname}", + file_system_name=new_name, + credential=self._raw_credential, + api_version=self.api_version, + _configuration=self._config, + _pipeline=self._pipeline, + _location_mode=self._location_mode, + _hosts=self._hosts, + ) return renamed_file_system @distributed_trace @@ -441,13 +441,12 @@ def get_file_system_properties(self, **kwargs: Any) -> FileSystemProperties: :caption: Getting properties on the file system. """ container_properties = self._container_client.get_container_properties(**kwargs) - return FileSystemProperties._convert_from_container_props(container_properties) # pylint: disable=protected-access + return FileSystemProperties._convert_from_container_props( + container_properties + ) # pylint: disable=protected-access @distributed_trace - def set_file_system_metadata( - self, metadata: Dict[str, str], - **kwargs: Any - ) -> Dict[str, Union[str, "datetime"]]: + def set_file_system_metadata(self, metadata: Dict[str, str], **kwargs: Any) -> Dict[str, Union[str, "datetime"]]: """Sets one or more user-defined name-value pairs for the specified file system. Each call to this operation replaces all existing metadata attached to the file system. To remove all metadata from the file system, @@ -499,9 +498,10 @@ def set_file_system_metadata( @distributed_trace def set_file_system_access_policy( - self, signed_identifiers: Dict[str, "AccessPolicy"], + self, + signed_identifiers: Dict[str, "AccessPolicy"], public_access: Optional[Union[str, "PublicAccess"]] = None, - **kwargs: Any + **kwargs: Any, ) -> Dict[str, Union[str, "datetime"]]: """Sets the permissions for the specified file system or stored access policies that may be used with Shared Access Signatures. The permissions @@ -540,9 +540,7 @@ def set_file_system_access_policy( :rtype: Dict[str, Union[str, ~datetime.datetime]] """ return self._container_client.set_container_access_policy( - cast(Dict[str, "BlobAccessPolicy"], signed_identifiers), - public_access=public_access, - **kwargs + cast(Dict[str, "BlobAccessPolicy"], signed_identifiers), public_access=public_access, **kwargs ) @distributed_trace @@ -565,16 +563,19 @@ def get_file_system_access_policy(self, **kwargs: Any) -> Dict[str, Any]: """ access_policy = self._container_client.get_container_access_policy(**kwargs) return { - 'public_access': PublicAccess._from_generated(access_policy['public_access']), # pylint: disable=protected-access - 'signed_identifiers': access_policy['signed_identifiers'] + "public_access": PublicAccess._from_generated( + access_policy["public_access"] + ), # pylint: disable=protected-access + "signed_identifiers": access_policy["signed_identifiers"], } @distributed_trace def get_paths( - self, path: Optional[str] = None, + self, + path: Optional[str] = None, recursive: Optional[bool] = True, max_results: Optional[int] = None, - **kwargs: Any + **kwargs: Any, ) -> ItemPaged["PathProperties"]: """Returns a generator to list the paths(could be files or directories) under the specified file system. The generator will lazily follow the continuation tokens returned by @@ -617,25 +618,18 @@ def get_paths( :dedent: 8 :caption: List the paths in the file system. """ - timeout = kwargs.pop('timeout', None) + timeout = kwargs.pop("timeout", None) begin_from = kwargs.pop("start_from", None) command = functools.partial( - self._client.file_system.list_paths, - path=path, - timeout=timeout, - begin_from=begin_from, - **kwargs + self._client.file_system.list_paths, path=path, timeout=timeout, begin_from=begin_from, **kwargs ) return ItemPaged( - command, recursive, path=path, max_results=max_results, - page_iterator_class=PathPropertiesPaged, **kwargs + command, recursive, path=path, max_results=max_results, page_iterator_class=PathPropertiesPaged, **kwargs ) @distributed_trace def create_directory( - self, directory: Union[DirectoryProperties, str], - metadata: Optional[Dict[str, str]] = None, - **kwargs: Any + self, directory: Union[DirectoryProperties, str], metadata: Optional[Dict[str, str]] = None, **kwargs: Any ) -> DataLakeDirectoryClient: """ Create directory @@ -726,8 +720,7 @@ def create_directory( @distributed_trace def delete_directory( # pylint: disable=delete-operation-wrong-return-type - self, directory: Union[DirectoryProperties, str], - **kwargs: Any + self, directory: Union[DirectoryProperties, str], **kwargs: Any ) -> DataLakeDirectoryClient: """ Marks the specified path for deletion. @@ -780,10 +773,7 @@ def delete_directory( # pylint: disable=delete-operation-wrong-return-type return directory_client @distributed_trace - def create_file( - self, file: Union[FileProperties, str], - **kwargs: Any - ) -> DataLakeFileClient: + def create_file(self, file: Union[FileProperties, str], **kwargs: Any) -> DataLakeFileClient: """ Create file @@ -881,8 +871,7 @@ def create_file( @distributed_trace def delete_file( # pylint: disable=delete-operation-wrong-return-type - self, file: Union[FileProperties, str], - **kwargs: Any + self, file: Union[FileProperties, str], **kwargs: Any ) -> DataLakeFileClient: """ Marks the specified file for deletion. @@ -935,9 +924,7 @@ def delete_file( # pylint: disable=delete-operation-wrong-return-type return file_client def _undelete_path( - self, deleted_path_name: str, - deletion_id: str, - **kwargs: Any + self, deleted_path_name: str, deletion_id: str, **kwargs: Any ) -> Union[DataLakeDirectoryClient, DataLakeFileClient]: """Restores soft-deleted path. @@ -964,13 +951,10 @@ def _undelete_path( pipeline = Pipeline( transport=TransportWrapper(self._pipeline._transport), # pylint: disable=protected-access - policies=self._pipeline._impl_policies # pylint: disable=protected-access + policies=self._pipeline._impl_policies, # pylint: disable=protected-access ) path_client = AzureDataLakeStorageRESTAPI( - url, - filesystem=self.file_system_name, - path=deleted_path_name, - pipeline=pipeline + url, filesystem=self.file_system_name, path=deleted_path_name, pipeline=pipeline ) try: is_file = path_client.path.undelete(undelete_source=undelete_source, cls=is_file_path, **kwargs) @@ -986,7 +970,7 @@ def _get_root_directory_client(self) -> DataLakeDirectoryClient: :returns: A DataLakeDirectoryClient. :rtype: ~azure.storage.filedatalake.DataLakeDirectoryClient """ - return self.get_directory_client('/') + return self.get_directory_client("/") def get_directory_client(self, directory: Union[DirectoryProperties, str]) -> DataLakeDirectoryClient: """Get a client to interact with the specified directory. @@ -1010,18 +994,23 @@ def get_directory_client(self, directory: Union[DirectoryProperties, str]) -> Da :caption: Getting the directory client to interact with a specific directory. """ if isinstance(directory, DirectoryProperties): - directory_name = directory.get('name') + directory_name = directory.get("name") else: directory_name = str(directory) _pipeline = Pipeline( transport=TransportWrapper(self._pipeline._transport), # pylint: disable=protected-access - policies=self._pipeline._impl_policies # pylint: disable=protected-access + policies=self._pipeline._impl_policies, # pylint: disable=protected-access + ) + return DataLakeDirectoryClient( + self.url, + self.file_system_name, + directory_name=directory_name, + credential=self._raw_credential, + api_version=self.api_version, + _configuration=self._config, + _pipeline=_pipeline, + _hosts=self._hosts, ) - return DataLakeDirectoryClient(self.url, self.file_system_name, directory_name=directory_name, - credential=self._raw_credential, - api_version=self.api_version, - _configuration=self._config, _pipeline=_pipeline, - _hosts=self._hosts) def get_file_client(self, file_path: Union[FileProperties, str]) -> DataLakeFileClient: """Get a client to interact with the specified file. @@ -1050,12 +1039,18 @@ def get_file_client(self, file_path: Union[FileProperties, str]) -> DataLakeFile file_path = str(file_path) _pipeline = Pipeline( transport=TransportWrapper(self._pipeline._transport), # pylint: disable=protected-access - policies=self._pipeline._impl_policies # pylint: disable=protected-access + policies=self._pipeline._impl_policies, # pylint: disable=protected-access ) return DataLakeFileClient( - self.url, self.file_system_name, file_path=file_path, credential=self._raw_credential, + self.url, + self.file_system_name, + file_path=file_path, + credential=self._raw_credential, api_version=self.api_version, - _hosts=self._hosts, _configuration=self._config, _pipeline=_pipeline) + _hosts=self._hosts, + _configuration=self._config, + _pipeline=_pipeline, + ) @distributed_trace def list_deleted_paths(self, **kwargs: Any) -> ItemPaged[DeletedPathProperties]: @@ -1081,14 +1076,19 @@ def list_deleted_paths(self, **kwargs: Any) -> ItemPaged[DeletedPathProperties]: :rtype: ~azure.core.paging.ItemPaged[~azure.storage.filedatalake.DeletedPathProperties] """ - path_prefix = kwargs.pop('path_prefix', None) - timeout = kwargs.pop('timeout', None) - results_per_page = kwargs.pop('results_per_page', None) + path_prefix = kwargs.pop("path_prefix", None) + timeout = kwargs.pop("timeout", None) + results_per_page = kwargs.pop("results_per_page", None) command = functools.partial( self._datalake_client_for_blob_operation.file_system.list_blob_hierarchy_segment, showonly="deleted", timeout=timeout, - **kwargs) + **kwargs, + ) return ItemPaged( - command, prefix=path_prefix, page_iterator_class=DeletedPathPropertiesPaged, - results_per_page=results_per_page, **kwargs) + command, + prefix=path_prefix, + page_iterator_class=DeletedPathPropertiesPaged, + results_per_page=results_per_page, + **kwargs, + ) diff --git a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_file_system_client_helpers.py b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_file_system_client_helpers.py index e95278cf3887..702197fc99d5 100644 --- a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_file_system_client_helpers.py +++ b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_file_system_client_helpers.py @@ -13,11 +13,11 @@ def _parse_url(account_url: str) -> "ParseResult": try: - if not account_url.lower().startswith('http'): + if not account_url.lower().startswith("http"): account_url = "https://" + account_url except AttributeError as exc: raise ValueError("account URL must be a string.") from exc - parsed_url = urlparse(account_url.rstrip('/')) + parsed_url = urlparse(account_url.rstrip("/")) if not parsed_url.netloc: raise ValueError(f"Invalid URL: {account_url}") return parsed_url @@ -25,16 +25,16 @@ def _parse_url(account_url: str) -> "ParseResult": def _format_url(scheme: str, hostname: str, file_system_name: Union[str, bytes], query_str: str) -> str: if isinstance(file_system_name, str): - file_system_name = file_system_name.encode('UTF-8') + file_system_name = file_system_name.encode("UTF-8") return f"{scheme}://{hostname}/{quote(file_system_name)}{query_str}" def _undelete_path_options(deleted_path_name, deletion_id, url): - quoted_path = quote(unquote(deleted_path_name.strip('/'))) - url_and_token = url.replace('.dfs.', '.blob.').split('?') + quoted_path = quote(unquote(deleted_path_name.strip("/"))) + url_and_token = url.replace(".dfs.", ".blob.").split("?") try: - url = url_and_token[0] + '/' + quoted_path + url_and_token[1] + url = url_and_token[0] + "/" + quoted_path + url_and_token[1] except IndexError: - url = url_and_token[0] + '/' + quoted_path - undelete_source = quoted_path + f'?deletionid={deletion_id}' if deletion_id else None + url = url_and_token[0] + "/" + quoted_path + undelete_source = quoted_path + f"?deletionid={deletion_id}" if deletion_id else None return quoted_path, url, undelete_source diff --git a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_generated/__init__.py b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_generated/__init__.py index 8a9f7149c0f5..a61eee2e3450 100644 --- a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_generated/__init__.py +++ b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_generated/__init__.py @@ -2,7 +2,7 @@ # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. +# Code generated by Microsoft (R) Python Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- # pylint: disable=wrong-import-position @@ -12,7 +12,10 @@ if TYPE_CHECKING: from ._patch import * # pylint: disable=unused-wildcard-import -from ._azure_data_lake_storage_restapi import AzureDataLakeStorageRESTAPI # type: ignore +from ._client import DataLakeClient # type: ignore +from ._version import VERSION + +__version__ = VERSION try: from ._patch import __all__ as _patch_all @@ -22,7 +25,7 @@ from ._patch import patch_sdk as _patch_sdk __all__ = [ - "AzureDataLakeStorageRESTAPI", + "DataLakeClient", ] __all__.extend([p for p in _patch_all if p not in __all__]) # pyright: ignore diff --git a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_generated/_azure_data_lake_storage_restapi.py b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_generated/_client.py similarity index 62% rename from sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_generated/_azure_data_lake_storage_restapi.py rename to sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_generated/_client.py index 6304ac3e09a4..bd1d836a48f5 100644 --- a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_generated/_azure_data_lake_storage_restapi.py +++ b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_generated/_client.py @@ -2,56 +2,49 @@ # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. +# Code generated by Microsoft (R) Python Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- from copy import deepcopy -from typing import Any, Optional +from typing import Any, TYPE_CHECKING from typing_extensions import Self from azure.core import PipelineClient from azure.core.pipeline import policies from azure.core.rest import HttpRequest, HttpResponse -from . import models as _models -from ._configuration import AzureDataLakeStorageRESTAPIConfiguration +from ._configuration import DataLakeClientConfiguration from ._utils.serialization import Deserializer, Serializer from .operations import FileSystemOperations, PathOperations, ServiceOperations +if TYPE_CHECKING: + from azure.core.credentials import TokenCredential -class AzureDataLakeStorageRESTAPI: # pylint: disable=client-accepts-api-version-keyword - """Azure Data Lake Storage provides storage for Hadoop and other big data workloads. + +class DataLakeClient: # pylint: disable=client-accepts-api-version-keyword + """DataLakeClient. :ivar service: ServiceOperations operations - :vartype service: azure.storage.filedatalake.operations.ServiceOperations + :vartype service: azure.storage.filedatalake._generated.operations.ServiceOperations :ivar file_system: FileSystemOperations operations - :vartype file_system: azure.storage.filedatalake.operations.FileSystemOperations + :vartype file_system: azure.storage.filedatalake._generated.operations.FileSystemOperations :ivar path: PathOperations operations - :vartype path: azure.storage.filedatalake.operations.PathOperations + :vartype path: azure.storage.filedatalake._generated.operations.PathOperations :param url: The URL of the service account, container, or blob that is the target of the desired operation. Required. :type url: str - :param base_url: Service URL. Required. Default value is "". - :type base_url: str - :param x_ms_lease_duration: The lease duration is required to acquire a lease, and specifies - the duration of the lease in seconds. The lease duration must be between 15 and 60 seconds or - -1 for infinite lease. Default value is None. - :type x_ms_lease_duration: int - :keyword resource: The value must be "filesystem" for all filesystem operations. Default value - is "filesystem". Note that overriding this default value may result in unsupported behavior. - :paramtype resource: str - :keyword version: Specifies the version of the operation to use for this request. Default value - is "2026-02-06". Note that overriding this default value may result in unsupported behavior. + :param credential: Credential used to authenticate requests to the service. Required. + :type credential: ~azure.core.credentials.TokenCredential + :keyword version: Specifies the version of the operation to use for this request. Known values + are "2026-02-06". Default value is "2026-02-06". Note that overriding this default value may + result in unsupported behavior. :paramtype version: str """ - def __init__( # pylint: disable=missing-client-constructor-parameter-credential - self, url: str, base_url: str = "", x_ms_lease_duration: Optional[int] = None, **kwargs: Any - ) -> None: - self._config = AzureDataLakeStorageRESTAPIConfiguration( - url=url, x_ms_lease_duration=x_ms_lease_duration, **kwargs - ) + def __init__(self, url: str, credential: "TokenCredential", **kwargs: Any) -> None: + _endpoint = "{url}" + self._config = DataLakeClientConfiguration(url=url, credential=credential, **kwargs) _policies = kwargs.pop("policies", None) if _policies is None: @@ -70,23 +63,22 @@ def __init__( # pylint: disable=missing-client-constructor-parameter-credential policies.SensitiveHeaderCleanupPolicy(**kwargs) if self._config.redirect_policy else None, self._config.http_logging_policy, ] - self._client: PipelineClient = PipelineClient(base_url=base_url, policies=_policies, **kwargs) + self._client: PipelineClient = PipelineClient(base_url=_endpoint, policies=_policies, **kwargs) - client_models = {k: v for k, v in _models.__dict__.items() if isinstance(v, type)} - self._serialize = Serializer(client_models) - self._deserialize = Deserializer(client_models) + self._serialize = Serializer() + self._deserialize = Deserializer() self._serialize.client_side_validation = False self.service = ServiceOperations(self._client, self._config, self._serialize, self._deserialize) self.file_system = FileSystemOperations(self._client, self._config, self._serialize, self._deserialize) self.path = PathOperations(self._client, self._config, self._serialize, self._deserialize) - def _send_request(self, request: HttpRequest, *, stream: bool = False, **kwargs: Any) -> HttpResponse: + def send_request(self, request: HttpRequest, *, stream: bool = False, **kwargs: Any) -> HttpResponse: """Runs the network request through the client's chained policies. >>> from azure.core.rest import HttpRequest >>> request = HttpRequest("GET", "https://www.example.org/") - >>> response = client._send_request(request) + >>> response = client.send_request(request) For more information on this code flow, see https://aka.ms/azsdk/dpcodegen/python/send_request @@ -99,7 +91,11 @@ def _send_request(self, request: HttpRequest, *, stream: bool = False, **kwargs: """ request_copy = deepcopy(request) - request_copy.url = self._client.format_url(request_copy.url) + path_format_arguments = { + "url": self._serialize.url("self._config.url", self._config.url, "str", skip_quote=True), + } + + request_copy.url = self._client.format_url(request_copy.url, **path_format_arguments) return self._client.send_request(request_copy, stream=stream, **kwargs) # type: ignore def close(self) -> None: diff --git a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_generated/_configuration.py b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_generated/_configuration.py index 0f8ea82cd0da..671843414363 100644 --- a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_generated/_configuration.py +++ b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_generated/_configuration.py @@ -2,19 +2,22 @@ # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. +# Code generated by Microsoft (R) Python Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from typing import Any, Literal, Optional +from typing import Any, TYPE_CHECKING from azure.core.pipeline import policies -VERSION = "unknown" +from ._version import VERSION +if TYPE_CHECKING: + from azure.core.credentials import TokenCredential -class AzureDataLakeStorageRESTAPIConfiguration: # pylint: disable=too-many-instance-attributes - """Configuration for AzureDataLakeStorageRESTAPI. + +class DataLakeClientConfiguration: # pylint: disable=too-many-instance-attributes + """Configuration for DataLakeClient. Note that all parameters used to create this instance are saved as instance attributes. @@ -22,30 +25,27 @@ class AzureDataLakeStorageRESTAPIConfiguration: # pylint: disable=too-many-inst :param url: The URL of the service account, container, or blob that is the target of the desired operation. Required. :type url: str - :param x_ms_lease_duration: The lease duration is required to acquire a lease, and specifies - the duration of the lease in seconds. The lease duration must be between 15 and 60 seconds or - -1 for infinite lease. Default value is None. - :type x_ms_lease_duration: int - :keyword resource: The value must be "filesystem" for all filesystem operations. Default value - is "filesystem". Note that overriding this default value may result in unsupported behavior. - :paramtype resource: str - :keyword version: Specifies the version of the operation to use for this request. Default value - is "2026-02-06". Note that overriding this default value may result in unsupported behavior. + :param credential: Credential used to authenticate requests to the service. Required. + :type credential: ~azure.core.credentials.TokenCredential + :keyword version: Specifies the version of the operation to use for this request. Known values + are "2026-02-06". Default value is "2026-02-06". Note that overriding this default value may + result in unsupported behavior. :paramtype version: str """ - def __init__(self, url: str, x_ms_lease_duration: Optional[int] = None, **kwargs: Any) -> None: - resource: Literal["filesystem"] = kwargs.pop("resource", "filesystem") - version: Literal["2026-02-06"] = kwargs.pop("version", "2026-02-06") + def __init__(self, url: str, credential: "TokenCredential", **kwargs: Any) -> None: + version: str = kwargs.pop("version", "2026-02-06") if url is None: raise ValueError("Parameter 'url' must not be None.") + if credential is None: + raise ValueError("Parameter 'credential' must not be None.") self.url = url - self.x_ms_lease_duration = x_ms_lease_duration - self.resource = resource + self.credential = credential self.version = version - kwargs.setdefault("sdk_moniker", "azuredatalakestoragerestapi/{}".format(VERSION)) + self.credential_scopes = kwargs.pop("credential_scopes", ["https://storage.azure.com/.default"]) + kwargs.setdefault("sdk_moniker", "storage-file-datalake/{}".format(VERSION)) self.polling_interval = kwargs.get("polling_interval", 30) self._configure(**kwargs) @@ -59,3 +59,7 @@ def _configure(self, **kwargs: Any) -> None: self.redirect_policy = kwargs.get("redirect_policy") or policies.RedirectPolicy(**kwargs) self.retry_policy = kwargs.get("retry_policy") or policies.RetryPolicy(**kwargs) self.authentication_policy = kwargs.get("authentication_policy") + if self.credential and not self.authentication_policy: + self.authentication_policy = policies.BearerTokenCredentialPolicy( + self.credential, *self.credential_scopes, **kwargs + ) diff --git a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_generated/_patch.py b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_generated/_patch.py index f7dd32510333..bbae00cb39b5 100644 --- a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_generated/_patch.py +++ b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_generated/_patch.py @@ -1,14 +1,108 @@ -# ------------------------------------ -# Copyright (c) Microsoft Corporation. -# Licensed under the MIT License. -# ------------------------------------ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# -------------------------------------------------------------------------- """Customize generated code here. Follow our quickstart for examples: https://aka.ms/azsdk/python/dpcodegen/python/customize """ -from typing import List +from typing import Any, Optional, TYPE_CHECKING -__all__: List[str] = [] # Add all objects you want publicly available to users at this package level +from azure.core import PipelineClient +from azure.core.pipeline import Pipeline, PipelineRequest +from azure.core.pipeline.policies import SansIOHTTPPolicy + +from ._client import DataLakeClient as GeneratedDataLakeClient +from ._configuration import DataLakeClientConfiguration as GeneratedDataLakeClientConfiguration + +if TYPE_CHECKING: + from azure.core.credentials import TokenCredential + + +class RangeHeaderPolicy(SansIOHTTPPolicy): + """Policy that converts the 'Range' header to 'x-ms-range'.""" + + def on_request(self, request: PipelineRequest) -> None: + range_value = request.http_request.headers.pop("Range", None) + if range_value is not None: + request.http_request.headers["x-ms-range"] = range_value + + +class DataLakeClientConfiguration(GeneratedDataLakeClientConfiguration): + """Configuration for DataLakeClient that allows optional credentials. + + Overrides the generated configuration to allow None credentials + for anonymous access. + """ + + def __init__(self, url: str, credential: Optional["TokenCredential"] = None, **kwargs: Any) -> None: + if url is None: + raise ValueError("Parameter 'url' must not be None.") + version: str = kwargs.pop("version", "2026-02-06") + self.url = url + self.credential = credential + self.version = version + self.credential_scopes = kwargs.pop("credential_scopes", ["https://storage.azure.com/.default"]) + from ._version import VERSION + + kwargs.setdefault("sdk_moniker", "storage-file-datalake/{}".format(VERSION)) + self.polling_interval = kwargs.get("polling_interval", 30) + self._configure(**kwargs) + + +class AzureDataLakeStorageRESTAPI(GeneratedDataLakeClient): + """Subclass of the generated DataLakeClient that allows optional credentials, + accepts a pre-built pipeline, and injects the RangeHeaderPolicy. + """ + + def __init__( + self, url: str, credential: Optional["TokenCredential"] = None, *, pipeline: Any = None, **kwargs: Any + ) -> None: + from azure.core.pipeline import policies + + from ._utils.serialization import Deserializer, Serializer + from .operations import FileSystemOperations, PathOperations, ServiceOperations + + _endpoint = "{url}" + self._config = DataLakeClientConfiguration(url=url, credential=credential, **kwargs) + + if pipeline is not None: + _wrapped_pipeline = Pipeline( + transport=pipeline._transport, + policies=[RangeHeaderPolicy()] + list(pipeline._impl_policies), + ) + self._client = PipelineClient(base_url=_endpoint, pipeline=_wrapped_pipeline) + else: + _policies = kwargs.pop("policies", None) + if _policies is None: + _policies = [ + RangeHeaderPolicy(), + policies.RequestIdPolicy(**kwargs), + self._config.headers_policy, + self._config.user_agent_policy, + self._config.proxy_policy, + policies.ContentDecodePolicy(**kwargs), + self._config.redirect_policy, + self._config.retry_policy, + self._config.authentication_policy, + self._config.custom_hook_policy, + self._config.logging_policy, + policies.DistributedTracingPolicy(**kwargs), + policies.SensitiveHeaderCleanupPolicy(**kwargs) if self._config.redirect_policy else None, + self._config.http_logging_policy, + ] + self._client = PipelineClient(base_url=_endpoint, policies=_policies, **kwargs) + + self._serialize = Serializer() + self._deserialize = Deserializer() + self._serialize.client_side_validation = False + self.service = ServiceOperations(self._client, self._config, self._serialize, self._deserialize) + self.file_system = FileSystemOperations(self._client, self._config, self._serialize, self._deserialize) + self.path = PathOperations(self._client, self._config, self._serialize, self._deserialize) + + +__all__: list[str] = ["AzureDataLakeStorageRESTAPI"] def patch_sdk(): diff --git a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_generated/_utils/__init__.py b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_generated/_utils/__init__.py index 0af9b28f6607..8026245c2abc 100644 --- a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_generated/_utils/__init__.py +++ b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_generated/_utils/__init__.py @@ -1,6 +1,6 @@ # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. +# Code generated by Microsoft (R) Python Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- diff --git a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_generated/_utils/model_base.py b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_generated/_utils/model_base.py new file mode 100644 index 000000000000..7b7f8ba67b53 --- /dev/null +++ b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_generated/_utils/model_base.py @@ -0,0 +1,1350 @@ +# pylint: disable=line-too-long,useless-suppression,too-many-lines +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +# pylint: disable=protected-access, broad-except + +import copy +import calendar +import decimal +import functools +import sys +import logging +import base64 +import re +import typing +import enum +import email.utils +from datetime import datetime, date, time, timedelta, timezone +from json import JSONEncoder +import xml.etree.ElementTree as ET +from collections.abc import MutableMapping +from typing_extensions import Self +import isodate +from azure.core.exceptions import DeserializationError +from azure.core import CaseInsensitiveEnumMeta +from azure.core.pipeline import PipelineResponse +from azure.core.serialization import _Null +from azure.core.rest import HttpResponse + +_LOGGER = logging.getLogger(__name__) + +__all__ = ["SdkJSONEncoder", "Model", "rest_field", "rest_discriminator"] + +TZ_UTC = timezone.utc +_T = typing.TypeVar("_T") +_NONE_TYPE = type(None) + + +def _timedelta_as_isostr(td: timedelta) -> str: + """Converts a datetime.timedelta object into an ISO 8601 formatted string, e.g. 'P4DT12H30M05S' + + Function adapted from the Tin Can Python project: https://github.com/RusticiSoftware/TinCanPython + + :param timedelta td: The timedelta to convert + :rtype: str + :return: ISO8601 version of this timedelta + """ + + # Split seconds to larger units + seconds = td.total_seconds() + minutes, seconds = divmod(seconds, 60) + hours, minutes = divmod(minutes, 60) + days, hours = divmod(hours, 24) + + days, hours, minutes = list(map(int, (days, hours, minutes))) + seconds = round(seconds, 6) + + # Build date + date_str = "" + if days: + date_str = "%sD" % days + + if hours or minutes or seconds: + # Build time + time_str = "T" + + # Hours + bigger_exists = date_str or hours + if bigger_exists: + time_str += "{:02}H".format(hours) + + # Minutes + bigger_exists = bigger_exists or minutes + if bigger_exists: + time_str += "{:02}M".format(minutes) + + # Seconds + try: + if seconds.is_integer(): + seconds_string = "{:02}".format(int(seconds)) + else: + # 9 chars long w/ leading 0, 6 digits after decimal + seconds_string = "%09.6f" % seconds + # Remove trailing zeros + seconds_string = seconds_string.rstrip("0") + except AttributeError: # int.is_integer() raises + seconds_string = "{:02}".format(seconds) + + time_str += "{}S".format(seconds_string) + else: + time_str = "" + + return "P" + date_str + time_str + + +def _serialize_bytes(o, format: typing.Optional[str] = None) -> str: + encoded = base64.b64encode(o).decode() + if format == "base64url": + return encoded.strip("=").replace("+", "-").replace("/", "_") + return encoded + + +def _serialize_datetime(o, format: typing.Optional[str] = None): + if hasattr(o, "year") and hasattr(o, "hour"): + if format == "rfc7231": + return email.utils.format_datetime(o, usegmt=True) + if format == "unix-timestamp": + return int(calendar.timegm(o.utctimetuple())) + + # astimezone() fails for naive times in Python 2.7, so make make sure o is aware (tzinfo is set) + if not o.tzinfo: + iso_formatted = o.replace(tzinfo=TZ_UTC).isoformat() + else: + iso_formatted = o.astimezone(TZ_UTC).isoformat() + # Replace the trailing "+00:00" UTC offset with "Z" (RFC 3339: https://www.ietf.org/rfc/rfc3339.txt) + return iso_formatted.replace("+00:00", "Z") + # Next try datetime.date or datetime.time + return o.isoformat() + + +def _is_readonly(p): + try: + return p._visibility == ["read"] + except AttributeError: + return False + + +class SdkJSONEncoder(JSONEncoder): + """A JSON encoder that's capable of serializing datetime objects and bytes.""" + + def __init__(self, *args, exclude_readonly: bool = False, format: typing.Optional[str] = None, **kwargs): + super().__init__(*args, **kwargs) + self.exclude_readonly = exclude_readonly + self.format = format + + def default(self, o): # pylint: disable=too-many-return-statements + if _is_model(o): + if self.exclude_readonly: + readonly_props = [p._rest_name for p in o._attr_to_rest_field.values() if _is_readonly(p)] + return {k: v for k, v in o.items() if k not in readonly_props} + return dict(o.items()) + try: + return super(SdkJSONEncoder, self).default(o) + except TypeError: + if isinstance(o, _Null): + return None + if isinstance(o, decimal.Decimal): + return float(o) + if isinstance(o, (bytes, bytearray)): + return _serialize_bytes(o, self.format) + try: + # First try datetime.datetime + return _serialize_datetime(o, self.format) + except AttributeError: + pass + # Last, try datetime.timedelta + try: + return _timedelta_as_isostr(o) + except AttributeError: + # This will be raised when it hits value.total_seconds in the method above + pass + return super(SdkJSONEncoder, self).default(o) + + +_VALID_DATE = re.compile(r"\d{4}[-]\d{2}[-]\d{2}T\d{2}:\d{2}:\d{2}" + r"\.?\d*Z?[-+]?[\d{2}]?:?[\d{2}]?") +_VALID_RFC7231 = re.compile( + r"(Mon|Tue|Wed|Thu|Fri|Sat|Sun),\s\d{2}\s" + r"(Jan|Feb|Mar|Apr|May|Jun|Jul|Aug|Sep|Oct|Nov|Dec)\s\d{4}\s\d{2}:\d{2}:\d{2}\sGMT" +) + +_ARRAY_ENCODE_MAPPING = { + "pipeDelimited": "|", + "spaceDelimited": " ", + "commaDelimited": ",", + "newlineDelimited": "\n", +} + + +def _deserialize_array_encoded(delimit: str, attr): + if isinstance(attr, str): + if attr == "": + return [] + return attr.split(delimit) + return attr + + +def _deserialize_datetime(attr: typing.Union[str, datetime]) -> datetime: + """Deserialize ISO-8601 formatted string into Datetime object. + + :param str attr: response string to be deserialized. + :rtype: ~datetime.datetime + :returns: The datetime object from that input + """ + if isinstance(attr, datetime): + # i'm already deserialized + return attr + attr = attr.upper() + match = _VALID_DATE.match(attr) + if not match: + raise ValueError("Invalid datetime string: " + attr) + + check_decimal = attr.split(".") + if len(check_decimal) > 1: + decimal_str = "" + for digit in check_decimal[1]: + if digit.isdigit(): + decimal_str += digit + else: + break + if len(decimal_str) > 6: + attr = attr.replace(decimal_str, decimal_str[0:6]) + + date_obj = isodate.parse_datetime(attr) + test_utc = date_obj.utctimetuple() + if test_utc.tm_year > 9999 or test_utc.tm_year < 1: + raise OverflowError("Hit max or min date") + return date_obj # type: ignore[no-any-return] + + +def _deserialize_datetime_rfc7231(attr: typing.Union[str, datetime]) -> datetime: + """Deserialize RFC7231 formatted string into Datetime object. + + :param str attr: response string to be deserialized. + :rtype: ~datetime.datetime + :returns: The datetime object from that input + """ + if isinstance(attr, datetime): + # i'm already deserialized + return attr + match = _VALID_RFC7231.match(attr) + if not match: + raise ValueError("Invalid datetime string: " + attr) + + return email.utils.parsedate_to_datetime(attr) + + +def _deserialize_datetime_unix_timestamp(attr: typing.Union[float, datetime]) -> datetime: + """Deserialize unix timestamp into Datetime object. + + :param str attr: response string to be deserialized. + :rtype: ~datetime.datetime + :returns: The datetime object from that input + """ + if isinstance(attr, datetime): + # i'm already deserialized + return attr + return datetime.fromtimestamp(attr, TZ_UTC) + + +def _deserialize_date(attr: typing.Union[str, date]) -> date: + """Deserialize ISO-8601 formatted string into Date object. + :param str attr: response string to be deserialized. + :rtype: date + :returns: The date object from that input + """ + # This must NOT use defaultmonth/defaultday. Using None ensure this raises an exception. + if isinstance(attr, date): + return attr + return isodate.parse_date(attr, defaultmonth=None, defaultday=None) # type: ignore + + +def _deserialize_time(attr: typing.Union[str, time]) -> time: + """Deserialize ISO-8601 formatted string into time object. + + :param str attr: response string to be deserialized. + :rtype: datetime.time + :returns: The time object from that input + """ + if isinstance(attr, time): + return attr + return isodate.parse_time(attr) # type: ignore[no-any-return] + + +def _deserialize_bytes(attr): + if isinstance(attr, (bytes, bytearray)): + return attr + return bytes(base64.b64decode(attr)) + + +def _deserialize_bytes_base64(attr): + if isinstance(attr, (bytes, bytearray)): + return attr + padding = "=" * (3 - (len(attr) + 3) % 4) # type: ignore + attr = attr + padding # type: ignore + encoded = attr.replace("-", "+").replace("_", "/") + return bytes(base64.b64decode(encoded)) + + +def _deserialize_duration(attr): + if isinstance(attr, timedelta): + return attr + return isodate.parse_duration(attr) + + +def _deserialize_decimal(attr): + if isinstance(attr, decimal.Decimal): + return attr + return decimal.Decimal(str(attr)) + + +def _deserialize_int_as_str(attr): + if isinstance(attr, int): + return attr + return int(attr) + + +_DESERIALIZE_MAPPING = { + datetime: _deserialize_datetime, + date: _deserialize_date, + time: _deserialize_time, + bytes: _deserialize_bytes, + bytearray: _deserialize_bytes, + timedelta: _deserialize_duration, + typing.Any: lambda x: x, + decimal.Decimal: _deserialize_decimal, +} + +_DESERIALIZE_MAPPING_WITHFORMAT = { + "rfc3339": _deserialize_datetime, + "rfc7231": _deserialize_datetime_rfc7231, + "unix-timestamp": _deserialize_datetime_unix_timestamp, + "base64": _deserialize_bytes, + "base64url": _deserialize_bytes_base64, +} + + +def get_deserializer(annotation: typing.Any, rf: typing.Optional["_RestField"] = None): + if annotation is int and rf and rf._format == "str": + return _deserialize_int_as_str + if annotation is str and rf and rf._format in _ARRAY_ENCODE_MAPPING: + return functools.partial(_deserialize_array_encoded, _ARRAY_ENCODE_MAPPING[rf._format]) + if rf and rf._format: + return _DESERIALIZE_MAPPING_WITHFORMAT.get(rf._format) + return _DESERIALIZE_MAPPING.get(annotation) # pyright: ignore + + +def _get_type_alias_type(module_name: str, alias_name: str): + types = { + k: v + for k, v in sys.modules[module_name].__dict__.items() + if isinstance(v, typing._GenericAlias) # type: ignore + } + if alias_name not in types: + return alias_name + return types[alias_name] + + +def _get_model(module_name: str, model_name: str): + models = {k: v for k, v in sys.modules[module_name].__dict__.items() if isinstance(v, type)} + module_end = module_name.rsplit(".", 1)[0] + models.update({k: v for k, v in sys.modules[module_end].__dict__.items() if isinstance(v, type)}) + if isinstance(model_name, str): + model_name = model_name.split(".")[-1] + if model_name not in models: + return model_name + return models[model_name] + + +_UNSET = object() + + +class _MyMutableMapping(MutableMapping[str, typing.Any]): + def __init__(self, data: dict[str, typing.Any]) -> None: + self._data = data + + def __contains__(self, key: typing.Any) -> bool: + return key in self._data + + def __getitem__(self, key: str) -> typing.Any: + # If this key has been deserialized (for mutable types), we need to handle serialization + if hasattr(self, "_attr_to_rest_field"): + cache_attr = f"_deserialized_{key}" + if hasattr(self, cache_attr): + rf = _get_rest_field(getattr(self, "_attr_to_rest_field"), key) + if rf: + value = self._data.get(key) + if isinstance(value, (dict, list, set)): + # For mutable types, serialize and return + # But also update _data with serialized form and clear flag + # so mutations via this returned value affect _data + serialized = _serialize(value, rf._format) + # If serialized form is same type (no transformation needed), + # return _data directly so mutations work + if isinstance(serialized, type(value)) and serialized == value: + return self._data.get(key) + # Otherwise return serialized copy and clear flag + try: + object.__delattr__(self, cache_attr) + except AttributeError: + pass + # Store serialized form back + self._data[key] = serialized + return serialized + return self._data.__getitem__(key) + + def __setitem__(self, key: str, value: typing.Any) -> None: + # Clear any cached deserialized value when setting through dictionary access + cache_attr = f"_deserialized_{key}" + try: + object.__delattr__(self, cache_attr) + except AttributeError: + pass + self._data.__setitem__(key, value) + + def __delitem__(self, key: str) -> None: + self._data.__delitem__(key) + + def __iter__(self) -> typing.Iterator[typing.Any]: + return self._data.__iter__() + + def __len__(self) -> int: + return self._data.__len__() + + def __ne__(self, other: typing.Any) -> bool: + return not self.__eq__(other) + + def keys(self) -> typing.KeysView[str]: + """ + :returns: a set-like object providing a view on D's keys + :rtype: ~typing.KeysView + """ + return self._data.keys() + + def values(self) -> typing.ValuesView[typing.Any]: + """ + :returns: an object providing a view on D's values + :rtype: ~typing.ValuesView + """ + return self._data.values() + + def items(self) -> typing.ItemsView[str, typing.Any]: + """ + :returns: set-like object providing a view on D's items + :rtype: ~typing.ItemsView + """ + return self._data.items() + + def get(self, key: str, default: typing.Any = None) -> typing.Any: + """ + Get the value for key if key is in the dictionary, else default. + :param str key: The key to look up. + :param any default: The value to return if key is not in the dictionary. Defaults to None + :returns: D[k] if k in D, else d. + :rtype: any + """ + try: + return self[key] + except KeyError: + return default + + @typing.overload + def pop(self, key: str) -> typing.Any: ... # pylint: disable=arguments-differ + + @typing.overload + def pop(self, key: str, default: _T) -> _T: ... # pylint: disable=signature-differs + + @typing.overload + def pop(self, key: str, default: typing.Any) -> typing.Any: ... # pylint: disable=signature-differs + + def pop(self, key: str, default: typing.Any = _UNSET) -> typing.Any: + """ + Removes specified key and return the corresponding value. + :param str key: The key to pop. + :param any default: The value to return if key is not in the dictionary + :returns: The value corresponding to the key. + :rtype: any + :raises KeyError: If key is not found and default is not given. + """ + if default is _UNSET: + return self._data.pop(key) + return self._data.pop(key, default) + + def popitem(self) -> tuple[str, typing.Any]: + """ + Removes and returns some (key, value) pair + :returns: The (key, value) pair. + :rtype: tuple + :raises KeyError: if D is empty. + """ + return self._data.popitem() + + def clear(self) -> None: + """ + Remove all items from D. + """ + self._data.clear() + + def update(self, *args: typing.Any, **kwargs: typing.Any) -> None: # pylint: disable=arguments-differ + """ + Updates D from mapping/iterable E and F. + :param any args: Either a mapping object or an iterable of key-value pairs. + """ + self._data.update(*args, **kwargs) + + @typing.overload + def setdefault(self, key: str, default: None = None) -> None: ... + + @typing.overload + def setdefault(self, key: str, default: typing.Any) -> typing.Any: ... # pylint: disable=signature-differs + + def setdefault(self, key: str, default: typing.Any = _UNSET) -> typing.Any: + """ + Same as calling D.get(k, d), and setting D[k]=d if k not found + :param str key: The key to look up. + :param any default: The value to set if key is not in the dictionary + :returns: D[k] if k in D, else d. + :rtype: any + """ + if default is _UNSET: + return self._data.setdefault(key) + return self._data.setdefault(key, default) + + def __eq__(self, other: typing.Any) -> bool: + if isinstance(other, _MyMutableMapping): + return self._data == other._data + try: + other_model = self.__class__(other) + except Exception: + return False + return self._data == other_model._data + + def __repr__(self) -> str: + return str(self._data) + + +def _is_model(obj: typing.Any) -> bool: + return getattr(obj, "_is_model", False) + + +def _serialize(o, format: typing.Optional[str] = None): # pylint: disable=too-many-return-statements + if isinstance(o, list): + if format in _ARRAY_ENCODE_MAPPING and all(isinstance(x, str) for x in o): + return _ARRAY_ENCODE_MAPPING[format].join(o) + return [_serialize(x, format) for x in o] + if isinstance(o, dict): + return {k: _serialize(v, format) for k, v in o.items()} + if isinstance(o, set): + return {_serialize(x, format) for x in o} + if isinstance(o, tuple): + return tuple(_serialize(x, format) for x in o) + if isinstance(o, (bytes, bytearray)): + return _serialize_bytes(o, format) + if isinstance(o, decimal.Decimal): + return float(o) + if isinstance(o, enum.Enum): + return o.value + if isinstance(o, int): + if format == "str": + return str(o) + return o + try: + # First try datetime.datetime + return _serialize_datetime(o, format) + except AttributeError: + pass + # Last, try datetime.timedelta + try: + return _timedelta_as_isostr(o) + except AttributeError: + # This will be raised when it hits value.total_seconds in the method above + pass + return o + + +def _get_rest_field(attr_to_rest_field: dict[str, "_RestField"], rest_name: str) -> typing.Optional["_RestField"]: + try: + return next(rf for rf in attr_to_rest_field.values() if rf._rest_name == rest_name) + except StopIteration: + return None + + +def _create_value(rf: typing.Optional["_RestField"], value: typing.Any) -> typing.Any: + if not rf: + return _serialize(value, None) + if rf._is_multipart_file_input: + return value + if rf._is_model: + return _deserialize(rf._type, value) + if isinstance(value, ET.Element): + value = _deserialize(rf._type, value) + return _serialize(value, rf._format) + + +class Model(_MyMutableMapping): + _is_model = True + # label whether current class's _attr_to_rest_field has been calculated + # could not see _attr_to_rest_field directly because subclass inherits it from parent class + _calculated: set[str] = set() + + def __init__(self, *args: typing.Any, **kwargs: typing.Any) -> None: + class_name = self.__class__.__name__ + if len(args) > 1: + raise TypeError(f"{class_name}.__init__() takes 2 positional arguments but {len(args) + 1} were given") + dict_to_pass = { + rest_field._rest_name: rest_field._default + for rest_field in self._attr_to_rest_field.values() + if rest_field._default is not _UNSET + } + if args: # pylint: disable=too-many-nested-blocks + if isinstance(args[0], ET.Element): + existed_attr_keys = [] + model_meta = getattr(self, "_xml", {}) + + for rf in self._attr_to_rest_field.values(): + prop_meta = getattr(rf, "_xml", {}) + xml_name = prop_meta.get("name", rf._rest_name) + xml_ns = prop_meta.get("ns", model_meta.get("ns", None)) + if xml_ns: + xml_name = "{" + xml_ns + "}" + xml_name + + # attribute + if prop_meta.get("attribute", False) and args[0].get(xml_name) is not None: + existed_attr_keys.append(xml_name) + dict_to_pass[rf._rest_name] = _deserialize(rf._type, args[0].get(xml_name)) + continue + + # unwrapped element is array + if prop_meta.get("unwrapped", False): + # unwrapped array could either use prop items meta/prop meta + if prop_meta.get("itemsName"): + xml_name = prop_meta.get("itemsName") + xml_ns = prop_meta.get("itemNs") + if xml_ns: + xml_name = "{" + xml_ns + "}" + xml_name + items = args[0].findall(xml_name) # pyright: ignore + if len(items) > 0: + existed_attr_keys.append(xml_name) + dict_to_pass[rf._rest_name] = _deserialize(rf._type, items) + elif not rf._is_optional: + existed_attr_keys.append(xml_name) + dict_to_pass[rf._rest_name] = [] + continue + + # text element is primitive type + if prop_meta.get("text", False): + if args[0].text is not None: + dict_to_pass[rf._rest_name] = _deserialize(rf._type, args[0].text) + continue + + # wrapped element could be normal property or array, it should only have one element + item = args[0].find(xml_name) + if item is not None: + existed_attr_keys.append(xml_name) + dict_to_pass[rf._rest_name] = _deserialize(rf._type, item) + + # rest thing is additional properties + for e in args[0]: + if e.tag not in existed_attr_keys: + dict_to_pass[e.tag] = _convert_element(e) + else: + dict_to_pass.update( + {k: _create_value(_get_rest_field(self._attr_to_rest_field, k), v) for k, v in args[0].items()} + ) + else: + non_attr_kwargs = [k for k in kwargs if k not in self._attr_to_rest_field] + if non_attr_kwargs: + # actual type errors only throw the first wrong keyword arg they see, so following that. + raise TypeError(f"{class_name}.__init__() got an unexpected keyword argument '{non_attr_kwargs[0]}'") + dict_to_pass.update( + { + self._attr_to_rest_field[k]._rest_name: _create_value(self._attr_to_rest_field[k], v) + for k, v in kwargs.items() + if v is not None + } + ) + super().__init__(dict_to_pass) + + def copy(self) -> "Model": + return Model(self.__dict__) + + def __new__(cls, *args: typing.Any, **kwargs: typing.Any) -> Self: + if f"{cls.__module__}.{cls.__qualname__}" not in cls._calculated: + # we know the last nine classes in mro are going to be 'Model', '_MyMutableMapping', 'MutableMapping', + # 'Mapping', 'Collection', 'Sized', 'Iterable', 'Container' and 'object' + mros = cls.__mro__[:-9][::-1] # ignore parents, and reverse the mro order + attr_to_rest_field: dict[str, _RestField] = { # map attribute name to rest_field property + k: v for mro_class in mros for k, v in mro_class.__dict__.items() if k[0] != "_" and hasattr(v, "_type") + } + annotations = { + k: v + for mro_class in mros + if hasattr(mro_class, "__annotations__") + for k, v in mro_class.__annotations__.items() + } + for attr, rf in attr_to_rest_field.items(): + rf._module = cls.__module__ + if not rf._type: + rf._type = rf._get_deserialize_callable_from_annotation(annotations.get(attr, None)) + if not rf._rest_name_input: + rf._rest_name_input = attr + cls._attr_to_rest_field: dict[str, _RestField] = dict(attr_to_rest_field.items()) + cls._calculated.add(f"{cls.__module__}.{cls.__qualname__}") + + return super().__new__(cls) + + def __init_subclass__(cls, discriminator: typing.Optional[str] = None) -> None: + for base in cls.__bases__: + if hasattr(base, "__mapping__"): + base.__mapping__[discriminator or cls.__name__] = cls # type: ignore + + @classmethod + def _get_discriminator(cls, exist_discriminators) -> typing.Optional["_RestField"]: + for v in cls.__dict__.values(): + if isinstance(v, _RestField) and v._is_discriminator and v._rest_name not in exist_discriminators: + return v + return None + + @classmethod + def _deserialize(cls, data, exist_discriminators): + if not hasattr(cls, "__mapping__"): + return cls(data) + discriminator = cls._get_discriminator(exist_discriminators) + if discriminator is None: + return cls(data) + exist_discriminators.append(discriminator._rest_name) + if isinstance(data, ET.Element): + model_meta = getattr(cls, "_xml", {}) + prop_meta = getattr(discriminator, "_xml", {}) + xml_name = prop_meta.get("name", discriminator._rest_name) + xml_ns = prop_meta.get("ns", model_meta.get("ns", None)) + if xml_ns: + xml_name = "{" + xml_ns + "}" + xml_name + + if data.get(xml_name) is not None: + discriminator_value = data.get(xml_name) + else: + discriminator_value = data.find(xml_name).text # pyright: ignore + else: + discriminator_value = data.get(discriminator._rest_name) + mapped_cls = cls.__mapping__.get(discriminator_value, cls) # pyright: ignore # pylint: disable=no-member + return mapped_cls._deserialize(data, exist_discriminators) + + def as_dict(self, *, exclude_readonly: bool = False) -> dict[str, typing.Any]: + """Return a dict that can be turned into json using json.dump. + + :keyword bool exclude_readonly: Whether to remove the readonly properties. + :returns: A dict JSON compatible object + :rtype: dict + """ + + result = {} + readonly_props = [] + if exclude_readonly: + readonly_props = [p._rest_name for p in self._attr_to_rest_field.values() if _is_readonly(p)] + for k, v in self.items(): + if exclude_readonly and k in readonly_props: # pyright: ignore + continue + is_multipart_file_input = False + try: + is_multipart_file_input = next( + rf for rf in self._attr_to_rest_field.values() if rf._rest_name == k + )._is_multipart_file_input + except StopIteration: + pass + result[k] = v if is_multipart_file_input else Model._as_dict_value(v, exclude_readonly=exclude_readonly) + return result + + @staticmethod + def _as_dict_value(v: typing.Any, exclude_readonly: bool = False) -> typing.Any: + if v is None or isinstance(v, _Null): + return None + if isinstance(v, (list, tuple, set)): + return type(v)(Model._as_dict_value(x, exclude_readonly=exclude_readonly) for x in v) + if isinstance(v, dict): + return {dk: Model._as_dict_value(dv, exclude_readonly=exclude_readonly) for dk, dv in v.items()} + return v.as_dict(exclude_readonly=exclude_readonly) if hasattr(v, "as_dict") else v + + +def _deserialize_model(model_deserializer: typing.Optional[typing.Callable], obj): + if _is_model(obj): + return obj + return _deserialize(model_deserializer, obj) + + +def _deserialize_with_optional(if_obj_deserializer: typing.Optional[typing.Callable], obj): + if obj is None: + return obj + return _deserialize_with_callable(if_obj_deserializer, obj) + + +def _deserialize_with_union(deserializers, obj): + for deserializer in deserializers: + try: + return _deserialize(deserializer, obj) + except DeserializationError: + pass + raise DeserializationError() + + +def _deserialize_dict( + value_deserializer: typing.Optional[typing.Callable], + module: typing.Optional[str], + obj: dict[typing.Any, typing.Any], +): + if obj is None: + return obj + if isinstance(obj, ET.Element): + obj = {child.tag: child for child in obj} + return {k: _deserialize(value_deserializer, v, module) for k, v in obj.items()} + + +def _deserialize_multiple_sequence( + entry_deserializers: list[typing.Optional[typing.Callable]], + module: typing.Optional[str], + obj, +): + if obj is None: + return obj + return type(obj)(_deserialize(deserializer, entry, module) for entry, deserializer in zip(obj, entry_deserializers)) + + +def _is_array_encoded_deserializer(deserializer: functools.partial) -> bool: + return ( + isinstance(deserializer, functools.partial) + and isinstance(deserializer.args[0], functools.partial) + and deserializer.args[0].func == _deserialize_array_encoded # pylint: disable=comparison-with-callable + ) + + +def _deserialize_sequence( + deserializer: typing.Optional[typing.Callable], + module: typing.Optional[str], + obj, +): + if obj is None: + return obj + if isinstance(obj, ET.Element): + obj = list(obj) + + # encoded string may be deserialized to sequence + if isinstance(obj, str) and isinstance(deserializer, functools.partial): + # for list[str] + if _is_array_encoded_deserializer(deserializer): + return deserializer(obj) + + # for list[Union[...]] + if isinstance(deserializer.args[0], list): + for sub_deserializer in deserializer.args[0]: + if _is_array_encoded_deserializer(sub_deserializer): + return sub_deserializer(obj) + + return type(obj)(_deserialize(deserializer, entry, module) for entry in obj) + + +def _sorted_annotations(types: list[typing.Any]) -> list[typing.Any]: + return sorted( + types, + key=lambda x: hasattr(x, "__name__") and x.__name__.lower() in ("str", "float", "int", "bool"), + ) + + +def _get_deserialize_callable_from_annotation( # pylint: disable=too-many-return-statements, too-many-statements, too-many-branches + annotation: typing.Any, + module: typing.Optional[str], + rf: typing.Optional["_RestField"] = None, +) -> typing.Optional[typing.Callable[[typing.Any], typing.Any]]: + if not annotation: + return None + + # is it a type alias? + if isinstance(annotation, str): + if module is not None: + annotation = _get_type_alias_type(module, annotation) + + # is it a forward ref / in quotes? + if isinstance(annotation, (str, typing.ForwardRef)): + try: + model_name = annotation.__forward_arg__ # type: ignore + except AttributeError: + model_name = annotation + if module is not None: + annotation = _get_model(module, model_name) # type: ignore + + try: + if module and _is_model(annotation): + if rf: + rf._is_model = True + + return functools.partial(_deserialize_model, annotation) # pyright: ignore + except Exception: + pass + + # is it a literal? + try: + if annotation.__origin__ is typing.Literal: # pyright: ignore + return None + except AttributeError: + pass + + # is it optional? + try: + if any(a is _NONE_TYPE for a in annotation.__args__): # pyright: ignore + if rf: + rf._is_optional = True + if len(annotation.__args__) <= 2: # pyright: ignore + if_obj_deserializer = _get_deserialize_callable_from_annotation( + next(a for a in annotation.__args__ if a is not _NONE_TYPE), module, rf # pyright: ignore + ) + + return functools.partial(_deserialize_with_optional, if_obj_deserializer) + # the type is Optional[Union[...]], we need to remove the None type from the Union + annotation_copy = copy.copy(annotation) + annotation_copy.__args__ = [a for a in annotation_copy.__args__ if a is not _NONE_TYPE] # pyright: ignore + return _get_deserialize_callable_from_annotation(annotation_copy, module, rf) + except AttributeError: + pass + + # is it union? + if getattr(annotation, "__origin__", None) is typing.Union: + # initial ordering is we make `string` the last deserialization option, because it is often them most generic + deserializers = [ + _get_deserialize_callable_from_annotation(arg, module, rf) + for arg in _sorted_annotations(annotation.__args__) # pyright: ignore + ] + + return functools.partial(_deserialize_with_union, deserializers) + + try: + annotation_name = ( + annotation.__name__ if hasattr(annotation, "__name__") else annotation._name # pyright: ignore + ) + if annotation_name.lower() == "dict": + value_deserializer = _get_deserialize_callable_from_annotation( + annotation.__args__[1], module, rf # pyright: ignore + ) + + return functools.partial( + _deserialize_dict, + value_deserializer, + module, + ) + except (AttributeError, IndexError): + pass + try: + annotation_name = ( + annotation.__name__ if hasattr(annotation, "__name__") else annotation._name # pyright: ignore + ) + if annotation_name.lower() in ["list", "set", "tuple", "sequence"]: + if len(annotation.__args__) > 1: # pyright: ignore + entry_deserializers = [ + _get_deserialize_callable_from_annotation(dt, module, rf) + for dt in annotation.__args__ # pyright: ignore + ] + return functools.partial(_deserialize_multiple_sequence, entry_deserializers, module) + deserializer = _get_deserialize_callable_from_annotation( + annotation.__args__[0], module, rf # pyright: ignore + ) + + return functools.partial(_deserialize_sequence, deserializer, module) + except (TypeError, IndexError, AttributeError, SyntaxError): + pass + + def _deserialize_default( + deserializer, + obj, + ): + if obj is None: + return obj + try: + return _deserialize_with_callable(deserializer, obj) + except Exception: + pass + return obj + + if get_deserializer(annotation, rf): + return functools.partial(_deserialize_default, get_deserializer(annotation, rf)) + + return functools.partial(_deserialize_default, annotation) + + +def _deserialize_with_callable( + deserializer: typing.Optional[typing.Callable[[typing.Any], typing.Any]], + value: typing.Any, +): # pylint: disable=too-many-return-statements + try: + if value is None or isinstance(value, _Null): + return None + if isinstance(value, ET.Element): + if deserializer is str: + return value.text or "" + if deserializer is int: + return int(value.text) if value.text else None + if deserializer is float: + return float(value.text) if value.text else None + if deserializer is bool: + return value.text == "true" if value.text else None + if deserializer and deserializer in _DESERIALIZE_MAPPING.values(): + return deserializer(value.text) if value.text else None + if deserializer and deserializer in _DESERIALIZE_MAPPING_WITHFORMAT.values(): + return deserializer(value.text) if value.text else None + if deserializer is None: + return value + if deserializer in [int, float, bool]: + return deserializer(value) + if isinstance(deserializer, CaseInsensitiveEnumMeta): + try: + return deserializer(value.text if isinstance(value, ET.Element) else value) + except ValueError: + # for unknown value, return raw value + return value.text if isinstance(value, ET.Element) else value + if isinstance(deserializer, type) and issubclass(deserializer, Model): + return deserializer._deserialize(value, []) + return typing.cast(typing.Callable[[typing.Any], typing.Any], deserializer)(value) + except Exception as e: + raise DeserializationError() from e + + +def _deserialize( + deserializer: typing.Any, + value: typing.Any, + module: typing.Optional[str] = None, + rf: typing.Optional["_RestField"] = None, + format: typing.Optional[str] = None, +) -> typing.Any: + if isinstance(value, PipelineResponse): + value = value.http_response.json() + if rf is None and format: + rf = _RestField(format=format) + if not isinstance(deserializer, functools.partial): + deserializer = _get_deserialize_callable_from_annotation(deserializer, module, rf) + return _deserialize_with_callable(deserializer, value) + + +def _failsafe_deserialize( + deserializer: typing.Any, + response: HttpResponse, + module: typing.Optional[str] = None, + rf: typing.Optional["_RestField"] = None, + format: typing.Optional[str] = None, +) -> typing.Any: + try: + return _deserialize(deserializer, response.json(), module, rf, format) + except Exception: # pylint: disable=broad-except + _LOGGER.warning( + "Ran into a deserialization error. Ignoring since this is failsafe deserialization", exc_info=True + ) + return None + + +def _failsafe_deserialize_xml( + deserializer: typing.Any, + response: HttpResponse, +) -> typing.Any: + try: + return _deserialize_xml(deserializer, response.text()) + except Exception: # pylint: disable=broad-except + _LOGGER.warning( + "Ran into a deserialization error. Ignoring since this is failsafe deserialization", exc_info=True + ) + return None + + +# pylint: disable=too-many-instance-attributes +class _RestField: + def __init__( + self, + *, + name: typing.Optional[str] = None, + type: typing.Optional[typing.Callable] = None, # pylint: disable=redefined-builtin + is_discriminator: bool = False, + visibility: typing.Optional[list[str]] = None, + default: typing.Any = _UNSET, + format: typing.Optional[str] = None, + is_multipart_file_input: bool = False, + xml: typing.Optional[dict[str, typing.Any]] = None, + ): + self._type = type + self._rest_name_input = name + self._module: typing.Optional[str] = None + self._is_discriminator = is_discriminator + self._visibility = visibility + self._is_model = False + self._is_optional = False + self._default = default + self._format = format + self._is_multipart_file_input = is_multipart_file_input + self._xml = xml if xml is not None else {} + + @property + def _class_type(self) -> typing.Any: + result = getattr(self._type, "args", [None])[0] + # type may be wrapped by nested functools.partial so we need to check for that + if isinstance(result, functools.partial): + return getattr(result, "args", [None])[0] + return result + + @property + def _rest_name(self) -> str: + if self._rest_name_input is None: + raise ValueError("Rest name was never set") + return self._rest_name_input + + def __get__(self, obj: Model, type=None): # pylint: disable=redefined-builtin + # by this point, type and rest_name will have a value bc we default + # them in __new__ of the Model class + # Use _data.get() directly to avoid triggering __getitem__ which clears the cache + item = obj._data.get(self._rest_name) + if item is None: + return item + if self._is_model: + return item + + # For mutable types, we want mutations to directly affect _data + # Check if we've already deserialized this value + cache_attr = f"_deserialized_{self._rest_name}" + if hasattr(obj, cache_attr): + # Return the value from _data directly (it's been deserialized in place) + return obj._data.get(self._rest_name) + + deserialized = _deserialize(self._type, _serialize(item, self._format), rf=self) + + # For mutable types, store the deserialized value back in _data + # so mutations directly affect _data + if isinstance(deserialized, (dict, list, set)): + obj._data[self._rest_name] = deserialized + object.__setattr__(obj, cache_attr, True) # Mark as deserialized + return deserialized + + return deserialized + + def __set__(self, obj: Model, value) -> None: + # Clear the cached deserialized object when setting a new value + cache_attr = f"_deserialized_{self._rest_name}" + if hasattr(obj, cache_attr): + object.__delattr__(obj, cache_attr) + + if value is None: + # we want to wipe out entries if users set attr to None + try: + obj.__delitem__(self._rest_name) + except KeyError: + pass + return + if self._is_model: + if not _is_model(value): + value = _deserialize(self._type, value) + obj.__setitem__(self._rest_name, value) + return + obj.__setitem__(self._rest_name, _serialize(value, self._format)) + + def _get_deserialize_callable_from_annotation( + self, annotation: typing.Any + ) -> typing.Optional[typing.Callable[[typing.Any], typing.Any]]: + return _get_deserialize_callable_from_annotation(annotation, self._module, self) + + +def rest_field( + *, + name: typing.Optional[str] = None, + type: typing.Optional[typing.Callable] = None, # pylint: disable=redefined-builtin + visibility: typing.Optional[list[str]] = None, + default: typing.Any = _UNSET, + format: typing.Optional[str] = None, + is_multipart_file_input: bool = False, + xml: typing.Optional[dict[str, typing.Any]] = None, +) -> typing.Any: + return _RestField( + name=name, + type=type, + visibility=visibility, + default=default, + format=format, + is_multipart_file_input=is_multipart_file_input, + xml=xml, + ) + + +def rest_discriminator( + *, + name: typing.Optional[str] = None, + type: typing.Optional[typing.Callable] = None, # pylint: disable=redefined-builtin + visibility: typing.Optional[list[str]] = None, + xml: typing.Optional[dict[str, typing.Any]] = None, +) -> typing.Any: + return _RestField(name=name, type=type, is_discriminator=True, visibility=visibility, xml=xml) + + +def serialize_xml(model: Model, exclude_readonly: bool = False) -> str: + """Serialize a model to XML. + + :param Model model: The model to serialize. + :param bool exclude_readonly: Whether to exclude readonly properties. + :returns: The XML representation of the model. + :rtype: str + """ + return ET.tostring(_get_element(model, exclude_readonly), encoding="unicode") # type: ignore + + +def _get_element( + o: typing.Any, + exclude_readonly: bool = False, + parent_meta: typing.Optional[dict[str, typing.Any]] = None, + wrapped_element: typing.Optional[ET.Element] = None, +) -> typing.Union[ET.Element, list[ET.Element]]: + if _is_model(o): + model_meta = getattr(o, "_xml", {}) + + # if prop is a model, then use the prop element directly, else generate a wrapper of model + if wrapped_element is None: + wrapped_element = _create_xml_element( + model_meta.get("name", o.__class__.__name__), + model_meta.get("prefix"), + model_meta.get("ns"), + ) + + readonly_props = [] + if exclude_readonly: + readonly_props = [p._rest_name for p in o._attr_to_rest_field.values() if _is_readonly(p)] + + for k, v in o.items(): + # do not serialize readonly properties + if exclude_readonly and k in readonly_props: + continue + + prop_rest_field = _get_rest_field(o._attr_to_rest_field, k) + if prop_rest_field: + prop_meta = getattr(prop_rest_field, "_xml").copy() + # use the wire name as xml name if no specific name is set + if prop_meta.get("name") is None: + prop_meta["name"] = k + else: + # additional properties will not have rest field, use the wire name as xml name + prop_meta = {"name": k} + + # if no ns for prop, use model's + if prop_meta.get("ns") is None and model_meta.get("ns"): + prop_meta["ns"] = model_meta.get("ns") + prop_meta["prefix"] = model_meta.get("prefix") + + if prop_meta.get("unwrapped", False): + # unwrapped could only set on array + wrapped_element.extend(_get_element(v, exclude_readonly, prop_meta)) + elif prop_meta.get("text", False): + # text could only set on primitive type + wrapped_element.text = _get_primitive_type_value(v) + elif prop_meta.get("attribute", False): + xml_name = prop_meta.get("name", k) + if prop_meta.get("ns"): + ET.register_namespace(prop_meta.get("prefix"), prop_meta.get("ns")) # pyright: ignore + xml_name = "{" + prop_meta.get("ns") + "}" + xml_name # pyright: ignore + # attribute should be primitive type + wrapped_element.set(xml_name, _get_primitive_type_value(v)) + else: + # other wrapped prop element + wrapped_element.append(_get_wrapped_element(v, exclude_readonly, prop_meta)) + return wrapped_element + if isinstance(o, list): + return [_get_element(x, exclude_readonly, parent_meta) for x in o] # type: ignore + if isinstance(o, dict): + result = [] + for k, v in o.items(): + result.append( + _get_wrapped_element( + v, + exclude_readonly, + { + "name": k, + "ns": parent_meta.get("ns") if parent_meta else None, + "prefix": parent_meta.get("prefix") if parent_meta else None, + }, + ) + ) + return result + + # primitive case need to create element based on parent_meta + if parent_meta: + return _get_wrapped_element( + o, + exclude_readonly, + { + "name": parent_meta.get("itemsName", parent_meta.get("name")), + "prefix": parent_meta.get("itemsPrefix", parent_meta.get("prefix")), + "ns": parent_meta.get("itemsNs", parent_meta.get("ns")), + }, + ) + + raise ValueError("Could not serialize value into xml: " + o) + + +def _get_wrapped_element( + v: typing.Any, + exclude_readonly: bool, + meta: typing.Optional[dict[str, typing.Any]], +) -> ET.Element: + wrapped_element = _create_xml_element( + meta.get("name") if meta else None, meta.get("prefix") if meta else None, meta.get("ns") if meta else None + ) + if isinstance(v, (dict, list)): + wrapped_element.extend(_get_element(v, exclude_readonly, meta)) + elif _is_model(v): + _get_element(v, exclude_readonly, meta, wrapped_element) + else: + wrapped_element.text = _get_primitive_type_value(v) + return wrapped_element # type: ignore[no-any-return] + + +def _get_primitive_type_value(v) -> str: + if v is True: + return "true" + if v is False: + return "false" + if isinstance(v, _Null): + return "" + return str(v) + + +def _create_xml_element( + tag: typing.Any, prefix: typing.Optional[str] = None, ns: typing.Optional[str] = None +) -> ET.Element: + if prefix and ns: + ET.register_namespace(prefix, ns) + if ns: + return ET.Element("{" + ns + "}" + tag) + return ET.Element(tag) + + +def _deserialize_xml( + deserializer: typing.Any, + value: str, +) -> typing.Any: + element = ET.fromstring(value) # nosec + return _deserialize(deserializer, element) + + +def _convert_element(e: ET.Element): + # dict case + if len(e.attrib) > 0 or len({child.tag for child in e}) > 1: + dict_result: dict[str, typing.Any] = {} + for child in e: + if dict_result.get(child.tag) is not None: + if isinstance(dict_result[child.tag], list): + dict_result[child.tag].append(_convert_element(child)) + else: + dict_result[child.tag] = [dict_result[child.tag], _convert_element(child)] + else: + dict_result[child.tag] = _convert_element(child) + dict_result.update(e.attrib) + return dict_result + # array case + if len(e) > 0: + array_result: list[typing.Any] = [] + for child in e: + array_result.append(_convert_element(child)) + return array_result + # primitive case + return e.text diff --git a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_generated/_utils/serialization.py b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_generated/_utils/serialization.py index 6da830e0cf4a..81ec1de5922b 100644 --- a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_generated/_utils/serialization.py +++ b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_generated/_utils/serialization.py @@ -3,7 +3,7 @@ # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. +# Code generated by Microsoft (R) Python Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- diff --git a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_generated/_utils/utils.py b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_generated/_utils/utils.py new file mode 100644 index 000000000000..cbaa624660e4 --- /dev/null +++ b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_generated/_utils/utils.py @@ -0,0 +1,40 @@ +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from typing import Optional + +from azure.core import MatchConditions + + +def quote_etag(etag: Optional[str]) -> Optional[str]: + if not etag or etag == "*": + return etag + if etag.startswith("W/"): + return etag + if etag.startswith('"') and etag.endswith('"'): + return etag + if etag.startswith("'") and etag.endswith("'"): + return etag + return '"' + etag + '"' + + +def prep_if_match(etag: Optional[str], match_condition: Optional[MatchConditions]) -> Optional[str]: + if match_condition == MatchConditions.IfNotModified: + if_match = quote_etag(etag) if etag else None + return if_match + if match_condition == MatchConditions.IfPresent: + return "*" + return None + + +def prep_if_none_match(etag: Optional[str], match_condition: Optional[MatchConditions]) -> Optional[str]: + if match_condition == MatchConditions.IfModified: + if_none_match = quote_etag(etag) if etag else None + return if_none_match + if match_condition == MatchConditions.IfMissing: + return "*" + return None diff --git a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_generated/_version.py b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_generated/_version.py new file mode 100644 index 000000000000..be71c81bd282 --- /dev/null +++ b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_generated/_version.py @@ -0,0 +1,9 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +VERSION = "1.0.0b1" diff --git a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_generated/aio/__init__.py b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_generated/aio/__init__.py index 8a9f7149c0f5..2ce417833389 100644 --- a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_generated/aio/__init__.py +++ b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_generated/aio/__init__.py @@ -2,7 +2,7 @@ # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. +# Code generated by Microsoft (R) Python Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- # pylint: disable=wrong-import-position @@ -12,7 +12,7 @@ if TYPE_CHECKING: from ._patch import * # pylint: disable=unused-wildcard-import -from ._azure_data_lake_storage_restapi import AzureDataLakeStorageRESTAPI # type: ignore +from ._client import DataLakeClient # type: ignore try: from ._patch import __all__ as _patch_all @@ -22,7 +22,7 @@ from ._patch import patch_sdk as _patch_sdk __all__ = [ - "AzureDataLakeStorageRESTAPI", + "DataLakeClient", ] __all__.extend([p for p in _patch_all if p not in __all__]) # pyright: ignore diff --git a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_generated/aio/_azure_data_lake_storage_restapi.py b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_generated/aio/_client.py similarity index 64% rename from sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_generated/aio/_azure_data_lake_storage_restapi.py rename to sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_generated/aio/_client.py index 60f2078853b1..8ffb7c63c19d 100644 --- a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_generated/aio/_azure_data_lake_storage_restapi.py +++ b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_generated/aio/_client.py @@ -2,56 +2,49 @@ # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. +# Code generated by Microsoft (R) Python Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- from copy import deepcopy -from typing import Any, Awaitable, Optional +from typing import Any, Awaitable, TYPE_CHECKING from typing_extensions import Self from azure.core import AsyncPipelineClient from azure.core.pipeline import policies from azure.core.rest import AsyncHttpResponse, HttpRequest -from .. import models as _models from .._utils.serialization import Deserializer, Serializer -from ._configuration import AzureDataLakeStorageRESTAPIConfiguration +from ._configuration import DataLakeClientConfiguration from .operations import FileSystemOperations, PathOperations, ServiceOperations +if TYPE_CHECKING: + from azure.core.credentials_async import AsyncTokenCredential -class AzureDataLakeStorageRESTAPI: # pylint: disable=client-accepts-api-version-keyword - """Azure Data Lake Storage provides storage for Hadoop and other big data workloads. + +class DataLakeClient: # pylint: disable=client-accepts-api-version-keyword + """DataLakeClient. :ivar service: ServiceOperations operations - :vartype service: azure.storage.filedatalake.aio.operations.ServiceOperations + :vartype service: azure.storage.filedatalake._generated.aio.operations.ServiceOperations :ivar file_system: FileSystemOperations operations - :vartype file_system: azure.storage.filedatalake.aio.operations.FileSystemOperations + :vartype file_system: azure.storage.filedatalake._generated.aio.operations.FileSystemOperations :ivar path: PathOperations operations - :vartype path: azure.storage.filedatalake.aio.operations.PathOperations + :vartype path: azure.storage.filedatalake._generated.aio.operations.PathOperations :param url: The URL of the service account, container, or blob that is the target of the desired operation. Required. :type url: str - :param base_url: Service URL. Required. Default value is "". - :type base_url: str - :param x_ms_lease_duration: The lease duration is required to acquire a lease, and specifies - the duration of the lease in seconds. The lease duration must be between 15 and 60 seconds or - -1 for infinite lease. Default value is None. - :type x_ms_lease_duration: int - :keyword resource: The value must be "filesystem" for all filesystem operations. Default value - is "filesystem". Note that overriding this default value may result in unsupported behavior. - :paramtype resource: str - :keyword version: Specifies the version of the operation to use for this request. Default value - is "2026-02-06". Note that overriding this default value may result in unsupported behavior. + :param credential: Credential used to authenticate requests to the service. Required. + :type credential: ~azure.core.credentials_async.AsyncTokenCredential + :keyword version: Specifies the version of the operation to use for this request. Known values + are "2026-02-06". Default value is "2026-02-06". Note that overriding this default value may + result in unsupported behavior. :paramtype version: str """ - def __init__( # pylint: disable=missing-client-constructor-parameter-credential - self, url: str, base_url: str = "", x_ms_lease_duration: Optional[int] = None, **kwargs: Any - ) -> None: - self._config = AzureDataLakeStorageRESTAPIConfiguration( - url=url, x_ms_lease_duration=x_ms_lease_duration, **kwargs - ) + def __init__(self, url: str, credential: "AsyncTokenCredential", **kwargs: Any) -> None: + _endpoint = "{url}" + self._config = DataLakeClientConfiguration(url=url, credential=credential, **kwargs) _policies = kwargs.pop("policies", None) if _policies is None: @@ -70,17 +63,16 @@ def __init__( # pylint: disable=missing-client-constructor-parameter-credential policies.SensitiveHeaderCleanupPolicy(**kwargs) if self._config.redirect_policy else None, self._config.http_logging_policy, ] - self._client: AsyncPipelineClient = AsyncPipelineClient(base_url=base_url, policies=_policies, **kwargs) + self._client: AsyncPipelineClient = AsyncPipelineClient(base_url=_endpoint, policies=_policies, **kwargs) - client_models = {k: v for k, v in _models.__dict__.items() if isinstance(v, type)} - self._serialize = Serializer(client_models) - self._deserialize = Deserializer(client_models) + self._serialize = Serializer() + self._deserialize = Deserializer() self._serialize.client_side_validation = False self.service = ServiceOperations(self._client, self._config, self._serialize, self._deserialize) self.file_system = FileSystemOperations(self._client, self._config, self._serialize, self._deserialize) self.path = PathOperations(self._client, self._config, self._serialize, self._deserialize) - def _send_request( + def send_request( self, request: HttpRequest, *, stream: bool = False, **kwargs: Any ) -> Awaitable[AsyncHttpResponse]: """Runs the network request through the client's chained policies. @@ -88,7 +80,7 @@ def _send_request( >>> from azure.core.rest import HttpRequest >>> request = HttpRequest("GET", "https://www.example.org/") - >>> response = await client._send_request(request) + >>> response = await client.send_request(request) For more information on this code flow, see https://aka.ms/azsdk/dpcodegen/python/send_request @@ -101,7 +93,11 @@ def _send_request( """ request_copy = deepcopy(request) - request_copy.url = self._client.format_url(request_copy.url) + path_format_arguments = { + "url": self._serialize.url("self._config.url", self._config.url, "str", skip_quote=True), + } + + request_copy.url = self._client.format_url(request_copy.url, **path_format_arguments) return self._client.send_request(request_copy, stream=stream, **kwargs) # type: ignore async def close(self) -> None: diff --git a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_generated/aio/_configuration.py b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_generated/aio/_configuration.py index 95fdeeffc2bc..4b15e0beed7c 100644 --- a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_generated/aio/_configuration.py +++ b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_generated/aio/_configuration.py @@ -2,19 +2,22 @@ # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. +# Code generated by Microsoft (R) Python Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from typing import Any, Literal, Optional +from typing import Any, TYPE_CHECKING from azure.core.pipeline import policies -VERSION = "unknown" +from .._version import VERSION +if TYPE_CHECKING: + from azure.core.credentials_async import AsyncTokenCredential -class AzureDataLakeStorageRESTAPIConfiguration: # pylint: disable=too-many-instance-attributes - """Configuration for AzureDataLakeStorageRESTAPI. + +class DataLakeClientConfiguration: # pylint: disable=too-many-instance-attributes + """Configuration for DataLakeClient. Note that all parameters used to create this instance are saved as instance attributes. @@ -22,30 +25,27 @@ class AzureDataLakeStorageRESTAPIConfiguration: # pylint: disable=too-many-inst :param url: The URL of the service account, container, or blob that is the target of the desired operation. Required. :type url: str - :param x_ms_lease_duration: The lease duration is required to acquire a lease, and specifies - the duration of the lease in seconds. The lease duration must be between 15 and 60 seconds or - -1 for infinite lease. Default value is None. - :type x_ms_lease_duration: int - :keyword resource: The value must be "filesystem" for all filesystem operations. Default value - is "filesystem". Note that overriding this default value may result in unsupported behavior. - :paramtype resource: str - :keyword version: Specifies the version of the operation to use for this request. Default value - is "2026-02-06". Note that overriding this default value may result in unsupported behavior. + :param credential: Credential used to authenticate requests to the service. Required. + :type credential: ~azure.core.credentials_async.AsyncTokenCredential + :keyword version: Specifies the version of the operation to use for this request. Known values + are "2026-02-06". Default value is "2026-02-06". Note that overriding this default value may + result in unsupported behavior. :paramtype version: str """ - def __init__(self, url: str, x_ms_lease_duration: Optional[int] = None, **kwargs: Any) -> None: - resource: Literal["filesystem"] = kwargs.pop("resource", "filesystem") - version: Literal["2026-02-06"] = kwargs.pop("version", "2026-02-06") + def __init__(self, url: str, credential: "AsyncTokenCredential", **kwargs: Any) -> None: + version: str = kwargs.pop("version", "2026-02-06") if url is None: raise ValueError("Parameter 'url' must not be None.") + if credential is None: + raise ValueError("Parameter 'credential' must not be None.") self.url = url - self.x_ms_lease_duration = x_ms_lease_duration - self.resource = resource + self.credential = credential self.version = version - kwargs.setdefault("sdk_moniker", "azuredatalakestoragerestapi/{}".format(VERSION)) + self.credential_scopes = kwargs.pop("credential_scopes", ["https://storage.azure.com/.default"]) + kwargs.setdefault("sdk_moniker", "storage-file-datalake/{}".format(VERSION)) self.polling_interval = kwargs.get("polling_interval", 30) self._configure(**kwargs) @@ -59,3 +59,7 @@ def _configure(self, **kwargs: Any) -> None: self.redirect_policy = kwargs.get("redirect_policy") or policies.AsyncRedirectPolicy(**kwargs) self.retry_policy = kwargs.get("retry_policy") or policies.AsyncRetryPolicy(**kwargs) self.authentication_policy = kwargs.get("authentication_policy") + if self.credential and not self.authentication_policy: + self.authentication_policy = policies.AsyncBearerTokenCredentialPolicy( + self.credential, *self.credential_scopes, **kwargs + ) diff --git a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_generated/aio/_patch.py b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_generated/aio/_patch.py index f7dd32510333..1842dcca83c8 100644 --- a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_generated/aio/_patch.py +++ b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_generated/aio/_patch.py @@ -1,14 +1,76 @@ -# ------------------------------------ -# Copyright (c) Microsoft Corporation. -# Licensed under the MIT License. -# ------------------------------------ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# -------------------------------------------------------------------------- """Customize generated code here. Follow our quickstart for examples: https://aka.ms/azsdk/python/dpcodegen/python/customize """ -from typing import List +from typing import Any, Optional, TYPE_CHECKING -__all__: List[str] = [] # Add all objects you want publicly available to users at this package level +from azure.core import AsyncPipelineClient +from azure.core.pipeline import AsyncPipeline + +from ._client import DataLakeClient as GeneratedDataLakeClient +from .._patch import DataLakeClientConfiguration, RangeHeaderPolicy + +if TYPE_CHECKING: + from azure.core.credentials_async import AsyncTokenCredential + + +class AzureDataLakeStorageRESTAPI(GeneratedDataLakeClient): + """Async subclass of the generated DataLakeClient that allows optional credentials, + accepts a pre-built pipeline, and injects the RangeHeaderPolicy. + """ + + def __init__( + self, url: str, credential: Optional["AsyncTokenCredential"] = None, *, pipeline: Any = None, **kwargs: Any + ) -> None: + from azure.core.pipeline import policies + + from .._utils.serialization import Deserializer, Serializer + from .operations import FileSystemOperations, PathOperations, ServiceOperations + + _endpoint = "{url}" + self._config = DataLakeClientConfiguration(url=url, credential=credential, **kwargs) + + if pipeline is not None: + _wrapped_pipeline = AsyncPipeline( + transport=pipeline._transport, + policies=[RangeHeaderPolicy()] + list(pipeline._impl_policies), + ) + self._client = AsyncPipelineClient(base_url=_endpoint, pipeline=_wrapped_pipeline) + else: + _policies = kwargs.pop("policies", None) + if _policies is None: + _policies = [ + RangeHeaderPolicy(), + policies.RequestIdPolicy(**kwargs), + self._config.headers_policy, + self._config.user_agent_policy, + self._config.proxy_policy, + policies.ContentDecodePolicy(**kwargs), + self._config.redirect_policy, + self._config.retry_policy, + self._config.authentication_policy, + self._config.custom_hook_policy, + self._config.logging_policy, + policies.DistributedTracingPolicy(**kwargs), + policies.SensitiveHeaderCleanupPolicy(**kwargs) if self._config.redirect_policy else None, + self._config.http_logging_policy, + ] + self._client = AsyncPipelineClient(base_url=_endpoint, policies=_policies, **kwargs) + + self._serialize = Serializer() + self._deserialize = Deserializer() + self._serialize.client_side_validation = False + self.service = ServiceOperations(self._client, self._config, self._serialize, self._deserialize) + self.file_system = FileSystemOperations(self._client, self._config, self._serialize, self._deserialize) + self.path = PathOperations(self._client, self._config, self._serialize, self._deserialize) + + +__all__: list[str] = ["AzureDataLakeStorageRESTAPI"] def patch_sdk(): diff --git a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_generated/aio/operations/__init__.py b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_generated/aio/operations/__init__.py index 56a7ece347ab..5c80f0bb8157 100644 --- a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_generated/aio/operations/__init__.py +++ b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_generated/aio/operations/__init__.py @@ -2,7 +2,7 @@ # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. +# Code generated by Microsoft (R) Python Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- # pylint: disable=wrong-import-position @@ -12,9 +12,9 @@ if TYPE_CHECKING: from ._patch import * # pylint: disable=unused-wildcard-import -from ._service_operations import ServiceOperations # type: ignore -from ._file_system_operations import FileSystemOperations # type: ignore -from ._path_operations import PathOperations # type: ignore +from ._operations import ServiceOperations # type: ignore +from ._operations import FileSystemOperations # type: ignore +from ._operations import PathOperations # type: ignore from ._patch import __all__ as _patch_all from ._patch import * diff --git a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_generated/aio/operations/_file_system_operations.py b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_generated/aio/operations/_file_system_operations.py deleted file mode 100644 index 3dc6fe583f50..000000000000 --- a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_generated/aio/operations/_file_system_operations.py +++ /dev/null @@ -1,647 +0,0 @@ -# pylint: disable=line-too-long,useless-suppression -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is regenerated. -# -------------------------------------------------------------------------- -from collections.abc import MutableMapping -from typing import Any, Callable, Literal, Optional, TypeVar, Union - -from azure.core import AsyncPipelineClient -from azure.core.exceptions import ( - ClientAuthenticationError, - HttpResponseError, - ResourceExistsError, - ResourceNotFoundError, - ResourceNotModifiedError, - map_error, -) -from azure.core.pipeline import PipelineResponse -from azure.core.rest import AsyncHttpResponse, HttpRequest -from azure.core.tracing.decorator_async import distributed_trace_async -from azure.core.utils import case_insensitive_dict - -from ... import models as _models -from ..._utils.serialization import Deserializer, Serializer -from ...operations._file_system_operations import ( - build_create_request, - build_delete_request, - build_get_properties_request, - build_list_blob_hierarchy_segment_request, - build_list_paths_request, - build_set_properties_request, -) -from .._configuration import AzureDataLakeStorageRESTAPIConfiguration - -T = TypeVar("T") -ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, dict[str, Any]], Any]] - - -class FileSystemOperations: - """ - .. warning:: - **DO NOT** instantiate this class directly. - - Instead, you should access the following operations through - :class:`~azure.storage.filedatalake.aio.AzureDataLakeStorageRESTAPI`'s - :attr:`file_system` attribute. - """ - - models = _models - - def __init__(self, *args, **kwargs) -> None: - input_args = list(args) - self._client: AsyncPipelineClient = input_args.pop(0) if input_args else kwargs.pop("client") - self._config: AzureDataLakeStorageRESTAPIConfiguration = ( - input_args.pop(0) if input_args else kwargs.pop("config") - ) - self._serialize: Serializer = input_args.pop(0) if input_args else kwargs.pop("serializer") - self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer") - - @distributed_trace_async - async def create( - self, - request_id_parameter: Optional[str] = None, - timeout: Optional[int] = None, - properties: Optional[str] = None, - **kwargs: Any - ) -> None: - """Create FileSystem. - - Create a FileSystem rooted at the specified location. If the FileSystem already exists, the - operation fails. This operation does not support conditional HTTP requests. - - :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. Default - value is None. - :type request_id_parameter: str - :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting - Timeouts for Blob Service Operations.`. Default value is None. - :type timeout: int - :param properties: Optional. User-defined properties to be stored with the filesystem, in the - format of a comma-separated list of name and value pairs "n1=v1, n2=v2, ...", where each value - is a base64 encoded string. Note that the string may only contain ASCII characters in the - ISO-8859-1 character set. If the filesystem exists, any properties not included in the list - will be removed. All properties are removed if the header is omitted. To merge new and - existing properties, first get all existing properties and the current E-Tag, then make a - conditional request with the E-Tag and include values for all properties. Default value is - None. - :type properties: str - :return: None or the result of cls(response) - :rtype: None - :raises ~azure.core.exceptions.HttpResponseError: - """ - error_map: MutableMapping = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = kwargs.pop("headers", {}) or {} - _params = kwargs.pop("params", {}) or {} - - cls: ClsType[None] = kwargs.pop("cls", None) - - _request = build_create_request( - url=self._config.url, - request_id_parameter=request_id_parameter, - timeout=timeout, - properties=properties, - resource=self._config.resource, - version=self._config.version, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - _stream = False - pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [201]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize( - _models.StorageError, - pipeline_response, - ) - raise HttpResponseError(response=response, model=error) - - response_headers = {} - response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) - response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) - response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) - response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) - response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) - response_headers["x-ms-namespace-enabled"] = self._deserialize( - "str", response.headers.get("x-ms-namespace-enabled") - ) - - if cls: - return cls(pipeline_response, None, response_headers) # type: ignore - - @distributed_trace_async - async def set_properties( - self, - request_id_parameter: Optional[str] = None, - timeout: Optional[int] = None, - properties: Optional[str] = None, - modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None, - **kwargs: Any - ) -> None: - """Set FileSystem Properties. - - Set properties for the FileSystem. This operation supports conditional HTTP requests. For - more information, see `Specifying Conditional Headers for Blob Service Operations - `_. - - :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. Default - value is None. - :type request_id_parameter: str - :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting - Timeouts for Blob Service Operations.`. Default value is None. - :type timeout: int - :param properties: Optional. User-defined properties to be stored with the filesystem, in the - format of a comma-separated list of name and value pairs "n1=v1, n2=v2, ...", where each value - is a base64 encoded string. Note that the string may only contain ASCII characters in the - ISO-8859-1 character set. If the filesystem exists, any properties not included in the list - will be removed. All properties are removed if the header is omitted. To merge new and - existing properties, first get all existing properties and the current E-Tag, then make a - conditional request with the E-Tag and include values for all properties. Default value is - None. - :type properties: str - :param modified_access_conditions: Parameter group. Default value is None. - :type modified_access_conditions: ~azure.storage.filedatalake.models.ModifiedAccessConditions - :return: None or the result of cls(response) - :rtype: None - :raises ~azure.core.exceptions.HttpResponseError: - """ - error_map: MutableMapping = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = kwargs.pop("headers", {}) or {} - _params = kwargs.pop("params", {}) or {} - - cls: ClsType[None] = kwargs.pop("cls", None) - - _if_modified_since = None - _if_unmodified_since = None - if modified_access_conditions is not None: - _if_modified_since = modified_access_conditions.if_modified_since - _if_unmodified_since = modified_access_conditions.if_unmodified_since - - _request = build_set_properties_request( - url=self._config.url, - request_id_parameter=request_id_parameter, - timeout=timeout, - properties=properties, - if_modified_since=_if_modified_since, - if_unmodified_since=_if_unmodified_since, - resource=self._config.resource, - version=self._config.version, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - _stream = False - pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize( - _models.StorageError, - pipeline_response, - ) - raise HttpResponseError(response=response, model=error) - - response_headers = {} - response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) - response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) - response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) - response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) - response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) - - if cls: - return cls(pipeline_response, None, response_headers) # type: ignore - - @distributed_trace_async - async def get_properties( - self, request_id_parameter: Optional[str] = None, timeout: Optional[int] = None, **kwargs: Any - ) -> None: - """Get FileSystem Properties. - - All system and user-defined filesystem properties are specified in the response headers. - - :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. Default - value is None. - :type request_id_parameter: str - :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting - Timeouts for Blob Service Operations.`. Default value is None. - :type timeout: int - :return: None or the result of cls(response) - :rtype: None - :raises ~azure.core.exceptions.HttpResponseError: - """ - error_map: MutableMapping = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = kwargs.pop("headers", {}) or {} - _params = kwargs.pop("params", {}) or {} - - cls: ClsType[None] = kwargs.pop("cls", None) - - _request = build_get_properties_request( - url=self._config.url, - request_id_parameter=request_id_parameter, - timeout=timeout, - resource=self._config.resource, - version=self._config.version, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - _stream = False - pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize( - _models.StorageError, - pipeline_response, - ) - raise HttpResponseError(response=response, model=error) - - response_headers = {} - response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) - response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) - response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) - response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) - response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) - response_headers["x-ms-properties"] = self._deserialize("str", response.headers.get("x-ms-properties")) - response_headers["x-ms-namespace-enabled"] = self._deserialize( - "str", response.headers.get("x-ms-namespace-enabled") - ) - - if cls: - return cls(pipeline_response, None, response_headers) # type: ignore - - @distributed_trace_async - async def delete( - self, - request_id_parameter: Optional[str] = None, - timeout: Optional[int] = None, - modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None, - **kwargs: Any - ) -> None: - """Delete FileSystem. - - Marks the FileSystem for deletion. When a FileSystem is deleted, a FileSystem with the same - identifier cannot be created for at least 30 seconds. While the filesystem is being deleted, - attempts to create a filesystem with the same identifier will fail with status code 409 - (Conflict), with the service returning additional error information indicating that the - filesystem is being deleted. All other operations, including operations on any files or - directories within the filesystem, will fail with status code 404 (Not Found) while the - filesystem is being deleted. This operation supports conditional HTTP requests. For more - information, see `Specifying Conditional Headers for Blob Service Operations - `_. - - :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. Default - value is None. - :type request_id_parameter: str - :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting - Timeouts for Blob Service Operations.`. Default value is None. - :type timeout: int - :param modified_access_conditions: Parameter group. Default value is None. - :type modified_access_conditions: ~azure.storage.filedatalake.models.ModifiedAccessConditions - :return: None or the result of cls(response) - :rtype: None - :raises ~azure.core.exceptions.HttpResponseError: - """ - error_map: MutableMapping = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = kwargs.pop("headers", {}) or {} - _params = kwargs.pop("params", {}) or {} - - cls: ClsType[None] = kwargs.pop("cls", None) - - _if_modified_since = None - _if_unmodified_since = None - if modified_access_conditions is not None: - _if_modified_since = modified_access_conditions.if_modified_since - _if_unmodified_since = modified_access_conditions.if_unmodified_since - - _request = build_delete_request( - url=self._config.url, - request_id_parameter=request_id_parameter, - timeout=timeout, - if_modified_since=_if_modified_since, - if_unmodified_since=_if_unmodified_since, - resource=self._config.resource, - version=self._config.version, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - _stream = False - pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [202]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize( - _models.StorageError, - pipeline_response, - ) - raise HttpResponseError(response=response, model=error) - - response_headers = {} - response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) - response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) - response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) - - if cls: - return cls(pipeline_response, None, response_headers) # type: ignore - - @distributed_trace_async - async def list_paths( - self, - recursive: bool, - request_id_parameter: Optional[str] = None, - timeout: Optional[int] = None, - continuation: Optional[str] = None, - path: Optional[str] = None, - max_results: Optional[int] = None, - upn: Optional[bool] = None, - begin_from: Optional[str] = None, - **kwargs: Any - ) -> _models.PathList: - """List Paths. - - List FileSystem paths and their properties. - - :param recursive: Required. Required. - :type recursive: bool - :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. Default - value is None. - :type request_id_parameter: str - :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting - Timeouts for Blob Service Operations.`. Default value is None. - :type timeout: int - :param continuation: Optional. When deleting a directory, the number of paths that are deleted - with each invocation is limited. If the number of paths to be deleted exceeds this limit, a - continuation token is returned in this response header. When a continuation token is returned - in the response, it must be specified in a subsequent invocation of the delete operation to - continue deleting the directory. Default value is None. - :type continuation: str - :param path: Optional. Filters results to paths within the specified directory. An error - occurs if the directory does not exist. Default value is None. - :type path: str - :param max_results: An optional value that specifies the maximum number of items to return. If - omitted or greater than 5,000, the response will include up to 5,000 items. Default value is - None. - :type max_results: int - :param upn: Optional. Valid only when Hierarchical Namespace is enabled for the account. If - "true", the user identity values returned in the x-ms-owner, x-ms-group, and x-ms-acl response - headers will be transformed from Azure Active Directory Object IDs to User Principal Names. If - "false", the values will be returned as Azure Active Directory Object IDs. The default value is - false. Note that group and application Object IDs are not translated because they do not have - unique friendly names. Default value is None. - :type upn: bool - :param begin_from: Optional. A relative path within the specified directory where the listing - will start from. For example, a recursive listing under directory folder1/folder2 with - beginFrom as folder3/readmefile.txt will start listing from - folder1/folder2/folder3/readmefile.txt. Please note that, multiple entity levels are supported - for recursive listing. Non-recursive listing supports only one entity level. An error will - appear if multiple entity levels are specified for non-recursive listing. Default value is - None. - :type begin_from: str - :return: PathList or the result of cls(response) - :rtype: ~azure.storage.filedatalake.models.PathList - :raises ~azure.core.exceptions.HttpResponseError: - """ - error_map: MutableMapping = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = kwargs.pop("headers", {}) or {} - _params = kwargs.pop("params", {}) or {} - - cls: ClsType[_models.PathList] = kwargs.pop("cls", None) - - _request = build_list_paths_request( - url=self._config.url, - recursive=recursive, - request_id_parameter=request_id_parameter, - timeout=timeout, - continuation=continuation, - path=path, - max_results=max_results, - upn=upn, - begin_from=begin_from, - resource=self._config.resource, - version=self._config.version, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - _stream = False - pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize( - _models.StorageError, - pipeline_response, - ) - raise HttpResponseError(response=response, model=error) - - response_headers = {} - response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) - response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) - response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) - response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) - response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) - response_headers["x-ms-continuation"] = self._deserialize("str", response.headers.get("x-ms-continuation")) - - deserialized = self._deserialize("PathList", pipeline_response.http_response) - - if cls: - return cls(pipeline_response, deserialized, response_headers) # type: ignore - - return deserialized # type: ignore - - @distributed_trace_async - async def list_blob_hierarchy_segment( - self, - prefix: Optional[str] = None, - delimiter: Optional[str] = None, - marker: Optional[str] = None, - max_results: Optional[int] = None, - include: Optional[list[Union[str, _models.ListBlobsIncludeItem]]] = None, - showonly: Literal["deleted"] = "deleted", - timeout: Optional[int] = None, - request_id_parameter: Optional[str] = None, - **kwargs: Any - ) -> _models.ListBlobsHierarchySegmentResponse: - """The List Blobs operation returns a list of the blobs under the specified container. - - :param prefix: Filters results to filesystems within the specified prefix. Default value is - None. - :type prefix: str - :param delimiter: When the request includes this parameter, the operation returns a BlobPrefix - element in the response body that acts as a placeholder for all blobs whose names begin with - the same substring up to the appearance of the delimiter character. The delimiter may be a - single character or a string. Default value is None. - :type delimiter: str - :param marker: A string value that identifies the portion of the list of containers to be - returned with the next listing operation. The operation returns the NextMarker value within the - response body if the listing operation did not return all containers remaining to be listed - with the current page. The NextMarker value can be used as the value for the marker parameter - in a subsequent call to request the next page of list items. The marker value is opaque to the - client. Default value is None. - :type marker: str - :param max_results: An optional value that specifies the maximum number of items to return. If - omitted or greater than 5,000, the response will include up to 5,000 items. Default value is - None. - :type max_results: int - :param include: Include this parameter to specify one or more datasets to include in the - response. Default value is None. - :type include: list[str or ~azure.storage.filedatalake.models.ListBlobsIncludeItem] - :param showonly: Include this parameter to specify one or more datasets to include in the - response. Known values are "deleted" and None. Default value is "deleted". - :type showonly: str - :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting - Timeouts for Blob Service Operations.`. Default value is None. - :type timeout: int - :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. Default - value is None. - :type request_id_parameter: str - :return: ListBlobsHierarchySegmentResponse or the result of cls(response) - :rtype: ~azure.storage.filedatalake.models.ListBlobsHierarchySegmentResponse - :raises ~azure.core.exceptions.HttpResponseError: - """ - error_map: MutableMapping = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = kwargs.pop("headers", {}) or {} - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - restype: Literal["container"] = kwargs.pop("restype", _params.pop("restype", "container")) - comp: Literal["list"] = kwargs.pop("comp", _params.pop("comp", "list")) - cls: ClsType[_models.ListBlobsHierarchySegmentResponse] = kwargs.pop("cls", None) - - _request = build_list_blob_hierarchy_segment_request( - url=self._config.url, - prefix=prefix, - delimiter=delimiter, - marker=marker, - max_results=max_results, - include=include, - showonly=showonly, - timeout=timeout, - request_id_parameter=request_id_parameter, - restype=restype, - comp=comp, - version=self._config.version, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - _stream = False - pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize( - _models.StorageError, - pipeline_response, - ) - raise HttpResponseError(response=response, model=error) - - response_headers = {} - response_headers["Content-Type"] = self._deserialize("str", response.headers.get("Content-Type")) - response_headers["x-ms-client-request-id"] = self._deserialize( - "str", response.headers.get("x-ms-client-request-id") - ) - response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) - response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) - response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) - - deserialized = self._deserialize("ListBlobsHierarchySegmentResponse", pipeline_response.http_response) - - if cls: - return cls(pipeline_response, deserialized, response_headers) # type: ignore - - return deserialized # type: ignore diff --git a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_generated/aio/operations/_operations.py b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_generated/aio/operations/_operations.py new file mode 100644 index 000000000000..eea8d5f08491 --- /dev/null +++ b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_generated/aio/operations/_operations.py @@ -0,0 +1,2710 @@ +# pylint: disable=line-too-long,useless-suppression,too-many-lines +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from collections.abc import MutableMapping +import datetime +from typing import Any, AsyncIterator, Callable, Optional, TypeVar, Union + +from azure.core import AsyncPipelineClient, MatchConditions +from azure.core.exceptions import ( + ClientAuthenticationError, + HttpResponseError, + ResourceExistsError, + ResourceModifiedError, + ResourceNotFoundError, + ResourceNotModifiedError, + StreamClosedError, + StreamConsumedError, + map_error, +) +from azure.core.pipeline import PipelineResponse +from azure.core.rest import AsyncHttpResponse, HttpRequest +from azure.core.tracing.decorator_async import distributed_trace_async +from azure.core.utils import case_insensitive_dict + +from ... import models as _models +from ..._utils.model_base import _deserialize, _deserialize_xml, _failsafe_deserialize +from ..._utils.serialization import Deserializer, Serializer +from ...operations._operations import ( + build_file_system_create_request, + build_file_system_delete_request, + build_file_system_get_properties_request, + build_file_system_list_blob_hierarchy_segment_request, + build_file_system_list_paths_request, + build_file_system_set_properties_request, + build_path_append_data_request, + build_path_create_request, + build_path_delete_request, + build_path_flush_data_request, + build_path_get_properties_request, + build_path_lease_request, + build_path_read_request, + build_path_set_access_control_recursive_request, + build_path_set_access_control_request, + build_path_set_expiry_request, + build_path_undelete_request, + build_path_update_request, + build_service_list_file_systems_request, +) +from .._configuration import DataLakeClientConfiguration + +T = TypeVar("T") +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, dict[str, Any]], Any]] + + +class ServiceOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.storage.filedatalake._generated.aio.DataLakeClient`'s + :attr:`service` attribute. + """ + + def __init__(self, *args, **kwargs) -> None: + input_args = list(args) + self._client: AsyncPipelineClient = input_args.pop(0) if input_args else kwargs.pop("client") + self._config: DataLakeClientConfiguration = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize: Serializer = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + @distributed_trace_async + async def list_file_systems( + self, + *, + resource: Union[str, _models.AccountResourceType], + prefix: Optional[str] = None, + continuation: Optional[str] = None, + max_results: Optional[int] = None, + timeout: Optional[int] = None, + **kwargs: Any + ) -> _models.FileSystemList: + """List filesystems and their properties in given account. + + :keyword resource: The value must be "account" for all account operations. "account" Required. + :paramtype resource: str or ~azure.storage.filedatalake._generated.models.AccountResourceType + :keyword prefix: Filters results to filesystems within the specified prefix. Default value is + None. + :paramtype prefix: str + :keyword continuation: Optional. When deleting a directory, the number of paths that are + deleted with each invocation is limited. If the number of paths to be deleted exceeds this + limit, a continuation token is returned in this response header. When a continuation token is + returned in the response, it must be specified in a subsequent invocation of the delete + operation to continue deleting the directory. Default value is None. + :paramtype continuation: str + :keyword max_results: An optional value that specifies the maximum number of items to return. + If omitted or greater than 5,000, the response will include up to 5,000 items. Default value is + None. + :paramtype max_results: int + :keyword timeout: The timeout parameter is expressed in seconds. For more information, see Setting + Timeouts for Blob Service Operations.. Default value is None. + :paramtype timeout: int + :return: FileSystemList. The FileSystemList is compatible with MutableMapping + :rtype: ~azure.storage.filedatalake._generated.models.FileSystemList + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[_models.FileSystemList] = kwargs.pop("cls", None) + + _request = build_service_list_file_systems_request( + resource=resource, + prefix=prefix, + continuation=continuation, + max_results=max_results, + timeout=timeout, + version=self._config.version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "url": self._serialize.url("self._config.url", self._config.url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _decompress = kwargs.pop("decompress", True) + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.StorageError, + response, + ) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["x-ms-continuation"] = self._deserialize("str", response.headers.get("x-ms-continuation")) + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + response_headers["Content-Type"] = self._deserialize("str", response.headers.get("Content-Type")) + + if _stream: + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + else: + deserialized = _deserialize(_models.FileSystemList, response.json()) + + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore + + +class FileSystemOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.storage.filedatalake._generated.aio.DataLakeClient`'s + :attr:`file_system` attribute. + """ + + def __init__(self, *args, **kwargs) -> None: + input_args = list(args) + self._client: AsyncPipelineClient = input_args.pop(0) if input_args else kwargs.pop("client") + self._config: DataLakeClientConfiguration = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize: Serializer = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + @distributed_trace_async + async def create( + self, + *, + resource: Union[str, _models.FileSystemResourceType], + properties: Optional[str] = None, + timeout: Optional[int] = None, + **kwargs: Any + ) -> None: + """Create a FileSystem rooted at the specified location. If the FileSystem already exists, the + operation fails. This operation does not support conditional HTTP requests. + + :keyword resource: The value must be "filesystem" for all filesystem operations. "filesystem" + Required. + :paramtype resource: str or + ~azure.storage.filedatalake._generated.models.FileSystemResourceType + :keyword properties: Optional. User-defined properties to be stored with the filesystem, in the + format of a comma-separated list of name and value pairs "n1=v1, n2=v2, ...", where each value + is a base64 encoded string. Note that the string may only contain ASCII characters in the + ISO-8859-1 character set. If the filesystem exists, any properties not included in the list + will be removed. All properties are removed if the header is omitted. To merge new and existing + properties, first get all existing properties and the current E-Tag, then make a conditional + request with the E-Tag and include values for all properties. Default value is None. + :paramtype properties: str + :keyword timeout: The timeout parameter is expressed in seconds. For more information, see Setting + Timeouts for Blob Service Operations.. Default value is None. + :paramtype timeout: int + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[None] = kwargs.pop("cls", None) + + _request = build_file_system_create_request( + resource=resource, + properties=properties, + timeout=timeout, + version=self._config.version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "url": self._serialize.url("self._config.url", self._config.url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [201]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.StorageError, + response, + ) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["x-ms-namespace-enabled"] = self._deserialize( + "str", response.headers.get("x-ms-namespace-enabled") + ) + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + + if cls: + return cls(pipeline_response, None, response_headers) # type: ignore + + @distributed_trace_async + async def set_properties( + self, + *, + resource: Union[str, _models.FileSystemResourceType], + properties: Optional[str] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + timeout: Optional[int] = None, + **kwargs: Any + ) -> None: + """Set properties for the FileSystem. This operation supports conditional HTTP requests. For more + information, see `Specifying Conditional Headers for Blob Service Operations + `_. + + :keyword resource: The value must be "filesystem" for all filesystem operations. "filesystem" + Required. + :paramtype resource: str or + ~azure.storage.filedatalake._generated.models.FileSystemResourceType + :keyword properties: Optional. User-defined properties to be stored with the filesystem, in the + format of a comma-separated list of name and value pairs "n1=v1, n2=v2, ...", where each value + is a base64 encoded string. Note that the string may only contain ASCII characters in the + ISO-8859-1 character set. If the filesystem exists, any properties not included in the list + will be removed. All properties are removed if the header is omitted. To merge new and existing + properties, first get all existing properties and the current E-Tag, then make a conditional + request with the E-Tag and include values for all properties. Default value is None. + :paramtype properties: str + :keyword if_modified_since: Specify this header value to operate only on a blob if it has been + modified since the specified date/time. Default value is None. + :paramtype if_modified_since: ~datetime.datetime + :keyword if_unmodified_since: Specify this header value to operate only on a blob if it has not + been modified since the specified date/time. Default value is None. + :paramtype if_unmodified_since: ~datetime.datetime + :keyword timeout: The timeout parameter is expressed in seconds. For more information, see Setting + Timeouts for Blob Service Operations.. Default value is None. + :paramtype timeout: int + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[None] = kwargs.pop("cls", None) + + _request = build_file_system_set_properties_request( + resource=resource, + properties=properties, + if_modified_since=if_modified_since, + if_unmodified_since=if_unmodified_since, + timeout=timeout, + version=self._config.version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "url": self._serialize.url("self._config.url", self._config.url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.StorageError, + response, + ) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + + if cls: + return cls(pipeline_response, None, response_headers) # type: ignore + + @distributed_trace_async + async def get_properties( + self, *, resource: Union[str, _models.FileSystemResourceType], timeout: Optional[int] = None, **kwargs: Any + ) -> bool: + """All system and user-defined filesystem properties are specified in the response headers. + + :keyword resource: The value must be "filesystem" for all filesystem operations. "filesystem" + Required. + :paramtype resource: str or + ~azure.storage.filedatalake._generated.models.FileSystemResourceType + :keyword timeout: The timeout parameter is expressed in seconds. For more information, see Setting + Timeouts for Blob Service Operations.. Default value is None. + :paramtype timeout: int + :return: bool + :rtype: bool + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[None] = kwargs.pop("cls", None) + + _request = build_file_system_get_properties_request( + resource=resource, + timeout=timeout, + version=self._config.version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "url": self._serialize.url("self._config.url", self._config.url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.StorageError, + response, + ) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["x-ms-properties"] = self._deserialize("str", response.headers.get("x-ms-properties")) + response_headers["x-ms-namespace-enabled"] = self._deserialize( + "str", response.headers.get("x-ms-namespace-enabled") + ) + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + + if cls: + return cls(pipeline_response, None, response_headers) # type: ignore + return 200 <= response.status_code <= 299 + + @distributed_trace_async + async def delete( + self, + *, + resource: Union[str, _models.FileSystemResourceType], + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + timeout: Optional[int] = None, + **kwargs: Any + ) -> None: + """Marks the FileSystem for deletion. When a FileSystem is deleted, a FileSystem with the same + identifier cannot be created for at least 30 seconds. While the filesystem is being deleted, + attempts to create a filesystem with the same identifier will fail with status code 409 + (Conflict), with the service returning additional error information indicating that the + filesystem is being deleted. All other operations, including operations on any files or + directories within the filesystem, will fail with status code 404 (Not Found) while the + filesystem is being deleted. This operation supports conditional HTTP requests. For more + information, see `Specifying Conditional Headers for Blob Service Operations + `_. + + :keyword resource: The value must be "filesystem" for all filesystem operations. "filesystem" + Required. + :paramtype resource: str or + ~azure.storage.filedatalake._generated.models.FileSystemResourceType + :keyword if_modified_since: Specify this header value to operate only on a blob if it has been + modified since the specified date/time. Default value is None. + :paramtype if_modified_since: ~datetime.datetime + :keyword if_unmodified_since: Specify this header value to operate only on a blob if it has not + been modified since the specified date/time. Default value is None. + :paramtype if_unmodified_since: ~datetime.datetime + :keyword timeout: The timeout parameter is expressed in seconds. For more information, see Setting + Timeouts for Blob Service Operations.. Default value is None. + :paramtype timeout: int + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[None] = kwargs.pop("cls", None) + + _request = build_file_system_delete_request( + resource=resource, + if_modified_since=if_modified_since, + if_unmodified_since=if_unmodified_since, + timeout=timeout, + version=self._config.version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "url": self._serialize.url("self._config.url", self._config.url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [202]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.StorageError, + response, + ) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + + if cls: + return cls(pipeline_response, None, response_headers) # type: ignore + + @distributed_trace_async + async def list_paths( + self, + *, + recursive: bool, + continuation: Optional[str] = None, + path: Optional[str] = None, + max_results: Optional[int] = None, + upn: Optional[bool] = None, + begin_from: Optional[str] = None, + timeout: Optional[int] = None, + **kwargs: Any + ) -> _models.PathList: + """List FileSystem paths and their properties. + + :keyword recursive: Required. Required. + :paramtype recursive: bool + :keyword continuation: Optional. When deleting a directory, the number of paths that are + deleted with each invocation is limited. If the number of paths to be deleted exceeds this + limit, a continuation token is returned in this response header. When a continuation token is + returned in the response, it must be specified in a subsequent invocation of the delete + operation to continue deleting the directory. Default value is None. + :paramtype continuation: str + :keyword path: Optional. Filters results to paths within the specified directory. An error + occurs if the directory does not exist. Default value is None. + :paramtype path: str + :keyword max_results: An optional value that specifies the maximum number of items to return. + If omitted or greater than 5,000, the response will include up to 5,000 items. Default value is + None. + :paramtype max_results: int + :keyword upn: Optional. Valid only when Hierarchical Namespace is enabled for the account. If + "true", the user identity values returned in the x-ms-owner, x-ms-group, and x-ms-acl response + headers will be transformed from Azure Active Directory Object IDs to User Principal Names. If + "false", the values will be returned as Azure Active Directory Object IDs. The default value is + false. Note that group and application Object IDs are not translated because they do not have + unique friendly names. Default value is None. + :paramtype upn: bool + :keyword begin_from: Optional. A relative path within the specified directory where the listing + will start from. Default value is None. + :paramtype begin_from: str + :keyword timeout: The timeout parameter is expressed in seconds. For more information, see Setting + Timeouts for Blob Service Operations.. Default value is None. + :paramtype timeout: int + :return: PathList. The PathList is compatible with MutableMapping + :rtype: ~azure.storage.filedatalake._generated.models.PathList + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[_models.PathList] = kwargs.pop("cls", None) + + _request = build_file_system_list_paths_request( + recursive=recursive, + continuation=continuation, + path=path, + max_results=max_results, + upn=upn, + begin_from=begin_from, + timeout=timeout, + version=self._config.version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "url": self._serialize.url("self._config.url", self._config.url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _decompress = kwargs.pop("decompress", True) + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.StorageError, + response, + ) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["x-ms-continuation"] = self._deserialize("str", response.headers.get("x-ms-continuation")) + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + response_headers["Content-Type"] = self._deserialize("str", response.headers.get("Content-Type")) + + if _stream: + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + else: + deserialized = _deserialize(_models.PathList, response.json()) + + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore + + @distributed_trace_async + async def list_blob_hierarchy_segment( + self, + *, + prefix: Optional[str] = None, + delimiter: Optional[str] = None, + marker: Optional[str] = None, + max_results: Optional[int] = None, + include: Optional[list[Union[str, _models.ListBlobsIncludeItem]]] = None, + showonly: Optional[Union[str, _models.ListBlobsShowOnly]] = None, + timeout: Optional[int] = None, + **kwargs: Any + ) -> _models.ListBlobsHierarchySegmentResponse: + """The List Blobs operation returns a list of the blobs under the specified container. + + :keyword prefix: Filters results to filesystems within the specified prefix. Default value is + None. + :paramtype prefix: str + :keyword delimiter: When the request includes this parameter, the operation returns a + BlobPrefix element in the response body that acts as a placeholder for all blobs whose names + begin with the same substring up to the appearance of the delimiter character. The delimiter + may be a single character or a string. Default value is None. + :paramtype delimiter: str + :keyword marker: A string value that identifies the portion of the list of containers to be + returned with the next listing operation. The operation returns the NextMarker value within the + response body if the listing operation did not return all containers remaining to be listed + with the current page. The NextMarker value can be used as the value for the marker parameter + in a subsequent call to request the next page of list items. The marker value is opaque to the + client. Default value is None. + :paramtype marker: str + :keyword max_results: An optional value that specifies the maximum number of items to return. + If omitted or greater than 5,000, the response will include up to 5,000 items. Default value is + None. + :paramtype max_results: int + :keyword include: Include this parameter to specify one or more datasets to include in the + response. Default value is None. + :paramtype include: list[str or + ~azure.storage.filedatalake._generated.models.ListBlobsIncludeItem] + :keyword showonly: Include this parameter to specify one or more datasets to include in the + response. "deleted" Default value is None. + :paramtype showonly: str or ~azure.storage.filedatalake._generated.models.ListBlobsShowOnly + :keyword timeout: The timeout parameter is expressed in seconds. For more information, see Setting + Timeouts for Blob Service Operations.. Default value is None. + :paramtype timeout: int + :return: ListBlobsHierarchySegmentResponse. The ListBlobsHierarchySegmentResponse is compatible + with MutableMapping + :rtype: ~azure.storage.filedatalake._generated.models.ListBlobsHierarchySegmentResponse + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[_models.ListBlobsHierarchySegmentResponse] = kwargs.pop("cls", None) + + _request = build_file_system_list_blob_hierarchy_segment_request( + prefix=prefix, + delimiter=delimiter, + marker=marker, + max_results=max_results, + include=include, + showonly=showonly, + timeout=timeout, + version=self._config.version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "url": self._serialize.url("self._config.url", self._config.url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _decompress = kwargs.pop("decompress", True) + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.StorageError, + response, + ) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + response_headers["Content-Type"] = self._deserialize("str", response.headers.get("Content-Type")) + + if _stream: + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + else: + deserialized = _deserialize_xml(_models.ListBlobsHierarchySegmentResponse, response.text()) + + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore + + +class PathOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.storage.filedatalake._generated.aio.DataLakeClient`'s + :attr:`path` attribute. + """ + + def __init__(self, *args, **kwargs) -> None: + input_args = list(args) + self._client: AsyncPipelineClient = input_args.pop(0) if input_args else kwargs.pop("client") + self._config: DataLakeClientConfiguration = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize: Serializer = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + @distributed_trace_async + async def create( # pylint: disable=too-many-locals + self, + *, + resource: Optional[Union[str, _models.PathResourceType]] = None, + mode: Optional[Union[str, _models.PathRenameMode]] = None, + continuation: Optional[str] = None, + cache_control: Optional[str] = None, + content_encoding: Optional[str] = None, + content_language: Optional[str] = None, + content_disposition: Optional[str] = None, + content_type: Optional[str] = None, + rename_source: Optional[str] = None, + lease_id: Optional[str] = None, + source_lease_id: Optional[str] = None, + properties: Optional[str] = None, + permissions: Optional[str] = None, + umask: Optional[str] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + source_if_match: Optional[str] = None, + source_if_none_match: Optional[str] = None, + source_if_modified_since: Optional[datetime.datetime] = None, + source_if_unmodified_since: Optional[datetime.datetime] = None, + encryption_key: Optional[str] = None, + encryption_key_sha256: Optional[str] = None, + encryption_algorithm: Optional[Union[str, _models.EncryptionAlgorithmType]] = None, + owner: Optional[str] = None, + group: Optional[str] = None, + acl: Optional[str] = None, + proposed_lease_id: Optional[str] = None, + lease_duration: Optional[int] = None, + expiry_options: Optional[Union[str, _models.PathExpiryOptions]] = None, + expires_on: Optional[str] = None, + encryption_context: Optional[str] = None, + timeout: Optional[int] = None, + etag: Optional[str] = None, + match_condition: Optional[MatchConditions] = None, + **kwargs: Any + ) -> None: + """Create or rename a file or directory. By default, the destination is overwritten and if the + destination already exists and has a lease the lease is broken. This operation supports + conditional HTTP requests. For more information, see `Specifying Conditional Headers for Blob + Service Operations + `_. + To fail if the destination already exists, use a conditional request with If-None-Match: "*". + + :keyword resource: Required only for Create File and Create Directory. The value must be "file" + or "directory". Known values are: "directory" and "file". Default value is None. + :paramtype resource: str or ~azure.storage.filedatalake._generated.models.PathResourceType + :keyword mode: Optional. Valid only when namespace is enabled. This parameter determines the + behavior of the rename operation. The value must be "legacy" or "posix", and the default value + will be "posix". Known values are: "legacy" and "posix". Default value is None. + :paramtype mode: str or ~azure.storage.filedatalake._generated.models.PathRenameMode + :keyword continuation: Optional. When deleting a directory, the number of paths that are + deleted with each invocation is limited. If the number of paths to be deleted exceeds this + limit, a continuation token is returned in this response header. When a continuation token is + returned in the response, it must be specified in a subsequent invocation of the delete + operation to continue deleting the directory. Default value is None. + :paramtype continuation: str + :keyword cache_control: Optional. Sets the blob's cache control. If specified, this property is + stored with the blob and returned with a read request. Default value is None. + :paramtype cache_control: str + :keyword content_encoding: Optional. Sets the blob's content encoding. If specified, this + property is stored with the blob and returned with a read request. Default value is None. + :paramtype content_encoding: str + :keyword content_language: Optional. Set the blob's content language. If specified, this + property is stored with the blob and returned with a read request. Default value is None. + :paramtype content_language: str + :keyword content_disposition: Optional. Sets the blob's Content-Disposition header. Default + value is None. + :paramtype content_disposition: str + :keyword content_type: Optional. Sets the blob's content type. If specified, this property is + stored with the blob and returned with a read request. Default value is None. + :paramtype content_type: str + :keyword rename_source: An optional file or directory to be renamed. The value must have the + following format: "/{filesystem}/{path}". If "x-ms-properties" is specified, the properties + will overwrite the existing properties; otherwise, the existing properties will be preserved. + This value must be a URL percent-encoded string. Note that the string may only contain ASCII + characters in the ISO-8859-1 character set. Default value is None. + :paramtype rename_source: str + :keyword lease_id: If specified, the operation only succeeds if the resource's lease is active + and matches this ID. Default value is None. + :paramtype lease_id: str + :keyword source_lease_id: A lease ID for the source path. If specified, the source path must + have an active lease and the lease ID must match. Default value is None. + :paramtype source_lease_id: str + :keyword properties: Optional. User-defined properties to be stored with the filesystem, in the + format of a comma-separated list of name and value pairs "n1=v1, n2=v2, ...", where each value + is a base64 encoded string. Note that the string may only contain ASCII characters in the + ISO-8859-1 character set. If the filesystem exists, any properties not included in the list + will be removed. All properties are removed if the header is omitted. To merge new and existing + properties, first get all existing properties and the current E-Tag, then make a conditional + request with the E-Tag and include values for all properties. Default value is None. + :paramtype properties: str + :keyword permissions: Optional and only valid if Hierarchical Namespace is enabled for the + account. Sets POSIX access permissions for the file owner, the file owning group, and others. + Each class may be granted read, write, or execute permission. The sticky bit is also supported. + Both symbolic (rwxrw-rw-) and 4-digit octal notation (e.g. 0766) are supported. Default value + is None. + :paramtype permissions: str + :keyword umask: Optional and only valid if Hierarchical Namespace is enabled for the account. + When creating a file or directory and the parent folder does not have a default ACL, the umask + restricts the permissions of the file or directory to be created. The resulting permission is + given by p bitwise and not u, where p is the permission and u is the umask. For example, if p + is 0777 and u is 0057, then the resulting permission is 0720. The default permission is 0777 + for a directory and 0666 for a file. The default umask is 0027. The umask must be specified in + 4-digit octal notation (e.g. 0766). Default value is None. + :paramtype umask: str + :keyword if_modified_since: Specify this header value to operate only on a blob if it has been + modified since the specified date/time. Default value is None. + :paramtype if_modified_since: ~datetime.datetime + :keyword if_unmodified_since: Specify this header value to operate only on a blob if it has not + been modified since the specified date/time. Default value is None. + :paramtype if_unmodified_since: ~datetime.datetime + :keyword source_if_match: Specify an ETag value to operate only on blobs with a matching value. + Default value is None. + :paramtype source_if_match: str + :keyword source_if_none_match: Specify an ETag value to operate only on blobs without a + matching value. Default value is None. + :paramtype source_if_none_match: str + :keyword source_if_modified_since: Specify this header value to operate only on a blob if it + has been modified since the specified date/time. Default value is None. + :paramtype source_if_modified_since: ~datetime.datetime + :keyword source_if_unmodified_since: Specify this header value to operate only on a blob if it + has not been modified since the specified date/time. Default value is None. + :paramtype source_if_unmodified_since: ~datetime.datetime + :keyword encryption_key: Optional. Specifies the encryption key to use to encrypt the data + provided in the request. If not specified, encryption is performed with the root account + encryption key. Default value is None. + :paramtype encryption_key: str + :keyword encryption_key_sha256: The SHA-256 hash of the provided encryption key. Must be + provided if the x-ms-encryption-key header is provided. Default value is None. + :paramtype encryption_key_sha256: str + :keyword encryption_algorithm: The algorithm used to produce the encryption key hash. + Currently, the only accepted value is "AES256". Must be provided if the x-ms-encryption-key + header is provided. "AES256" Default value is None. + :paramtype encryption_algorithm: str or + ~azure.storage.filedatalake._generated.models.EncryptionAlgorithmType + :keyword owner: Optional. The owner of the blob or directory. Default value is None. + :paramtype owner: str + :keyword group: Optional. The owning group of the blob or directory. Default value is None. + :paramtype group: str + :keyword acl: Sets POSIX access control rights on files and directories. The value is a + comma-separated list of access control entries. Each access control entry (ACE) consists of a + scope, a type, a user or group identifier, and permissions in the format + "[scope:][type]:[id]:[permissions]". Default value is None. + :paramtype acl: str + :keyword proposed_lease_id: Proposed lease ID, in a GUID string format. Default value is None. + :paramtype proposed_lease_id: str + :keyword lease_duration: The lease duration is required to acquire a lease, and specifies the + duration of the lease in seconds. The lease duration must be between 15 and 60 seconds or -1 + for infinite lease. Default value is None. + :paramtype lease_duration: int + :keyword expiry_options: Required. Indicates mode of the expiry time. Known values are: + "NeverExpire", "RelativeToCreation", "RelativeToNow", and "Absolute". Default value is None. + :paramtype expiry_options: str or + ~azure.storage.filedatalake._generated.models.PathExpiryOptions + :keyword expires_on: The time to set the blob to expiry. Default value is None. + :paramtype expires_on: str + :keyword encryption_context: Specifies the encryption context to set on the file. Default value + is None. + :paramtype encryption_context: str + :keyword timeout: The timeout parameter is expressed in seconds. For more information, see Setting + Timeouts for Blob Service Operations.. Default value is None. + :paramtype timeout: int + :keyword etag: check if resource is changed. Set None to skip checking etag. Default value is + None. + :paramtype etag: str + :keyword match_condition: The match condition to use upon the etag. Default value is None. + :paramtype match_condition: ~azure.core.MatchConditions + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + if match_condition == MatchConditions.IfNotModified: + error_map[412] = ResourceModifiedError + elif match_condition == MatchConditions.IfPresent: + error_map[412] = ResourceNotFoundError + elif match_condition == MatchConditions.IfMissing: + error_map[412] = ResourceExistsError + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[None] = kwargs.pop("cls", None) + + _request = build_path_create_request( + resource=resource, + mode=mode, + continuation=continuation, + cache_control=cache_control, + content_encoding=content_encoding, + content_language=content_language, + content_disposition=content_disposition, + content_type=content_type, + rename_source=rename_source, + lease_id=lease_id, + source_lease_id=source_lease_id, + properties=properties, + permissions=permissions, + umask=umask, + if_modified_since=if_modified_since, + if_unmodified_since=if_unmodified_since, + source_if_match=source_if_match, + source_if_none_match=source_if_none_match, + source_if_modified_since=source_if_modified_since, + source_if_unmodified_since=source_if_unmodified_since, + encryption_key=encryption_key, + encryption_key_sha256=encryption_key_sha256, + encryption_algorithm=encryption_algorithm, + owner=owner, + group=group, + acl=acl, + proposed_lease_id=proposed_lease_id, + lease_duration=lease_duration, + expiry_options=expiry_options, + expires_on=expires_on, + encryption_context=encryption_context, + timeout=timeout, + etag=etag, + match_condition=match_condition, + version=self._config.version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "url": self._serialize.url("self._config.url", self._config.url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [201]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.StorageError, + response, + ) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["x-ms-continuation"] = self._deserialize("str", response.headers.get("x-ms-continuation")) + response_headers["Content-Length"] = self._deserialize("int", response.headers.get("Content-Length")) + response_headers["x-ms-request-server-encrypted"] = self._deserialize( + "bool", response.headers.get("x-ms-request-server-encrypted") + ) + response_headers["x-ms-encryption-key-sha256"] = self._deserialize( + "str", response.headers.get("x-ms-encryption-key-sha256") + ) + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + + if cls: + return cls(pipeline_response, None, response_headers) # type: ignore + + @distributed_trace_async + async def update( # pylint: disable=too-many-locals + self, + body: bytes, + *, + action: Union[str, _models.PathUpdateAction], + max_records: Optional[int] = None, + continuation: Optional[str] = None, + mode: Optional[Union[str, _models.PathSetAccessControlRecursiveMode]] = None, + force_flag: Optional[bool] = None, + position: Optional[int] = None, + retain_uncommitted_data: Optional[bool] = None, + close: Optional[bool] = None, + content_length: Optional[int] = None, + content_md5: Optional[bytes] = None, + lease_id: Optional[str] = None, + cache_control: Optional[str] = None, + content_disposition: Optional[str] = None, + content_encoding: Optional[str] = None, + content_language: Optional[str] = None, + properties: Optional[str] = None, + owner: Optional[str] = None, + group: Optional[str] = None, + permissions: Optional[str] = None, + acl: Optional[str] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + structured_body_type: Optional[str] = None, + structured_content_length: Optional[int] = None, + timeout: Optional[int] = None, + etag: Optional[str] = None, + match_condition: Optional[MatchConditions] = None, + **kwargs: Any + ) -> _models.SetAccessControlRecursiveResponse: + """Uploads data to be appended to a file, flushes (writes) previously uploaded data to a file, + sets properties for a file or directory, or sets access control for a file or directory. Data + can only be appended to a file. Concurrent writes to the same file using multiple clients are + not supported. This operation supports conditional HTTP requests. For more information, see + `Specifying Conditional Headers for Blob Service Operations + `_. + + :param body: Initial data. Required. + :type body: bytes + :keyword action: The action must be "append" to upload data to be appended to a file, "flush" + to flush previously uploaded data to a file, "setProperties" to set the properties of a file or + directory, "setAccessControl" to set the owner, group, permissions, or access control list for + a file or directory, or "setAccessControlRecursive" to set the access control list for a + directory recursively. Note that Hierarchical Namespace must be enabled for the account in + order to use access control. Also note that the Access Control List (ACL) includes permissions + for the owner, owning group, and others, so the x-ms-permissions and x-ms-acl request headers + are mutually exclusive. Known values are: "append", "flush", "setProperties", + "setAccessControl", and "setAccessControlRecursive". Required. + :paramtype action: str or ~azure.storage.filedatalake._generated.models.PathUpdateAction + :keyword max_records: Optional. Valid for "SetAccessControlRecursive" operation. It specifies + the maximum number of files or directories on which the acl change will be applied. If omitted + or greater than 2,000, the request will process up to 2,000 items. Default value is None. + :paramtype max_records: int + :keyword continuation: Optional. The number of paths processed with each invocation is limited. + If the number of paths to be processed exceeds this limit, a continuation token is returned in + the response header x-ms-continuation. When a continuation token is returned in the response, + it must be percent-encoded and specified in a subsequent invocation of + setAccessControlRecursive operation. Default value is None. + :paramtype continuation: str + :keyword mode: Mode for set access control recursive. Known values are: "set", "modify", and + "remove". Default value is None. + :paramtype mode: str or + ~azure.storage.filedatalake._generated.models.PathSetAccessControlRecursiveMode + :keyword force_flag: Optional. Valid for "SetAccessControlRecursive" operation. If set to + false, the operation will terminate quickly on encountering user errors (4XX). If true, the + operation will ignore user errors and proceed with the operation on other sub-entities of the + directory. Continuation token will only be returned when forceFlag is true in case of user + errors. If not set the default value is false for this. Default value is None. + :paramtype force_flag: bool + :keyword position: This parameter allows the caller to upload data in parallel and control the + order in which it is appended to the file. It is required when uploading data to be appended to + the file and when flushing previously uploaded data to the file. The value must be the position + where the data is to be appended. Uploaded data is not immediately flushed, or written, to the + file. To flush, the previously uploaded data must be contiguous, the position parameter must be + specified and equal to the length of the file after all data has been written, and there must + not be a request entity body included with the request. Default value is None. + :paramtype position: int + :keyword retain_uncommitted_data: Valid only for flush operations. If "true", uncommitted data + is retained after the flush operation completes; otherwise, the uncommitted data is deleted + after the flush operation. The default is false. Data at offsets less than the specified + position are written to the file when flush succeeds, but this optional parameter allows data + after the flush position to be retained for a future flush operation. Default value is None. + :paramtype retain_uncommitted_data: bool + :keyword close: Azure Storage Events allow applications to receive notifications when files + change. When Azure Storage Events are enabled, a file changed event is raised. This event has a + property indicating whether this is the final change to distinguish the difference between an + intermediate flush to a file stream and the final close of a file stream. The close query + parameter is valid only when the action is "flush" and change notifications are enabled. If the + value of close is "true" and the flush operation completes successfully, the service raises a + file change notification with a property indicating that this is the final update (the file + stream has been closed). If "false" a change notification is raised indicating the file has + changed. The default is false. This query parameter is set to true by the Hadoop ABFS driver to + indicate that the file stream has been closed. Default value is None. + :paramtype close: bool + :keyword content_length: Required for "Append Data" and "Flush Data". Must be 0 for "Flush + Data". Must be the length of the request content in bytes for "Append Data". Default value is + None. + :paramtype content_length: int + :keyword content_md5: Specify the transactional md5 for the body, to be validated by the + service. Default value is None. + :paramtype content_md5: bytes + :keyword lease_id: If specified, the operation only succeeds if the resource's lease is active + and matches this ID. Default value is None. + :paramtype lease_id: str + :keyword cache_control: Optional. Sets the blob's cache control. If specified, this property is + stored with the blob and returned with a read request. Default value is None. + :paramtype cache_control: str + :keyword content_disposition: Optional. Sets the blob's Content-Disposition header. Default + value is None. + :paramtype content_disposition: str + :keyword content_encoding: Optional. Sets the blob's content encoding. If specified, this + property is stored with the blob and returned with a read request. Default value is None. + :paramtype content_encoding: str + :keyword content_language: Optional. Set the blob's content language. If specified, this + property is stored with the blob and returned with a read request. Default value is None. + :paramtype content_language: str + :keyword properties: Optional. User-defined properties to be stored with the filesystem, in the + format of a comma-separated list of name and value pairs "n1=v1, n2=v2, ...", where each value + is a base64 encoded string. Note that the string may only contain ASCII characters in the + ISO-8859-1 character set. If the filesystem exists, any properties not included in the list + will be removed. All properties are removed if the header is omitted. To merge new and existing + properties, first get all existing properties and the current E-Tag, then make a conditional + request with the E-Tag and include values for all properties. Default value is None. + :paramtype properties: str + :keyword owner: Optional. The owner of the blob or directory. Default value is None. + :paramtype owner: str + :keyword group: Optional. The owning group of the blob or directory. Default value is None. + :paramtype group: str + :keyword permissions: Optional and only valid if Hierarchical Namespace is enabled for the + account. Sets POSIX access permissions for the file owner, the file owning group, and others. + Each class may be granted read, write, or execute permission. The sticky bit is also supported. + Both symbolic (rwxrw-rw-) and 4-digit octal notation (e.g. 0766) are supported. Default value + is None. + :paramtype permissions: str + :keyword acl: Sets POSIX access control rights on files and directories. The value is a + comma-separated list of access control entries. Each access control entry (ACE) consists of a + scope, a type, a user or group identifier, and permissions in the format + "[scope:][type]:[id]:[permissions]". Default value is None. + :paramtype acl: str + :keyword if_modified_since: Specify this header value to operate only on a blob if it has been + modified since the specified date/time. Default value is None. + :paramtype if_modified_since: ~datetime.datetime + :keyword if_unmodified_since: Specify this header value to operate only on a blob if it has not + been modified since the specified date/time. Default value is None. + :paramtype if_unmodified_since: ~datetime.datetime + :keyword structured_body_type: Required if the request body is a structured message. Specifies + the message schema version and properties. Default value is None. + :paramtype structured_body_type: str + :keyword structured_content_length: Required if the request body is a structured message. + Specifies the length of the blob/file content inside the message body. Will always be smaller + than Content-Length. Default value is None. + :paramtype structured_content_length: int + :keyword timeout: The timeout parameter is expressed in seconds. For more information, see Setting + Timeouts for Blob Service Operations.. Default value is None. + :paramtype timeout: int + :keyword etag: check if resource is changed. Set None to skip checking etag. Default value is + None. + :paramtype etag: str + :keyword match_condition: The match condition to use upon the etag. Default value is None. + :paramtype match_condition: ~azure.core.MatchConditions + :return: SetAccessControlRecursiveResponse. The SetAccessControlRecursiveResponse is compatible + with MutableMapping + :rtype: ~azure.storage.filedatalake._generated.models.SetAccessControlRecursiveResponse + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + if match_condition == MatchConditions.IfNotModified: + error_map[412] = ResourceModifiedError + elif match_condition == MatchConditions.IfPresent: + error_map[412] = ResourceNotFoundError + elif match_condition == MatchConditions.IfMissing: + error_map[412] = ResourceExistsError + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: str = kwargs.pop("content_type", _headers.pop("Content-Type", "application/octet-stream")) + cls: ClsType[_models.SetAccessControlRecursiveResponse] = kwargs.pop("cls", None) + + _content = body + + _request = build_path_update_request( + action=action, + max_records=max_records, + continuation=continuation, + mode=mode, + force_flag=force_flag, + position=position, + retain_uncommitted_data=retain_uncommitted_data, + close=close, + content_length=content_length, + content_md5=content_md5, + lease_id=lease_id, + cache_control=cache_control, + content_disposition=content_disposition, + content_encoding=content_encoding, + content_language=content_language, + properties=properties, + owner=owner, + group=group, + permissions=permissions, + acl=acl, + if_modified_since=if_modified_since, + if_unmodified_since=if_unmodified_since, + structured_body_type=structured_body_type, + structured_content_length=structured_content_length, + timeout=timeout, + etag=etag, + match_condition=match_condition, + content_type=content_type, + version=self._config.version, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "url": self._serialize.url("self._config.url", self._config.url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _decompress = kwargs.pop("decompress", True) + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.StorageError, + response, + ) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["Accept-Ranges"] = self._deserialize("str", response.headers.get("Accept-Ranges")) + response_headers["Cache-Control"] = self._deserialize("str", response.headers.get("Cache-Control")) + response_headers["Content-Disposition"] = self._deserialize("str", response.headers.get("Content-Disposition")) + response_headers["Content-Encoding"] = self._deserialize("str", response.headers.get("Content-Encoding")) + response_headers["Content-Language"] = self._deserialize("str", response.headers.get("Content-Language")) + response_headers["Content-Length"] = self._deserialize("int", response.headers.get("Content-Length")) + response_headers["Content-Range"] = self._deserialize("str", response.headers.get("Content-Range")) + response_headers["Content-MD5"] = self._deserialize("bytearray", response.headers.get("Content-MD5")) + response_headers["x-ms-properties"] = self._deserialize("str", response.headers.get("x-ms-properties")) + response_headers["x-ms-continuation"] = self._deserialize("str", response.headers.get("x-ms-continuation")) + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + response_headers["Content-Type"] = self._deserialize("str", response.headers.get("Content-Type")) + + if _stream: + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + else: + deserialized = _deserialize(_models.SetAccessControlRecursiveResponse, response.json()) + + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore + + @distributed_trace_async + async def lease( + self, + *, + lease_action: Union[str, _models.PathLeaseAction], + lease_duration: Optional[int] = None, + lease_break_period: Optional[int] = None, + lease_id: Optional[str] = None, + proposed_lease_id: Optional[str] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + timeout: Optional[int] = None, + etag: Optional[str] = None, + match_condition: Optional[MatchConditions] = None, + **kwargs: Any + ) -> None: + """Create and manage a lease to restrict write and delete access to the path. This operation + supports conditional HTTP requests. For more information, see `Specifying Conditional Headers + for Blob Service Operations + `_. + + :keyword lease_action: There are five lease actions: "acquire", "break", "change", "renew", and + "release". Use "acquire" and specify the "x-ms-proposed-lease-id" and "x-ms-lease-duration" to + acquire a new lease. Use "break" to break an existing lease. When a lease is broken, the lease + break period is allowed to elapse, during which time no lease operation except break and + release can be performed on the file. When a lease is successfully broken, the response + indicates the interval in seconds until a new lease can be acquired. Use "change" and specify + the current lease ID in "x-ms-lease-id" and the new lease ID in "x-ms-proposed-lease-id" to + change the lease ID of an active lease. Use "renew" and specify the "x-ms-lease-id" to renew an + existing lease. Use "release" and specify the "x-ms-lease-id" to release a lease. Known values + are: "acquire", "break", "change", "renew", and "release". Required. + :paramtype lease_action: str or ~azure.storage.filedatalake._generated.models.PathLeaseAction + :keyword lease_duration: The lease duration is required to acquire a lease, and specifies the + duration of the lease in seconds. The lease duration must be between 15 and 60 seconds or -1 + for infinite lease. Default value is None. + :paramtype lease_duration: int + :keyword lease_break_period: The lease break period duration is optional to break a lease, and + specifies the break period of the lease in seconds. The lease break duration must be between 0 + and 60 seconds. Default value is None. + :paramtype lease_break_period: int + :keyword lease_id: If specified, the operation only succeeds if the resource's lease is active + and matches this ID. Default value is None. + :paramtype lease_id: str + :keyword proposed_lease_id: Proposed lease ID, in a GUID string format. Default value is None. + :paramtype proposed_lease_id: str + :keyword if_modified_since: Specify this header value to operate only on a blob if it has been + modified since the specified date/time. Default value is None. + :paramtype if_modified_since: ~datetime.datetime + :keyword if_unmodified_since: Specify this header value to operate only on a blob if it has not + been modified since the specified date/time. Default value is None. + :paramtype if_unmodified_since: ~datetime.datetime + :keyword timeout: The timeout parameter is expressed in seconds. For more information, see Setting + Timeouts for Blob Service Operations.. Default value is None. + :paramtype timeout: int + :keyword etag: check if resource is changed. Set None to skip checking etag. Default value is + None. + :paramtype etag: str + :keyword match_condition: The match condition to use upon the etag. Default value is None. + :paramtype match_condition: ~azure.core.MatchConditions + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + if match_condition == MatchConditions.IfNotModified: + error_map[412] = ResourceModifiedError + elif match_condition == MatchConditions.IfPresent: + error_map[412] = ResourceNotFoundError + elif match_condition == MatchConditions.IfMissing: + error_map[412] = ResourceExistsError + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[None] = kwargs.pop("cls", None) + + _request = build_path_lease_request( + lease_action=lease_action, + lease_duration=lease_duration, + lease_break_period=lease_break_period, + lease_id=lease_id, + proposed_lease_id=proposed_lease_id, + if_modified_since=if_modified_since, + if_unmodified_since=if_unmodified_since, + timeout=timeout, + etag=etag, + match_condition=match_condition, + version=self._config.version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "url": self._serialize.url("self._config.url", self._config.url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.StorageError, + response, + ) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["x-ms-lease-id"] = self._deserialize("str", response.headers.get("x-ms-lease-id")) + response_headers["x-ms-lease-time"] = self._deserialize("str", response.headers.get("x-ms-lease-time")) + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + + if cls: + return cls(pipeline_response, None, response_headers) # type: ignore + + @distributed_trace_async + async def read( + self, + *, + range: Optional[str] = None, + lease_id: Optional[str] = None, + range_get_content_md5: Optional[bool] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + encryption_key: Optional[str] = None, + encryption_key_sha256: Optional[str] = None, + encryption_algorithm: Optional[Union[str, _models.EncryptionAlgorithmType]] = None, + timeout: Optional[int] = None, + etag: Optional[str] = None, + match_condition: Optional[MatchConditions] = None, + **kwargs: Any + ) -> AsyncIterator[bytes]: + """Read the contents of a file. For read operations, range requests are supported. This operation + supports conditional HTTP requests. For more information, see `Specifying Conditional Headers + for Blob Service Operations + `_. + + :keyword range: The HTTP Range request header specifies one or more byte ranges of the resource + to be retrieved. Default value is None. + :paramtype range: str + :keyword lease_id: If specified, the operation only succeeds if the resource's lease is active + and matches this ID. Default value is None. + :paramtype lease_id: str + :keyword range_get_content_md5: Optional. When this header is set to "true" and specified + together with the Range header, the service returns the MD5 hash for the range, as long as the + range is less than or equal to 4MB in size. Default value is None. + :paramtype range_get_content_md5: bool + :keyword if_modified_since: Specify this header value to operate only on a blob if it has been + modified since the specified date/time. Default value is None. + :paramtype if_modified_since: ~datetime.datetime + :keyword if_unmodified_since: Specify this header value to operate only on a blob if it has not + been modified since the specified date/time. Default value is None. + :paramtype if_unmodified_since: ~datetime.datetime + :keyword encryption_key: Optional. Specifies the encryption key to use to encrypt the data + provided in the request. If not specified, encryption is performed with the root account + encryption key. Default value is None. + :paramtype encryption_key: str + :keyword encryption_key_sha256: The SHA-256 hash of the provided encryption key. Must be + provided if the x-ms-encryption-key header is provided. Default value is None. + :paramtype encryption_key_sha256: str + :keyword encryption_algorithm: The algorithm used to produce the encryption key hash. + Currently, the only accepted value is "AES256". Must be provided if the x-ms-encryption-key + header is provided. "AES256" Default value is None. + :paramtype encryption_algorithm: str or + ~azure.storage.filedatalake._generated.models.EncryptionAlgorithmType + :keyword timeout: The timeout parameter is expressed in seconds. For more information, see Setting + Timeouts for Blob Service Operations.. Default value is None. + :paramtype timeout: int + :keyword etag: check if resource is changed. Set None to skip checking etag. Default value is + None. + :paramtype etag: str + :keyword match_condition: The match condition to use upon the etag. Default value is None. + :paramtype match_condition: ~azure.core.MatchConditions + :return: AsyncIterator[bytes] + :rtype: AsyncIterator[bytes] + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + if match_condition == MatchConditions.IfNotModified: + error_map[412] = ResourceModifiedError + elif match_condition == MatchConditions.IfPresent: + error_map[412] = ResourceNotFoundError + elif match_condition == MatchConditions.IfMissing: + error_map[412] = ResourceExistsError + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None) + + _request = build_path_read_request( + range=range, + lease_id=lease_id, + range_get_content_md5=range_get_content_md5, + if_modified_since=if_modified_since, + if_unmodified_since=if_unmodified_since, + encryption_key=encryption_key, + encryption_key_sha256=encryption_key_sha256, + encryption_algorithm=encryption_algorithm, + timeout=timeout, + etag=etag, + match_condition=match_condition, + version=self._config.version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "url": self._serialize.url("self._config.url", self._config.url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _decompress = kwargs.pop("decompress", True) + _stream = kwargs.pop("stream", True) + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.StorageError, + response, + ) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["Accept-Ranges"] = self._deserialize("str", response.headers.get("Accept-Ranges")) + response_headers["Cache-Control"] = self._deserialize("str", response.headers.get("Cache-Control")) + response_headers["Content-Disposition"] = self._deserialize("str", response.headers.get("Content-Disposition")) + response_headers["Content-Encoding"] = self._deserialize("str", response.headers.get("Content-Encoding")) + response_headers["Content-Language"] = self._deserialize("str", response.headers.get("Content-Language")) + response_headers["Content-Length"] = self._deserialize("int", response.headers.get("Content-Length")) + response_headers["Content-Range"] = self._deserialize("str", response.headers.get("Content-Range")) + response_headers["Content-MD5"] = self._deserialize("bytearray", response.headers.get("Content-MD5")) + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["x-ms-resource-type"] = self._deserialize("str", response.headers.get("x-ms-resource-type")) + response_headers["x-ms-properties"] = self._deserialize("str", response.headers.get("x-ms-properties")) + response_headers["x-ms-lease-duration"] = self._deserialize("str", response.headers.get("x-ms-lease-duration")) + response_headers["x-ms-lease-state"] = self._deserialize("str", response.headers.get("x-ms-lease-state")) + response_headers["x-ms-lease-status"] = self._deserialize("str", response.headers.get("x-ms-lease-status")) + response_headers["x-ms-request-server-encrypted"] = self._deserialize( + "bool", response.headers.get("x-ms-request-server-encrypted") + ) + response_headers["x-ms-encryption-key-sha256"] = self._deserialize( + "str", response.headers.get("x-ms-encryption-key-sha256") + ) + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + response_headers["Content-Type"] = self._deserialize("str", response.headers.get("Content-Type")) + + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore + + @distributed_trace_async + async def get_properties( + self, + *, + action: Optional[Union[str, _models.PathGetPropertiesAction]] = None, + upn: Optional[bool] = None, + lease_id: Optional[str] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + timeout: Optional[int] = None, + etag: Optional[str] = None, + match_condition: Optional[MatchConditions] = None, + **kwargs: Any + ) -> bool: + """Get Properties returns all system and user defined properties for a path. Get Status returns + all system defined properties for a path. Get Access Control List returns the access control + list for a path. This operation supports conditional HTTP requests. For more information, see + `Specifying Conditional Headers for Blob Service Operations + `_. + + :keyword action: Optional. If the value is "getStatus" only the system defined properties for + the path are returned. If the value is "getAccessControl" the access control list is returned + in the response headers (Hierarchical Namespace must be enabled for the account), otherwise the + properties are returned. Known values are: "getAccessControl" and "getStatus". Default value is + None. + :paramtype action: str or ~azure.storage.filedatalake._generated.models.PathGetPropertiesAction + :keyword upn: Optional. Valid only when Hierarchical Namespace is enabled for the account. If + "true", the user identity values returned in the x-ms-owner, x-ms-group, and x-ms-acl response + headers will be transformed from Azure Active Directory Object IDs to User Principal Names. If + "false", the values will be returned as Azure Active Directory Object IDs. The default value is + false. Note that group and application Object IDs are not translated because they do not have + unique friendly names. Default value is None. + :paramtype upn: bool + :keyword lease_id: If specified, the operation only succeeds if the resource's lease is active + and matches this ID. Default value is None. + :paramtype lease_id: str + :keyword if_modified_since: Specify this header value to operate only on a blob if it has been + modified since the specified date/time. Default value is None. + :paramtype if_modified_since: ~datetime.datetime + :keyword if_unmodified_since: Specify this header value to operate only on a blob if it has not + been modified since the specified date/time. Default value is None. + :paramtype if_unmodified_since: ~datetime.datetime + :keyword timeout: The timeout parameter is expressed in seconds. For more information, see Setting + Timeouts for Blob Service Operations.. Default value is None. + :paramtype timeout: int + :keyword etag: check if resource is changed. Set None to skip checking etag. Default value is + None. + :paramtype etag: str + :keyword match_condition: The match condition to use upon the etag. Default value is None. + :paramtype match_condition: ~azure.core.MatchConditions + :return: bool + :rtype: bool + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + if match_condition == MatchConditions.IfNotModified: + error_map[412] = ResourceModifiedError + elif match_condition == MatchConditions.IfPresent: + error_map[412] = ResourceNotFoundError + elif match_condition == MatchConditions.IfMissing: + error_map[412] = ResourceExistsError + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[None] = kwargs.pop("cls", None) + + _request = build_path_get_properties_request( + action=action, + upn=upn, + lease_id=lease_id, + if_modified_since=if_modified_since, + if_unmodified_since=if_unmodified_since, + timeout=timeout, + etag=etag, + match_condition=match_condition, + version=self._config.version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "url": self._serialize.url("self._config.url", self._config.url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.StorageError, + response, + ) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["Accept-Ranges"] = self._deserialize("str", response.headers.get("Accept-Ranges")) + response_headers["Cache-Control"] = self._deserialize("str", response.headers.get("Cache-Control")) + response_headers["Content-Disposition"] = self._deserialize("str", response.headers.get("Content-Disposition")) + response_headers["Content-Encoding"] = self._deserialize("str", response.headers.get("Content-Encoding")) + response_headers["Content-Language"] = self._deserialize("str", response.headers.get("Content-Language")) + response_headers["Content-Length"] = self._deserialize("int", response.headers.get("Content-Length")) + response_headers["Content-Range"] = self._deserialize("str", response.headers.get("Content-Range")) + response_headers["Content-Type"] = self._deserialize("str", response.headers.get("Content-Type")) + response_headers["Content-MD5"] = self._deserialize("bytearray", response.headers.get("Content-MD5")) + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["x-ms-resource-type"] = self._deserialize("str", response.headers.get("x-ms-resource-type")) + response_headers["x-ms-properties"] = self._deserialize("str", response.headers.get("x-ms-properties")) + response_headers["x-ms-owner"] = self._deserialize("str", response.headers.get("x-ms-owner")) + response_headers["x-ms-group"] = self._deserialize("str", response.headers.get("x-ms-group")) + response_headers["x-ms-permissions"] = self._deserialize("str", response.headers.get("x-ms-permissions")) + response_headers["x-ms-acl"] = self._deserialize("str", response.headers.get("x-ms-acl")) + response_headers["x-ms-lease-duration"] = self._deserialize("str", response.headers.get("x-ms-lease-duration")) + response_headers["x-ms-lease-state"] = self._deserialize("str", response.headers.get("x-ms-lease-state")) + response_headers["x-ms-lease-status"] = self._deserialize("str", response.headers.get("x-ms-lease-status")) + response_headers["x-ms-server-encrypted"] = self._deserialize( + "bool", response.headers.get("x-ms-server-encrypted") + ) + response_headers["x-ms-encryption-key-sha256"] = self._deserialize( + "str", response.headers.get("x-ms-encryption-key-sha256") + ) + response_headers["x-ms-encryption-context"] = self._deserialize( + "str", response.headers.get("x-ms-encryption-context") + ) + response_headers["x-ms-encryption-scope"] = self._deserialize( + "str", response.headers.get("x-ms-encryption-scope") + ) + response_headers["x-ms-creation-time"] = self._deserialize( + "rfc-1123", response.headers.get("x-ms-creation-time") + ) + response_headers["x-ms-expiry-time"] = self._deserialize("rfc-1123", response.headers.get("x-ms-expiry-time")) + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + + if cls: + return cls(pipeline_response, None, response_headers) # type: ignore + return 200 <= response.status_code <= 299 + + @distributed_trace_async + async def delete( + self, + *, + recursive: Optional[bool] = None, + continuation: Optional[str] = None, + lease_id: Optional[str] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + paginated: Optional[bool] = None, + timeout: Optional[int] = None, + etag: Optional[str] = None, + match_condition: Optional[MatchConditions] = None, + **kwargs: Any + ) -> None: + """Delete the file or directory. This operation supports conditional HTTP requests. For more + information, see `Specifying Conditional Headers for Blob Service Operations + `_. + + :keyword recursive: Required. Default value is None. + :paramtype recursive: bool + :keyword continuation: Optional. When deleting a directory, the number of paths that are + deleted with each invocation is limited. If the number of paths to be deleted exceeds this + limit, a continuation token is returned in this response header. When a continuation token is + returned in the response, it must be specified in a subsequent invocation of the delete + operation to continue deleting the directory. Default value is None. + :paramtype continuation: str + :keyword lease_id: If specified, the operation only succeeds if the resource's lease is active + and matches this ID. Default value is None. + :paramtype lease_id: str + :keyword if_modified_since: Specify this header value to operate only on a blob if it has been + modified since the specified date/time. Default value is None. + :paramtype if_modified_since: ~datetime.datetime + :keyword if_unmodified_since: Specify this header value to operate only on a blob if it has not + been modified since the specified date/time. Default value is None. + :paramtype if_unmodified_since: ~datetime.datetime + :keyword paginated: If true, paginated behavior will be seen. Pagination is for the recursive + ACL checks as a POSIX requirement in the server and Delete in an atomic operation once the ACL + checks are completed. If false or missing, normal default behavior will kick in, which may + timeout in case of very large directories due to recursive ACL checks. This new parameter is + introduced for backward compatibility. Default value is None. + :paramtype paginated: bool + :keyword timeout: The timeout parameter is expressed in seconds. For more information, see Setting + Timeouts for Blob Service Operations.. Default value is None. + :paramtype timeout: int + :keyword etag: check if resource is changed. Set None to skip checking etag. Default value is + None. + :paramtype etag: str + :keyword match_condition: The match condition to use upon the etag. Default value is None. + :paramtype match_condition: ~azure.core.MatchConditions + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + if match_condition == MatchConditions.IfNotModified: + error_map[412] = ResourceModifiedError + elif match_condition == MatchConditions.IfPresent: + error_map[412] = ResourceNotFoundError + elif match_condition == MatchConditions.IfMissing: + error_map[412] = ResourceExistsError + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[None] = kwargs.pop("cls", None) + + _request = build_path_delete_request( + recursive=recursive, + continuation=continuation, + lease_id=lease_id, + if_modified_since=if_modified_since, + if_unmodified_since=if_unmodified_since, + paginated=paginated, + timeout=timeout, + etag=etag, + match_condition=match_condition, + version=self._config.version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "url": self._serialize.url("self._config.url", self._config.url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.StorageError, + response, + ) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["x-ms-continuation"] = self._deserialize("str", response.headers.get("x-ms-continuation")) + response_headers["x-ms-deletion-id"] = self._deserialize("str", response.headers.get("x-ms-deletion-id")) + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + + if cls: + return cls(pipeline_response, None, response_headers) # type: ignore + + @distributed_trace_async + async def set_access_control( + self, + *, + lease_id: Optional[str] = None, + owner: Optional[str] = None, + group: Optional[str] = None, + permissions: Optional[str] = None, + acl: Optional[str] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + timeout: Optional[int] = None, + etag: Optional[str] = None, + match_condition: Optional[MatchConditions] = None, + **kwargs: Any + ) -> None: + """Set the owner, group, permissions, or access control list for a path. + + :keyword lease_id: If specified, the operation only succeeds if the resource's lease is active + and matches this ID. Default value is None. + :paramtype lease_id: str + :keyword owner: Optional. The owner of the blob or directory. Default value is None. + :paramtype owner: str + :keyword group: Optional. The owning group of the blob or directory. Default value is None. + :paramtype group: str + :keyword permissions: Optional and only valid if Hierarchical Namespace is enabled for the + account. Sets POSIX access permissions for the file owner, the file owning group, and others. + Each class may be granted read, write, or execute permission. The sticky bit is also supported. + Both symbolic (rwxrw-rw-) and 4-digit octal notation (e.g. 0766) are supported. Default value + is None. + :paramtype permissions: str + :keyword acl: Sets POSIX access control rights on files and directories. The value is a + comma-separated list of access control entries. Each access control entry (ACE) consists of a + scope, a type, a user or group identifier, and permissions in the format + "[scope:][type]:[id]:[permissions]". Default value is None. + :paramtype acl: str + :keyword if_modified_since: Specify this header value to operate only on a blob if it has been + modified since the specified date/time. Default value is None. + :paramtype if_modified_since: ~datetime.datetime + :keyword if_unmodified_since: Specify this header value to operate only on a blob if it has not + been modified since the specified date/time. Default value is None. + :paramtype if_unmodified_since: ~datetime.datetime + :keyword timeout: The timeout parameter is expressed in seconds. For more information, see Setting + Timeouts for Blob Service Operations.. Default value is None. + :paramtype timeout: int + :keyword etag: check if resource is changed. Set None to skip checking etag. Default value is + None. + :paramtype etag: str + :keyword match_condition: The match condition to use upon the etag. Default value is None. + :paramtype match_condition: ~azure.core.MatchConditions + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + if match_condition == MatchConditions.IfNotModified: + error_map[412] = ResourceModifiedError + elif match_condition == MatchConditions.IfPresent: + error_map[412] = ResourceNotFoundError + elif match_condition == MatchConditions.IfMissing: + error_map[412] = ResourceExistsError + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[None] = kwargs.pop("cls", None) + + _request = build_path_set_access_control_request( + lease_id=lease_id, + owner=owner, + group=group, + permissions=permissions, + acl=acl, + if_modified_since=if_modified_since, + if_unmodified_since=if_unmodified_since, + timeout=timeout, + etag=etag, + match_condition=match_condition, + version=self._config.version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "url": self._serialize.url("self._config.url", self._config.url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.StorageError, + response, + ) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + + if cls: + return cls(pipeline_response, None, response_headers) # type: ignore + + @distributed_trace_async + async def set_access_control_recursive( + self, + *, + mode: Union[str, _models.PathSetAccessControlRecursiveMode], + continuation: Optional[str] = None, + force_flag: Optional[bool] = None, + max_records: Optional[int] = None, + acl: Optional[str] = None, + timeout: Optional[int] = None, + **kwargs: Any + ) -> _models.SetAccessControlRecursiveResponse: + """Set the access control list for a path and sub-paths. + + :keyword mode: Mode "set" sets POSIX access control rights on files and directories, "modify" + modifies one or more POSIX access control rights that pre-exist on files and directories, + "remove" removes one or more POSIX access control rights that were present earlier on files and + directories. Known values are: "set", "modify", and "remove". Required. + :paramtype mode: str or + ~azure.storage.filedatalake._generated.models.PathSetAccessControlRecursiveMode + :keyword continuation: Optional. When deleting a directory, the number of paths that are + deleted with each invocation is limited. If the number of paths to be deleted exceeds this + limit, a continuation token is returned in this response header. When a continuation token is + returned in the response, it must be specified in a subsequent invocation of the delete + operation to continue deleting the directory. Default value is None. + :paramtype continuation: str + :keyword force_flag: Optional. Valid for "SetAccessControlRecursive" operation. If set to + false, the operation will terminate quickly on encountering user errors (4XX). If true, the + operation will ignore user errors and proceed with the operation on other sub-entities of the + directory. Continuation token will only be returned when forceFlag is true in case of user + errors. If not set the default value is false for this. Default value is None. + :paramtype force_flag: bool + :keyword max_records: Optional. It specifies the maximum number of files or directories on + which the acl change will be applied. If omitted or greater than 2,000, the request will + process up to 2,000 items. Default value is None. + :paramtype max_records: int + :keyword acl: Sets POSIX access control rights on files and directories. The value is a + comma-separated list of access control entries. Each access control entry (ACE) consists of a + scope, a type, a user or group identifier, and permissions in the format + "[scope:][type]:[id]:[permissions]". Default value is None. + :paramtype acl: str + :keyword timeout: The timeout parameter is expressed in seconds. For more information, see Setting + Timeouts for Blob Service Operations.. Default value is None. + :paramtype timeout: int + :return: SetAccessControlRecursiveResponse. The SetAccessControlRecursiveResponse is compatible + with MutableMapping + :rtype: ~azure.storage.filedatalake._generated.models.SetAccessControlRecursiveResponse + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[_models.SetAccessControlRecursiveResponse] = kwargs.pop("cls", None) + + _request = build_path_set_access_control_recursive_request( + mode=mode, + continuation=continuation, + force_flag=force_flag, + max_records=max_records, + acl=acl, + timeout=timeout, + version=self._config.version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "url": self._serialize.url("self._config.url", self._config.url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _decompress = kwargs.pop("decompress", True) + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.StorageError, + response, + ) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["x-ms-continuation"] = self._deserialize("str", response.headers.get("x-ms-continuation")) + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + response_headers["Content-Type"] = self._deserialize("str", response.headers.get("Content-Type")) + + if _stream: + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + else: + deserialized = _deserialize(_models.SetAccessControlRecursiveResponse, response.json()) + + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore + + @distributed_trace_async + async def flush_data( # pylint: disable=too-many-locals + self, + *, + position: Optional[int] = None, + retain_uncommitted_data: Optional[bool] = None, + close: Optional[bool] = None, + content_length: Optional[int] = None, + content_md5: Optional[bytes] = None, + lease_id: Optional[str] = None, + lease_action: Optional[Union[str, _models.LeaseAction]] = None, + lease_duration: Optional[int] = None, + proposed_lease_id: Optional[str] = None, + cache_control: Optional[str] = None, + content_type: Optional[str] = None, + content_disposition: Optional[str] = None, + content_encoding: Optional[str] = None, + content_language: Optional[str] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + encryption_key: Optional[str] = None, + encryption_key_sha256: Optional[str] = None, + encryption_algorithm: Optional[Union[str, _models.EncryptionAlgorithmType]] = None, + timeout: Optional[int] = None, + etag: Optional[str] = None, + match_condition: Optional[MatchConditions] = None, + **kwargs: Any + ) -> None: + """Set the owner, group, permissions, or access control list for a path. + + :keyword position: This parameter allows the caller to upload data in parallel and control the + order in which it is appended to the file. It is required when uploading data to be appended to + the file and when flushing previously uploaded data to the file. The value must be the position + where the data is to be appended. Uploaded data is not immediately flushed, or written, to the + file. To flush, the previously uploaded data must be contiguous, the position parameter must be + specified and equal to the length of the file after all data has been written, and there must + not be a request entity body included with the request. Default value is None. + :paramtype position: int + :keyword retain_uncommitted_data: Valid only for flush operations. If "true", uncommitted data + is retained after the flush operation completes; otherwise, the uncommitted data is deleted + after the flush operation. The default is false. Data at offsets less than the specified + position are written to the file when flush succeeds, but this optional parameter allows data + after the flush position to be retained for a future flush operation. Default value is None. + :paramtype retain_uncommitted_data: bool + :keyword close: Azure Storage Events allow applications to receive notifications when files + change. When Azure Storage Events are enabled, a file changed event is raised. This event has a + property indicating whether this is the final change to distinguish the difference between an + intermediate flush to a file stream and the final close of a file stream. The close query + parameter is valid only when the action is "flush" and change notifications are enabled. If the + value of close is "true" and the flush operation completes successfully, the service raises a + file change notification with a property indicating that this is the final update (the file + stream has been closed). If "false" a change notification is raised indicating the file has + changed. The default is false. This query parameter is set to true by the Hadoop ABFS driver to + indicate that the file stream has been closed. Default value is None. + :paramtype close: bool + :keyword content_length: Required for "Append Data" and "Flush Data". Must be 0 for "Flush + Data". Must be the length of the request content in bytes for "Append Data". Default value is + None. + :paramtype content_length: int + :keyword content_md5: Specify the transactional md5 for the body, to be validated by the + service. Default value is None. + :paramtype content_md5: bytes + :keyword lease_id: If specified, the operation only succeeds if the resource's lease is active + and matches this ID. Default value is None. + :paramtype lease_id: str + :keyword lease_action: Optional. If "acquire" it will acquire the lease. If "auto-renew" it + will renew the lease. If "release" it will release the lease only on flush. If + "acquire-release" it will acquire & complete the operation & release the lease once operation + is done. Known values are: "acquire", "auto-renew", "release", and "acquire-release". Default + value is None. + :paramtype lease_action: str or ~azure.storage.filedatalake._generated.models.LeaseAction + :keyword lease_duration: The lease duration is required to acquire a lease, and specifies the + duration of the lease in seconds. The lease duration must be between 15 and 60 seconds or -1 + for infinite lease. Default value is None. + :paramtype lease_duration: int + :keyword proposed_lease_id: Proposed lease ID, in a GUID string format. Default value is None. + :paramtype proposed_lease_id: str + :keyword cache_control: Optional. Sets the blob's cache control. If specified, this property is + stored with the blob and returned with a read request. Default value is None. + :paramtype cache_control: str + :keyword content_type: Optional. Sets the blob's content type. If specified, this property is + stored with the blob and returned with a read request. Default value is None. + :paramtype content_type: str + :keyword content_disposition: Optional. Sets the blob's Content-Disposition header. Default + value is None. + :paramtype content_disposition: str + :keyword content_encoding: Optional. Sets the blob's content encoding. If specified, this + property is stored with the blob and returned with a read request. Default value is None. + :paramtype content_encoding: str + :keyword content_language: Optional. Set the blob's content language. If specified, this + property is stored with the blob and returned with a read request. Default value is None. + :paramtype content_language: str + :keyword if_modified_since: Specify this header value to operate only on a blob if it has been + modified since the specified date/time. Default value is None. + :paramtype if_modified_since: ~datetime.datetime + :keyword if_unmodified_since: Specify this header value to operate only on a blob if it has not + been modified since the specified date/time. Default value is None. + :paramtype if_unmodified_since: ~datetime.datetime + :keyword encryption_key: Optional. Specifies the encryption key to use to encrypt the data + provided in the request. If not specified, encryption is performed with the root account + encryption key. Default value is None. + :paramtype encryption_key: str + :keyword encryption_key_sha256: The SHA-256 hash of the provided encryption key. Must be + provided if the x-ms-encryption-key header is provided. Default value is None. + :paramtype encryption_key_sha256: str + :keyword encryption_algorithm: The algorithm used to produce the encryption key hash. + Currently, the only accepted value is "AES256". Must be provided if the x-ms-encryption-key + header is provided. "AES256" Default value is None. + :paramtype encryption_algorithm: str or + ~azure.storage.filedatalake._generated.models.EncryptionAlgorithmType + :keyword timeout: The timeout parameter is expressed in seconds. For more information, see Setting + Timeouts for Blob Service Operations.. Default value is None. + :paramtype timeout: int + :keyword etag: check if resource is changed. Set None to skip checking etag. Default value is + None. + :paramtype etag: str + :keyword match_condition: The match condition to use upon the etag. Default value is None. + :paramtype match_condition: ~azure.core.MatchConditions + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + if match_condition == MatchConditions.IfNotModified: + error_map[412] = ResourceModifiedError + elif match_condition == MatchConditions.IfPresent: + error_map[412] = ResourceNotFoundError + elif match_condition == MatchConditions.IfMissing: + error_map[412] = ResourceExistsError + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[None] = kwargs.pop("cls", None) + + _request = build_path_flush_data_request( + position=position, + retain_uncommitted_data=retain_uncommitted_data, + close=close, + content_length=content_length, + content_md5=content_md5, + lease_id=lease_id, + lease_action=lease_action, + lease_duration=lease_duration, + proposed_lease_id=proposed_lease_id, + cache_control=cache_control, + content_type=content_type, + content_disposition=content_disposition, + content_encoding=content_encoding, + content_language=content_language, + if_modified_since=if_modified_since, + if_unmodified_since=if_unmodified_since, + encryption_key=encryption_key, + encryption_key_sha256=encryption_key_sha256, + encryption_algorithm=encryption_algorithm, + timeout=timeout, + etag=etag, + match_condition=match_condition, + version=self._config.version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "url": self._serialize.url("self._config.url", self._config.url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.StorageError, + response, + ) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["Content-Length"] = self._deserialize("int", response.headers.get("Content-Length")) + response_headers["x-ms-request-server-encrypted"] = self._deserialize( + "bool", response.headers.get("x-ms-request-server-encrypted") + ) + response_headers["x-ms-encryption-key-sha256"] = self._deserialize( + "str", response.headers.get("x-ms-encryption-key-sha256") + ) + response_headers["x-ms-lease-renewed"] = self._deserialize("bool", response.headers.get("x-ms-lease-renewed")) + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + + if cls: + return cls(pipeline_response, None, response_headers) # type: ignore + + @distributed_trace_async + async def append_data( # pylint: disable=too-many-locals + self, + body: bytes, + *, + position: Optional[int] = None, + content_length: Optional[int] = None, + transactional_content_hash: Optional[bytes] = None, + transactional_content_crc64: Optional[bytes] = None, + lease_id: Optional[str] = None, + lease_action: Optional[Union[str, _models.LeaseAction]] = None, + lease_duration: Optional[int] = None, + proposed_lease_id: Optional[str] = None, + encryption_key: Optional[str] = None, + encryption_key_sha256: Optional[str] = None, + encryption_algorithm: Optional[Union[str, _models.EncryptionAlgorithmType]] = None, + flush: Optional[bool] = None, + structured_body_type: Optional[str] = None, + structured_content_length: Optional[int] = None, + timeout: Optional[int] = None, + **kwargs: Any + ) -> None: + """Append data to the file. + + :param body: Initial data. Required. + :type body: bytes + :keyword position: This parameter allows the caller to upload data in parallel and control the + order in which it is appended to the file. It is required when uploading data to be appended to + the file and when flushing previously uploaded data to the file. The value must be the position + where the data is to be appended. Uploaded data is not immediately flushed, or written, to the + file. To flush, the previously uploaded data must be contiguous, the position parameter must be + specified and equal to the length of the file after all data has been written, and there must + not be a request entity body included with the request. Default value is None. + :paramtype position: int + :keyword content_length: Required for "Append Data" and "Flush Data". Must be 0 for "Flush + Data". Must be the length of the request content in bytes for "Append Data". Default value is + None. + :paramtype content_length: int + :keyword transactional_content_hash: Specify the transactional md5 for the body, to be + validated by the service. Default value is None. + :paramtype transactional_content_hash: bytes + :keyword transactional_content_crc64: Specify the transactional crc64 for the body, to be + validated by the service. Default value is None. + :paramtype transactional_content_crc64: bytes + :keyword lease_id: If specified, the operation only succeeds if the resource's lease is active + and matches this ID. Default value is None. + :paramtype lease_id: str + :keyword lease_action: Optional. If "acquire" it will acquire the lease. If "auto-renew" it + will renew the lease. If "release" it will release the lease only on flush. If + "acquire-release" it will acquire & complete the operation & release the lease once operation + is done. Known values are: "acquire", "auto-renew", "release", and "acquire-release". Default + value is None. + :paramtype lease_action: str or ~azure.storage.filedatalake._generated.models.LeaseAction + :keyword lease_duration: The lease duration is required to acquire a lease, and specifies the + duration of the lease in seconds. The lease duration must be between 15 and 60 seconds or -1 + for infinite lease. Default value is None. + :paramtype lease_duration: int + :keyword proposed_lease_id: Proposed lease ID, in a GUID string format. Default value is None. + :paramtype proposed_lease_id: str + :keyword encryption_key: Optional. Specifies the encryption key to use to encrypt the data + provided in the request. If not specified, encryption is performed with the root account + encryption key. Default value is None. + :paramtype encryption_key: str + :keyword encryption_key_sha256: The SHA-256 hash of the provided encryption key. Must be + provided if the x-ms-encryption-key header is provided. Default value is None. + :paramtype encryption_key_sha256: str + :keyword encryption_algorithm: The algorithm used to produce the encryption key hash. + Currently, the only accepted value is "AES256". Must be provided if the x-ms-encryption-key + header is provided. "AES256" Default value is None. + :paramtype encryption_algorithm: str or + ~azure.storage.filedatalake._generated.models.EncryptionAlgorithmType + :keyword flush: If file should be flushed after the append. Default value is None. + :paramtype flush: bool + :keyword structured_body_type: Required if the request body is a structured message. Specifies + the message schema version and properties. Default value is None. + :paramtype structured_body_type: str + :keyword structured_content_length: Required if the request body is a structured message. + Specifies the length of the blob/file content inside the message body. Will always be smaller + than Content-Length. Default value is None. + :paramtype structured_content_length: int + :keyword timeout: The timeout parameter is expressed in seconds. For more information, see Setting + Timeouts for Blob Service Operations.. Default value is None. + :paramtype timeout: int + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: str = kwargs.pop("content_type", _headers.pop("Content-Type", "application/octet-stream")) + cls: ClsType[None] = kwargs.pop("cls", None) + + _content = body + + _request = build_path_append_data_request( + position=position, + content_length=content_length, + transactional_content_hash=transactional_content_hash, + transactional_content_crc64=transactional_content_crc64, + lease_id=lease_id, + lease_action=lease_action, + lease_duration=lease_duration, + proposed_lease_id=proposed_lease_id, + encryption_key=encryption_key, + encryption_key_sha256=encryption_key_sha256, + encryption_algorithm=encryption_algorithm, + flush=flush, + structured_body_type=structured_body_type, + structured_content_length=structured_content_length, + timeout=timeout, + content_type=content_type, + version=self._config.version, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "url": self._serialize.url("self._config.url", self._config.url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [202]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.StorageError, + response, + ) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Content-MD5"] = self._deserialize("bytearray", response.headers.get("Content-MD5")) + response_headers["x-ms-content-crc64"] = self._deserialize( + "bytearray", response.headers.get("x-ms-content-crc64") + ) + response_headers["x-ms-request-server-encrypted"] = self._deserialize( + "bool", response.headers.get("x-ms-request-server-encrypted") + ) + response_headers["x-ms-encryption-key-sha256"] = self._deserialize( + "str", response.headers.get("x-ms-encryption-key-sha256") + ) + response_headers["x-ms-lease-renewed"] = self._deserialize("bool", response.headers.get("x-ms-lease-renewed")) + response_headers["x-ms-structured-body"] = self._deserialize( + "str", response.headers.get("x-ms-structured-body") + ) + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + + if cls: + return cls(pipeline_response, None, response_headers) # type: ignore + + @distributed_trace_async + async def set_expiry( + self, + *, + expiry_options: Union[str, _models.PathExpiryOptions], + expires_on: Optional[str] = None, + timeout: Optional[int] = None, + **kwargs: Any + ) -> None: + """Sets the time a blob will expire and be deleted. + + :keyword expiry_options: Required. Indicates mode of the expiry time. Known values are: + "NeverExpire", "RelativeToCreation", "RelativeToNow", and "Absolute". Required. + :paramtype expiry_options: str or + ~azure.storage.filedatalake._generated.models.PathExpiryOptions + :keyword expires_on: The time to set the blob to expiry. Default value is None. + :paramtype expires_on: str + :keyword timeout: The timeout parameter is expressed in seconds. For more information, see Setting + Timeouts for Blob Service Operations.. Default value is None. + :paramtype timeout: int + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[None] = kwargs.pop("cls", None) + + _request = build_path_set_expiry_request( + expiry_options=expiry_options, + expires_on=expires_on, + timeout=timeout, + version=self._config.version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "url": self._serialize.url("self._config.url", self._config.url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.StorageError, + response, + ) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + + if cls: + return cls(pipeline_response, None, response_headers) # type: ignore + + @distributed_trace_async + async def undelete( + self, *, undelete_source: Optional[str] = None, timeout: Optional[int] = None, **kwargs: Any + ) -> None: + """Undelete a path that was previously soft deleted. + + :keyword undelete_source: Only for hierarchical namespace enabled accounts. Optional. The path + of the soft deleted blob to undelete. Default value is None. + :paramtype undelete_source: str + :keyword timeout: The timeout parameter is expressed in seconds. For more information, see Setting + Timeouts for Blob Service Operations.. Default value is None. + :paramtype timeout: int + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[None] = kwargs.pop("cls", None) + + _request = build_path_undelete_request( + undelete_source=undelete_source, + timeout=timeout, + version=self._config.version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "url": self._serialize.url("self._config.url", self._config.url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.StorageError, + response, + ) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["x-ms-resource-type"] = self._deserialize("str", response.headers.get("x-ms-resource-type")) + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + + if cls: + return cls(pipeline_response, None, response_headers) # type: ignore diff --git a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_generated/aio/operations/_patch.py b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_generated/aio/operations/_patch.py index f7dd32510333..f15f4c988310 100644 --- a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_generated/aio/operations/_patch.py +++ b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_generated/aio/operations/_patch.py @@ -1,14 +1,63 @@ -# ------------------------------------ -# Copyright (c) Microsoft Corporation. -# Licensed under the MIT License. -# ------------------------------------ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# -------------------------------------------------------------------------- """Customize generated code here. Follow our quickstart for examples: https://aka.ms/azsdk/python/dpcodegen/python/customize """ -from typing import List +from typing import Any -__all__: List[str] = [] # Add all objects you want publicly available to users at this package level +from ...operations._patch import extract_parameter_groups +from ._operations import PathOperations as _PathOperations + + +class _ParameterGroupExtractionMixin: + """Mixin that extracts parameter group objects into flat kwargs before calling generated operations.""" + + async def create(self, **kwargs: Any) -> None: + extract_parameter_groups(kwargs) + return await super().create(**kwargs) # type: ignore[misc] + + async def update(self, **kwargs: Any): + extract_parameter_groups(kwargs) + return await super().update(**kwargs) # type: ignore[misc] + + async def delete(self, **kwargs: Any) -> None: + extract_parameter_groups(kwargs) + return await super().delete(**kwargs) # type: ignore[misc] + + async def set_access_control(self, **kwargs: Any) -> None: + extract_parameter_groups(kwargs) + return await super().set_access_control(**kwargs) # type: ignore[misc] + + async def get_properties(self, **kwargs: Any): + extract_parameter_groups(kwargs) + return await super().get_properties(**kwargs) # type: ignore[misc] + + async def flush_data(self, **kwargs: Any) -> None: + extract_parameter_groups(kwargs) + return await super().flush_data(**kwargs) # type: ignore[misc] + + async def append_data(self, **kwargs: Any) -> None: + extract_parameter_groups(kwargs) + return await super().append_data(**kwargs) # type: ignore[misc] + + async def set_access_control_recursive(self, **kwargs: Any): + extract_parameter_groups(kwargs) + return await super().set_access_control_recursive(**kwargs) # type: ignore[misc] + + async def undelete(self, **kwargs: Any): + extract_parameter_groups(kwargs) + return await super().undelete(**kwargs) # type: ignore[misc] + + +class PathOperations(_ParameterGroupExtractionMixin, _PathOperations): + """PathOperations with parameter group extraction support.""" + + +__all__: list[str] = ["PathOperations"] def patch_sdk(): diff --git a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_generated/aio/operations/_path_operations.py b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_generated/aio/operations/_path_operations.py deleted file mode 100644 index bb1ce5ce7e4e..000000000000 --- a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_generated/aio/operations/_path_operations.py +++ /dev/null @@ -1,2004 +0,0 @@ -# pylint: disable=line-too-long,useless-suppression,too-many-lines -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is regenerated. -# -------------------------------------------------------------------------- -from collections.abc import MutableMapping -from typing import Any, AsyncIterator, Callable, IO, Literal, Optional, TypeVar, Union - -from azure.core import AsyncPipelineClient -from azure.core.exceptions import ( - ClientAuthenticationError, - HttpResponseError, - ResourceExistsError, - ResourceNotFoundError, - ResourceNotModifiedError, - StreamClosedError, - StreamConsumedError, - map_error, -) -from azure.core.pipeline import PipelineResponse -from azure.core.rest import AsyncHttpResponse, HttpRequest -from azure.core.tracing.decorator_async import distributed_trace_async -from azure.core.utils import case_insensitive_dict - -from ... import models as _models -from ..._utils.serialization import Deserializer, Serializer -from ...operations._path_operations import ( - build_append_data_request, - build_create_request, - build_delete_request, - build_flush_data_request, - build_get_properties_request, - build_lease_request, - build_read_request, - build_set_access_control_recursive_request, - build_set_access_control_request, - build_set_expiry_request, - build_undelete_request, - build_update_request, -) -from .._configuration import AzureDataLakeStorageRESTAPIConfiguration - -T = TypeVar("T") -ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, dict[str, Any]], Any]] - - -class PathOperations: - """ - .. warning:: - **DO NOT** instantiate this class directly. - - Instead, you should access the following operations through - :class:`~azure.storage.filedatalake.aio.AzureDataLakeStorageRESTAPI`'s - :attr:`path` attribute. - """ - - models = _models - - def __init__(self, *args, **kwargs) -> None: - input_args = list(args) - self._client: AsyncPipelineClient = input_args.pop(0) if input_args else kwargs.pop("client") - self._config: AzureDataLakeStorageRESTAPIConfiguration = ( - input_args.pop(0) if input_args else kwargs.pop("config") - ) - self._serialize: Serializer = input_args.pop(0) if input_args else kwargs.pop("serializer") - self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer") - - @distributed_trace_async - async def create( # pylint: disable=too-many-locals - self, - request_id_parameter: Optional[str] = None, - timeout: Optional[int] = None, - resource: Optional[Union[str, _models.PathResourceType]] = None, - continuation: Optional[str] = None, - mode: Optional[Union[str, _models.PathRenameMode]] = None, - rename_source: Optional[str] = None, - source_lease_id: Optional[str] = None, - properties: Optional[str] = None, - permissions: Optional[str] = None, - umask: Optional[str] = None, - owner: Optional[str] = None, - group: Optional[str] = None, - acl: Optional[str] = None, - proposed_lease_id: Optional[str] = None, - lease_duration: Optional[int] = None, - expiry_options: Optional[Union[str, _models.PathExpiryOptions]] = None, - expires_on: Optional[str] = None, - encryption_context: Optional[str] = None, - path_http_headers: Optional[_models.PathHTTPHeaders] = None, - lease_access_conditions: Optional[_models.LeaseAccessConditions] = None, - modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None, - source_modified_access_conditions: Optional[_models.SourceModifiedAccessConditions] = None, - cpk_info: Optional[_models.CpkInfo] = None, - **kwargs: Any - ) -> None: - """Create File | Create Directory | Rename File | Rename Directory. - - Create or rename a file or directory. By default, the destination is overwritten and if the - destination already exists and has a lease the lease is broken. This operation supports - conditional HTTP requests. For more information, see `Specifying Conditional Headers for Blob - Service Operations - `_. - To fail if the destination already exists, use a conditional request with If-None-Match: "*". - - :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. Default - value is None. - :type request_id_parameter: str - :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting - Timeouts for Blob Service Operations.`. Default value is None. - :type timeout: int - :param resource: Required only for Create File and Create Directory. The value must be "file" - or "directory". Known values are: "directory" and "file". Default value is None. - :type resource: str or ~azure.storage.filedatalake.models.PathResourceType - :param continuation: Optional. When deleting a directory, the number of paths that are deleted - with each invocation is limited. If the number of paths to be deleted exceeds this limit, a - continuation token is returned in this response header. When a continuation token is returned - in the response, it must be specified in a subsequent invocation of the delete operation to - continue deleting the directory. Default value is None. - :type continuation: str - :param mode: Optional. Valid only when namespace is enabled. This parameter determines the - behavior of the rename operation. The value must be "legacy" or "posix", and the default value - will be "posix". Known values are: "legacy" and "posix". Default value is None. - :type mode: str or ~azure.storage.filedatalake.models.PathRenameMode - :param rename_source: An optional file or directory to be renamed. The value must have the - following format: "/{filesystem}/{path}". If "x-ms-properties" is specified, the properties - will overwrite the existing properties; otherwise, the existing properties will be preserved. - This value must be a URL percent-encoded string. Note that the string may only contain ASCII - characters in the ISO-8859-1 character set. Default value is None. - :type rename_source: str - :param source_lease_id: A lease ID for the source path. If specified, the source path must have - an active lease and the lease ID must match. Default value is None. - :type source_lease_id: str - :param properties: Optional. User-defined properties to be stored with the filesystem, in the - format of a comma-separated list of name and value pairs "n1=v1, n2=v2, ...", where each value - is a base64 encoded string. Note that the string may only contain ASCII characters in the - ISO-8859-1 character set. If the filesystem exists, any properties not included in the list - will be removed. All properties are removed if the header is omitted. To merge new and - existing properties, first get all existing properties and the current E-Tag, then make a - conditional request with the E-Tag and include values for all properties. Default value is - None. - :type properties: str - :param permissions: Optional and only valid if Hierarchical Namespace is enabled for the - account. Sets POSIX access permissions for the file owner, the file owning group, and others. - Each class may be granted read, write, or execute permission. The sticky bit is also - supported. Both symbolic (rwxrw-rw-) and 4-digit octal notation (e.g. 0766) are supported. - Default value is None. - :type permissions: str - :param umask: Optional and only valid if Hierarchical Namespace is enabled for the account. - When creating a file or directory and the parent folder does not have a default ACL, the umask - restricts the permissions of the file or directory to be created. The resulting permission is - given by p bitwise and not u, where p is the permission and u is the umask. For example, if p - is 0777 and u is 0057, then the resulting permission is 0720. The default permission is 0777 - for a directory and 0666 for a file. The default umask is 0027. The umask must be specified - in 4-digit octal notation (e.g. 0766). Default value is None. - :type umask: str - :param owner: Optional. The owner of the blob or directory. Default value is None. - :type owner: str - :param group: Optional. The owning group of the blob or directory. Default value is None. - :type group: str - :param acl: Sets POSIX access control rights on files and directories. The value is a - comma-separated list of access control entries. Each access control entry (ACE) consists of a - scope, a type, a user or group identifier, and permissions in the format - "[scope:][type]:[id]:[permissions]". Default value is None. - :type acl: str - :param proposed_lease_id: Proposed lease ID, in a GUID string format. The Blob service returns - 400 (Invalid request) if the proposed lease ID is not in the correct format. See Guid - Constructor (String) for a list of valid GUID string formats. Default value is None. - :type proposed_lease_id: str - :param lease_duration: The lease duration is required to acquire a lease, and specifies the - duration of the lease in seconds. The lease duration must be between 15 and 60 seconds or -1 - for infinite lease. Default value is None. - :type lease_duration: int - :param expiry_options: Required. Indicates mode of the expiry time. Known values are: - "NeverExpire", "RelativeToCreation", "RelativeToNow", and "Absolute". Default value is None. - :type expiry_options: str or ~azure.storage.filedatalake.models.PathExpiryOptions - :param expires_on: The time to set the blob to expiry. Default value is None. - :type expires_on: str - :param encryption_context: Specifies the encryption context to set on the file. Default value - is None. - :type encryption_context: str - :param path_http_headers: Parameter group. Default value is None. - :type path_http_headers: ~azure.storage.filedatalake.models.PathHTTPHeaders - :param lease_access_conditions: Parameter group. Default value is None. - :type lease_access_conditions: ~azure.storage.filedatalake.models.LeaseAccessConditions - :param modified_access_conditions: Parameter group. Default value is None. - :type modified_access_conditions: ~azure.storage.filedatalake.models.ModifiedAccessConditions - :param source_modified_access_conditions: Parameter group. Default value is None. - :type source_modified_access_conditions: - ~azure.storage.filedatalake.models.SourceModifiedAccessConditions - :param cpk_info: Parameter group. Default value is None. - :type cpk_info: ~azure.storage.filedatalake.models.CpkInfo - :return: None or the result of cls(response) - :rtype: None - :raises ~azure.core.exceptions.HttpResponseError: - """ - error_map: MutableMapping = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = kwargs.pop("headers", {}) or {} - _params = kwargs.pop("params", {}) or {} - - cls: ClsType[None] = kwargs.pop("cls", None) - - _cache_control = None - _content_encoding = None - _content_language = None - _content_disposition = None - _content_type_parameter = None - _lease_id = None - _if_match = None - _if_none_match = None - _if_modified_since = None - _if_unmodified_since = None - _source_if_match = None - _source_if_none_match = None - _source_if_modified_since = None - _source_if_unmodified_since = None - _encryption_key = None - _encryption_key_sha256 = None - _encryption_algorithm = None - if path_http_headers is not None: - _cache_control = path_http_headers.cache_control - _content_disposition = path_http_headers.content_disposition - _content_encoding = path_http_headers.content_encoding - _content_language = path_http_headers.content_language - _content_type_parameter = path_http_headers.content_type - if lease_access_conditions is not None: - _lease_id = lease_access_conditions.lease_id - if modified_access_conditions is not None: - _if_match = modified_access_conditions.if_match - _if_modified_since = modified_access_conditions.if_modified_since - _if_none_match = modified_access_conditions.if_none_match - _if_unmodified_since = modified_access_conditions.if_unmodified_since - if source_modified_access_conditions is not None: - _source_if_match = source_modified_access_conditions.source_if_match - _source_if_modified_since = source_modified_access_conditions.source_if_modified_since - _source_if_none_match = source_modified_access_conditions.source_if_none_match - _source_if_unmodified_since = source_modified_access_conditions.source_if_unmodified_since - if cpk_info is not None: - _encryption_algorithm = cpk_info.encryption_algorithm - _encryption_key = cpk_info.encryption_key - _encryption_key_sha256 = cpk_info.encryption_key_sha256 - - _request = build_create_request( - url=self._config.url, - request_id_parameter=request_id_parameter, - timeout=timeout, - resource=resource, - continuation=continuation, - mode=mode, - cache_control=_cache_control, - content_encoding=_content_encoding, - content_language=_content_language, - content_disposition=_content_disposition, - content_type_parameter=_content_type_parameter, - rename_source=rename_source, - lease_id=_lease_id, - source_lease_id=source_lease_id, - properties=properties, - permissions=permissions, - umask=umask, - if_match=_if_match, - if_none_match=_if_none_match, - if_modified_since=_if_modified_since, - if_unmodified_since=_if_unmodified_since, - source_if_match=_source_if_match, - source_if_none_match=_source_if_none_match, - source_if_modified_since=_source_if_modified_since, - source_if_unmodified_since=_source_if_unmodified_since, - encryption_key=_encryption_key, - encryption_key_sha256=_encryption_key_sha256, - encryption_algorithm=_encryption_algorithm, # type: ignore - owner=owner, - group=group, - acl=acl, - proposed_lease_id=proposed_lease_id, - lease_duration=lease_duration, - expiry_options=expiry_options, - expires_on=expires_on, - encryption_context=encryption_context, - version=self._config.version, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - _stream = False - pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [201]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize( - _models.StorageError, - pipeline_response, - ) - raise HttpResponseError(response=response, model=error) - - response_headers = {} - response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) - response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) - response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) - response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) - response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) - response_headers["x-ms-continuation"] = self._deserialize("str", response.headers.get("x-ms-continuation")) - response_headers["Content-Length"] = self._deserialize("int", response.headers.get("Content-Length")) - response_headers["x-ms-request-server-encrypted"] = self._deserialize( - "bool", response.headers.get("x-ms-request-server-encrypted") - ) - response_headers["x-ms-encryption-key-sha256"] = self._deserialize( - "str", response.headers.get("x-ms-encryption-key-sha256") - ) - - if cls: - return cls(pipeline_response, None, response_headers) # type: ignore - - @distributed_trace_async - async def update( # pylint: disable=too-many-locals - self, - action: Union[str, _models.PathUpdateAction], - mode: Union[str, _models.PathSetAccessControlRecursiveMode], - body: IO[bytes], - request_id_parameter: Optional[str] = None, - timeout: Optional[int] = None, - max_records: Optional[int] = None, - continuation: Optional[str] = None, - force_flag: Optional[bool] = None, - position: Optional[int] = None, - retain_uncommitted_data: Optional[bool] = None, - close: Optional[bool] = None, - content_length: Optional[int] = None, - properties: Optional[str] = None, - owner: Optional[str] = None, - group: Optional[str] = None, - permissions: Optional[str] = None, - acl: Optional[str] = None, - structured_body_type: Optional[str] = None, - structured_content_length: Optional[int] = None, - path_http_headers: Optional[_models.PathHTTPHeaders] = None, - lease_access_conditions: Optional[_models.LeaseAccessConditions] = None, - modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None, - **kwargs: Any - ) -> Optional[_models.SetAccessControlRecursiveResponse]: - """Append Data | Flush Data | Set Properties | Set Access Control. - - Uploads data to be appended to a file, flushes (writes) previously uploaded data to a file, - sets properties for a file or directory, or sets access control for a file or directory. Data - can only be appended to a file. Concurrent writes to the same file using multiple clients are - not supported. This operation supports conditional HTTP requests. For more information, see - `Specifying Conditional Headers for Blob Service Operations - `_. - - :param action: The action must be "append" to upload data to be appended to a file, "flush" to - flush previously uploaded data to a file, "setProperties" to set the properties of a file or - directory, "setAccessControl" to set the owner, group, permissions, or access control list for - a file or directory, or "setAccessControlRecursive" to set the access control list for a - directory recursively. Note that Hierarchical Namespace must be enabled for the account in - order to use access control. Also note that the Access Control List (ACL) includes permissions - for the owner, owning group, and others, so the x-ms-permissions and x-ms-acl request headers - are mutually exclusive. Known values are: "append", "flush", "setProperties", - "setAccessControl", and "setAccessControlRecursive". Required. - :type action: str or ~azure.storage.filedatalake.models.PathUpdateAction - :param mode: Mode "set" sets POSIX access control rights on files and directories, "modify" - modifies one or more POSIX access control rights that pre-exist on files and directories, - "remove" removes one or more POSIX access control rights that were present earlier on files - and directories. Known values are: "set", "modify", and "remove". Required. - :type mode: str or ~azure.storage.filedatalake.models.PathSetAccessControlRecursiveMode - :param body: Initial data. Required. - :type body: IO[bytes] - :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. Default - value is None. - :type request_id_parameter: str - :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting - Timeouts for Blob Service Operations.`. Default value is None. - :type timeout: int - :param max_records: Optional. Valid for "SetAccessControlRecursive" operation. It specifies the - maximum number of files or directories on which the acl change will be applied. If omitted or - greater than 2,000, the request will process up to 2,000 items. Default value is None. - :type max_records: int - :param continuation: Optional. The number of paths processed with each invocation is limited. - If the number of paths to be processed exceeds this limit, a continuation token is returned in - the response header x-ms-continuation. When a continuation token is returned in the response, - it must be percent-encoded and specified in a subsequent invocation of - setAccessControlRecursive operation. Default value is None. - :type continuation: str - :param force_flag: Optional. Valid for "SetAccessControlRecursive" operation. If set to false, - the operation will terminate quickly on encountering user errors (4XX). If true, the operation - will ignore user errors and proceed with the operation on other sub-entities of the directory. - Continuation token will only be returned when forceFlag is true in case of user errors. If not - set the default value is false for this. Default value is None. - :type force_flag: bool - :param position: This parameter allows the caller to upload data in parallel and control the - order in which it is appended to the file. It is required when uploading data to be appended - to the file and when flushing previously uploaded data to the file. The value must be the - position where the data is to be appended. Uploaded data is not immediately flushed, or - written, to the file. To flush, the previously uploaded data must be contiguous, the position - parameter must be specified and equal to the length of the file after all data has been - written, and there must not be a request entity body included with the request. Default value - is None. - :type position: int - :param retain_uncommitted_data: Valid only for flush operations. If "true", uncommitted data - is retained after the flush operation completes; otherwise, the uncommitted data is deleted - after the flush operation. The default is false. Data at offsets less than the specified - position are written to the file when flush succeeds, but this optional parameter allows data - after the flush position to be retained for a future flush operation. Default value is None. - :type retain_uncommitted_data: bool - :param close: Azure Storage Events allow applications to receive notifications when files - change. When Azure Storage Events are enabled, a file changed event is raised. This event has a - property indicating whether this is the final change to distinguish the difference between an - intermediate flush to a file stream and the final close of a file stream. The close query - parameter is valid only when the action is "flush" and change notifications are enabled. If the - value of close is "true" and the flush operation completes successfully, the service raises a - file change notification with a property indicating that this is the final update (the file - stream has been closed). If "false" a change notification is raised indicating the file has - changed. The default is false. This query parameter is set to true by the Hadoop ABFS driver to - indicate that the file stream has been closed.". Default value is None. - :type close: bool - :param content_length: Required for "Append Data" and "Flush Data". Must be 0 for "Flush - Data". Must be the length of the request content in bytes for "Append Data". Default value is - None. - :type content_length: int - :param properties: Optional. User-defined properties to be stored with the filesystem, in the - format of a comma-separated list of name and value pairs "n1=v1, n2=v2, ...", where each value - is a base64 encoded string. Note that the string may only contain ASCII characters in the - ISO-8859-1 character set. If the filesystem exists, any properties not included in the list - will be removed. All properties are removed if the header is omitted. To merge new and - existing properties, first get all existing properties and the current E-Tag, then make a - conditional request with the E-Tag and include values for all properties. Default value is - None. - :type properties: str - :param owner: Optional. The owner of the blob or directory. Default value is None. - :type owner: str - :param group: Optional. The owning group of the blob or directory. Default value is None. - :type group: str - :param permissions: Optional and only valid if Hierarchical Namespace is enabled for the - account. Sets POSIX access permissions for the file owner, the file owning group, and others. - Each class may be granted read, write, or execute permission. The sticky bit is also - supported. Both symbolic (rwxrw-rw-) and 4-digit octal notation (e.g. 0766) are supported. - Default value is None. - :type permissions: str - :param acl: Sets POSIX access control rights on files and directories. The value is a - comma-separated list of access control entries. Each access control entry (ACE) consists of a - scope, a type, a user or group identifier, and permissions in the format - "[scope:][type]:[id]:[permissions]". Default value is None. - :type acl: str - :param structured_body_type: Required if the request body is a structured message. Specifies - the message schema version and properties. Default value is None. - :type structured_body_type: str - :param structured_content_length: Required if the request body is a structured message. - Specifies the length of the blob/file content inside the message body. Will always be smaller - than Content-Length. Default value is None. - :type structured_content_length: int - :param path_http_headers: Parameter group. Default value is None. - :type path_http_headers: ~azure.storage.filedatalake.models.PathHTTPHeaders - :param lease_access_conditions: Parameter group. Default value is None. - :type lease_access_conditions: ~azure.storage.filedatalake.models.LeaseAccessConditions - :param modified_access_conditions: Parameter group. Default value is None. - :type modified_access_conditions: ~azure.storage.filedatalake.models.ModifiedAccessConditions - :return: SetAccessControlRecursiveResponse or None or the result of cls(response) - :rtype: ~azure.storage.filedatalake.models.SetAccessControlRecursiveResponse or None - :raises ~azure.core.exceptions.HttpResponseError: - """ - error_map: MutableMapping = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = kwargs.pop("params", {}) or {} - - content_type: str = kwargs.pop("content_type", _headers.pop("Content-Type", "application/octet-stream")) - cls: ClsType[Optional[_models.SetAccessControlRecursiveResponse]] = kwargs.pop("cls", None) - - _content_md5 = None - _lease_id = None - _cache_control = None - _content_type_parameter = None - _content_disposition = None - _content_encoding = None - _content_language = None - _if_match = None - _if_none_match = None - _if_modified_since = None - _if_unmodified_since = None - if path_http_headers is not None: - _cache_control = path_http_headers.cache_control - _content_disposition = path_http_headers.content_disposition - _content_encoding = path_http_headers.content_encoding - _content_language = path_http_headers.content_language - _content_md5 = path_http_headers.content_md5 - _content_type_parameter = path_http_headers.content_type - if lease_access_conditions is not None: - _lease_id = lease_access_conditions.lease_id - if modified_access_conditions is not None: - _if_match = modified_access_conditions.if_match - _if_modified_since = modified_access_conditions.if_modified_since - _if_none_match = modified_access_conditions.if_none_match - _if_unmodified_since = modified_access_conditions.if_unmodified_since - _content = body - - _request = build_update_request( - url=self._config.url, - action=action, - mode=mode, - request_id_parameter=request_id_parameter, - timeout=timeout, - max_records=max_records, - continuation=continuation, - force_flag=force_flag, - position=position, - retain_uncommitted_data=retain_uncommitted_data, - close=close, - content_length=content_length, - content_md5=_content_md5, - lease_id=_lease_id, - cache_control=_cache_control, - content_type_parameter=_content_type_parameter, - content_disposition=_content_disposition, - content_encoding=_content_encoding, - content_language=_content_language, - properties=properties, - owner=owner, - group=group, - permissions=permissions, - acl=acl, - if_match=_if_match, - if_none_match=_if_none_match, - if_modified_since=_if_modified_since, - if_unmodified_since=_if_unmodified_since, - structured_body_type=structured_body_type, - structured_content_length=structured_content_length, - content_type=content_type, - version=self._config.version, - content=_content, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - _stream = False - pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [200, 202]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize( - _models.StorageError, - pipeline_response, - ) - raise HttpResponseError(response=response, model=error) - - deserialized = None - response_headers = {} - if response.status_code == 200: - response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) - response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) - response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) - response_headers["Accept-Ranges"] = self._deserialize("str", response.headers.get("Accept-Ranges")) - response_headers["Cache-Control"] = self._deserialize("str", response.headers.get("Cache-Control")) - response_headers["Content-Disposition"] = self._deserialize( - "str", response.headers.get("Content-Disposition") - ) - response_headers["Content-Encoding"] = self._deserialize("str", response.headers.get("Content-Encoding")) - response_headers["Content-Language"] = self._deserialize("str", response.headers.get("Content-Language")) - response_headers["Content-Length"] = self._deserialize("int", response.headers.get("Content-Length")) - response_headers["Content-Range"] = self._deserialize("str", response.headers.get("Content-Range")) - response_headers["Content-Type"] = self._deserialize("str", response.headers.get("Content-Type")) - response_headers["Content-MD5"] = self._deserialize("str", response.headers.get("Content-MD5")) - response_headers["x-ms-properties"] = self._deserialize("str", response.headers.get("x-ms-properties")) - response_headers["x-ms-continuation"] = self._deserialize("str", response.headers.get("x-ms-continuation")) - response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) - response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) - - deserialized = self._deserialize("SetAccessControlRecursiveResponse", pipeline_response.http_response) - - if response.status_code == 202: - response_headers["Content-MD5"] = self._deserialize("str", response.headers.get("Content-MD5")) - response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) - response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) - response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) - response_headers["x-ms-structured-body"] = self._deserialize( - "str", response.headers.get("x-ms-structured-body") - ) - - if cls: - return cls(pipeline_response, deserialized, response_headers) # type: ignore - - return deserialized # type: ignore - - @distributed_trace_async - async def lease( - self, - x_ms_lease_action: Union[str, _models.PathLeaseAction], - request_id_parameter: Optional[str] = None, - timeout: Optional[int] = None, - x_ms_lease_break_period: Optional[int] = None, - proposed_lease_id: Optional[str] = None, - lease_access_conditions: Optional[_models.LeaseAccessConditions] = None, - modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None, - **kwargs: Any - ) -> None: - """Lease Path. - - Create and manage a lease to restrict write and delete access to the path. This operation - supports conditional HTTP requests. For more information, see `Specifying Conditional Headers - for Blob Service Operations - `_. - - :param x_ms_lease_action: There are five lease actions: "acquire", "break", "change", "renew", - and "release". Use "acquire" and specify the "x-ms-proposed-lease-id" and "x-ms-lease-duration" - to acquire a new lease. Use "break" to break an existing lease. When a lease is broken, the - lease break period is allowed to elapse, during which time no lease operation except break and - release can be performed on the file. When a lease is successfully broken, the response - indicates the interval in seconds until a new lease can be acquired. Use "change" and specify - the current lease ID in "x-ms-lease-id" and the new lease ID in "x-ms-proposed-lease-id" to - change the lease ID of an active lease. Use "renew" and specify the "x-ms-lease-id" to renew an - existing lease. Use "release" and specify the "x-ms-lease-id" to release a lease. Known values - are: "acquire", "break", "change", "renew", "release", and "break". Required. - :type x_ms_lease_action: str or ~azure.storage.filedatalake.models.PathLeaseAction - :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. Default - value is None. - :type request_id_parameter: str - :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting - Timeouts for Blob Service Operations.`. Default value is None. - :type timeout: int - :param x_ms_lease_break_period: The lease break period duration is optional to break a lease, - and specifies the break period of the lease in seconds. The lease break duration must be - between 0 and 60 seconds. Default value is None. - :type x_ms_lease_break_period: int - :param proposed_lease_id: Proposed lease ID, in a GUID string format. The Blob service returns - 400 (Invalid request) if the proposed lease ID is not in the correct format. See Guid - Constructor (String) for a list of valid GUID string formats. Default value is None. - :type proposed_lease_id: str - :param lease_access_conditions: Parameter group. Default value is None. - :type lease_access_conditions: ~azure.storage.filedatalake.models.LeaseAccessConditions - :param modified_access_conditions: Parameter group. Default value is None. - :type modified_access_conditions: ~azure.storage.filedatalake.models.ModifiedAccessConditions - :return: None or the result of cls(response) - :rtype: None - :raises ~azure.core.exceptions.HttpResponseError: - """ - error_map: MutableMapping = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = kwargs.pop("headers", {}) or {} - _params = kwargs.pop("params", {}) or {} - - cls: ClsType[None] = kwargs.pop("cls", None) - - _lease_id = None - _if_match = None - _if_none_match = None - _if_modified_since = None - _if_unmodified_since = None - if lease_access_conditions is not None: - _lease_id = lease_access_conditions.lease_id - if modified_access_conditions is not None: - _if_match = modified_access_conditions.if_match - _if_modified_since = modified_access_conditions.if_modified_since - _if_none_match = modified_access_conditions.if_none_match - _if_unmodified_since = modified_access_conditions.if_unmodified_since - - _request = build_lease_request( - url=self._config.url, - x_ms_lease_action=x_ms_lease_action, - request_id_parameter=request_id_parameter, - timeout=timeout, - x_ms_lease_break_period=x_ms_lease_break_period, - lease_id=_lease_id, - proposed_lease_id=proposed_lease_id, - if_match=_if_match, - if_none_match=_if_none_match, - if_modified_since=_if_modified_since, - if_unmodified_since=_if_unmodified_since, - x_ms_lease_duration=self._config.x_ms_lease_duration, - version=self._config.version, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - _stream = False - pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [200, 201, 202]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize( - _models.StorageError, - pipeline_response, - ) - raise HttpResponseError(response=response, model=error) - - response_headers = {} - if response.status_code == 200: - response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) - response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) - response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) - response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) - response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) - response_headers["x-ms-lease-id"] = self._deserialize("str", response.headers.get("x-ms-lease-id")) - - if response.status_code == 201: - response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) - response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) - response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) - response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) - response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) - response_headers["x-ms-lease-id"] = self._deserialize("str", response.headers.get("x-ms-lease-id")) - - if response.status_code == 202: - response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) - response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) - response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) - response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) - response_headers["x-ms-lease-time"] = self._deserialize("str", response.headers.get("x-ms-lease-time")) - - if cls: - return cls(pipeline_response, None, response_headers) # type: ignore - - @distributed_trace_async - async def read( - self, - request_id_parameter: Optional[str] = None, - timeout: Optional[int] = None, - range: Optional[str] = None, - x_ms_range_get_content_md5: Optional[bool] = None, - lease_access_conditions: Optional[_models.LeaseAccessConditions] = None, - modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None, - cpk_info: Optional[_models.CpkInfo] = None, - **kwargs: Any - ) -> AsyncIterator[bytes]: - """Read File. - - Read the contents of a file. For read operations, range requests are supported. This operation - supports conditional HTTP requests. For more information, see `Specifying Conditional Headers - for Blob Service Operations - `_. - - :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. Default - value is None. - :type request_id_parameter: str - :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting - Timeouts for Blob Service Operations.`. Default value is None. - :type timeout: int - :param range: The HTTP Range request header specifies one or more byte ranges of the resource - to be retrieved. Default value is None. - :type range: str - :param x_ms_range_get_content_md5: Optional. When this header is set to "true" and specified - together with the Range header, the service returns the MD5 hash for the range, as long as the - range is less than or equal to 4MB in size. If this header is specified without the Range - header, the service returns status code 400 (Bad Request). If this header is set to true when - the range exceeds 4 MB in size, the service returns status code 400 (Bad Request). Default - value is None. - :type x_ms_range_get_content_md5: bool - :param lease_access_conditions: Parameter group. Default value is None. - :type lease_access_conditions: ~azure.storage.filedatalake.models.LeaseAccessConditions - :param modified_access_conditions: Parameter group. Default value is None. - :type modified_access_conditions: ~azure.storage.filedatalake.models.ModifiedAccessConditions - :param cpk_info: Parameter group. Default value is None. - :type cpk_info: ~azure.storage.filedatalake.models.CpkInfo - :return: AsyncIterator[bytes] or the result of cls(response) - :rtype: AsyncIterator[bytes] - :raises ~azure.core.exceptions.HttpResponseError: - """ - error_map: MutableMapping = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = kwargs.pop("headers", {}) or {} - _params = kwargs.pop("params", {}) or {} - - cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None) - - _lease_id = None - _if_match = None - _if_none_match = None - _if_modified_since = None - _if_unmodified_since = None - _encryption_key = None - _encryption_key_sha256 = None - _encryption_algorithm = None - if lease_access_conditions is not None: - _lease_id = lease_access_conditions.lease_id - if modified_access_conditions is not None: - _if_match = modified_access_conditions.if_match - _if_modified_since = modified_access_conditions.if_modified_since - _if_none_match = modified_access_conditions.if_none_match - _if_unmodified_since = modified_access_conditions.if_unmodified_since - if cpk_info is not None: - _encryption_algorithm = cpk_info.encryption_algorithm - _encryption_key = cpk_info.encryption_key - _encryption_key_sha256 = cpk_info.encryption_key_sha256 - - _request = build_read_request( - url=self._config.url, - request_id_parameter=request_id_parameter, - timeout=timeout, - range=range, - lease_id=_lease_id, - x_ms_range_get_content_md5=x_ms_range_get_content_md5, - if_match=_if_match, - if_none_match=_if_none_match, - if_modified_since=_if_modified_since, - if_unmodified_since=_if_unmodified_since, - encryption_key=_encryption_key, - encryption_key_sha256=_encryption_key_sha256, - encryption_algorithm=_encryption_algorithm, # type: ignore - version=self._config.version, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - _decompress = kwargs.pop("decompress", True) - _stream = True - pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [200, 206]: - try: - await response.read() # Load the body in memory and close the socket - except (StreamConsumedError, StreamClosedError): - pass - map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize( - _models.StorageError, - pipeline_response, - ) - raise HttpResponseError(response=response, model=error) - - response_headers = {} - if response.status_code == 200: - response_headers["Accept-Ranges"] = self._deserialize("str", response.headers.get("Accept-Ranges")) - response_headers["Cache-Control"] = self._deserialize("str", response.headers.get("Cache-Control")) - response_headers["Content-Disposition"] = self._deserialize( - "str", response.headers.get("Content-Disposition") - ) - response_headers["Content-Encoding"] = self._deserialize("str", response.headers.get("Content-Encoding")) - response_headers["Content-Language"] = self._deserialize("str", response.headers.get("Content-Language")) - response_headers["Content-Length"] = self._deserialize("int", response.headers.get("Content-Length")) - response_headers["Content-Range"] = self._deserialize("str", response.headers.get("Content-Range")) - response_headers["Content-Type"] = self._deserialize("str", response.headers.get("Content-Type")) - response_headers["Content-MD5"] = self._deserialize("str", response.headers.get("Content-MD5")) - response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) - response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) - response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) - response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) - response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) - response_headers["x-ms-resource-type"] = self._deserialize( - "str", response.headers.get("x-ms-resource-type") - ) - response_headers["x-ms-properties"] = self._deserialize("str", response.headers.get("x-ms-properties")) - response_headers["x-ms-lease-duration"] = self._deserialize( - "str", response.headers.get("x-ms-lease-duration") - ) - response_headers["x-ms-lease-state"] = self._deserialize("str", response.headers.get("x-ms-lease-state")) - response_headers["x-ms-lease-status"] = self._deserialize("str", response.headers.get("x-ms-lease-status")) - response_headers["x-ms-request-server-encrypted"] = self._deserialize( - "bool", response.headers.get("x-ms-request-server-encrypted") - ) - response_headers["x-ms-encryption-key-sha256"] = self._deserialize( - "str", response.headers.get("x-ms-encryption-key-sha256") - ) - - if response.status_code == 206: - response_headers["Accept-Ranges"] = self._deserialize("str", response.headers.get("Accept-Ranges")) - response_headers["Cache-Control"] = self._deserialize("str", response.headers.get("Cache-Control")) - response_headers["Content-Disposition"] = self._deserialize( - "str", response.headers.get("Content-Disposition") - ) - response_headers["Content-Encoding"] = self._deserialize("str", response.headers.get("Content-Encoding")) - response_headers["Content-Language"] = self._deserialize("str", response.headers.get("Content-Language")) - response_headers["Content-Length"] = self._deserialize("int", response.headers.get("Content-Length")) - response_headers["Content-Range"] = self._deserialize("str", response.headers.get("Content-Range")) - response_headers["Content-Type"] = self._deserialize("str", response.headers.get("Content-Type")) - response_headers["Content-MD5"] = self._deserialize("str", response.headers.get("Content-MD5")) - response_headers["x-ms-content-md5"] = self._deserialize("str", response.headers.get("x-ms-content-md5")) - response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) - response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) - response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) - response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) - response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) - response_headers["x-ms-resource-type"] = self._deserialize( - "str", response.headers.get("x-ms-resource-type") - ) - response_headers["x-ms-properties"] = self._deserialize("str", response.headers.get("x-ms-properties")) - response_headers["x-ms-lease-duration"] = self._deserialize( - "str", response.headers.get("x-ms-lease-duration") - ) - response_headers["x-ms-lease-state"] = self._deserialize("str", response.headers.get("x-ms-lease-state")) - response_headers["x-ms-lease-status"] = self._deserialize("str", response.headers.get("x-ms-lease-status")) - response_headers["x-ms-request-server-encrypted"] = self._deserialize( - "bool", response.headers.get("x-ms-request-server-encrypted") - ) - response_headers["x-ms-encryption-key-sha256"] = self._deserialize( - "str", response.headers.get("x-ms-encryption-key-sha256") - ) - - deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) - - if cls: - return cls(pipeline_response, deserialized, response_headers) # type: ignore - - return deserialized # type: ignore - - @distributed_trace_async - async def get_properties( - self, - request_id_parameter: Optional[str] = None, - timeout: Optional[int] = None, - action: Optional[Union[str, _models.PathGetPropertiesAction]] = None, - upn: Optional[bool] = None, - lease_access_conditions: Optional[_models.LeaseAccessConditions] = None, - modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None, - **kwargs: Any - ) -> None: - """Get Properties | Get Status | Get Access Control List. - - Get Properties returns all system and user defined properties for a path. Get Status returns - all system defined properties for a path. Get Access Control List returns the access control - list for a path. This operation supports conditional HTTP requests. For more information, see - `Specifying Conditional Headers for Blob Service Operations - `_. - - :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. Default - value is None. - :type request_id_parameter: str - :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting - Timeouts for Blob Service Operations.`. Default value is None. - :type timeout: int - :param action: Optional. If the value is "getStatus" only the system defined properties for the - path are returned. If the value is "getAccessControl" the access control list is returned in - the response headers (Hierarchical Namespace must be enabled for the account), otherwise the - properties are returned. Known values are: "getAccessControl" and "getStatus". Default value is - None. - :type action: str or ~azure.storage.filedatalake.models.PathGetPropertiesAction - :param upn: Optional. Valid only when Hierarchical Namespace is enabled for the account. If - "true", the user identity values returned in the x-ms-owner, x-ms-group, and x-ms-acl response - headers will be transformed from Azure Active Directory Object IDs to User Principal Names. If - "false", the values will be returned as Azure Active Directory Object IDs. The default value is - false. Note that group and application Object IDs are not translated because they do not have - unique friendly names. Default value is None. - :type upn: bool - :param lease_access_conditions: Parameter group. Default value is None. - :type lease_access_conditions: ~azure.storage.filedatalake.models.LeaseAccessConditions - :param modified_access_conditions: Parameter group. Default value is None. - :type modified_access_conditions: ~azure.storage.filedatalake.models.ModifiedAccessConditions - :return: None or the result of cls(response) - :rtype: None - :raises ~azure.core.exceptions.HttpResponseError: - """ - error_map: MutableMapping = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = kwargs.pop("headers", {}) or {} - _params = kwargs.pop("params", {}) or {} - - cls: ClsType[None] = kwargs.pop("cls", None) - - _lease_id = None - _if_match = None - _if_none_match = None - _if_modified_since = None - _if_unmodified_since = None - if lease_access_conditions is not None: - _lease_id = lease_access_conditions.lease_id - if modified_access_conditions is not None: - _if_match = modified_access_conditions.if_match - _if_modified_since = modified_access_conditions.if_modified_since - _if_none_match = modified_access_conditions.if_none_match - _if_unmodified_since = modified_access_conditions.if_unmodified_since - - _request = build_get_properties_request( - url=self._config.url, - request_id_parameter=request_id_parameter, - timeout=timeout, - action=action, - upn=upn, - lease_id=_lease_id, - if_match=_if_match, - if_none_match=_if_none_match, - if_modified_since=_if_modified_since, - if_unmodified_since=_if_unmodified_since, - version=self._config.version, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - _stream = False - pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize( - _models.StorageError, - pipeline_response, - ) - raise HttpResponseError(response=response, model=error) - - response_headers = {} - response_headers["Accept-Ranges"] = self._deserialize("str", response.headers.get("Accept-Ranges")) - response_headers["Cache-Control"] = self._deserialize("str", response.headers.get("Cache-Control")) - response_headers["Content-Disposition"] = self._deserialize("str", response.headers.get("Content-Disposition")) - response_headers["Content-Encoding"] = self._deserialize("str", response.headers.get("Content-Encoding")) - response_headers["Content-Language"] = self._deserialize("str", response.headers.get("Content-Language")) - response_headers["Content-Length"] = self._deserialize("int", response.headers.get("Content-Length")) - response_headers["Content-Range"] = self._deserialize("str", response.headers.get("Content-Range")) - response_headers["Content-Type"] = self._deserialize("str", response.headers.get("Content-Type")) - response_headers["Content-MD5"] = self._deserialize("str", response.headers.get("Content-MD5")) - response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) - response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) - response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) - response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) - response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) - response_headers["x-ms-resource-type"] = self._deserialize("str", response.headers.get("x-ms-resource-type")) - response_headers["x-ms-properties"] = self._deserialize("str", response.headers.get("x-ms-properties")) - response_headers["x-ms-owner"] = self._deserialize("str", response.headers.get("x-ms-owner")) - response_headers["x-ms-group"] = self._deserialize("str", response.headers.get("x-ms-group")) - response_headers["x-ms-permissions"] = self._deserialize("str", response.headers.get("x-ms-permissions")) - response_headers["x-ms-acl"] = self._deserialize("str", response.headers.get("x-ms-acl")) - response_headers["x-ms-lease-duration"] = self._deserialize("str", response.headers.get("x-ms-lease-duration")) - response_headers["x-ms-lease-state"] = self._deserialize("str", response.headers.get("x-ms-lease-state")) - response_headers["x-ms-lease-status"] = self._deserialize("str", response.headers.get("x-ms-lease-status")) - response_headers["x-ms-server-encrypted"] = self._deserialize( - "bool", response.headers.get("x-ms-server-encrypted") - ) - response_headers["x-ms-encryption-key-sha256"] = self._deserialize( - "str", response.headers.get("x-ms-encryption-key-sha256") - ) - response_headers["x-ms-encryption-context"] = self._deserialize( - "str", response.headers.get("x-ms-encryption-context") - ) - response_headers["x-ms-encryption-scope"] = self._deserialize( - "str", response.headers.get("x-ms-encryption-scope") - ) - response_headers["x-ms-creation-time"] = self._deserialize( - "rfc-1123", response.headers.get("x-ms-creation-time") - ) - response_headers["x-ms-expiry-time"] = self._deserialize("rfc-1123", response.headers.get("x-ms-expiry-time")) - - if cls: - return cls(pipeline_response, None, response_headers) # type: ignore - - @distributed_trace_async - async def delete( - self, - request_id_parameter: Optional[str] = None, - timeout: Optional[int] = None, - recursive: Optional[bool] = None, - continuation: Optional[str] = None, - paginated: Optional[bool] = None, - lease_access_conditions: Optional[_models.LeaseAccessConditions] = None, - modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None, - **kwargs: Any - ) -> None: - """Delete File | Delete Directory. - - Delete the file or directory. This operation supports conditional HTTP requests. For more - information, see `Specifying Conditional Headers for Blob Service Operations - `_. - - :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. Default - value is None. - :type request_id_parameter: str - :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting - Timeouts for Blob Service Operations.`. Default value is None. - :type timeout: int - :param recursive: Required. Default value is None. - :type recursive: bool - :param continuation: Optional. When deleting a directory, the number of paths that are deleted - with each invocation is limited. If the number of paths to be deleted exceeds this limit, a - continuation token is returned in this response header. When a continuation token is returned - in the response, it must be specified in a subsequent invocation of the delete operation to - continue deleting the directory. Default value is None. - :type continuation: str - :param paginated: If true, paginated behavior will be seen. Pagination is for the recursive ACL - checks as a POSIX requirement in the server and Delete in an atomic operation once the ACL - checks are completed. If false or missing, normal default behavior will kick in, which may - timeout in case of very large directories due to recursive ACL checks. This new parameter is - introduced for backward compatibility. Default value is None. - :type paginated: bool - :param lease_access_conditions: Parameter group. Default value is None. - :type lease_access_conditions: ~azure.storage.filedatalake.models.LeaseAccessConditions - :param modified_access_conditions: Parameter group. Default value is None. - :type modified_access_conditions: ~azure.storage.filedatalake.models.ModifiedAccessConditions - :return: None or the result of cls(response) - :rtype: None - :raises ~azure.core.exceptions.HttpResponseError: - """ - error_map: MutableMapping = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = kwargs.pop("headers", {}) or {} - _params = kwargs.pop("params", {}) or {} - - cls: ClsType[None] = kwargs.pop("cls", None) - - _lease_id = None - _if_match = None - _if_none_match = None - _if_modified_since = None - _if_unmodified_since = None - if lease_access_conditions is not None: - _lease_id = lease_access_conditions.lease_id - if modified_access_conditions is not None: - _if_match = modified_access_conditions.if_match - _if_modified_since = modified_access_conditions.if_modified_since - _if_none_match = modified_access_conditions.if_none_match - _if_unmodified_since = modified_access_conditions.if_unmodified_since - - _request = build_delete_request( - url=self._config.url, - request_id_parameter=request_id_parameter, - timeout=timeout, - recursive=recursive, - continuation=continuation, - lease_id=_lease_id, - if_match=_if_match, - if_none_match=_if_none_match, - if_modified_since=_if_modified_since, - if_unmodified_since=_if_unmodified_since, - paginated=paginated, - version=self._config.version, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - _stream = False - pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [200, 202]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize( - _models.StorageError, - pipeline_response, - ) - raise HttpResponseError(response=response, model=error) - - response_headers = {} - if response.status_code == 200: - response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) - response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) - response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) - response_headers["x-ms-continuation"] = self._deserialize("str", response.headers.get("x-ms-continuation")) - response_headers["x-ms-deletion-id"] = self._deserialize("str", response.headers.get("x-ms-deletion-id")) - - if response.status_code == 202: - response_headers["Date"] = self._deserialize("str", response.headers.get("Date")) - response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) - response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) - response_headers["x-ms-continuation"] = self._deserialize("str", response.headers.get("x-ms-continuation")) - - if cls: - return cls(pipeline_response, None, response_headers) # type: ignore - - @distributed_trace_async - async def set_access_control( - self, - timeout: Optional[int] = None, - owner: Optional[str] = None, - group: Optional[str] = None, - permissions: Optional[str] = None, - acl: Optional[str] = None, - request_id_parameter: Optional[str] = None, - lease_access_conditions: Optional[_models.LeaseAccessConditions] = None, - modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None, - **kwargs: Any - ) -> None: - """Set the owner, group, permissions, or access control list for a path. - - :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting - Timeouts for Blob Service Operations.`. Default value is None. - :type timeout: int - :param owner: Optional. The owner of the blob or directory. Default value is None. - :type owner: str - :param group: Optional. The owning group of the blob or directory. Default value is None. - :type group: str - :param permissions: Optional and only valid if Hierarchical Namespace is enabled for the - account. Sets POSIX access permissions for the file owner, the file owning group, and others. - Each class may be granted read, write, or execute permission. The sticky bit is also - supported. Both symbolic (rwxrw-rw-) and 4-digit octal notation (e.g. 0766) are supported. - Default value is None. - :type permissions: str - :param acl: Sets POSIX access control rights on files and directories. The value is a - comma-separated list of access control entries. Each access control entry (ACE) consists of a - scope, a type, a user or group identifier, and permissions in the format - "[scope:][type]:[id]:[permissions]". Default value is None. - :type acl: str - :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. Default - value is None. - :type request_id_parameter: str - :param lease_access_conditions: Parameter group. Default value is None. - :type lease_access_conditions: ~azure.storage.filedatalake.models.LeaseAccessConditions - :param modified_access_conditions: Parameter group. Default value is None. - :type modified_access_conditions: ~azure.storage.filedatalake.models.ModifiedAccessConditions - :return: None or the result of cls(response) - :rtype: None - :raises ~azure.core.exceptions.HttpResponseError: - """ - error_map: MutableMapping = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = kwargs.pop("headers", {}) or {} - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - action: Literal["setAccessControl"] = kwargs.pop("action", _params.pop("action", "setAccessControl")) - cls: ClsType[None] = kwargs.pop("cls", None) - - _lease_id = None - _if_match = None - _if_none_match = None - _if_modified_since = None - _if_unmodified_since = None - if lease_access_conditions is not None: - _lease_id = lease_access_conditions.lease_id - if modified_access_conditions is not None: - _if_match = modified_access_conditions.if_match - _if_modified_since = modified_access_conditions.if_modified_since - _if_none_match = modified_access_conditions.if_none_match - _if_unmodified_since = modified_access_conditions.if_unmodified_since - - _request = build_set_access_control_request( - url=self._config.url, - timeout=timeout, - lease_id=_lease_id, - owner=owner, - group=group, - permissions=permissions, - acl=acl, - if_match=_if_match, - if_none_match=_if_none_match, - if_modified_since=_if_modified_since, - if_unmodified_since=_if_unmodified_since, - request_id_parameter=request_id_parameter, - action=action, - version=self._config.version, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - _stream = False - pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize( - _models.StorageError, - pipeline_response, - ) - raise HttpResponseError(response=response, model=error) - - response_headers = {} - response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) - response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) - response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) - response_headers["x-ms-client-request-id"] = self._deserialize( - "str", response.headers.get("x-ms-client-request-id") - ) - response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) - response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) - - if cls: - return cls(pipeline_response, None, response_headers) # type: ignore - - @distributed_trace_async - async def set_access_control_recursive( - self, - mode: Union[str, _models.PathSetAccessControlRecursiveMode], - timeout: Optional[int] = None, - continuation: Optional[str] = None, - force_flag: Optional[bool] = None, - max_records: Optional[int] = None, - acl: Optional[str] = None, - request_id_parameter: Optional[str] = None, - **kwargs: Any - ) -> _models.SetAccessControlRecursiveResponse: - """Set the access control list for a path and sub-paths. - - :param mode: Mode "set" sets POSIX access control rights on files and directories, "modify" - modifies one or more POSIX access control rights that pre-exist on files and directories, - "remove" removes one or more POSIX access control rights that were present earlier on files - and directories. Known values are: "set", "modify", and "remove". Required. - :type mode: str or ~azure.storage.filedatalake.models.PathSetAccessControlRecursiveMode - :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting - Timeouts for Blob Service Operations.`. Default value is None. - :type timeout: int - :param continuation: Optional. When deleting a directory, the number of paths that are deleted - with each invocation is limited. If the number of paths to be deleted exceeds this limit, a - continuation token is returned in this response header. When a continuation token is returned - in the response, it must be specified in a subsequent invocation of the delete operation to - continue deleting the directory. Default value is None. - :type continuation: str - :param force_flag: Optional. Valid for "SetAccessControlRecursive" operation. If set to false, - the operation will terminate quickly on encountering user errors (4XX). If true, the operation - will ignore user errors and proceed with the operation on other sub-entities of the directory. - Continuation token will only be returned when forceFlag is true in case of user errors. If not - set the default value is false for this. Default value is None. - :type force_flag: bool - :param max_records: Optional. It specifies the maximum number of files or directories on which - the acl change will be applied. If omitted or greater than 2,000, the request will process up - to 2,000 items. Default value is None. - :type max_records: int - :param acl: Sets POSIX access control rights on files and directories. The value is a - comma-separated list of access control entries. Each access control entry (ACE) consists of a - scope, a type, a user or group identifier, and permissions in the format - "[scope:][type]:[id]:[permissions]". Default value is None. - :type acl: str - :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. Default - value is None. - :type request_id_parameter: str - :return: SetAccessControlRecursiveResponse or the result of cls(response) - :rtype: ~azure.storage.filedatalake.models.SetAccessControlRecursiveResponse - :raises ~azure.core.exceptions.HttpResponseError: - """ - error_map: MutableMapping = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = kwargs.pop("headers", {}) or {} - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - action: Literal["setAccessControlRecursive"] = kwargs.pop( - "action", _params.pop("action", "setAccessControlRecursive") - ) - cls: ClsType[_models.SetAccessControlRecursiveResponse] = kwargs.pop("cls", None) - - _request = build_set_access_control_recursive_request( - url=self._config.url, - mode=mode, - timeout=timeout, - continuation=continuation, - force_flag=force_flag, - max_records=max_records, - acl=acl, - request_id_parameter=request_id_parameter, - action=action, - version=self._config.version, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - _stream = False - pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize( - _models.StorageError, - pipeline_response, - ) - raise HttpResponseError(response=response, model=error) - - response_headers = {} - response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) - response_headers["x-ms-client-request-id"] = self._deserialize( - "str", response.headers.get("x-ms-client-request-id") - ) - response_headers["x-ms-continuation"] = self._deserialize("str", response.headers.get("x-ms-continuation")) - response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) - response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) - - deserialized = self._deserialize("SetAccessControlRecursiveResponse", pipeline_response.http_response) - - if cls: - return cls(pipeline_response, deserialized, response_headers) # type: ignore - - return deserialized # type: ignore - - @distributed_trace_async - async def flush_data( # pylint: disable=too-many-locals - self, - timeout: Optional[int] = None, - position: Optional[int] = None, - retain_uncommitted_data: Optional[bool] = None, - close: Optional[bool] = None, - content_length: Optional[int] = None, - lease_action: Optional[Union[str, _models.LeaseAction]] = None, - lease_duration: Optional[int] = None, - proposed_lease_id: Optional[str] = None, - request_id_parameter: Optional[str] = None, - path_http_headers: Optional[_models.PathHTTPHeaders] = None, - lease_access_conditions: Optional[_models.LeaseAccessConditions] = None, - modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None, - cpk_info: Optional[_models.CpkInfo] = None, - **kwargs: Any - ) -> None: - """Set the owner, group, permissions, or access control list for a path. - - :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting - Timeouts for Blob Service Operations.`. Default value is None. - :type timeout: int - :param position: This parameter allows the caller to upload data in parallel and control the - order in which it is appended to the file. It is required when uploading data to be appended - to the file and when flushing previously uploaded data to the file. The value must be the - position where the data is to be appended. Uploaded data is not immediately flushed, or - written, to the file. To flush, the previously uploaded data must be contiguous, the position - parameter must be specified and equal to the length of the file after all data has been - written, and there must not be a request entity body included with the request. Default value - is None. - :type position: int - :param retain_uncommitted_data: Valid only for flush operations. If "true", uncommitted data - is retained after the flush operation completes; otherwise, the uncommitted data is deleted - after the flush operation. The default is false. Data at offsets less than the specified - position are written to the file when flush succeeds, but this optional parameter allows data - after the flush position to be retained for a future flush operation. Default value is None. - :type retain_uncommitted_data: bool - :param close: Azure Storage Events allow applications to receive notifications when files - change. When Azure Storage Events are enabled, a file changed event is raised. This event has a - property indicating whether this is the final change to distinguish the difference between an - intermediate flush to a file stream and the final close of a file stream. The close query - parameter is valid only when the action is "flush" and change notifications are enabled. If the - value of close is "true" and the flush operation completes successfully, the service raises a - file change notification with a property indicating that this is the final update (the file - stream has been closed). If "false" a change notification is raised indicating the file has - changed. The default is false. This query parameter is set to true by the Hadoop ABFS driver to - indicate that the file stream has been closed.". Default value is None. - :type close: bool - :param content_length: Required for "Append Data" and "Flush Data". Must be 0 for "Flush - Data". Must be the length of the request content in bytes for "Append Data". Default value is - None. - :type content_length: int - :param lease_action: Optional. If "acquire" it will acquire the lease. If "auto-renew" it will - renew the lease. If "release" it will release the lease only on flush. If "acquire-release" it - will acquire & complete the operation & release the lease once operation is done. Known values - are: "acquire", "auto-renew", "release", and "acquire-release". Default value is None. - :type lease_action: str or ~azure.storage.filedatalake.models.LeaseAction - :param lease_duration: The lease duration is required to acquire a lease, and specifies the - duration of the lease in seconds. The lease duration must be between 15 and 60 seconds or -1 - for infinite lease. Default value is None. - :type lease_duration: int - :param proposed_lease_id: Proposed lease ID, in a GUID string format. The Blob service returns - 400 (Invalid request) if the proposed lease ID is not in the correct format. See Guid - Constructor (String) for a list of valid GUID string formats. Default value is None. - :type proposed_lease_id: str - :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. Default - value is None. - :type request_id_parameter: str - :param path_http_headers: Parameter group. Default value is None. - :type path_http_headers: ~azure.storage.filedatalake.models.PathHTTPHeaders - :param lease_access_conditions: Parameter group. Default value is None. - :type lease_access_conditions: ~azure.storage.filedatalake.models.LeaseAccessConditions - :param modified_access_conditions: Parameter group. Default value is None. - :type modified_access_conditions: ~azure.storage.filedatalake.models.ModifiedAccessConditions - :param cpk_info: Parameter group. Default value is None. - :type cpk_info: ~azure.storage.filedatalake.models.CpkInfo - :return: None or the result of cls(response) - :rtype: None - :raises ~azure.core.exceptions.HttpResponseError: - """ - error_map: MutableMapping = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = kwargs.pop("headers", {}) or {} - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - action: Literal["flush"] = kwargs.pop("action", _params.pop("action", "flush")) - cls: ClsType[None] = kwargs.pop("cls", None) - - _content_md5 = None - _lease_id = None - _cache_control = None - _content_type_parameter = None - _content_disposition = None - _content_encoding = None - _content_language = None - _if_match = None - _if_none_match = None - _if_modified_since = None - _if_unmodified_since = None - _encryption_key = None - _encryption_key_sha256 = None - _encryption_algorithm = None - if path_http_headers is not None: - _cache_control = path_http_headers.cache_control - _content_disposition = path_http_headers.content_disposition - _content_encoding = path_http_headers.content_encoding - _content_language = path_http_headers.content_language - _content_md5 = path_http_headers.content_md5 - _content_type_parameter = path_http_headers.content_type - if lease_access_conditions is not None: - _lease_id = lease_access_conditions.lease_id - if modified_access_conditions is not None: - _if_match = modified_access_conditions.if_match - _if_modified_since = modified_access_conditions.if_modified_since - _if_none_match = modified_access_conditions.if_none_match - _if_unmodified_since = modified_access_conditions.if_unmodified_since - if cpk_info is not None: - _encryption_algorithm = cpk_info.encryption_algorithm - _encryption_key = cpk_info.encryption_key - _encryption_key_sha256 = cpk_info.encryption_key_sha256 - - _request = build_flush_data_request( - url=self._config.url, - timeout=timeout, - position=position, - retain_uncommitted_data=retain_uncommitted_data, - close=close, - content_length=content_length, - content_md5=_content_md5, - lease_id=_lease_id, - lease_action=lease_action, - lease_duration=lease_duration, - proposed_lease_id=proposed_lease_id, - cache_control=_cache_control, - content_type_parameter=_content_type_parameter, - content_disposition=_content_disposition, - content_encoding=_content_encoding, - content_language=_content_language, - if_match=_if_match, - if_none_match=_if_none_match, - if_modified_since=_if_modified_since, - if_unmodified_since=_if_unmodified_since, - request_id_parameter=request_id_parameter, - encryption_key=_encryption_key, - encryption_key_sha256=_encryption_key_sha256, - encryption_algorithm=_encryption_algorithm, # type: ignore - action=action, - version=self._config.version, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - _stream = False - pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize( - _models.StorageError, - pipeline_response, - ) - raise HttpResponseError(response=response, model=error) - - response_headers = {} - response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) - response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) - response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) - response_headers["Content-Length"] = self._deserialize("int", response.headers.get("Content-Length")) - response_headers["x-ms-client-request-id"] = self._deserialize( - "str", response.headers.get("x-ms-client-request-id") - ) - response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) - response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) - response_headers["x-ms-request-server-encrypted"] = self._deserialize( - "bool", response.headers.get("x-ms-request-server-encrypted") - ) - response_headers["x-ms-encryption-key-sha256"] = self._deserialize( - "str", response.headers.get("x-ms-encryption-key-sha256") - ) - response_headers["x-ms-lease-renewed"] = self._deserialize("bool", response.headers.get("x-ms-lease-renewed")) - - if cls: - return cls(pipeline_response, None, response_headers) # type: ignore - - @distributed_trace_async - async def append_data( # pylint: disable=too-many-locals - self, - body: IO[bytes], - position: Optional[int] = None, - timeout: Optional[int] = None, - content_length: Optional[int] = None, - transactional_content_crc64: Optional[bytes] = None, - lease_action: Optional[Union[str, _models.LeaseAction]] = None, - lease_duration: Optional[int] = None, - proposed_lease_id: Optional[str] = None, - request_id_parameter: Optional[str] = None, - flush: Optional[bool] = None, - structured_body_type: Optional[str] = None, - structured_content_length: Optional[int] = None, - path_http_headers: Optional[_models.PathHTTPHeaders] = None, - lease_access_conditions: Optional[_models.LeaseAccessConditions] = None, - cpk_info: Optional[_models.CpkInfo] = None, - **kwargs: Any - ) -> None: - """Append data to the file. - - :param body: Initial data. Required. - :type body: IO[bytes] - :param position: This parameter allows the caller to upload data in parallel and control the - order in which it is appended to the file. It is required when uploading data to be appended - to the file and when flushing previously uploaded data to the file. The value must be the - position where the data is to be appended. Uploaded data is not immediately flushed, or - written, to the file. To flush, the previously uploaded data must be contiguous, the position - parameter must be specified and equal to the length of the file after all data has been - written, and there must not be a request entity body included with the request. Default value - is None. - :type position: int - :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting - Timeouts for Blob Service Operations.`. Default value is None. - :type timeout: int - :param content_length: Required for "Append Data" and "Flush Data". Must be 0 for "Flush - Data". Must be the length of the request content in bytes for "Append Data". Default value is - None. - :type content_length: int - :param transactional_content_crc64: Specify the transactional crc64 for the body, to be - validated by the service. Default value is None. - :type transactional_content_crc64: bytes - :param lease_action: Optional. If "acquire" it will acquire the lease. If "auto-renew" it will - renew the lease. If "release" it will release the lease only on flush. If "acquire-release" it - will acquire & complete the operation & release the lease once operation is done. Known values - are: "acquire", "auto-renew", "release", and "acquire-release". Default value is None. - :type lease_action: str or ~azure.storage.filedatalake.models.LeaseAction - :param lease_duration: The lease duration is required to acquire a lease, and specifies the - duration of the lease in seconds. The lease duration must be between 15 and 60 seconds or -1 - for infinite lease. Default value is None. - :type lease_duration: int - :param proposed_lease_id: Proposed lease ID, in a GUID string format. The Blob service returns - 400 (Invalid request) if the proposed lease ID is not in the correct format. See Guid - Constructor (String) for a list of valid GUID string formats. Default value is None. - :type proposed_lease_id: str - :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. Default - value is None. - :type request_id_parameter: str - :param flush: If file should be flushed after the append. Default value is None. - :type flush: bool - :param structured_body_type: Required if the request body is a structured message. Specifies - the message schema version and properties. Default value is None. - :type structured_body_type: str - :param structured_content_length: Required if the request body is a structured message. - Specifies the length of the blob/file content inside the message body. Will always be smaller - than Content-Length. Default value is None. - :type structured_content_length: int - :param path_http_headers: Parameter group. Default value is None. - :type path_http_headers: ~azure.storage.filedatalake.models.PathHTTPHeaders - :param lease_access_conditions: Parameter group. Default value is None. - :type lease_access_conditions: ~azure.storage.filedatalake.models.LeaseAccessConditions - :param cpk_info: Parameter group. Default value is None. - :type cpk_info: ~azure.storage.filedatalake.models.CpkInfo - :return: None or the result of cls(response) - :rtype: None - :raises ~azure.core.exceptions.HttpResponseError: - """ - error_map: MutableMapping = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - action: Literal["append"] = kwargs.pop("action", _params.pop("action", "append")) - content_type: str = kwargs.pop("content_type", _headers.pop("Content-Type", "application/json")) - cls: ClsType[None] = kwargs.pop("cls", None) - - _transactional_content_hash = None - _lease_id = None - _encryption_key = None - _encryption_key_sha256 = None - _encryption_algorithm = None - if path_http_headers is not None: - _transactional_content_hash = path_http_headers.transactional_content_hash - if lease_access_conditions is not None: - _lease_id = lease_access_conditions.lease_id - if cpk_info is not None: - _encryption_algorithm = cpk_info.encryption_algorithm - _encryption_key = cpk_info.encryption_key - _encryption_key_sha256 = cpk_info.encryption_key_sha256 - _content = body - - _request = build_append_data_request( - url=self._config.url, - position=position, - timeout=timeout, - content_length=content_length, - transactional_content_hash=_transactional_content_hash, - transactional_content_crc64=transactional_content_crc64, - lease_id=_lease_id, - lease_action=lease_action, - lease_duration=lease_duration, - proposed_lease_id=proposed_lease_id, - request_id_parameter=request_id_parameter, - encryption_key=_encryption_key, - encryption_key_sha256=_encryption_key_sha256, - encryption_algorithm=_encryption_algorithm, # type: ignore - flush=flush, - structured_body_type=structured_body_type, - structured_content_length=structured_content_length, - action=action, - content_type=content_type, - version=self._config.version, - content=_content, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - _stream = False - pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [202]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize( - _models.StorageError, - pipeline_response, - ) - raise HttpResponseError(response=response, model=error) - - response_headers = {} - response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) - response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) - response_headers["x-ms-client-request-id"] = self._deserialize( - "str", response.headers.get("x-ms-client-request-id") - ) - response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) - response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) - response_headers["Content-MD5"] = self._deserialize("bytearray", response.headers.get("Content-MD5")) - response_headers["x-ms-content-crc64"] = self._deserialize( - "bytearray", response.headers.get("x-ms-content-crc64") - ) - response_headers["x-ms-request-server-encrypted"] = self._deserialize( - "bool", response.headers.get("x-ms-request-server-encrypted") - ) - response_headers["x-ms-encryption-key-sha256"] = self._deserialize( - "str", response.headers.get("x-ms-encryption-key-sha256") - ) - response_headers["x-ms-lease-renewed"] = self._deserialize("bool", response.headers.get("x-ms-lease-renewed")) - response_headers["x-ms-structured-body"] = self._deserialize( - "str", response.headers.get("x-ms-structured-body") - ) - - if cls: - return cls(pipeline_response, None, response_headers) # type: ignore - - @distributed_trace_async - async def set_expiry( - self, - expiry_options: Union[str, _models.PathExpiryOptions], - timeout: Optional[int] = None, - request_id_parameter: Optional[str] = None, - expires_on: Optional[str] = None, - **kwargs: Any - ) -> None: - """Sets the time a blob will expire and be deleted. - - :param expiry_options: Required. Indicates mode of the expiry time. Known values are: - "NeverExpire", "RelativeToCreation", "RelativeToNow", and "Absolute". Required. - :type expiry_options: str or ~azure.storage.filedatalake.models.PathExpiryOptions - :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting - Timeouts for Blob Service Operations.`. Default value is None. - :type timeout: int - :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. Default - value is None. - :type request_id_parameter: str - :param expires_on: The time to set the blob to expiry. Default value is None. - :type expires_on: str - :return: None or the result of cls(response) - :rtype: None - :raises ~azure.core.exceptions.HttpResponseError: - """ - error_map: MutableMapping = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = kwargs.pop("headers", {}) or {} - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - comp: Literal["expiry"] = kwargs.pop("comp", _params.pop("comp", "expiry")) - cls: ClsType[None] = kwargs.pop("cls", None) - - _request = build_set_expiry_request( - url=self._config.url, - expiry_options=expiry_options, - timeout=timeout, - request_id_parameter=request_id_parameter, - expires_on=expires_on, - comp=comp, - version=self._config.version, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - _stream = False - pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize( - _models.StorageError, - pipeline_response, - ) - raise HttpResponseError(response=response, model=error) - - response_headers = {} - response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) - response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) - response_headers["x-ms-client-request-id"] = self._deserialize( - "str", response.headers.get("x-ms-client-request-id") - ) - response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) - response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) - response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) - - if cls: - return cls(pipeline_response, None, response_headers) # type: ignore - - @distributed_trace_async - async def undelete( - self, - timeout: Optional[int] = None, - undelete_source: Optional[str] = None, - request_id_parameter: Optional[str] = None, - **kwargs: Any - ) -> None: - """Undelete a path that was previously soft deleted. - - :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting - Timeouts for Blob Service Operations.`. Default value is None. - :type timeout: int - :param undelete_source: Only for hierarchical namespace enabled accounts. Optional. The path of - the soft deleted blob to undelete. Default value is None. - :type undelete_source: str - :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. Default - value is None. - :type request_id_parameter: str - :return: None or the result of cls(response) - :rtype: None - :raises ~azure.core.exceptions.HttpResponseError: - """ - error_map: MutableMapping = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = kwargs.pop("headers", {}) or {} - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - comp: Literal["undelete"] = kwargs.pop("comp", _params.pop("comp", "undelete")) - cls: ClsType[None] = kwargs.pop("cls", None) - - _request = build_undelete_request( - url=self._config.url, - timeout=timeout, - undelete_source=undelete_source, - request_id_parameter=request_id_parameter, - comp=comp, - version=self._config.version, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - _stream = False - pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize( - _models.StorageError, - pipeline_response, - ) - raise HttpResponseError(response=response, model=error) - - response_headers = {} - response_headers["x-ms-client-request-id"] = self._deserialize( - "str", response.headers.get("x-ms-client-request-id") - ) - response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) - response_headers["x-ms-resource-type"] = self._deserialize("str", response.headers.get("x-ms-resource-type")) - response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) - response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) - - if cls: - return cls(pipeline_response, None, response_headers) # type: ignore diff --git a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_generated/aio/operations/_service_operations.py b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_generated/aio/operations/_service_operations.py deleted file mode 100644 index 83b9459d6ede..000000000000 --- a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_generated/aio/operations/_service_operations.py +++ /dev/null @@ -1,160 +0,0 @@ -# pylint: disable=line-too-long,useless-suppression -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is regenerated. -# -------------------------------------------------------------------------- -from collections.abc import MutableMapping -from typing import Any, Callable, Literal, Optional, TypeVar - -from azure.core import AsyncPipelineClient -from azure.core.async_paging import AsyncItemPaged, AsyncList -from azure.core.exceptions import ( - ClientAuthenticationError, - HttpResponseError, - ResourceExistsError, - ResourceNotFoundError, - ResourceNotModifiedError, - map_error, -) -from azure.core.pipeline import PipelineResponse -from azure.core.rest import AsyncHttpResponse, HttpRequest -from azure.core.tracing.decorator import distributed_trace -from azure.core.utils import case_insensitive_dict - -from ... import models as _models -from ..._utils.serialization import Deserializer, Serializer -from ...operations._service_operations import build_list_file_systems_request -from .._configuration import AzureDataLakeStorageRESTAPIConfiguration - -T = TypeVar("T") -ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, dict[str, Any]], Any]] - - -class ServiceOperations: - """ - .. warning:: - **DO NOT** instantiate this class directly. - - Instead, you should access the following operations through - :class:`~azure.storage.filedatalake.aio.AzureDataLakeStorageRESTAPI`'s - :attr:`service` attribute. - """ - - models = _models - - def __init__(self, *args, **kwargs) -> None: - input_args = list(args) - self._client: AsyncPipelineClient = input_args.pop(0) if input_args else kwargs.pop("client") - self._config: AzureDataLakeStorageRESTAPIConfiguration = ( - input_args.pop(0) if input_args else kwargs.pop("config") - ) - self._serialize: Serializer = input_args.pop(0) if input_args else kwargs.pop("serializer") - self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer") - - @distributed_trace - def list_file_systems( - self, - prefix: Optional[str] = None, - continuation: Optional[str] = None, - max_results: Optional[int] = None, - request_id_parameter: Optional[str] = None, - timeout: Optional[int] = None, - **kwargs: Any - ) -> AsyncItemPaged["_models.FileSystem"]: - """List FileSystems. - - List filesystems and their properties in given account. - - :param prefix: Filters results to filesystems within the specified prefix. Default value is - None. - :type prefix: str - :param continuation: Optional. When deleting a directory, the number of paths that are deleted - with each invocation is limited. If the number of paths to be deleted exceeds this limit, a - continuation token is returned in this response header. When a continuation token is returned - in the response, it must be specified in a subsequent invocation of the delete operation to - continue deleting the directory. Default value is None. - :type continuation: str - :param max_results: An optional value that specifies the maximum number of items to return. If - omitted or greater than 5,000, the response will include up to 5,000 items. Default value is - None. - :type max_results: int - :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. Default - value is None. - :type request_id_parameter: str - :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting - Timeouts for Blob Service Operations.`. Default value is None. - :type timeout: int - :return: An iterator like instance of either FileSystem or the result of cls(response) - :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.storage.filedatalake.models.FileSystem] - :raises ~azure.core.exceptions.HttpResponseError: - """ - _headers = kwargs.pop("headers", {}) or {} - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - resource: Literal["account"] = kwargs.pop("resource", _params.pop("resource", "account")) - cls: ClsType[_models.FileSystemList] = kwargs.pop("cls", None) - - error_map: MutableMapping = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - def prepare_request(next_link=None): - if not next_link: - - _request = build_list_file_systems_request( - url=self._config.url, - prefix=prefix, - continuation=continuation, - max_results=max_results, - request_id_parameter=request_id_parameter, - timeout=timeout, - resource=resource, - version=self._config.version, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - else: - _request = HttpRequest("GET", next_link) - _request.url = self._client.format_url(_request.url) - _request.method = "GET" - return _request - - async def extract_data(pipeline_response): - deserialized = self._deserialize("FileSystemList", pipeline_response) - list_of_elem = deserialized.filesystems - if cls: - list_of_elem = cls(list_of_elem) # type: ignore - return None, AsyncList(list_of_elem) - - async def get_next(next_link=None): - _request = prepare_request(next_link) - - _stream = False - pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize( - _models.StorageError, - pipeline_response, - ) - raise HttpResponseError(response=response, model=error) - - return pipeline_response - - return AsyncItemPaged(get_next, extract_data) diff --git a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_generated/models/__init__.py b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_generated/models/__init__.py index ca1ce1ca0e03..6f17dd1b1b8d 100644 --- a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_generated/models/__init__.py +++ b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_generated/models/__init__.py @@ -2,7 +2,7 @@ # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. +# Code generated by Microsoft (R) Python Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- # pylint: disable=wrong-import-position @@ -13,30 +13,29 @@ from ._patch import * # pylint: disable=unused-wildcard-import -from ._models_py3 import ( # type: ignore +from ._models import ( # type: ignore AclFailedEntry, BlobHierarchyListSegment, BlobItemInternal, BlobPrefix, BlobPropertiesInternal, - CpkInfo, - FileSystem, + FileSystemItem, FileSystemList, - LeaseAccessConditions, ListBlobsHierarchySegmentResponse, - ModifiedAccessConditions, - Path, - PathHTTPHeaders, + PathItem, PathList, SetAccessControlRecursiveResponse, - SourceModifiedAccessConditions, StorageError, - StorageErrorError, + StorageErrorBody, ) -from ._azure_data_lake_storage_restapi_enums import ( # type: ignore +from ._enums import ( # type: ignore + AccountResourceType, + EncryptionAlgorithmType, + FileSystemResourceType, LeaseAction, ListBlobsIncludeItem, + ListBlobsShowOnly, PathExpiryOptions, PathGetPropertiesAction, PathLeaseAction, @@ -55,21 +54,20 @@ "BlobItemInternal", "BlobPrefix", "BlobPropertiesInternal", - "CpkInfo", - "FileSystem", + "FileSystemItem", "FileSystemList", - "LeaseAccessConditions", "ListBlobsHierarchySegmentResponse", - "ModifiedAccessConditions", - "Path", - "PathHTTPHeaders", + "PathItem", "PathList", "SetAccessControlRecursiveResponse", - "SourceModifiedAccessConditions", "StorageError", - "StorageErrorError", + "StorageErrorBody", + "AccountResourceType", + "EncryptionAlgorithmType", + "FileSystemResourceType", "LeaseAction", "ListBlobsIncludeItem", + "ListBlobsShowOnly", "PathExpiryOptions", "PathGetPropertiesAction", "PathLeaseAction", diff --git a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_generated/models/_azure_data_lake_storage_restapi_enums.py b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_generated/models/_azure_data_lake_storage_restapi_enums.py deleted file mode 100644 index c9bb43b5e4a0..000000000000 --- a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_generated/models/_azure_data_lake_storage_restapi_enums.py +++ /dev/null @@ -1,90 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is regenerated. -# -------------------------------------------------------------------------- - -from enum import Enum -from azure.core import CaseInsensitiveEnumMeta - - -class LeaseAction(str, Enum, metaclass=CaseInsensitiveEnumMeta): - """LeaseAction.""" - - ACQUIRE = "acquire" - AUTO_RENEW = "auto-renew" - RELEASE = "release" - ACQUIRE_RELEASE = "acquire-release" - - -class ListBlobsIncludeItem(str, Enum, metaclass=CaseInsensitiveEnumMeta): - """ListBlobsIncludeItem.""" - - COPY = "copy" - DELETED = "deleted" - METADATA = "metadata" - SNAPSHOTS = "snapshots" - UNCOMMITTEDBLOBS = "uncommittedblobs" - VERSIONS = "versions" - TAGS = "tags" - - -class PathExpiryOptions(str, Enum, metaclass=CaseInsensitiveEnumMeta): - """PathExpiryOptions.""" - - NEVER_EXPIRE = "NeverExpire" - RELATIVE_TO_CREATION = "RelativeToCreation" - RELATIVE_TO_NOW = "RelativeToNow" - ABSOLUTE = "Absolute" - - -class PathGetPropertiesAction(str, Enum, metaclass=CaseInsensitiveEnumMeta): - """PathGetPropertiesAction.""" - - GET_ACCESS_CONTROL = "getAccessControl" - GET_STATUS = "getStatus" - - -class PathLeaseAction(str, Enum, metaclass=CaseInsensitiveEnumMeta): - """PathLeaseAction.""" - - ACQUIRE = "acquire" - BREAK = "break" - CHANGE = "change" - RENEW = "renew" - RELEASE = "release" - BREAK_ENUM = "break" - - -class PathRenameMode(str, Enum, metaclass=CaseInsensitiveEnumMeta): - """PathRenameMode.""" - - LEGACY = "legacy" - POSIX = "posix" - - -class PathResourceType(str, Enum, metaclass=CaseInsensitiveEnumMeta): - """PathResourceType.""" - - DIRECTORY = "directory" - FILE = "file" - - -class PathSetAccessControlRecursiveMode(str, Enum, metaclass=CaseInsensitiveEnumMeta): - """PathSetAccessControlRecursiveMode.""" - - SET = "set" - MODIFY = "modify" - REMOVE = "remove" - - -class PathUpdateAction(str, Enum, metaclass=CaseInsensitiveEnumMeta): - """PathUpdateAction.""" - - APPEND = "append" - FLUSH = "flush" - SET_PROPERTIES = "setProperties" - SET_ACCESS_CONTROL = "setAccessControl" - SET_ACCESS_CONTROL_RECURSIVE = "setAccessControlRecursive" diff --git a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_generated/models/_enums.py b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_generated/models/_enums.py new file mode 100644 index 000000000000..2f75d59f3524 --- /dev/null +++ b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_generated/models/_enums.py @@ -0,0 +1,151 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from enum import Enum +from azure.core import CaseInsensitiveEnumMeta + + +class AccountResourceType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """The value must be "account" for all account operations.""" + + ACCOUNT = "account" + """The account resource type.""" + + +class EncryptionAlgorithmType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """The encryption algorithm type.""" + + AES256 = "AES256" + """The AES256 encryption algorithm.""" + + +class FileSystemResourceType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """The value must be "filesystem" for all filesystem operations.""" + + FILESYSTEM = "filesystem" + """The filesystem resource type.""" + + +class LeaseAction(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """The lease action for append and flush operations.""" + + ACQUIRE = "acquire" + """Acquire a lease.""" + AUTO_RENEW = "auto-renew" + """Auto-renew the lease.""" + RELEASE = "release" + """Release the lease only on flush.""" + ACQUIRE_RELEASE = "acquire-release" + """Acquire, complete the operation, and release the lease.""" + + +class ListBlobsIncludeItem(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Include this parameter to specify one or more datasets to include in the response.""" + + COPY = "copy" + """Include copy information.""" + DELETED = "deleted" + """Include deleted blobs.""" + METADATA = "metadata" + """Include metadata.""" + SNAPSHOTS = "snapshots" + """Include snapshots.""" + UNCOMMITTEDBLOBS = "uncommittedblobs" + """Include uncommitted blobs.""" + VERSIONS = "versions" + """Include versions.""" + TAGS = "tags" + """Include tags.""" + + +class ListBlobsShowOnly(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """The show only filter for list blobs.""" + + DELETED = "deleted" + """Show only deleted blobs.""" + + +class PathExpiryOptions(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """The path expiry options.""" + + NEVER_EXPIRE = "NeverExpire" + """Never expire.""" + RELATIVE_TO_CREATION = "RelativeToCreation" + """Relative to creation time.""" + RELATIVE_TO_NOW = "RelativeToNow" + """Relative to now.""" + ABSOLUTE = "Absolute" + """Absolute time.""" + + +class PathGetPropertiesAction(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """The action for Path_GetProperties.""" + + GET_ACCESS_CONTROL = "getAccessControl" + """Get access control list.""" + GET_STATUS = "getStatus" + """Get status.""" + + +class PathLeaseAction(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """The lease action for Path_Lease.""" + + ACQUIRE = "acquire" + """Acquire a new lease.""" + BREAK = "break" + """Break an existing lease.""" + CHANGE = "change" + """Change the lease ID of an active lease.""" + RENEW = "renew" + """Renew an existing lease.""" + RELEASE = "release" + """Release a lease.""" + + +class PathRenameMode(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """The rename mode. The value must be "legacy" or "posix".""" + + LEGACY = "legacy" + """Legacy rename mode.""" + POSIX = "posix" + """POSIX rename mode.""" + + +class PathResourceType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Required only for Create File and Create Directory. The value must be "file" or "directory".""" + + DIRECTORY = "directory" + """A directory resource type.""" + FILE = "file" + """A file resource type.""" + + +class PathSetAccessControlRecursiveMode(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """The mode for recursive access control operations.""" + + SET = "set" + """Set POSIX access control rights.""" + MODIFY = "modify" + """Modify POSIX access control rights.""" + REMOVE = "remove" + """Remove POSIX access control rights.""" + + +class PathUpdateAction(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """The action to perform on the path during update.""" + + APPEND = "append" + """Append data to the file.""" + FLUSH = "flush" + """Flush previously uploaded data to a file.""" + SET_PROPERTIES = "setProperties" + """Set the properties of a file or directory.""" + SET_ACCESS_CONTROL = "setAccessControl" + """Set the access control for a file or directory.""" + SET_ACCESS_CONTROL_RECURSIVE = "setAccessControlRecursive" + """Set the access control list for a directory recursively.""" diff --git a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_generated/models/_models.py b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_generated/models/_models.py new file mode 100644 index 000000000000..5478e14a1616 --- /dev/null +++ b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_generated/models/_models.py @@ -0,0 +1,925 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +# pylint: disable=useless-super-delegation + +import datetime +from typing import Any, Mapping, Optional, TYPE_CHECKING, overload + +from .._utils.model_base import Model as _Model, rest_field + +if TYPE_CHECKING: + from .. import models as _models + + +class AclFailedEntry(_Model): + """An ACL failed entry. + + :ivar name: The name of the entry. + :vartype name: str + :ivar type: The type of the entry. + :vartype type: str + :ivar error_message: The error message. + :vartype error_message: str + """ + + name: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The name of the entry.""" + type: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The type of the entry.""" + error_message: Optional[str] = rest_field( + name="errorMessage", visibility=["read", "create", "update", "delete", "query"] + ) + """The error message.""" + + @overload + def __init__( + self, + *, + name: Optional[str] = None, + type: Optional[str] = None, + error_message: Optional[str] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class BlobHierarchyListSegment(_Model): + """A segment of blob hierarchy items. + + :ivar blob_prefixes: The blob prefixes. + :vartype blob_prefixes: ~azure.storage.filedatalake._generated.models.BlobPrefix + :ivar blob_items: The blob items. Required. + :vartype blob_items: ~azure.storage.filedatalake._generated.models.BlobItemInternal + """ + + blob_prefixes: Optional[list["_models.BlobPrefix"]] = rest_field( + name="blobPrefixes", + visibility=["read", "create", "update", "delete", "query"], + xml={"attribute": False, "itemsName": "BlobPrefix", "name": "BlobPrefix", "text": False, "unwrapped": True}, + ) + """The blob prefixes.""" + blob_items: list["_models.BlobItemInternal"] = rest_field( + name="blobItems", + visibility=["read", "create", "update", "delete", "query"], + xml={"attribute": False, "itemsName": "Blob", "name": "Blob", "text": False, "unwrapped": True}, + ) + """The blob items. Required.""" + + _xml = {"attribute": False, "name": "Blobs", "text": False, "unwrapped": False} + + @overload + def __init__( + self, + *, + blob_items: list["_models.BlobItemInternal"], + blob_prefixes: Optional[list["_models.BlobPrefix"]] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class BlobItemInternal(_Model): + """An Azure Storage blob. + + :ivar name: The blob name. Required. + :vartype name: str + :ivar deleted: Whether the blob is deleted. Required. + :vartype deleted: bool + :ivar snapshot: The snapshot. Required. + :vartype snapshot: str + :ivar version_id: The version ID. + :vartype version_id: str + :ivar is_current_version: Whether this is the current version. + :vartype is_current_version: bool + :ivar properties: The blob properties. Required. + :vartype properties: ~azure.storage.filedatalake._generated.models.BlobPropertiesInternal + :ivar deletion_id: The deletion ID. + :vartype deletion_id: str + """ + + name: str = rest_field( + visibility=["read", "create", "update", "delete", "query"], + xml={"attribute": False, "name": "Name", "text": False, "unwrapped": False}, + ) + """The blob name. Required.""" + deleted: bool = rest_field( + visibility=["read", "create", "update", "delete", "query"], + xml={"attribute": False, "name": "Deleted", "text": False, "unwrapped": False}, + ) + """Whether the blob is deleted. Required.""" + snapshot: str = rest_field( + visibility=["read", "create", "update", "delete", "query"], + xml={"attribute": False, "name": "Snapshot", "text": False, "unwrapped": False}, + ) + """The snapshot. Required.""" + version_id: Optional[str] = rest_field( + name="versionId", + visibility=["read", "create", "update", "delete", "query"], + xml={"attribute": False, "name": "VersionId", "text": False, "unwrapped": False}, + ) + """The version ID.""" + is_current_version: Optional[bool] = rest_field( + name="isCurrentVersion", + visibility=["read", "create", "update", "delete", "query"], + xml={"attribute": False, "name": "IsCurrentVersion", "text": False, "unwrapped": False}, + ) + """Whether this is the current version.""" + properties: "_models.BlobPropertiesInternal" = rest_field( + visibility=["read", "create", "update", "delete", "query"], + xml={"attribute": False, "name": "Properties", "text": False, "unwrapped": False}, + ) + """The blob properties. Required.""" + deletion_id: Optional[str] = rest_field( + name="deletionId", + visibility=["read", "create", "update", "delete", "query"], + xml={"attribute": False, "name": "DeletionId", "text": False, "unwrapped": False}, + ) + """The deletion ID.""" + + _xml = {"attribute": False, "name": "Blob", "text": False, "unwrapped": False} + + @overload + def __init__( + self, + *, + name: str, + deleted: bool, + snapshot: str, + properties: "_models.BlobPropertiesInternal", + version_id: Optional[str] = None, + is_current_version: Optional[bool] = None, + deletion_id: Optional[str] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class BlobPrefix(_Model): + """A blob prefix. + + :ivar name: The prefix name. Required. + :vartype name: str + """ + + name: str = rest_field( + visibility=["read", "create", "update", "delete", "query"], + xml={"attribute": False, "name": "Name", "text": False, "unwrapped": False}, + ) + """The prefix name. Required.""" + + _xml = {"attribute": False, "name": "BlobPrefix", "text": False, "unwrapped": False} + + @overload + def __init__( + self, + *, + name: str, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class BlobPropertiesInternal(_Model): + """Properties of a blob. + + :ivar creation_time: The creation time. + :vartype creation_time: ~datetime.datetime + :ivar last_modified: The last modified time. Required. + :vartype last_modified: ~datetime.datetime + :ivar etag: The entity tag. Required. + :vartype etag: str + :ivar content_length: Size in bytes. + :vartype content_length: int + :ivar content_type: The content type. + :vartype content_type: str + :ivar content_encoding: The content encoding. + :vartype content_encoding: str + :ivar content_language: The content language. + :vartype content_language: str + :ivar content_md5: The content MD5 hash. + :vartype content_md5: bytes + :ivar content_disposition: The content disposition. + :vartype content_disposition: str + :ivar cache_control: The cache control. + :vartype cache_control: str + :ivar blob_sequence_number: The blob sequence number. + :vartype blob_sequence_number: int + :ivar copy_id: The copy ID. + :vartype copy_id: str + :ivar copy_source: The copy source. + :vartype copy_source: str + :ivar copy_progress: The copy progress. + :vartype copy_progress: str + :ivar copy_completion_time: The copy completion time. + :vartype copy_completion_time: ~datetime.datetime + :ivar copy_status_description: The copy status description. + :vartype copy_status_description: str + :ivar server_encrypted: Whether the server is encrypted. + :vartype server_encrypted: bool + :ivar incremental_copy: Whether it is an incremental copy. + :vartype incremental_copy: bool + :ivar destination_snapshot: The destination snapshot. + :vartype destination_snapshot: str + :ivar deleted_time: The deleted time. + :vartype deleted_time: ~datetime.datetime + :ivar remaining_retention_days: The remaining retention days. + :vartype remaining_retention_days: int + :ivar access_tier_inferred: Whether the access tier is inferred. + :vartype access_tier_inferred: bool + :ivar customer_provided_key_sha256: The customer-provided key SHA256 hash. + :vartype customer_provided_key_sha256: str + :ivar encryption_scope: The name of the encryption scope under which the blob is encrypted. + :vartype encryption_scope: str + :ivar access_tier_change_time: The access tier change time. + :vartype access_tier_change_time: ~datetime.datetime + :ivar tag_count: The tag count. + :vartype tag_count: int + :ivar expires_on: The expiry time. + :vartype expires_on: ~datetime.datetime + :ivar is_sealed: Whether the blob is sealed. + :vartype is_sealed: bool + :ivar last_accessed_on: The last accessed time. + :vartype last_accessed_on: ~datetime.datetime + :ivar delete_time: The delete time. + :vartype delete_time: ~datetime.datetime + """ + + creation_time: Optional[datetime.datetime] = rest_field( + name="creationTime", + visibility=["read", "create", "update", "delete", "query"], + format="rfc7231", + xml={"attribute": False, "name": "Creation-Time", "text": False, "unwrapped": False}, + ) + """The creation time.""" + last_modified: datetime.datetime = rest_field( + name="lastModified", + visibility=["read", "create", "update", "delete", "query"], + format="rfc7231", + xml={"attribute": False, "name": "Last-Modified", "text": False, "unwrapped": False}, + ) + """The last modified time. Required.""" + etag: str = rest_field( + visibility=["read", "create", "update", "delete", "query"], + xml={"attribute": False, "name": "Etag", "text": False, "unwrapped": False}, + ) + """The entity tag. Required.""" + content_length: Optional[int] = rest_field( + name="contentLength", + visibility=["read", "create", "update", "delete", "query"], + xml={"attribute": False, "name": "Content-Length", "text": False, "unwrapped": False}, + ) + """Size in bytes.""" + content_type: Optional[str] = rest_field( + name="contentType", + visibility=["read", "create", "update", "delete", "query"], + xml={"attribute": False, "name": "Content-Type", "text": False, "unwrapped": False}, + ) + """The content type.""" + content_encoding: Optional[str] = rest_field( + name="contentEncoding", + visibility=["read", "create", "update", "delete", "query"], + xml={"attribute": False, "name": "Content-Encoding", "text": False, "unwrapped": False}, + ) + """The content encoding.""" + content_language: Optional[str] = rest_field( + name="contentLanguage", + visibility=["read", "create", "update", "delete", "query"], + xml={"attribute": False, "name": "Content-Language", "text": False, "unwrapped": False}, + ) + """The content language.""" + content_md5: Optional[bytes] = rest_field( + name="contentMd5", + visibility=["read", "create", "update", "delete", "query"], + format="base64", + xml={"attribute": False, "name": "Content-MD5", "text": False, "unwrapped": False}, + ) + """The content MD5 hash.""" + content_disposition: Optional[str] = rest_field( + name="contentDisposition", + visibility=["read", "create", "update", "delete", "query"], + xml={"attribute": False, "name": "Content-Disposition", "text": False, "unwrapped": False}, + ) + """The content disposition.""" + cache_control: Optional[str] = rest_field( + name="cacheControl", + visibility=["read", "create", "update", "delete", "query"], + xml={"attribute": False, "name": "Cache-Control", "text": False, "unwrapped": False}, + ) + """The cache control.""" + blob_sequence_number: Optional[int] = rest_field( + name="blobSequenceNumber", + visibility=["read", "create", "update", "delete", "query"], + xml={"attribute": False, "name": "x-ms-blob-sequence-number", "text": False, "unwrapped": False}, + ) + """The blob sequence number.""" + copy_id: Optional[str] = rest_field( + name="copyId", + visibility=["read", "create", "update", "delete", "query"], + xml={"attribute": False, "name": "CopyId", "text": False, "unwrapped": False}, + ) + """The copy ID.""" + copy_source: Optional[str] = rest_field( + name="copySource", + visibility=["read", "create", "update", "delete", "query"], + xml={"attribute": False, "name": "CopySource", "text": False, "unwrapped": False}, + ) + """The copy source.""" + copy_progress: Optional[str] = rest_field( + name="copyProgress", + visibility=["read", "create", "update", "delete", "query"], + xml={"attribute": False, "name": "CopyProgress", "text": False, "unwrapped": False}, + ) + """The copy progress.""" + copy_completion_time: Optional[datetime.datetime] = rest_field( + name="copyCompletionTime", + visibility=["read", "create", "update", "delete", "query"], + format="rfc7231", + xml={"attribute": False, "name": "CopyCompletionTime", "text": False, "unwrapped": False}, + ) + """The copy completion time.""" + copy_status_description: Optional[str] = rest_field( + name="copyStatusDescription", + visibility=["read", "create", "update", "delete", "query"], + xml={"attribute": False, "name": "CopyStatusDescription", "text": False, "unwrapped": False}, + ) + """The copy status description.""" + server_encrypted: Optional[bool] = rest_field( + name="serverEncrypted", + visibility=["read", "create", "update", "delete", "query"], + xml={"attribute": False, "name": "ServerEncrypted", "text": False, "unwrapped": False}, + ) + """Whether the server is encrypted.""" + incremental_copy: Optional[bool] = rest_field( + name="incrementalCopy", + visibility=["read", "create", "update", "delete", "query"], + xml={"attribute": False, "name": "IncrementalCopy", "text": False, "unwrapped": False}, + ) + """Whether it is an incremental copy.""" + destination_snapshot: Optional[str] = rest_field( + name="destinationSnapshot", + visibility=["read", "create", "update", "delete", "query"], + xml={"attribute": False, "name": "DestinationSnapshot", "text": False, "unwrapped": False}, + ) + """The destination snapshot.""" + deleted_time: Optional[datetime.datetime] = rest_field( + name="deletedTime", + visibility=["read", "create", "update", "delete", "query"], + format="rfc7231", + xml={"attribute": False, "name": "DeletedTime", "text": False, "unwrapped": False}, + ) + """The deleted time.""" + remaining_retention_days: Optional[int] = rest_field( + name="remainingRetentionDays", + visibility=["read", "create", "update", "delete", "query"], + xml={"attribute": False, "name": "RemainingRetentionDays", "text": False, "unwrapped": False}, + ) + """The remaining retention days.""" + access_tier_inferred: Optional[bool] = rest_field( + name="accessTierInferred", + visibility=["read", "create", "update", "delete", "query"], + xml={"attribute": False, "name": "AccessTierInferred", "text": False, "unwrapped": False}, + ) + """Whether the access tier is inferred.""" + customer_provided_key_sha256: Optional[str] = rest_field( + name="customerProvidedKeySha256", + visibility=["read", "create", "update", "delete", "query"], + xml={"attribute": False, "name": "CustomerProvidedKeySha256", "text": False, "unwrapped": False}, + ) + """The customer-provided key SHA256 hash.""" + encryption_scope: Optional[str] = rest_field( + name="encryptionScope", + visibility=["read", "create", "update", "delete", "query"], + xml={"attribute": False, "name": "EncryptionScope", "text": False, "unwrapped": False}, + ) + """The name of the encryption scope under which the blob is encrypted.""" + access_tier_change_time: Optional[datetime.datetime] = rest_field( + name="accessTierChangeTime", + visibility=["read", "create", "update", "delete", "query"], + format="rfc7231", + xml={"attribute": False, "name": "AccessTierChangeTime", "text": False, "unwrapped": False}, + ) + """The access tier change time.""" + tag_count: Optional[int] = rest_field( + name="tagCount", + visibility=["read", "create", "update", "delete", "query"], + xml={"attribute": False, "name": "TagCount", "text": False, "unwrapped": False}, + ) + """The tag count.""" + expires_on: Optional[datetime.datetime] = rest_field( + name="expiresOn", + visibility=["read", "create", "update", "delete", "query"], + format="rfc7231", + xml={"attribute": False, "name": "Expiry-Time", "text": False, "unwrapped": False}, + ) + """The expiry time.""" + is_sealed: Optional[bool] = rest_field( + name="isSealed", + visibility=["read", "create", "update", "delete", "query"], + xml={"attribute": False, "name": "Sealed", "text": False, "unwrapped": False}, + ) + """Whether the blob is sealed.""" + last_accessed_on: Optional[datetime.datetime] = rest_field( + name="lastAccessedOn", + visibility=["read", "create", "update", "delete", "query"], + format="rfc7231", + xml={"attribute": False, "name": "LastAccessTime", "text": False, "unwrapped": False}, + ) + """The last accessed time.""" + delete_time: Optional[datetime.datetime] = rest_field( + name="deleteTime", + visibility=["read", "create", "update", "delete", "query"], + format="rfc7231", + xml={"attribute": False, "name": "DeleteTime", "text": False, "unwrapped": False}, + ) + """The delete time.""" + + _xml = {"attribute": False, "name": "Properties", "text": False, "unwrapped": False} + + @overload + def __init__( # pylint: disable=too-many-locals + self, + *, + last_modified: datetime.datetime, + etag: str, + creation_time: Optional[datetime.datetime] = None, + content_length: Optional[int] = None, + content_type: Optional[str] = None, + content_encoding: Optional[str] = None, + content_language: Optional[str] = None, + content_md5: Optional[bytes] = None, + content_disposition: Optional[str] = None, + cache_control: Optional[str] = None, + blob_sequence_number: Optional[int] = None, + copy_id: Optional[str] = None, + copy_source: Optional[str] = None, + copy_progress: Optional[str] = None, + copy_completion_time: Optional[datetime.datetime] = None, + copy_status_description: Optional[str] = None, + server_encrypted: Optional[bool] = None, + incremental_copy: Optional[bool] = None, + destination_snapshot: Optional[str] = None, + deleted_time: Optional[datetime.datetime] = None, + remaining_retention_days: Optional[int] = None, + access_tier_inferred: Optional[bool] = None, + customer_provided_key_sha256: Optional[str] = None, + encryption_scope: Optional[str] = None, + access_tier_change_time: Optional[datetime.datetime] = None, + tag_count: Optional[int] = None, + expires_on: Optional[datetime.datetime] = None, + is_sealed: Optional[bool] = None, + last_accessed_on: Optional[datetime.datetime] = None, + delete_time: Optional[datetime.datetime] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class FileSystemItem(_Model): + """Represents a filesystem. + + :ivar name: The filesystem name. + :vartype name: str + :ivar last_modified: The last modified time. + :vartype last_modified: str + :ivar e_tag: The entity tag. + :vartype e_tag: str + """ + + name: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The filesystem name.""" + last_modified: Optional[str] = rest_field( + name="lastModified", visibility=["read", "create", "update", "delete", "query"] + ) + """The last modified time.""" + e_tag: Optional[str] = rest_field(name="eTag", visibility=["read", "create", "update", "delete", "query"]) + """The entity tag.""" + + @overload + def __init__( + self, + *, + name: Optional[str] = None, + last_modified: Optional[str] = None, + e_tag: Optional[str] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class FileSystemList(_Model): + """A list of filesystems. + + :ivar filesystems: The list of filesystems. + :vartype filesystems: list[~azure.storage.filedatalake._generated.models.FileSystemItem] + """ + + filesystems: Optional[list["_models.FileSystemItem"]] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """The list of filesystems.""" + + @overload + def __init__( + self, + *, + filesystems: Optional[list["_models.FileSystemItem"]] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class ListBlobsHierarchySegmentResponse(_Model): + """An enumeration of blobs. + + :ivar service_endpoint: The service endpoint. Required. + :vartype service_endpoint: str + :ivar container_name: The container name. Required. + :vartype container_name: str + :ivar prefix: The prefix filter. + :vartype prefix: str + :ivar marker: The marker for pagination. + :vartype marker: str + :ivar max_results: The maximum number of results. + :vartype max_results: int + :ivar delimiter: The delimiter used for hierarchy. + :vartype delimiter: str + :ivar segment: The blob segment. Required. + :vartype segment: ~azure.storage.filedatalake._generated.models.BlobHierarchyListSegment + :ivar next_marker: The next marker for pagination. + :vartype next_marker: str + """ + + service_endpoint: str = rest_field( + name="serviceEndpoint", + visibility=["read", "create", "update", "delete", "query"], + xml={"attribute": True, "name": "ServiceEndpoint", "text": False, "unwrapped": False}, + ) + """The service endpoint. Required.""" + container_name: str = rest_field( + name="containerName", + visibility=["read", "create", "update", "delete", "query"], + xml={"attribute": True, "name": "ContainerName", "text": False, "unwrapped": False}, + ) + """The container name. Required.""" + prefix: Optional[str] = rest_field( + visibility=["read", "create", "update", "delete", "query"], + xml={"attribute": False, "name": "Prefix", "text": False, "unwrapped": False}, + ) + """The prefix filter.""" + marker: Optional[str] = rest_field( + visibility=["read", "create", "update", "delete", "query"], + xml={"attribute": False, "name": "Marker", "text": False, "unwrapped": False}, + ) + """The marker for pagination.""" + max_results: Optional[int] = rest_field( + name="maxResults", + visibility=["read", "create", "update", "delete", "query"], + xml={"attribute": False, "name": "MaxResults", "text": False, "unwrapped": False}, + ) + """The maximum number of results.""" + delimiter: Optional[str] = rest_field( + visibility=["read", "create", "update", "delete", "query"], + xml={"attribute": False, "name": "Delimiter", "text": False, "unwrapped": False}, + ) + """The delimiter used for hierarchy.""" + segment: "_models.BlobHierarchyListSegment" = rest_field( + visibility=["read", "create", "update", "delete", "query"], + xml={"attribute": False, "name": "Blobs", "text": False, "unwrapped": False}, + ) + """The blob segment. Required.""" + next_marker: Optional[str] = rest_field( + name="nextMarker", + visibility=["read", "create", "update", "delete", "query"], + xml={"attribute": False, "name": "NextMarker", "text": False, "unwrapped": False}, + ) + """The next marker for pagination.""" + + _xml = {"attribute": False, "name": "EnumerationResults", "text": False, "unwrapped": False} + + @overload + def __init__( + self, + *, + service_endpoint: str, + container_name: str, + segment: "_models.BlobHierarchyListSegment", + prefix: Optional[str] = None, + marker: Optional[str] = None, + max_results: Optional[int] = None, + delimiter: Optional[str] = None, + next_marker: Optional[str] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class PathItem(_Model): + """Represents a path in a filesystem. + + :ivar name: The path name. + :vartype name: str + :ivar is_directory: Whether the path is a directory. + :vartype is_directory: bool + :ivar last_modified: The last modified time. + :vartype last_modified: str + :ivar e_tag: The entity tag. + :vartype e_tag: str + :ivar content_length: The content length. + :vartype content_length: int + :ivar owner: The owner of the path. + :vartype owner: str + :ivar group: The owning group of the path. + :vartype group: str + :ivar permissions: The POSIX access permissions. + :vartype permissions: str + :ivar encryption_scope: The name of the encryption scope under which the blob is encrypted. + :vartype encryption_scope: str + :ivar creation_time: The creation time. + :vartype creation_time: str + :ivar expiry_time: The expiry time. + :vartype expiry_time: str + :ivar encryption_context: The encryption context. + :vartype encryption_context: str + """ + + name: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The path name.""" + is_directory: Optional[bool] = rest_field( + name="isDirectory", visibility=["read", "create", "update", "delete", "query"] + ) + """Whether the path is a directory.""" + last_modified: Optional[str] = rest_field( + name="lastModified", visibility=["read", "create", "update", "delete", "query"] + ) + """The last modified time.""" + e_tag: Optional[str] = rest_field(name="eTag", visibility=["read", "create", "update", "delete", "query"]) + """The entity tag.""" + content_length: Optional[int] = rest_field( + name="contentLength", visibility=["read", "create", "update", "delete", "query"] + ) + """The content length.""" + owner: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The owner of the path.""" + group: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The owning group of the path.""" + permissions: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The POSIX access permissions.""" + encryption_scope: Optional[str] = rest_field( + name="encryptionScope", visibility=["read", "create", "update", "delete", "query"] + ) + """The name of the encryption scope under which the blob is encrypted.""" + creation_time: Optional[str] = rest_field( + name="creationTime", visibility=["read", "create", "update", "delete", "query"] + ) + """The creation time.""" + expiry_time: Optional[str] = rest_field( + name="expiryTime", visibility=["read", "create", "update", "delete", "query"] + ) + """The expiry time.""" + encryption_context: Optional[str] = rest_field( + name="EncryptionContext", visibility=["read", "create", "update", "delete", "query"] + ) + """The encryption context.""" + + @overload + def __init__( + self, + *, + name: Optional[str] = None, + is_directory: Optional[bool] = None, + last_modified: Optional[str] = None, + e_tag: Optional[str] = None, + content_length: Optional[int] = None, + owner: Optional[str] = None, + group: Optional[str] = None, + permissions: Optional[str] = None, + encryption_scope: Optional[str] = None, + creation_time: Optional[str] = None, + expiry_time: Optional[str] = None, + encryption_context: Optional[str] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class PathList(_Model): + """A list of paths. + + :ivar paths: The list of paths. + :vartype paths: list[~azure.storage.filedatalake._generated.models.PathItem] + """ + + paths: Optional[list["_models.PathItem"]] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The list of paths.""" + + @overload + def __init__( + self, + *, + paths: Optional[list["_models.PathItem"]] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class SetAccessControlRecursiveResponse(_Model): + """The response for set access control recursive operations. + + :ivar directories_successful: The number of directories successfully processed. + :vartype directories_successful: int + :ivar files_successful: The number of files successfully processed. + :vartype files_successful: int + :ivar failure_count: The number of failures. + :vartype failure_count: int + :ivar failed_entries: The list of failed entries. + :vartype failed_entries: list[~azure.storage.filedatalake._generated.models.AclFailedEntry] + """ + + directories_successful: Optional[int] = rest_field( + name="directoriesSuccessful", visibility=["read", "create", "update", "delete", "query"] + ) + """The number of directories successfully processed.""" + files_successful: Optional[int] = rest_field( + name="filesSuccessful", visibility=["read", "create", "update", "delete", "query"] + ) + """The number of files successfully processed.""" + failure_count: Optional[int] = rest_field( + name="failureCount", visibility=["read", "create", "update", "delete", "query"] + ) + """The number of failures.""" + failed_entries: Optional[list["_models.AclFailedEntry"]] = rest_field( + name="failedEntries", visibility=["read", "create", "update", "delete", "query"] + ) + """The list of failed entries.""" + + @overload + def __init__( + self, + *, + directories_successful: Optional[int] = None, + files_successful: Optional[int] = None, + failure_count: Optional[int] = None, + failed_entries: Optional[list["_models.AclFailedEntry"]] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class StorageError(_Model): + """The storage error response. + + :ivar error: The service error response object. + :vartype error: ~azure.storage.filedatalake._generated.models.StorageErrorBody + """ + + error: Optional["_models.StorageErrorBody"] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The service error response object.""" + + @overload + def __init__( + self, + *, + error: Optional["_models.StorageErrorBody"] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class StorageErrorBody(_Model): + """The service error response body. + + :ivar code: The service error code. + :vartype code: str + :ivar message: The service error message. + :vartype message: str + """ + + code: Optional[str] = rest_field( + visibility=["read", "create", "update", "delete", "query"], + xml={"attribute": False, "name": "Code", "text": False, "unwrapped": False}, + ) + """The service error code.""" + message: Optional[str] = rest_field( + visibility=["read", "create", "update", "delete", "query"], + xml={"attribute": False, "name": "Message", "text": False, "unwrapped": False}, + ) + """The service error message.""" + + @overload + def __init__( + self, + *, + code: Optional[str] = None, + message: Optional[str] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) diff --git a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_generated/models/_models_py3.py b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_generated/models/_models_py3.py deleted file mode 100644 index e9422fbe63d7..000000000000 --- a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_generated/models/_models_py3.py +++ /dev/null @@ -1,1041 +0,0 @@ -# pylint: disable=too-many-lines -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is regenerated. -# -------------------------------------------------------------------------- - -import datetime -from typing import Any, Literal, Optional, TYPE_CHECKING - -from .._utils import serialization as _serialization - -if TYPE_CHECKING: - from .. import models as _models - - -class AclFailedEntry(_serialization.Model): - """AclFailedEntry. - - :ivar name: - :vartype name: str - :ivar type: - :vartype type: str - :ivar error_message: - :vartype error_message: str - """ - - _attribute_map = { - "name": {"key": "name", "type": "str"}, - "type": {"key": "type", "type": "str"}, - "error_message": {"key": "errorMessage", "type": "str"}, - } - - def __init__( - self, - *, - name: Optional[str] = None, - type: Optional[str] = None, - error_message: Optional[str] = None, - **kwargs: Any - ) -> None: - """ - :keyword name: - :paramtype name: str - :keyword type: - :paramtype type: str - :keyword error_message: - :paramtype error_message: str - """ - super().__init__(**kwargs) - self.name = name - self.type = type - self.error_message = error_message - - -class BlobHierarchyListSegment(_serialization.Model): - """BlobHierarchyListSegment. - - All required parameters must be populated in order to send to server. - - :ivar blob_prefixes: - :vartype blob_prefixes: list[~azure.storage.filedatalake.models.BlobPrefix] - :ivar blob_items: Required. - :vartype blob_items: list[~azure.storage.filedatalake.models.BlobItemInternal] - """ - - _validation = { - "blob_items": {"required": True}, - } - - _attribute_map = { - "blob_prefixes": {"key": "BlobPrefixes", "type": "[BlobPrefix]"}, - "blob_items": {"key": "BlobItems", "type": "[BlobItemInternal]", "xml": {"itemsName": "Blob"}}, - } - _xml_map = {"name": "Blobs"} - - def __init__( - self, - *, - blob_items: list["_models.BlobItemInternal"], - blob_prefixes: Optional[list["_models.BlobPrefix"]] = None, - **kwargs: Any - ) -> None: - """ - :keyword blob_prefixes: - :paramtype blob_prefixes: list[~azure.storage.filedatalake.models.BlobPrefix] - :keyword blob_items: Required. - :paramtype blob_items: list[~azure.storage.filedatalake.models.BlobItemInternal] - """ - super().__init__(**kwargs) - self.blob_prefixes = blob_prefixes - self.blob_items = blob_items - - -class BlobItemInternal(_serialization.Model): - """An Azure Storage blob. - - All required parameters must be populated in order to send to server. - - :ivar name: Required. - :vartype name: str - :ivar deleted: Required. - :vartype deleted: bool - :ivar snapshot: Required. - :vartype snapshot: str - :ivar version_id: - :vartype version_id: str - :ivar is_current_version: - :vartype is_current_version: bool - :ivar properties: Properties of a blob. Required. - :vartype properties: ~azure.storage.filedatalake.models.BlobPropertiesInternal - :ivar deletion_id: - :vartype deletion_id: str - """ - - _validation = { - "name": {"required": True}, - "deleted": {"required": True}, - "snapshot": {"required": True}, - "properties": {"required": True}, - } - - _attribute_map = { - "name": {"key": "Name", "type": "str"}, - "deleted": {"key": "Deleted", "type": "bool"}, - "snapshot": {"key": "Snapshot", "type": "str"}, - "version_id": {"key": "VersionId", "type": "str"}, - "is_current_version": {"key": "IsCurrentVersion", "type": "bool"}, - "properties": {"key": "Properties", "type": "BlobPropertiesInternal"}, - "deletion_id": {"key": "DeletionId", "type": "str"}, - } - _xml_map = {"name": "Blob"} - - def __init__( - self, - *, - name: str, - deleted: bool, - snapshot: str, - properties: "_models.BlobPropertiesInternal", - version_id: Optional[str] = None, - is_current_version: Optional[bool] = None, - deletion_id: Optional[str] = None, - **kwargs: Any - ) -> None: - """ - :keyword name: Required. - :paramtype name: str - :keyword deleted: Required. - :paramtype deleted: bool - :keyword snapshot: Required. - :paramtype snapshot: str - :keyword version_id: - :paramtype version_id: str - :keyword is_current_version: - :paramtype is_current_version: bool - :keyword properties: Properties of a blob. Required. - :paramtype properties: ~azure.storage.filedatalake.models.BlobPropertiesInternal - :keyword deletion_id: - :paramtype deletion_id: str - """ - super().__init__(**kwargs) - self.name = name - self.deleted = deleted - self.snapshot = snapshot - self.version_id = version_id - self.is_current_version = is_current_version - self.properties = properties - self.deletion_id = deletion_id - - -class BlobPrefix(_serialization.Model): - """BlobPrefix. - - All required parameters must be populated in order to send to server. - - :ivar name: Required. - :vartype name: str - """ - - _validation = { - "name": {"required": True}, - } - - _attribute_map = { - "name": {"key": "Name", "type": "str"}, - } - - def __init__(self, *, name: str, **kwargs: Any) -> None: - """ - :keyword name: Required. - :paramtype name: str - """ - super().__init__(**kwargs) - self.name = name - - -class BlobPropertiesInternal(_serialization.Model): - """Properties of a blob. - - All required parameters must be populated in order to send to server. - - :ivar creation_time: - :vartype creation_time: ~datetime.datetime - :ivar last_modified: Required. - :vartype last_modified: ~datetime.datetime - :ivar etag: Required. - :vartype etag: str - :ivar content_length: Size in bytes. - :vartype content_length: int - :ivar content_type: - :vartype content_type: str - :ivar content_encoding: - :vartype content_encoding: str - :ivar content_language: - :vartype content_language: str - :ivar content_md5: - :vartype content_md5: bytes - :ivar content_disposition: - :vartype content_disposition: str - :ivar cache_control: - :vartype cache_control: str - :ivar blob_sequence_number: - :vartype blob_sequence_number: int - :ivar copy_id: - :vartype copy_id: str - :ivar copy_source: - :vartype copy_source: str - :ivar copy_progress: - :vartype copy_progress: str - :ivar copy_completion_time: - :vartype copy_completion_time: ~datetime.datetime - :ivar copy_status_description: - :vartype copy_status_description: str - :ivar server_encrypted: - :vartype server_encrypted: bool - :ivar incremental_copy: - :vartype incremental_copy: bool - :ivar destination_snapshot: - :vartype destination_snapshot: str - :ivar deleted_time: - :vartype deleted_time: ~datetime.datetime - :ivar remaining_retention_days: - :vartype remaining_retention_days: int - :ivar access_tier_inferred: - :vartype access_tier_inferred: bool - :ivar customer_provided_key_sha256: - :vartype customer_provided_key_sha256: str - :ivar encryption_scope: The name of the encryption scope under which the blob is encrypted. - :vartype encryption_scope: str - :ivar access_tier_change_time: - :vartype access_tier_change_time: ~datetime.datetime - :ivar tag_count: - :vartype tag_count: int - :ivar expires_on: - :vartype expires_on: ~datetime.datetime - :ivar is_sealed: - :vartype is_sealed: bool - :ivar last_accessed_on: - :vartype last_accessed_on: ~datetime.datetime - :ivar delete_time: - :vartype delete_time: ~datetime.datetime - """ - - _validation = { - "last_modified": {"required": True}, - "etag": {"required": True}, - } - - _attribute_map = { - "creation_time": {"key": "Creation-Time", "type": "rfc-1123"}, - "last_modified": {"key": "Last-Modified", "type": "rfc-1123"}, - "etag": {"key": "Etag", "type": "str"}, - "content_length": {"key": "Content-Length", "type": "int"}, - "content_type": {"key": "Content-Type", "type": "str"}, - "content_encoding": {"key": "Content-Encoding", "type": "str"}, - "content_language": {"key": "Content-Language", "type": "str"}, - "content_md5": {"key": "Content-MD5", "type": "bytearray"}, - "content_disposition": {"key": "Content-Disposition", "type": "str"}, - "cache_control": {"key": "Cache-Control", "type": "str"}, - "blob_sequence_number": {"key": "x-ms-blob-sequence-number", "type": "int"}, - "copy_id": {"key": "CopyId", "type": "str"}, - "copy_source": {"key": "CopySource", "type": "str"}, - "copy_progress": {"key": "CopyProgress", "type": "str"}, - "copy_completion_time": {"key": "CopyCompletionTime", "type": "rfc-1123"}, - "copy_status_description": {"key": "CopyStatusDescription", "type": "str"}, - "server_encrypted": {"key": "ServerEncrypted", "type": "bool"}, - "incremental_copy": {"key": "IncrementalCopy", "type": "bool"}, - "destination_snapshot": {"key": "DestinationSnapshot", "type": "str"}, - "deleted_time": {"key": "DeletedTime", "type": "rfc-1123"}, - "remaining_retention_days": {"key": "RemainingRetentionDays", "type": "int"}, - "access_tier_inferred": {"key": "AccessTierInferred", "type": "bool"}, - "customer_provided_key_sha256": {"key": "CustomerProvidedKeySha256", "type": "str"}, - "encryption_scope": {"key": "EncryptionScope", "type": "str"}, - "access_tier_change_time": {"key": "AccessTierChangeTime", "type": "rfc-1123"}, - "tag_count": {"key": "TagCount", "type": "int"}, - "expires_on": {"key": "Expiry-Time", "type": "rfc-1123"}, - "is_sealed": {"key": "Sealed", "type": "bool"}, - "last_accessed_on": {"key": "LastAccessTime", "type": "rfc-1123"}, - "delete_time": {"key": "DeleteTime", "type": "rfc-1123"}, - } - _xml_map = {"name": "Properties"} - - def __init__( # pylint: disable=too-many-locals - self, - *, - last_modified: datetime.datetime, - etag: str, - creation_time: Optional[datetime.datetime] = None, - content_length: Optional[int] = None, - content_type: Optional[str] = None, - content_encoding: Optional[str] = None, - content_language: Optional[str] = None, - content_md5: Optional[bytes] = None, - content_disposition: Optional[str] = None, - cache_control: Optional[str] = None, - blob_sequence_number: Optional[int] = None, - copy_id: Optional[str] = None, - copy_source: Optional[str] = None, - copy_progress: Optional[str] = None, - copy_completion_time: Optional[datetime.datetime] = None, - copy_status_description: Optional[str] = None, - server_encrypted: Optional[bool] = None, - incremental_copy: Optional[bool] = None, - destination_snapshot: Optional[str] = None, - deleted_time: Optional[datetime.datetime] = None, - remaining_retention_days: Optional[int] = None, - access_tier_inferred: Optional[bool] = None, - customer_provided_key_sha256: Optional[str] = None, - encryption_scope: Optional[str] = None, - access_tier_change_time: Optional[datetime.datetime] = None, - tag_count: Optional[int] = None, - expires_on: Optional[datetime.datetime] = None, - is_sealed: Optional[bool] = None, - last_accessed_on: Optional[datetime.datetime] = None, - delete_time: Optional[datetime.datetime] = None, - **kwargs: Any - ) -> None: - """ - :keyword creation_time: - :paramtype creation_time: ~datetime.datetime - :keyword last_modified: Required. - :paramtype last_modified: ~datetime.datetime - :keyword etag: Required. - :paramtype etag: str - :keyword content_length: Size in bytes. - :paramtype content_length: int - :keyword content_type: - :paramtype content_type: str - :keyword content_encoding: - :paramtype content_encoding: str - :keyword content_language: - :paramtype content_language: str - :keyword content_md5: - :paramtype content_md5: bytes - :keyword content_disposition: - :paramtype content_disposition: str - :keyword cache_control: - :paramtype cache_control: str - :keyword blob_sequence_number: - :paramtype blob_sequence_number: int - :keyword copy_id: - :paramtype copy_id: str - :keyword copy_source: - :paramtype copy_source: str - :keyword copy_progress: - :paramtype copy_progress: str - :keyword copy_completion_time: - :paramtype copy_completion_time: ~datetime.datetime - :keyword copy_status_description: - :paramtype copy_status_description: str - :keyword server_encrypted: - :paramtype server_encrypted: bool - :keyword incremental_copy: - :paramtype incremental_copy: bool - :keyword destination_snapshot: - :paramtype destination_snapshot: str - :keyword deleted_time: - :paramtype deleted_time: ~datetime.datetime - :keyword remaining_retention_days: - :paramtype remaining_retention_days: int - :keyword access_tier_inferred: - :paramtype access_tier_inferred: bool - :keyword customer_provided_key_sha256: - :paramtype customer_provided_key_sha256: str - :keyword encryption_scope: The name of the encryption scope under which the blob is encrypted. - :paramtype encryption_scope: str - :keyword access_tier_change_time: - :paramtype access_tier_change_time: ~datetime.datetime - :keyword tag_count: - :paramtype tag_count: int - :keyword expires_on: - :paramtype expires_on: ~datetime.datetime - :keyword is_sealed: - :paramtype is_sealed: bool - :keyword last_accessed_on: - :paramtype last_accessed_on: ~datetime.datetime - :keyword delete_time: - :paramtype delete_time: ~datetime.datetime - """ - super().__init__(**kwargs) - self.creation_time = creation_time - self.last_modified = last_modified - self.etag = etag - self.content_length = content_length - self.content_type = content_type - self.content_encoding = content_encoding - self.content_language = content_language - self.content_md5 = content_md5 - self.content_disposition = content_disposition - self.cache_control = cache_control - self.blob_sequence_number = blob_sequence_number - self.copy_id = copy_id - self.copy_source = copy_source - self.copy_progress = copy_progress - self.copy_completion_time = copy_completion_time - self.copy_status_description = copy_status_description - self.server_encrypted = server_encrypted - self.incremental_copy = incremental_copy - self.destination_snapshot = destination_snapshot - self.deleted_time = deleted_time - self.remaining_retention_days = remaining_retention_days - self.access_tier_inferred = access_tier_inferred - self.customer_provided_key_sha256 = customer_provided_key_sha256 - self.encryption_scope = encryption_scope - self.access_tier_change_time = access_tier_change_time - self.tag_count = tag_count - self.expires_on = expires_on - self.is_sealed = is_sealed - self.last_accessed_on = last_accessed_on - self.delete_time = delete_time - - -class CpkInfo(_serialization.Model): - """Parameter group. - - :ivar encryption_key: Optional. Specifies the encryption key to use to encrypt the data - provided in the request. If not specified, encryption is performed with the root account - encryption key. For more information, see Encryption at Rest for Azure Storage Services. - :vartype encryption_key: str - :ivar encryption_key_sha256: The SHA-256 hash of the provided encryption key. Must be provided - if the x-ms-encryption-key header is provided. - :vartype encryption_key_sha256: str - :ivar encryption_algorithm: The algorithm used to produce the encryption key hash. Currently, - the only accepted value is "AES256". Must be provided if the x-ms-encryption-key header is - provided. Default value is "AES256". - :vartype encryption_algorithm: str - """ - - _attribute_map = { - "encryption_key": {"key": "encryptionKey", "type": "str"}, - "encryption_key_sha256": {"key": "encryptionKeySha256", "type": "str"}, - "encryption_algorithm": {"key": "encryptionAlgorithm", "type": "str"}, - } - - def __init__( - self, - *, - encryption_key: Optional[str] = None, - encryption_key_sha256: Optional[str] = None, - encryption_algorithm: Optional[Literal["AES256"]] = None, - **kwargs: Any - ) -> None: - """ - :keyword encryption_key: Optional. Specifies the encryption key to use to encrypt the data - provided in the request. If not specified, encryption is performed with the root account - encryption key. For more information, see Encryption at Rest for Azure Storage Services. - :paramtype encryption_key: str - :keyword encryption_key_sha256: The SHA-256 hash of the provided encryption key. Must be - provided if the x-ms-encryption-key header is provided. - :paramtype encryption_key_sha256: str - :keyword encryption_algorithm: The algorithm used to produce the encryption key hash. - Currently, the only accepted value is "AES256". Must be provided if the x-ms-encryption-key - header is provided. Default value is "AES256". - :paramtype encryption_algorithm: str - """ - super().__init__(**kwargs) - self.encryption_key = encryption_key - self.encryption_key_sha256 = encryption_key_sha256 - self.encryption_algorithm = encryption_algorithm - - -class FileSystem(_serialization.Model): - """FileSystem. - - :ivar name: - :vartype name: str - :ivar last_modified: - :vartype last_modified: str - :ivar e_tag: - :vartype e_tag: str - """ - - _attribute_map = { - "name": {"key": "name", "type": "str"}, - "last_modified": {"key": "lastModified", "type": "str"}, - "e_tag": {"key": "eTag", "type": "str"}, - } - - def __init__( - self, - *, - name: Optional[str] = None, - last_modified: Optional[str] = None, - e_tag: Optional[str] = None, - **kwargs: Any - ) -> None: - """ - :keyword name: - :paramtype name: str - :keyword last_modified: - :paramtype last_modified: str - :keyword e_tag: - :paramtype e_tag: str - """ - super().__init__(**kwargs) - self.name = name - self.last_modified = last_modified - self.e_tag = e_tag - - -class FileSystemList(_serialization.Model): - """FileSystemList. - - :ivar filesystems: - :vartype filesystems: list[~azure.storage.filedatalake.models.FileSystem] - """ - - _attribute_map = { - "filesystems": {"key": "filesystems", "type": "[FileSystem]"}, - } - - def __init__(self, *, filesystems: Optional[list["_models.FileSystem"]] = None, **kwargs: Any) -> None: - """ - :keyword filesystems: - :paramtype filesystems: list[~azure.storage.filedatalake.models.FileSystem] - """ - super().__init__(**kwargs) - self.filesystems = filesystems - - -class LeaseAccessConditions(_serialization.Model): - """Parameter group. - - :ivar lease_id: If specified, the operation only succeeds if the resource's lease is active and - matches this ID. - :vartype lease_id: str - """ - - _attribute_map = { - "lease_id": {"key": "leaseId", "type": "str"}, - } - - def __init__(self, *, lease_id: Optional[str] = None, **kwargs: Any) -> None: - """ - :keyword lease_id: If specified, the operation only succeeds if the resource's lease is active - and matches this ID. - :paramtype lease_id: str - """ - super().__init__(**kwargs) - self.lease_id = lease_id - - -class ListBlobsHierarchySegmentResponse(_serialization.Model): - """An enumeration of blobs. - - All required parameters must be populated in order to send to server. - - :ivar service_endpoint: Required. - :vartype service_endpoint: str - :ivar container_name: Required. - :vartype container_name: str - :ivar prefix: - :vartype prefix: str - :ivar marker: - :vartype marker: str - :ivar max_results: - :vartype max_results: int - :ivar delimiter: - :vartype delimiter: str - :ivar segment: Required. - :vartype segment: ~azure.storage.filedatalake.models.BlobHierarchyListSegment - :ivar next_marker: - :vartype next_marker: str - """ - - _validation = { - "service_endpoint": {"required": True}, - "container_name": {"required": True}, - "segment": {"required": True}, - } - - _attribute_map = { - "service_endpoint": {"key": "ServiceEndpoint", "type": "str", "xml": {"attr": True}}, - "container_name": {"key": "ContainerName", "type": "str", "xml": {"attr": True}}, - "prefix": {"key": "Prefix", "type": "str"}, - "marker": {"key": "Marker", "type": "str"}, - "max_results": {"key": "MaxResults", "type": "int"}, - "delimiter": {"key": "Delimiter", "type": "str"}, - "segment": {"key": "Segment", "type": "BlobHierarchyListSegment"}, - "next_marker": {"key": "NextMarker", "type": "str"}, - } - _xml_map = {"name": "EnumerationResults"} - - def __init__( - self, - *, - service_endpoint: str, - container_name: str, - segment: "_models.BlobHierarchyListSegment", - prefix: Optional[str] = None, - marker: Optional[str] = None, - max_results: Optional[int] = None, - delimiter: Optional[str] = None, - next_marker: Optional[str] = None, - **kwargs: Any - ) -> None: - """ - :keyword service_endpoint: Required. - :paramtype service_endpoint: str - :keyword container_name: Required. - :paramtype container_name: str - :keyword prefix: - :paramtype prefix: str - :keyword marker: - :paramtype marker: str - :keyword max_results: - :paramtype max_results: int - :keyword delimiter: - :paramtype delimiter: str - :keyword segment: Required. - :paramtype segment: ~azure.storage.filedatalake.models.BlobHierarchyListSegment - :keyword next_marker: - :paramtype next_marker: str - """ - super().__init__(**kwargs) - self.service_endpoint = service_endpoint - self.container_name = container_name - self.prefix = prefix - self.marker = marker - self.max_results = max_results - self.delimiter = delimiter - self.segment = segment - self.next_marker = next_marker - - -class ModifiedAccessConditions(_serialization.Model): - """Parameter group. - - :ivar if_modified_since: Specify this header value to operate only on a blob if it has been - modified since the specified date/time. - :vartype if_modified_since: ~datetime.datetime - :ivar if_unmodified_since: Specify this header value to operate only on a blob if it has not - been modified since the specified date/time. - :vartype if_unmodified_since: ~datetime.datetime - :ivar if_match: Specify an ETag value to operate only on blobs with a matching value. - :vartype if_match: str - :ivar if_none_match: Specify an ETag value to operate only on blobs without a matching value. - :vartype if_none_match: str - """ - - _attribute_map = { - "if_modified_since": {"key": "ifModifiedSince", "type": "rfc-1123"}, - "if_unmodified_since": {"key": "ifUnmodifiedSince", "type": "rfc-1123"}, - "if_match": {"key": "ifMatch", "type": "str"}, - "if_none_match": {"key": "ifNoneMatch", "type": "str"}, - } - - def __init__( - self, - *, - if_modified_since: Optional[datetime.datetime] = None, - if_unmodified_since: Optional[datetime.datetime] = None, - if_match: Optional[str] = None, - if_none_match: Optional[str] = None, - **kwargs: Any - ) -> None: - """ - :keyword if_modified_since: Specify this header value to operate only on a blob if it has been - modified since the specified date/time. - :paramtype if_modified_since: ~datetime.datetime - :keyword if_unmodified_since: Specify this header value to operate only on a blob if it has not - been modified since the specified date/time. - :paramtype if_unmodified_since: ~datetime.datetime - :keyword if_match: Specify an ETag value to operate only on blobs with a matching value. - :paramtype if_match: str - :keyword if_none_match: Specify an ETag value to operate only on blobs without a matching - value. - :paramtype if_none_match: str - """ - super().__init__(**kwargs) - self.if_modified_since = if_modified_since - self.if_unmodified_since = if_unmodified_since - self.if_match = if_match - self.if_none_match = if_none_match - - -class Path(_serialization.Model): - """Path. - - :ivar name: - :vartype name: str - :ivar is_directory: - :vartype is_directory: bool - :ivar last_modified: - :vartype last_modified: str - :ivar e_tag: - :vartype e_tag: str - :ivar content_length: - :vartype content_length: int - :ivar owner: - :vartype owner: str - :ivar group: - :vartype group: str - :ivar permissions: - :vartype permissions: str - :ivar encryption_scope: The name of the encryption scope under which the blob is encrypted. - :vartype encryption_scope: str - :ivar creation_time: - :vartype creation_time: str - :ivar expiry_time: - :vartype expiry_time: str - :ivar encryption_context: - :vartype encryption_context: str - """ - - _attribute_map = { - "name": {"key": "name", "type": "str"}, - "is_directory": {"key": "isDirectory", "type": "bool"}, - "last_modified": {"key": "lastModified", "type": "str"}, - "e_tag": {"key": "eTag", "type": "str"}, - "content_length": {"key": "contentLength", "type": "int"}, - "owner": {"key": "owner", "type": "str"}, - "group": {"key": "group", "type": "str"}, - "permissions": {"key": "permissions", "type": "str"}, - "encryption_scope": {"key": "EncryptionScope", "type": "str"}, - "creation_time": {"key": "creationTime", "type": "str"}, - "expiry_time": {"key": "expiryTime", "type": "str"}, - "encryption_context": {"key": "EncryptionContext", "type": "str"}, - } - - def __init__( - self, - *, - name: Optional[str] = None, - is_directory: bool = False, - last_modified: Optional[str] = None, - e_tag: Optional[str] = None, - content_length: Optional[int] = None, - owner: Optional[str] = None, - group: Optional[str] = None, - permissions: Optional[str] = None, - encryption_scope: Optional[str] = None, - creation_time: Optional[str] = None, - expiry_time: Optional[str] = None, - encryption_context: Optional[str] = None, - **kwargs: Any - ) -> None: - """ - :keyword name: - :paramtype name: str - :keyword is_directory: - :paramtype is_directory: bool - :keyword last_modified: - :paramtype last_modified: str - :keyword e_tag: - :paramtype e_tag: str - :keyword content_length: - :paramtype content_length: int - :keyword owner: - :paramtype owner: str - :keyword group: - :paramtype group: str - :keyword permissions: - :paramtype permissions: str - :keyword encryption_scope: The name of the encryption scope under which the blob is encrypted. - :paramtype encryption_scope: str - :keyword creation_time: - :paramtype creation_time: str - :keyword expiry_time: - :paramtype expiry_time: str - :keyword encryption_context: - :paramtype encryption_context: str - """ - super().__init__(**kwargs) - self.name = name - self.is_directory = is_directory - self.last_modified = last_modified - self.e_tag = e_tag - self.content_length = content_length - self.owner = owner - self.group = group - self.permissions = permissions - self.encryption_scope = encryption_scope - self.creation_time = creation_time - self.expiry_time = expiry_time - self.encryption_context = encryption_context - - -class PathHTTPHeaders(_serialization.Model): - """Parameter group. - - :ivar cache_control: Optional. Sets the blob's cache control. If specified, this property is - stored with the blob and returned with a read request. - :vartype cache_control: str - :ivar content_encoding: Optional. Sets the blob's content encoding. If specified, this property - is stored with the blob and returned with a read request. - :vartype content_encoding: str - :ivar content_language: Optional. Set the blob's content language. If specified, this property - is stored with the blob and returned with a read request. - :vartype content_language: str - :ivar content_disposition: Optional. Sets the blob's Content-Disposition header. - :vartype content_disposition: str - :ivar content_type: Optional. Sets the blob's content type. If specified, this property is - stored with the blob and returned with a read request. - :vartype content_type: str - :ivar content_md5: Specify the transactional md5 for the body, to be validated by the service. - :vartype content_md5: bytes - :ivar transactional_content_hash: Specify the transactional md5 for the body, to be validated - by the service. - :vartype transactional_content_hash: bytes - """ - - _attribute_map = { - "cache_control": {"key": "cacheControl", "type": "str"}, - "content_encoding": {"key": "contentEncoding", "type": "str"}, - "content_language": {"key": "contentLanguage", "type": "str"}, - "content_disposition": {"key": "contentDisposition", "type": "str"}, - "content_type": {"key": "contentType", "type": "str"}, - "content_md5": {"key": "contentMD5", "type": "bytearray"}, - "transactional_content_hash": {"key": "transactionalContentHash", "type": "bytearray"}, - } - - def __init__( - self, - *, - cache_control: Optional[str] = None, - content_encoding: Optional[str] = None, - content_language: Optional[str] = None, - content_disposition: Optional[str] = None, - content_type: Optional[str] = None, - content_md5: Optional[bytes] = None, - transactional_content_hash: Optional[bytes] = None, - **kwargs: Any - ) -> None: - """ - :keyword cache_control: Optional. Sets the blob's cache control. If specified, this property is - stored with the blob and returned with a read request. - :paramtype cache_control: str - :keyword content_encoding: Optional. Sets the blob's content encoding. If specified, this - property is stored with the blob and returned with a read request. - :paramtype content_encoding: str - :keyword content_language: Optional. Set the blob's content language. If specified, this - property is stored with the blob and returned with a read request. - :paramtype content_language: str - :keyword content_disposition: Optional. Sets the blob's Content-Disposition header. - :paramtype content_disposition: str - :keyword content_type: Optional. Sets the blob's content type. If specified, this property is - stored with the blob and returned with a read request. - :paramtype content_type: str - :keyword content_md5: Specify the transactional md5 for the body, to be validated by the - service. - :paramtype content_md5: bytes - :keyword transactional_content_hash: Specify the transactional md5 for the body, to be - validated by the service. - :paramtype transactional_content_hash: bytes - """ - super().__init__(**kwargs) - self.cache_control = cache_control - self.content_encoding = content_encoding - self.content_language = content_language - self.content_disposition = content_disposition - self.content_type = content_type - self.content_md5 = content_md5 - self.transactional_content_hash = transactional_content_hash - - -class PathList(_serialization.Model): - """PathList. - - :ivar paths: - :vartype paths: list[~azure.storage.filedatalake.models.Path] - """ - - _attribute_map = { - "paths": {"key": "paths", "type": "[Path]"}, - } - - def __init__(self, *, paths: Optional[list["_models.Path"]] = None, **kwargs: Any) -> None: - """ - :keyword paths: - :paramtype paths: list[~azure.storage.filedatalake.models.Path] - """ - super().__init__(**kwargs) - self.paths = paths - - -class SetAccessControlRecursiveResponse(_serialization.Model): - """SetAccessControlRecursiveResponse. - - :ivar directories_successful: - :vartype directories_successful: int - :ivar files_successful: - :vartype files_successful: int - :ivar failure_count: - :vartype failure_count: int - :ivar failed_entries: - :vartype failed_entries: list[~azure.storage.filedatalake.models.AclFailedEntry] - """ - - _attribute_map = { - "directories_successful": {"key": "directoriesSuccessful", "type": "int"}, - "files_successful": {"key": "filesSuccessful", "type": "int"}, - "failure_count": {"key": "failureCount", "type": "int"}, - "failed_entries": {"key": "failedEntries", "type": "[AclFailedEntry]"}, - } - - def __init__( - self, - *, - directories_successful: Optional[int] = None, - files_successful: Optional[int] = None, - failure_count: Optional[int] = None, - failed_entries: Optional[list["_models.AclFailedEntry"]] = None, - **kwargs: Any - ) -> None: - """ - :keyword directories_successful: - :paramtype directories_successful: int - :keyword files_successful: - :paramtype files_successful: int - :keyword failure_count: - :paramtype failure_count: int - :keyword failed_entries: - :paramtype failed_entries: list[~azure.storage.filedatalake.models.AclFailedEntry] - """ - super().__init__(**kwargs) - self.directories_successful = directories_successful - self.files_successful = files_successful - self.failure_count = failure_count - self.failed_entries = failed_entries - - -class SourceModifiedAccessConditions(_serialization.Model): - """Parameter group. - - :ivar source_if_match: Specify an ETag value to operate only on blobs with a matching value. - :vartype source_if_match: str - :ivar source_if_none_match: Specify an ETag value to operate only on blobs without a matching - value. - :vartype source_if_none_match: str - :ivar source_if_modified_since: Specify this header value to operate only on a blob if it has - been modified since the specified date/time. - :vartype source_if_modified_since: ~datetime.datetime - :ivar source_if_unmodified_since: Specify this header value to operate only on a blob if it has - not been modified since the specified date/time. - :vartype source_if_unmodified_since: ~datetime.datetime - """ - - _attribute_map = { - "source_if_match": {"key": "sourceIfMatch", "type": "str"}, - "source_if_none_match": {"key": "sourceIfNoneMatch", "type": "str"}, - "source_if_modified_since": {"key": "sourceIfModifiedSince", "type": "rfc-1123"}, - "source_if_unmodified_since": {"key": "sourceIfUnmodifiedSince", "type": "rfc-1123"}, - } - - def __init__( - self, - *, - source_if_match: Optional[str] = None, - source_if_none_match: Optional[str] = None, - source_if_modified_since: Optional[datetime.datetime] = None, - source_if_unmodified_since: Optional[datetime.datetime] = None, - **kwargs: Any - ) -> None: - """ - :keyword source_if_match: Specify an ETag value to operate only on blobs with a matching value. - :paramtype source_if_match: str - :keyword source_if_none_match: Specify an ETag value to operate only on blobs without a - matching value. - :paramtype source_if_none_match: str - :keyword source_if_modified_since: Specify this header value to operate only on a blob if it - has been modified since the specified date/time. - :paramtype source_if_modified_since: ~datetime.datetime - :keyword source_if_unmodified_since: Specify this header value to operate only on a blob if it - has not been modified since the specified date/time. - :paramtype source_if_unmodified_since: ~datetime.datetime - """ - super().__init__(**kwargs) - self.source_if_match = source_if_match - self.source_if_none_match = source_if_none_match - self.source_if_modified_since = source_if_modified_since - self.source_if_unmodified_since = source_if_unmodified_since - - -class StorageError(_serialization.Model): - """StorageError. - - :ivar error: The service error response object. - :vartype error: ~azure.storage.filedatalake.models.StorageErrorError - """ - - _attribute_map = { - "error": {"key": "error", "type": "StorageErrorError"}, - } - - def __init__(self, *, error: Optional["_models.StorageErrorError"] = None, **kwargs: Any) -> None: - """ - :keyword error: The service error response object. - :paramtype error: ~azure.storage.filedatalake.models.StorageErrorError - """ - super().__init__(**kwargs) - self.error = error - - -class StorageErrorError(_serialization.Model): - """The service error response object. - - :ivar code: The service error code. - :vartype code: str - :ivar message: The service error message. - :vartype message: str - """ - - _attribute_map = { - "code": {"key": "Code", "type": "str"}, - "message": {"key": "Message", "type": "str"}, - } - - def __init__(self, *, code: Optional[str] = None, message: Optional[str] = None, **kwargs: Any) -> None: - """ - :keyword code: The service error code. - :paramtype code: str - :keyword message: The service error message. - :paramtype message: str - """ - super().__init__(**kwargs) - self.code = code - self.message = message diff --git a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_generated/models/_patch.py b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_generated/models/_patch.py index f7dd32510333..17d186403884 100644 --- a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_generated/models/_patch.py +++ b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_generated/models/_patch.py @@ -1,14 +1,75 @@ -# ------------------------------------ -# Copyright (c) Microsoft Corporation. -# Licensed under the MIT License. -# ------------------------------------ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# -------------------------------------------------------------------------- """Customize generated code here. Follow our quickstart for examples: https://aka.ms/azsdk/python/dpcodegen/python/customize """ -from typing import List +import datetime +from typing import Optional -__all__: List[str] = [] # Add all objects you want publicly available to users at this package level +from .._utils.model_base import Model as _Model, rest_field +from ._models import PathItem + +_ALL_VISIBILITY = ["read", "create", "update", "delete", "query"] + + +class CpkInfo(_Model): + encryption_key: Optional[str] = rest_field(name="encryption_key", visibility=_ALL_VISIBILITY) + encryption_key_sha256: Optional[str] = rest_field(name="encryption_key_sha256", visibility=_ALL_VISIBILITY) + encryption_algorithm: Optional[str] = rest_field(name="encryption_algorithm", visibility=_ALL_VISIBILITY) + + +class LeaseAccessConditions(_Model): + lease_id: Optional[str] = rest_field(name="lease_id", visibility=_ALL_VISIBILITY) + + +class ModifiedAccessConditions(_Model): + if_modified_since: Optional[datetime.datetime] = rest_field(name="if_modified_since", visibility=_ALL_VISIBILITY) + if_unmodified_since: Optional[datetime.datetime] = rest_field( + name="if_unmodified_since", visibility=_ALL_VISIBILITY + ) + if_match: Optional[str] = rest_field(name="if_match", visibility=_ALL_VISIBILITY) + if_none_match: Optional[str] = rest_field(name="if_none_match", visibility=_ALL_VISIBILITY) + + +class PathHTTPHeaders(_Model): + cache_control: Optional[str] = rest_field(name="cache_control", visibility=_ALL_VISIBILITY) + content_encoding: Optional[str] = rest_field(name="content_encoding", visibility=_ALL_VISIBILITY) + content_language: Optional[str] = rest_field(name="content_language", visibility=_ALL_VISIBILITY) + content_disposition: Optional[str] = rest_field(name="content_disposition", visibility=_ALL_VISIBILITY) + content_type: Optional[str] = rest_field(name="content_type", visibility=_ALL_VISIBILITY) + content_md5: Optional[bytes] = rest_field(name="content_md5", visibility=_ALL_VISIBILITY) + transactional_content_hash: Optional[bytes] = rest_field( + name="transactional_content_hash", visibility=_ALL_VISIBILITY + ) + + +class SourceModifiedAccessConditions(_Model): + source_if_match: Optional[str] = rest_field(name="source_if_match", visibility=_ALL_VISIBILITY) + source_if_none_match: Optional[str] = rest_field(name="source_if_none_match", visibility=_ALL_VISIBILITY) + source_if_modified_since: Optional[datetime.datetime] = rest_field( + name="source_if_modified_since", visibility=_ALL_VISIBILITY + ) + source_if_unmodified_since: Optional[datetime.datetime] = rest_field( + name="source_if_unmodified_since", visibility=_ALL_VISIBILITY + ) + + +# Alias: the old generated code exported "Path"; the new code uses "PathItem". +Path = PathItem + + +__all__: list[str] = [ + "CpkInfo", + "LeaseAccessConditions", + "ModifiedAccessConditions", + "Path", + "PathHTTPHeaders", + "SourceModifiedAccessConditions", +] def patch_sdk(): diff --git a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_generated/operations/__init__.py b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_generated/operations/__init__.py index 56a7ece347ab..5c80f0bb8157 100644 --- a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_generated/operations/__init__.py +++ b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_generated/operations/__init__.py @@ -2,7 +2,7 @@ # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. +# Code generated by Microsoft (R) Python Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- # pylint: disable=wrong-import-position @@ -12,9 +12,9 @@ if TYPE_CHECKING: from ._patch import * # pylint: disable=unused-wildcard-import -from ._service_operations import ServiceOperations # type: ignore -from ._file_system_operations import FileSystemOperations # type: ignore -from ._path_operations import PathOperations # type: ignore +from ._operations import ServiceOperations # type: ignore +from ._operations import FileSystemOperations # type: ignore +from ._operations import PathOperations # type: ignore from ._patch import __all__ as _patch_all from ._patch import * diff --git a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_generated/operations/_file_system_operations.py b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_generated/operations/_file_system_operations.py deleted file mode 100644 index a6bd831c6b1f..000000000000 --- a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_generated/operations/_file_system_operations.py +++ /dev/null @@ -1,910 +0,0 @@ -# pylint: disable=line-too-long,useless-suppression -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is regenerated. -# -------------------------------------------------------------------------- -from collections.abc import MutableMapping -import datetime -from typing import Any, Callable, Literal, Optional, TypeVar, Union - -from azure.core import PipelineClient -from azure.core.exceptions import ( - ClientAuthenticationError, - HttpResponseError, - ResourceExistsError, - ResourceNotFoundError, - ResourceNotModifiedError, - map_error, -) -from azure.core.pipeline import PipelineResponse -from azure.core.rest import HttpRequest, HttpResponse -from azure.core.tracing.decorator import distributed_trace -from azure.core.utils import case_insensitive_dict - -from .. import models as _models -from .._configuration import AzureDataLakeStorageRESTAPIConfiguration -from .._utils.serialization import Deserializer, Serializer - -T = TypeVar("T") -ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, dict[str, Any]], Any]] - -_SERIALIZER = Serializer() -_SERIALIZER.client_side_validation = False - - -def build_create_request( - url: str, - *, - request_id_parameter: Optional[str] = None, - timeout: Optional[int] = None, - properties: Optional[str] = None, - **kwargs: Any -) -> HttpRequest: - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - resource: Literal["filesystem"] = kwargs.pop("resource", _params.pop("resource", "filesystem")) - version: Literal["2026-02-06"] = kwargs.pop("version", _headers.pop("x-ms-version", "2026-02-06")) - accept = _headers.pop("Accept", "application/json") - - # Construct URL - _url = kwargs.pop("template_url", "{url}") - path_format_arguments = { - "url": _SERIALIZER.url("url", url, "str", skip_quote=True), - } - - _url: str = _url.format(**path_format_arguments) # type: ignore - - # Construct parameters - _params["resource"] = _SERIALIZER.query("resource", resource, "str") - if timeout is not None: - _params["timeout"] = _SERIALIZER.query("timeout", timeout, "int", minimum=0) - - # Construct headers - if request_id_parameter is not None: - _headers["x-ms-client-request-id"] = _SERIALIZER.header("request_id_parameter", request_id_parameter, "str") - _headers["x-ms-version"] = _SERIALIZER.header("version", version, "str") - if properties is not None: - _headers["x-ms-properties"] = _SERIALIZER.header("properties", properties, "str") - _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - - return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) - - -def build_set_properties_request( - url: str, - *, - request_id_parameter: Optional[str] = None, - timeout: Optional[int] = None, - properties: Optional[str] = None, - if_modified_since: Optional[datetime.datetime] = None, - if_unmodified_since: Optional[datetime.datetime] = None, - **kwargs: Any -) -> HttpRequest: - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - resource: Literal["filesystem"] = kwargs.pop("resource", _params.pop("resource", "filesystem")) - version: Literal["2026-02-06"] = kwargs.pop("version", _headers.pop("x-ms-version", "2026-02-06")) - accept = _headers.pop("Accept", "application/json") - - # Construct URL - _url = kwargs.pop("template_url", "{url}") - path_format_arguments = { - "url": _SERIALIZER.url("url", url, "str", skip_quote=True), - } - - _url: str = _url.format(**path_format_arguments) # type: ignore - - # Construct parameters - _params["resource"] = _SERIALIZER.query("resource", resource, "str") - if timeout is not None: - _params["timeout"] = _SERIALIZER.query("timeout", timeout, "int", minimum=0) - - # Construct headers - if request_id_parameter is not None: - _headers["x-ms-client-request-id"] = _SERIALIZER.header("request_id_parameter", request_id_parameter, "str") - _headers["x-ms-version"] = _SERIALIZER.header("version", version, "str") - if properties is not None: - _headers["x-ms-properties"] = _SERIALIZER.header("properties", properties, "str") - if if_modified_since is not None: - _headers["If-Modified-Since"] = _SERIALIZER.header("if_modified_since", if_modified_since, "rfc-1123") - if if_unmodified_since is not None: - _headers["If-Unmodified-Since"] = _SERIALIZER.header("if_unmodified_since", if_unmodified_since, "rfc-1123") - _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - - return HttpRequest(method="PATCH", url=_url, params=_params, headers=_headers, **kwargs) - - -def build_get_properties_request( - url: str, *, request_id_parameter: Optional[str] = None, timeout: Optional[int] = None, **kwargs: Any -) -> HttpRequest: - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - resource: Literal["filesystem"] = kwargs.pop("resource", _params.pop("resource", "filesystem")) - version: Literal["2026-02-06"] = kwargs.pop("version", _headers.pop("x-ms-version", "2026-02-06")) - accept = _headers.pop("Accept", "application/json") - - # Construct URL - _url = kwargs.pop("template_url", "{url}") - path_format_arguments = { - "url": _SERIALIZER.url("url", url, "str", skip_quote=True), - } - - _url: str = _url.format(**path_format_arguments) # type: ignore - - # Construct parameters - _params["resource"] = _SERIALIZER.query("resource", resource, "str") - if timeout is not None: - _params["timeout"] = _SERIALIZER.query("timeout", timeout, "int", minimum=0) - - # Construct headers - if request_id_parameter is not None: - _headers["x-ms-client-request-id"] = _SERIALIZER.header("request_id_parameter", request_id_parameter, "str") - _headers["x-ms-version"] = _SERIALIZER.header("version", version, "str") - _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - - return HttpRequest(method="HEAD", url=_url, params=_params, headers=_headers, **kwargs) - - -def build_delete_request( - url: str, - *, - request_id_parameter: Optional[str] = None, - timeout: Optional[int] = None, - if_modified_since: Optional[datetime.datetime] = None, - if_unmodified_since: Optional[datetime.datetime] = None, - **kwargs: Any -) -> HttpRequest: - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - resource: Literal["filesystem"] = kwargs.pop("resource", _params.pop("resource", "filesystem")) - version: Literal["2026-02-06"] = kwargs.pop("version", _headers.pop("x-ms-version", "2026-02-06")) - accept = _headers.pop("Accept", "application/json") - - # Construct URL - _url = kwargs.pop("template_url", "{url}") - path_format_arguments = { - "url": _SERIALIZER.url("url", url, "str", skip_quote=True), - } - - _url: str = _url.format(**path_format_arguments) # type: ignore - - # Construct parameters - _params["resource"] = _SERIALIZER.query("resource", resource, "str") - if timeout is not None: - _params["timeout"] = _SERIALIZER.query("timeout", timeout, "int", minimum=0) - - # Construct headers - if request_id_parameter is not None: - _headers["x-ms-client-request-id"] = _SERIALIZER.header("request_id_parameter", request_id_parameter, "str") - _headers["x-ms-version"] = _SERIALIZER.header("version", version, "str") - if if_modified_since is not None: - _headers["If-Modified-Since"] = _SERIALIZER.header("if_modified_since", if_modified_since, "rfc-1123") - if if_unmodified_since is not None: - _headers["If-Unmodified-Since"] = _SERIALIZER.header("if_unmodified_since", if_unmodified_since, "rfc-1123") - _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - - return HttpRequest(method="DELETE", url=_url, params=_params, headers=_headers, **kwargs) - - -def build_list_paths_request( - url: str, - *, - recursive: bool, - request_id_parameter: Optional[str] = None, - timeout: Optional[int] = None, - continuation: Optional[str] = None, - path: Optional[str] = None, - max_results: Optional[int] = None, - upn: Optional[bool] = None, - begin_from: Optional[str] = None, - **kwargs: Any -) -> HttpRequest: - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - resource: Literal["filesystem"] = kwargs.pop("resource", _params.pop("resource", "filesystem")) - version: Literal["2026-02-06"] = kwargs.pop("version", _headers.pop("x-ms-version", "2026-02-06")) - accept = _headers.pop("Accept", "application/json") - - # Construct URL - _url = kwargs.pop("template_url", "{url}") - path_format_arguments = { - "url": _SERIALIZER.url("url", url, "str", skip_quote=True), - } - - _url: str = _url.format(**path_format_arguments) # type: ignore - - # Construct parameters - _params["resource"] = _SERIALIZER.query("resource", resource, "str") - if timeout is not None: - _params["timeout"] = _SERIALIZER.query("timeout", timeout, "int", minimum=0) - if continuation is not None: - _params["continuation"] = _SERIALIZER.query("continuation", continuation, "str") - if path is not None: - _params["directory"] = _SERIALIZER.query("path", path, "str") - _params["recursive"] = _SERIALIZER.query("recursive", recursive, "bool") - if max_results is not None: - _params["maxResults"] = _SERIALIZER.query("max_results", max_results, "int", minimum=1) - if upn is not None: - _params["upn"] = _SERIALIZER.query("upn", upn, "bool") - if begin_from is not None: - _params["beginFrom"] = _SERIALIZER.query("begin_from", begin_from, "str") - - # Construct headers - if request_id_parameter is not None: - _headers["x-ms-client-request-id"] = _SERIALIZER.header("request_id_parameter", request_id_parameter, "str") - _headers["x-ms-version"] = _SERIALIZER.header("version", version, "str") - _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - - return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) - - -def build_list_blob_hierarchy_segment_request( # pylint: disable=name-too-long - url: str, - *, - prefix: Optional[str] = None, - delimiter: Optional[str] = None, - marker: Optional[str] = None, - max_results: Optional[int] = None, - include: Optional[list[Union[str, _models.ListBlobsIncludeItem]]] = None, - showonly: Literal["deleted"] = "deleted", - timeout: Optional[int] = None, - request_id_parameter: Optional[str] = None, - **kwargs: Any -) -> HttpRequest: - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - restype: Literal["container"] = kwargs.pop("restype", _params.pop("restype", "container")) - comp: Literal["list"] = kwargs.pop("comp", _params.pop("comp", "list")) - version: Literal["2026-02-06"] = kwargs.pop("version", _headers.pop("x-ms-version", "2026-02-06")) - accept = _headers.pop("Accept", "application/xml") - - # Construct URL - _url = kwargs.pop("template_url", "{url}") - path_format_arguments = { - "url": _SERIALIZER.url("url", url, "str", skip_quote=True), - } - - _url: str = _url.format(**path_format_arguments) # type: ignore - - # Construct parameters - _params["restype"] = _SERIALIZER.query("restype", restype, "str") - _params["comp"] = _SERIALIZER.query("comp", comp, "str") - if prefix is not None: - _params["prefix"] = _SERIALIZER.query("prefix", prefix, "str") - if delimiter is not None: - _params["delimiter"] = _SERIALIZER.query("delimiter", delimiter, "str") - if marker is not None: - _params["marker"] = _SERIALIZER.query("marker", marker, "str") - if max_results is not None: - _params["maxResults"] = _SERIALIZER.query("max_results", max_results, "int", minimum=1) - if include is not None: - _params["include"] = _SERIALIZER.query("include", include, "[str]", div=",") - if showonly is not None: - _params["showonly"] = _SERIALIZER.query("showonly", showonly, "str") - if timeout is not None: - _params["timeout"] = _SERIALIZER.query("timeout", timeout, "int", minimum=0) - - # Construct headers - _headers["x-ms-version"] = _SERIALIZER.header("version", version, "str") - if request_id_parameter is not None: - _headers["x-ms-client-request-id"] = _SERIALIZER.header("request_id_parameter", request_id_parameter, "str") - _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - - return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) - - -class FileSystemOperations: - """ - .. warning:: - **DO NOT** instantiate this class directly. - - Instead, you should access the following operations through - :class:`~azure.storage.filedatalake.AzureDataLakeStorageRESTAPI`'s - :attr:`file_system` attribute. - """ - - models = _models - - def __init__(self, *args, **kwargs) -> None: - input_args = list(args) - self._client: PipelineClient = input_args.pop(0) if input_args else kwargs.pop("client") - self._config: AzureDataLakeStorageRESTAPIConfiguration = ( - input_args.pop(0) if input_args else kwargs.pop("config") - ) - self._serialize: Serializer = input_args.pop(0) if input_args else kwargs.pop("serializer") - self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer") - - @distributed_trace - def create( # pylint: disable=inconsistent-return-statements - self, - request_id_parameter: Optional[str] = None, - timeout: Optional[int] = None, - properties: Optional[str] = None, - **kwargs: Any - ) -> None: - """Create FileSystem. - - Create a FileSystem rooted at the specified location. If the FileSystem already exists, the - operation fails. This operation does not support conditional HTTP requests. - - :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. Default - value is None. - :type request_id_parameter: str - :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting - Timeouts for Blob Service Operations.`. Default value is None. - :type timeout: int - :param properties: Optional. User-defined properties to be stored with the filesystem, in the - format of a comma-separated list of name and value pairs "n1=v1, n2=v2, ...", where each value - is a base64 encoded string. Note that the string may only contain ASCII characters in the - ISO-8859-1 character set. If the filesystem exists, any properties not included in the list - will be removed. All properties are removed if the header is omitted. To merge new and - existing properties, first get all existing properties and the current E-Tag, then make a - conditional request with the E-Tag and include values for all properties. Default value is - None. - :type properties: str - :return: None or the result of cls(response) - :rtype: None - :raises ~azure.core.exceptions.HttpResponseError: - """ - error_map: MutableMapping = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = kwargs.pop("headers", {}) or {} - _params = kwargs.pop("params", {}) or {} - - cls: ClsType[None] = kwargs.pop("cls", None) - - _request = build_create_request( - url=self._config.url, - request_id_parameter=request_id_parameter, - timeout=timeout, - properties=properties, - resource=self._config.resource, - version=self._config.version, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - _stream = False - pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [201]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize( - _models.StorageError, - pipeline_response, - ) - raise HttpResponseError(response=response, model=error) - - response_headers = {} - response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) - response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) - response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) - response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) - response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) - response_headers["x-ms-namespace-enabled"] = self._deserialize( - "str", response.headers.get("x-ms-namespace-enabled") - ) - - if cls: - return cls(pipeline_response, None, response_headers) # type: ignore - - @distributed_trace - def set_properties( # pylint: disable=inconsistent-return-statements - self, - request_id_parameter: Optional[str] = None, - timeout: Optional[int] = None, - properties: Optional[str] = None, - modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None, - **kwargs: Any - ) -> None: - """Set FileSystem Properties. - - Set properties for the FileSystem. This operation supports conditional HTTP requests. For - more information, see `Specifying Conditional Headers for Blob Service Operations - `_. - - :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. Default - value is None. - :type request_id_parameter: str - :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting - Timeouts for Blob Service Operations.`. Default value is None. - :type timeout: int - :param properties: Optional. User-defined properties to be stored with the filesystem, in the - format of a comma-separated list of name and value pairs "n1=v1, n2=v2, ...", where each value - is a base64 encoded string. Note that the string may only contain ASCII characters in the - ISO-8859-1 character set. If the filesystem exists, any properties not included in the list - will be removed. All properties are removed if the header is omitted. To merge new and - existing properties, first get all existing properties and the current E-Tag, then make a - conditional request with the E-Tag and include values for all properties. Default value is - None. - :type properties: str - :param modified_access_conditions: Parameter group. Default value is None. - :type modified_access_conditions: ~azure.storage.filedatalake.models.ModifiedAccessConditions - :return: None or the result of cls(response) - :rtype: None - :raises ~azure.core.exceptions.HttpResponseError: - """ - error_map: MutableMapping = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = kwargs.pop("headers", {}) or {} - _params = kwargs.pop("params", {}) or {} - - cls: ClsType[None] = kwargs.pop("cls", None) - - _if_modified_since = None - _if_unmodified_since = None - if modified_access_conditions is not None: - _if_modified_since = modified_access_conditions.if_modified_since - _if_unmodified_since = modified_access_conditions.if_unmodified_since - - _request = build_set_properties_request( - url=self._config.url, - request_id_parameter=request_id_parameter, - timeout=timeout, - properties=properties, - if_modified_since=_if_modified_since, - if_unmodified_since=_if_unmodified_since, - resource=self._config.resource, - version=self._config.version, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - _stream = False - pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize( - _models.StorageError, - pipeline_response, - ) - raise HttpResponseError(response=response, model=error) - - response_headers = {} - response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) - response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) - response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) - response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) - response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) - - if cls: - return cls(pipeline_response, None, response_headers) # type: ignore - - @distributed_trace - def get_properties( # pylint: disable=inconsistent-return-statements - self, request_id_parameter: Optional[str] = None, timeout: Optional[int] = None, **kwargs: Any - ) -> None: - """Get FileSystem Properties. - - All system and user-defined filesystem properties are specified in the response headers. - - :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. Default - value is None. - :type request_id_parameter: str - :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting - Timeouts for Blob Service Operations.`. Default value is None. - :type timeout: int - :return: None or the result of cls(response) - :rtype: None - :raises ~azure.core.exceptions.HttpResponseError: - """ - error_map: MutableMapping = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = kwargs.pop("headers", {}) or {} - _params = kwargs.pop("params", {}) or {} - - cls: ClsType[None] = kwargs.pop("cls", None) - - _request = build_get_properties_request( - url=self._config.url, - request_id_parameter=request_id_parameter, - timeout=timeout, - resource=self._config.resource, - version=self._config.version, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - _stream = False - pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize( - _models.StorageError, - pipeline_response, - ) - raise HttpResponseError(response=response, model=error) - - response_headers = {} - response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) - response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) - response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) - response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) - response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) - response_headers["x-ms-properties"] = self._deserialize("str", response.headers.get("x-ms-properties")) - response_headers["x-ms-namespace-enabled"] = self._deserialize( - "str", response.headers.get("x-ms-namespace-enabled") - ) - - if cls: - return cls(pipeline_response, None, response_headers) # type: ignore - - @distributed_trace - def delete( # pylint: disable=inconsistent-return-statements - self, - request_id_parameter: Optional[str] = None, - timeout: Optional[int] = None, - modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None, - **kwargs: Any - ) -> None: - """Delete FileSystem. - - Marks the FileSystem for deletion. When a FileSystem is deleted, a FileSystem with the same - identifier cannot be created for at least 30 seconds. While the filesystem is being deleted, - attempts to create a filesystem with the same identifier will fail with status code 409 - (Conflict), with the service returning additional error information indicating that the - filesystem is being deleted. All other operations, including operations on any files or - directories within the filesystem, will fail with status code 404 (Not Found) while the - filesystem is being deleted. This operation supports conditional HTTP requests. For more - information, see `Specifying Conditional Headers for Blob Service Operations - `_. - - :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. Default - value is None. - :type request_id_parameter: str - :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting - Timeouts for Blob Service Operations.`. Default value is None. - :type timeout: int - :param modified_access_conditions: Parameter group. Default value is None. - :type modified_access_conditions: ~azure.storage.filedatalake.models.ModifiedAccessConditions - :return: None or the result of cls(response) - :rtype: None - :raises ~azure.core.exceptions.HttpResponseError: - """ - error_map: MutableMapping = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = kwargs.pop("headers", {}) or {} - _params = kwargs.pop("params", {}) or {} - - cls: ClsType[None] = kwargs.pop("cls", None) - - _if_modified_since = None - _if_unmodified_since = None - if modified_access_conditions is not None: - _if_modified_since = modified_access_conditions.if_modified_since - _if_unmodified_since = modified_access_conditions.if_unmodified_since - - _request = build_delete_request( - url=self._config.url, - request_id_parameter=request_id_parameter, - timeout=timeout, - if_modified_since=_if_modified_since, - if_unmodified_since=_if_unmodified_since, - resource=self._config.resource, - version=self._config.version, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - _stream = False - pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [202]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize( - _models.StorageError, - pipeline_response, - ) - raise HttpResponseError(response=response, model=error) - - response_headers = {} - response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) - response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) - response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) - - if cls: - return cls(pipeline_response, None, response_headers) # type: ignore - - @distributed_trace - def list_paths( - self, - recursive: bool, - request_id_parameter: Optional[str] = None, - timeout: Optional[int] = None, - continuation: Optional[str] = None, - path: Optional[str] = None, - max_results: Optional[int] = None, - upn: Optional[bool] = None, - begin_from: Optional[str] = None, - **kwargs: Any - ) -> _models.PathList: - """List Paths. - - List FileSystem paths and their properties. - - :param recursive: Required. Required. - :type recursive: bool - :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. Default - value is None. - :type request_id_parameter: str - :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting - Timeouts for Blob Service Operations.`. Default value is None. - :type timeout: int - :param continuation: Optional. When deleting a directory, the number of paths that are deleted - with each invocation is limited. If the number of paths to be deleted exceeds this limit, a - continuation token is returned in this response header. When a continuation token is returned - in the response, it must be specified in a subsequent invocation of the delete operation to - continue deleting the directory. Default value is None. - :type continuation: str - :param path: Optional. Filters results to paths within the specified directory. An error - occurs if the directory does not exist. Default value is None. - :type path: str - :param max_results: An optional value that specifies the maximum number of items to return. If - omitted or greater than 5,000, the response will include up to 5,000 items. Default value is - None. - :type max_results: int - :param upn: Optional. Valid only when Hierarchical Namespace is enabled for the account. If - "true", the user identity values returned in the x-ms-owner, x-ms-group, and x-ms-acl response - headers will be transformed from Azure Active Directory Object IDs to User Principal Names. If - "false", the values will be returned as Azure Active Directory Object IDs. The default value is - false. Note that group and application Object IDs are not translated because they do not have - unique friendly names. Default value is None. - :type upn: bool - :param begin_from: Optional. A relative path within the specified directory where the listing - will start from. For example, a recursive listing under directory folder1/folder2 with - beginFrom as folder3/readmefile.txt will start listing from - folder1/folder2/folder3/readmefile.txt. Please note that, multiple entity levels are supported - for recursive listing. Non-recursive listing supports only one entity level. An error will - appear if multiple entity levels are specified for non-recursive listing. Default value is - None. - :type begin_from: str - :return: PathList or the result of cls(response) - :rtype: ~azure.storage.filedatalake.models.PathList - :raises ~azure.core.exceptions.HttpResponseError: - """ - error_map: MutableMapping = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = kwargs.pop("headers", {}) or {} - _params = kwargs.pop("params", {}) or {} - - cls: ClsType[_models.PathList] = kwargs.pop("cls", None) - - _request = build_list_paths_request( - url=self._config.url, - recursive=recursive, - request_id_parameter=request_id_parameter, - timeout=timeout, - continuation=continuation, - path=path, - max_results=max_results, - upn=upn, - begin_from=begin_from, - resource=self._config.resource, - version=self._config.version, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - _stream = False - pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize( - _models.StorageError, - pipeline_response, - ) - raise HttpResponseError(response=response, model=error) - - response_headers = {} - response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) - response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) - response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) - response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) - response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) - response_headers["x-ms-continuation"] = self._deserialize("str", response.headers.get("x-ms-continuation")) - - deserialized = self._deserialize("PathList", pipeline_response.http_response) - - if cls: - return cls(pipeline_response, deserialized, response_headers) # type: ignore - - return deserialized # type: ignore - - @distributed_trace - def list_blob_hierarchy_segment( - self, - prefix: Optional[str] = None, - delimiter: Optional[str] = None, - marker: Optional[str] = None, - max_results: Optional[int] = None, - include: Optional[list[Union[str, _models.ListBlobsIncludeItem]]] = None, - showonly: Literal["deleted"] = "deleted", - timeout: Optional[int] = None, - request_id_parameter: Optional[str] = None, - **kwargs: Any - ) -> _models.ListBlobsHierarchySegmentResponse: - """The List Blobs operation returns a list of the blobs under the specified container. - - :param prefix: Filters results to filesystems within the specified prefix. Default value is - None. - :type prefix: str - :param delimiter: When the request includes this parameter, the operation returns a BlobPrefix - element in the response body that acts as a placeholder for all blobs whose names begin with - the same substring up to the appearance of the delimiter character. The delimiter may be a - single character or a string. Default value is None. - :type delimiter: str - :param marker: A string value that identifies the portion of the list of containers to be - returned with the next listing operation. The operation returns the NextMarker value within the - response body if the listing operation did not return all containers remaining to be listed - with the current page. The NextMarker value can be used as the value for the marker parameter - in a subsequent call to request the next page of list items. The marker value is opaque to the - client. Default value is None. - :type marker: str - :param max_results: An optional value that specifies the maximum number of items to return. If - omitted or greater than 5,000, the response will include up to 5,000 items. Default value is - None. - :type max_results: int - :param include: Include this parameter to specify one or more datasets to include in the - response. Default value is None. - :type include: list[str or ~azure.storage.filedatalake.models.ListBlobsIncludeItem] - :param showonly: Include this parameter to specify one or more datasets to include in the - response. Known values are "deleted" and None. Default value is "deleted". - :type showonly: str - :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting - Timeouts for Blob Service Operations.`. Default value is None. - :type timeout: int - :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. Default - value is None. - :type request_id_parameter: str - :return: ListBlobsHierarchySegmentResponse or the result of cls(response) - :rtype: ~azure.storage.filedatalake.models.ListBlobsHierarchySegmentResponse - :raises ~azure.core.exceptions.HttpResponseError: - """ - error_map: MutableMapping = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = kwargs.pop("headers", {}) or {} - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - restype: Literal["container"] = kwargs.pop("restype", _params.pop("restype", "container")) - comp: Literal["list"] = kwargs.pop("comp", _params.pop("comp", "list")) - cls: ClsType[_models.ListBlobsHierarchySegmentResponse] = kwargs.pop("cls", None) - - _request = build_list_blob_hierarchy_segment_request( - url=self._config.url, - prefix=prefix, - delimiter=delimiter, - marker=marker, - max_results=max_results, - include=include, - showonly=showonly, - timeout=timeout, - request_id_parameter=request_id_parameter, - restype=restype, - comp=comp, - version=self._config.version, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - _stream = False - pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize( - _models.StorageError, - pipeline_response, - ) - raise HttpResponseError(response=response, model=error) - - response_headers = {} - response_headers["Content-Type"] = self._deserialize("str", response.headers.get("Content-Type")) - response_headers["x-ms-client-request-id"] = self._deserialize( - "str", response.headers.get("x-ms-client-request-id") - ) - response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) - response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) - response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) - - deserialized = self._deserialize("ListBlobsHierarchySegmentResponse", pipeline_response.http_response) - - if cls: - return cls(pipeline_response, deserialized, response_headers) # type: ignore - - return deserialized # type: ignore diff --git a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_generated/operations/_operations.py b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_generated/operations/_operations.py new file mode 100644 index 000000000000..34e465c70657 --- /dev/null +++ b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_generated/operations/_operations.py @@ -0,0 +1,3668 @@ +# pylint: disable=line-too-long,useless-suppression,too-many-lines +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from collections.abc import MutableMapping +import datetime +from typing import Any, Callable, Iterator, Optional, TypeVar, Union + +from azure.core import MatchConditions, PipelineClient +from azure.core.exceptions import ( + ClientAuthenticationError, + HttpResponseError, + ResourceExistsError, + ResourceModifiedError, + ResourceNotFoundError, + ResourceNotModifiedError, + StreamClosedError, + StreamConsumedError, + map_error, +) +from azure.core.pipeline import PipelineResponse +from azure.core.rest import HttpRequest, HttpResponse +from azure.core.tracing.decorator import distributed_trace +from azure.core.utils import case_insensitive_dict + +from .. import models as _models +from .._configuration import DataLakeClientConfiguration +from .._utils.model_base import _deserialize, _deserialize_xml, _failsafe_deserialize +from .._utils.serialization import Deserializer, Serializer +from .._utils.utils import prep_if_match, prep_if_none_match + +T = TypeVar("T") +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, dict[str, Any]], Any]] + +_SERIALIZER = Serializer() +_SERIALIZER.client_side_validation = False + + +def build_service_list_file_systems_request( + *, + resource: Union[str, _models.AccountResourceType], + prefix: Optional[str] = None, + continuation: Optional[str] = None, + max_results: Optional[int] = None, + timeout: Optional[int] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + version: str = kwargs.pop("version", _headers.pop("x-ms-version", "2026-02-06")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "" + + # Construct parameters + _params["resource"] = _SERIALIZER.query("resource", resource, "str") + if prefix is not None: + _params["prefix"] = _SERIALIZER.query("prefix", prefix, "str") + if continuation is not None: + _params["continuation"] = _SERIALIZER.query("continuation", continuation, "str") + if max_results is not None: + _params["maxResults"] = _SERIALIZER.query("max_results", max_results, "int") + if timeout is not None: + _params["timeout"] = _SERIALIZER.query("timeout", timeout, "int") + + # Construct headers + _headers["x-ms-version"] = _SERIALIZER.header("version", version, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_file_system_create_request( + *, + resource: Union[str, _models.FileSystemResourceType], + properties: Optional[str] = None, + timeout: Optional[int] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + version: str = kwargs.pop("version", _headers.pop("x-ms-version", "2026-02-06")) + # Construct URL + _url = "" + + # Construct parameters + _params["resource"] = _SERIALIZER.query("resource", resource, "str") + if timeout is not None: + _params["timeout"] = _SERIALIZER.query("timeout", timeout, "int") + + # Construct headers + _headers["x-ms-version"] = _SERIALIZER.header("version", version, "str") + if properties is not None: + _headers["x-ms-properties"] = _SERIALIZER.header("properties", properties, "str") + + return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_file_system_set_properties_request( + *, + resource: Union[str, _models.FileSystemResourceType], + properties: Optional[str] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + timeout: Optional[int] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + version: str = kwargs.pop("version", _headers.pop("x-ms-version", "2026-02-06")) + # Construct URL + _url = "" + + # Construct parameters + _params["resource"] = _SERIALIZER.query("resource", resource, "str") + if timeout is not None: + _params["timeout"] = _SERIALIZER.query("timeout", timeout, "int") + + # Construct headers + _headers["x-ms-version"] = _SERIALIZER.header("version", version, "str") + if properties is not None: + _headers["x-ms-properties"] = _SERIALIZER.header("properties", properties, "str") + if if_modified_since is not None: + _headers["If-Modified-Since"] = _SERIALIZER.header("if_modified_since", if_modified_since, "rfc-1123") + if if_unmodified_since is not None: + _headers["If-Unmodified-Since"] = _SERIALIZER.header("if_unmodified_since", if_unmodified_since, "rfc-1123") + + return HttpRequest(method="PATCH", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_file_system_get_properties_request( + *, resource: Union[str, _models.FileSystemResourceType], timeout: Optional[int] = None, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + version: str = kwargs.pop("version", _headers.pop("x-ms-version", "2026-02-06")) + # Construct URL + _url = "" + + # Construct parameters + _params["resource"] = _SERIALIZER.query("resource", resource, "str") + if timeout is not None: + _params["timeout"] = _SERIALIZER.query("timeout", timeout, "int") + + # Construct headers + _headers["x-ms-version"] = _SERIALIZER.header("version", version, "str") + + return HttpRequest(method="HEAD", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_file_system_delete_request( + *, + resource: Union[str, _models.FileSystemResourceType], + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + timeout: Optional[int] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + version: str = kwargs.pop("version", _headers.pop("x-ms-version", "2026-02-06")) + # Construct URL + _url = "" + + # Construct parameters + _params["resource"] = _SERIALIZER.query("resource", resource, "str") + if timeout is not None: + _params["timeout"] = _SERIALIZER.query("timeout", timeout, "int") + + # Construct headers + _headers["x-ms-version"] = _SERIALIZER.header("version", version, "str") + if if_modified_since is not None: + _headers["If-Modified-Since"] = _SERIALIZER.header("if_modified_since", if_modified_since, "rfc-1123") + if if_unmodified_since is not None: + _headers["If-Unmodified-Since"] = _SERIALIZER.header("if_unmodified_since", if_unmodified_since, "rfc-1123") + + return HttpRequest(method="DELETE", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_file_system_list_paths_request( + *, + recursive: bool, + continuation: Optional[str] = None, + path: Optional[str] = None, + max_results: Optional[int] = None, + upn: Optional[bool] = None, + begin_from: Optional[str] = None, + timeout: Optional[int] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + version: str = kwargs.pop("version", _headers.pop("x-ms-version", "2026-02-06")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "?resource=filesystem" + + # Construct parameters + if continuation is not None: + _params["continuation"] = _SERIALIZER.query("continuation", continuation, "str") + if path is not None: + _params["directory"] = _SERIALIZER.query("path", path, "str") + _params["recursive"] = _SERIALIZER.query("recursive", recursive, "bool") + if max_results is not None: + _params["maxResults"] = _SERIALIZER.query("max_results", max_results, "int") + if upn is not None: + _params["upn"] = _SERIALIZER.query("upn", upn, "bool") + if begin_from is not None: + _params["beginFrom"] = _SERIALIZER.query("begin_from", begin_from, "str") + if timeout is not None: + _params["timeout"] = _SERIALIZER.query("timeout", timeout, "int") + + # Construct headers + _headers["x-ms-version"] = _SERIALIZER.header("version", version, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_file_system_list_blob_hierarchy_segment_request( # pylint: disable=name-too-long + *, + prefix: Optional[str] = None, + delimiter: Optional[str] = None, + marker: Optional[str] = None, + max_results: Optional[int] = None, + include: Optional[list[Union[str, _models.ListBlobsIncludeItem]]] = None, + showonly: Optional[Union[str, _models.ListBlobsShowOnly]] = None, + timeout: Optional[int] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + version: str = kwargs.pop("version", _headers.pop("x-ms-version", "2026-02-06")) + accept = _headers.pop("Accept", "application/xml") + + # Construct URL + _url = "?restype=container&comp=list" + + # Construct parameters + if prefix is not None: + _params["prefix"] = _SERIALIZER.query("prefix", prefix, "str") + if delimiter is not None: + _params["delimiter"] = _SERIALIZER.query("delimiter", delimiter, "str") + if marker is not None: + _params["marker"] = _SERIALIZER.query("marker", marker, "str") + if max_results is not None: + _params["maxResults"] = _SERIALIZER.query("max_results", max_results, "int") + if include is not None: + _params["include"] = _SERIALIZER.query("include", include, "[str]", div=",") + if showonly is not None: + _params["showonly"] = _SERIALIZER.query("showonly", showonly, "str") + if timeout is not None: + _params["timeout"] = _SERIALIZER.query("timeout", timeout, "int") + + # Construct headers + _headers["x-ms-version"] = _SERIALIZER.header("version", version, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_path_create_request( # pylint: disable=too-many-locals,too-many-statements,too-many-branches + *, + resource: Optional[Union[str, _models.PathResourceType]] = None, + mode: Optional[Union[str, _models.PathRenameMode]] = None, + continuation: Optional[str] = None, + cache_control: Optional[str] = None, + content_encoding: Optional[str] = None, + content_language: Optional[str] = None, + content_disposition: Optional[str] = None, + content_type: Optional[str] = None, + rename_source: Optional[str] = None, + lease_id: Optional[str] = None, + source_lease_id: Optional[str] = None, + properties: Optional[str] = None, + permissions: Optional[str] = None, + umask: Optional[str] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + source_if_match: Optional[str] = None, + source_if_none_match: Optional[str] = None, + source_if_modified_since: Optional[datetime.datetime] = None, + source_if_unmodified_since: Optional[datetime.datetime] = None, + encryption_key: Optional[str] = None, + encryption_key_sha256: Optional[str] = None, + encryption_algorithm: Optional[Union[str, _models.EncryptionAlgorithmType]] = None, + owner: Optional[str] = None, + group: Optional[str] = None, + acl: Optional[str] = None, + proposed_lease_id: Optional[str] = None, + lease_duration: Optional[int] = None, + expiry_options: Optional[Union[str, _models.PathExpiryOptions]] = None, + expires_on: Optional[str] = None, + encryption_context: Optional[str] = None, + timeout: Optional[int] = None, + etag: Optional[str] = None, + match_condition: Optional[MatchConditions] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + version: str = kwargs.pop("version", _headers.pop("x-ms-version", "2026-02-06")) + # Construct URL + _url = "" + + # Construct parameters + if resource is not None: + _params["resource"] = _SERIALIZER.query("resource", resource, "str") + if mode is not None: + _params["mode"] = _SERIALIZER.query("mode", mode, "str") + if continuation is not None: + _params["continuation"] = _SERIALIZER.query("continuation", continuation, "str") + if timeout is not None: + _params["timeout"] = _SERIALIZER.query("timeout", timeout, "int") + + # Construct headers + _headers["x-ms-version"] = _SERIALIZER.header("version", version, "str") + if cache_control is not None: + _headers["x-ms-cache-control"] = _SERIALIZER.header("cache_control", cache_control, "str") + if content_encoding is not None: + _headers["x-ms-content-encoding"] = _SERIALIZER.header("content_encoding", content_encoding, "str") + if content_language is not None: + _headers["x-ms-content-language"] = _SERIALIZER.header("content_language", content_language, "str") + if content_disposition is not None: + _headers["x-ms-content-disposition"] = _SERIALIZER.header("content_disposition", content_disposition, "str") + if content_type is not None: + _headers["x-ms-content-type"] = _SERIALIZER.header("content_type", content_type, "str") + if rename_source is not None: + _headers["x-ms-rename-source"] = _SERIALIZER.header("rename_source", rename_source, "str") + if lease_id is not None: + _headers["x-ms-lease-id"] = _SERIALIZER.header("lease_id", lease_id, "str") + if source_lease_id is not None: + _headers["x-ms-source-lease-id"] = _SERIALIZER.header("source_lease_id", source_lease_id, "str") + if properties is not None: + _headers["x-ms-properties"] = _SERIALIZER.header("properties", properties, "str") + if permissions is not None: + _headers["x-ms-permissions"] = _SERIALIZER.header("permissions", permissions, "str") + if umask is not None: + _headers["x-ms-umask"] = _SERIALIZER.header("umask", umask, "str") + if if_modified_since is not None: + _headers["If-Modified-Since"] = _SERIALIZER.header("if_modified_since", if_modified_since, "rfc-1123") + if if_unmodified_since is not None: + _headers["If-Unmodified-Since"] = _SERIALIZER.header("if_unmodified_since", if_unmodified_since, "rfc-1123") + if source_if_match is not None: + _headers["x-ms-source-if-match"] = _SERIALIZER.header("source_if_match", source_if_match, "str") + if source_if_none_match is not None: + _headers["x-ms-source-if-none-match"] = _SERIALIZER.header("source_if_none_match", source_if_none_match, "str") + if source_if_modified_since is not None: + _headers["x-ms-source-if-modified-since"] = _SERIALIZER.header( + "source_if_modified_since", source_if_modified_since, "rfc-1123" + ) + if source_if_unmodified_since is not None: + _headers["x-ms-source-if-unmodified-since"] = _SERIALIZER.header( + "source_if_unmodified_since", source_if_unmodified_since, "rfc-1123" + ) + if encryption_key is not None: + _headers["x-ms-encryption-key"] = _SERIALIZER.header("encryption_key", encryption_key, "str") + if encryption_key_sha256 is not None: + _headers["x-ms-encryption-key-sha256"] = _SERIALIZER.header( + "encryption_key_sha256", encryption_key_sha256, "str" + ) + if encryption_algorithm is not None: + _headers["x-ms-encryption-algorithm"] = _SERIALIZER.header("encryption_algorithm", encryption_algorithm, "str") + if owner is not None: + _headers["x-ms-owner"] = _SERIALIZER.header("owner", owner, "str") + if group is not None: + _headers["x-ms-group"] = _SERIALIZER.header("group", group, "str") + if acl is not None: + _headers["x-ms-acl"] = _SERIALIZER.header("acl", acl, "str") + if proposed_lease_id is not None: + _headers["x-ms-proposed-lease-id"] = _SERIALIZER.header("proposed_lease_id", proposed_lease_id, "str") + if lease_duration is not None: + _headers["x-ms-lease-duration"] = _SERIALIZER.header("lease_duration", lease_duration, "int") + if expiry_options is not None: + _headers["x-ms-expiry-option"] = _SERIALIZER.header("expiry_options", expiry_options, "str") + if expires_on is not None: + _headers["x-ms-expiry-time"] = _SERIALIZER.header("expires_on", expires_on, "str") + if encryption_context is not None: + _headers["x-ms-encryption-context"] = _SERIALIZER.header("encryption_context", encryption_context, "str") + if_match = prep_if_match(etag, match_condition) + if if_match is not None: + _headers["If-Match"] = _SERIALIZER.header("if_match", if_match, "str") + if_none_match = prep_if_none_match(etag, match_condition) + if if_none_match is not None: + _headers["If-None-Match"] = _SERIALIZER.header("if_none_match", if_none_match, "str") + + return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_path_update_request( # pylint: disable=too-many-locals,too-many-statements,too-many-branches + *, + action: Union[str, _models.PathUpdateAction], + max_records: Optional[int] = None, + continuation: Optional[str] = None, + mode: Optional[Union[str, _models.PathSetAccessControlRecursiveMode]] = None, + force_flag: Optional[bool] = None, + position: Optional[int] = None, + retain_uncommitted_data: Optional[bool] = None, + close: Optional[bool] = None, + content_length: Optional[int] = None, + content_md5: Optional[bytes] = None, + lease_id: Optional[str] = None, + cache_control: Optional[str] = None, + content_disposition: Optional[str] = None, + content_encoding: Optional[str] = None, + content_language: Optional[str] = None, + properties: Optional[str] = None, + owner: Optional[str] = None, + group: Optional[str] = None, + permissions: Optional[str] = None, + acl: Optional[str] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + structured_body_type: Optional[str] = None, + structured_content_length: Optional[int] = None, + timeout: Optional[int] = None, + etag: Optional[str] = None, + match_condition: Optional[MatchConditions] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + content_type: str = kwargs.pop("content_type") + version: str = kwargs.pop("version", _headers.pop("x-ms-version", "2026-02-06")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "" + + # Construct parameters + _params["action"] = _SERIALIZER.query("action", action, "str") + if max_records is not None: + _params["maxRecords"] = _SERIALIZER.query("max_records", max_records, "int") + if continuation is not None: + _params["continuation"] = _SERIALIZER.query("continuation", continuation, "str") + if mode is not None: + _params["mode"] = _SERIALIZER.query("mode", mode, "str") + if force_flag is not None: + _params["forceFlag"] = _SERIALIZER.query("force_flag", force_flag, "bool") + if position is not None: + _params["position"] = _SERIALIZER.query("position", position, "int") + if retain_uncommitted_data is not None: + _params["retainUncommittedData"] = _SERIALIZER.query("retain_uncommitted_data", retain_uncommitted_data, "bool") + if close is not None: + _params["close"] = _SERIALIZER.query("close", close, "bool") + if timeout is not None: + _params["timeout"] = _SERIALIZER.query("timeout", timeout, "int") + + # Construct headers + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["x-ms-version"] = _SERIALIZER.header("version", version, "str") + if content_length is not None: + _headers["Content-Length"] = _SERIALIZER.header("content_length", content_length, "int") + if content_md5 is not None: + _headers["x-ms-content-md5"] = _SERIALIZER.header("content_md5", content_md5, "bytearray") + if lease_id is not None: + _headers["x-ms-lease-id"] = _SERIALIZER.header("lease_id", lease_id, "str") + if cache_control is not None: + _headers["x-ms-cache-control"] = _SERIALIZER.header("cache_control", cache_control, "str") + if content_disposition is not None: + _headers["x-ms-content-disposition"] = _SERIALIZER.header("content_disposition", content_disposition, "str") + if content_encoding is not None: + _headers["x-ms-content-encoding"] = _SERIALIZER.header("content_encoding", content_encoding, "str") + if content_language is not None: + _headers["x-ms-content-language"] = _SERIALIZER.header("content_language", content_language, "str") + if properties is not None: + _headers["x-ms-properties"] = _SERIALIZER.header("properties", properties, "str") + if owner is not None: + _headers["x-ms-owner"] = _SERIALIZER.header("owner", owner, "str") + if group is not None: + _headers["x-ms-group"] = _SERIALIZER.header("group", group, "str") + if permissions is not None: + _headers["x-ms-permissions"] = _SERIALIZER.header("permissions", permissions, "str") + if acl is not None: + _headers["x-ms-acl"] = _SERIALIZER.header("acl", acl, "str") + if if_modified_since is not None: + _headers["If-Modified-Since"] = _SERIALIZER.header("if_modified_since", if_modified_since, "rfc-1123") + if if_unmodified_since is not None: + _headers["If-Unmodified-Since"] = _SERIALIZER.header("if_unmodified_since", if_unmodified_since, "rfc-1123") + if structured_body_type is not None: + _headers["x-ms-structured-body"] = _SERIALIZER.header("structured_body_type", structured_body_type, "str") + if structured_content_length is not None: + _headers["x-ms-structured-content-length"] = _SERIALIZER.header( + "structured_content_length", structured_content_length, "int" + ) + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + if_match = prep_if_match(etag, match_condition) + if if_match is not None: + _headers["If-Match"] = _SERIALIZER.header("if_match", if_match, "str") + if_none_match = prep_if_none_match(etag, match_condition) + if if_none_match is not None: + _headers["If-None-Match"] = _SERIALIZER.header("if_none_match", if_none_match, "str") + + return HttpRequest(method="PATCH", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_path_lease_request( + *, + lease_action: Union[str, _models.PathLeaseAction], + lease_duration: Optional[int] = None, + lease_break_period: Optional[int] = None, + lease_id: Optional[str] = None, + proposed_lease_id: Optional[str] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + timeout: Optional[int] = None, + etag: Optional[str] = None, + match_condition: Optional[MatchConditions] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + version: str = kwargs.pop("version", _headers.pop("x-ms-version", "2026-02-06")) + # Construct URL + _url = "" + + # Construct parameters + if timeout is not None: + _params["timeout"] = _SERIALIZER.query("timeout", timeout, "int") + + # Construct headers + _headers["x-ms-version"] = _SERIALIZER.header("version", version, "str") + _headers["x-ms-lease-action"] = _SERIALIZER.header("lease_action", lease_action, "str") + if lease_duration is not None: + _headers["x-ms-lease-duration"] = _SERIALIZER.header("lease_duration", lease_duration, "int") + if lease_break_period is not None: + _headers["x-ms-lease-break-period"] = _SERIALIZER.header("lease_break_period", lease_break_period, "int") + if lease_id is not None: + _headers["x-ms-lease-id"] = _SERIALIZER.header("lease_id", lease_id, "str") + if proposed_lease_id is not None: + _headers["x-ms-proposed-lease-id"] = _SERIALIZER.header("proposed_lease_id", proposed_lease_id, "str") + if if_modified_since is not None: + _headers["If-Modified-Since"] = _SERIALIZER.header("if_modified_since", if_modified_since, "rfc-1123") + if if_unmodified_since is not None: + _headers["If-Unmodified-Since"] = _SERIALIZER.header("if_unmodified_since", if_unmodified_since, "rfc-1123") + if_match = prep_if_match(etag, match_condition) + if if_match is not None: + _headers["If-Match"] = _SERIALIZER.header("if_match", if_match, "str") + if_none_match = prep_if_none_match(etag, match_condition) + if if_none_match is not None: + _headers["If-None-Match"] = _SERIALIZER.header("if_none_match", if_none_match, "str") + + return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_path_read_request( + *, + range: Optional[str] = None, + lease_id: Optional[str] = None, + range_get_content_md5: Optional[bool] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + encryption_key: Optional[str] = None, + encryption_key_sha256: Optional[str] = None, + encryption_algorithm: Optional[Union[str, _models.EncryptionAlgorithmType]] = None, + timeout: Optional[int] = None, + etag: Optional[str] = None, + match_condition: Optional[MatchConditions] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + version: str = kwargs.pop("version", _headers.pop("x-ms-version", "2026-02-06")) + accept = _headers.pop("Accept", "application/octet-stream") + + # Construct URL + _url = "" + + # Construct parameters + if timeout is not None: + _params["timeout"] = _SERIALIZER.query("timeout", timeout, "int") + + # Construct headers + _headers["x-ms-version"] = _SERIALIZER.header("version", version, "str") + if range is not None: + _headers["Range"] = _SERIALIZER.header("range", range, "str") + if lease_id is not None: + _headers["x-ms-lease-id"] = _SERIALIZER.header("lease_id", lease_id, "str") + if range_get_content_md5 is not None: + _headers["x-ms-range-get-content-md5"] = _SERIALIZER.header( + "range_get_content_md5", range_get_content_md5, "bool" + ) + if if_modified_since is not None: + _headers["If-Modified-Since"] = _SERIALIZER.header("if_modified_since", if_modified_since, "rfc-1123") + if if_unmodified_since is not None: + _headers["If-Unmodified-Since"] = _SERIALIZER.header("if_unmodified_since", if_unmodified_since, "rfc-1123") + if encryption_key is not None: + _headers["x-ms-encryption-key"] = _SERIALIZER.header("encryption_key", encryption_key, "str") + if encryption_key_sha256 is not None: + _headers["x-ms-encryption-key-sha256"] = _SERIALIZER.header( + "encryption_key_sha256", encryption_key_sha256, "str" + ) + if encryption_algorithm is not None: + _headers["x-ms-encryption-algorithm"] = _SERIALIZER.header("encryption_algorithm", encryption_algorithm, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + if_match = prep_if_match(etag, match_condition) + if if_match is not None: + _headers["If-Match"] = _SERIALIZER.header("if_match", if_match, "str") + if_none_match = prep_if_none_match(etag, match_condition) + if if_none_match is not None: + _headers["If-None-Match"] = _SERIALIZER.header("if_none_match", if_none_match, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_path_get_properties_request( + *, + action: Optional[Union[str, _models.PathGetPropertiesAction]] = None, + upn: Optional[bool] = None, + lease_id: Optional[str] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + timeout: Optional[int] = None, + etag: Optional[str] = None, + match_condition: Optional[MatchConditions] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + version: str = kwargs.pop("version", _headers.pop("x-ms-version", "2026-02-06")) + # Construct URL + _url = "" + + # Construct parameters + if action is not None: + _params["action"] = _SERIALIZER.query("action", action, "str") + if upn is not None: + _params["upn"] = _SERIALIZER.query("upn", upn, "bool") + if timeout is not None: + _params["timeout"] = _SERIALIZER.query("timeout", timeout, "int") + + # Construct headers + _headers["x-ms-version"] = _SERIALIZER.header("version", version, "str") + if lease_id is not None: + _headers["x-ms-lease-id"] = _SERIALIZER.header("lease_id", lease_id, "str") + if if_modified_since is not None: + _headers["If-Modified-Since"] = _SERIALIZER.header("if_modified_since", if_modified_since, "rfc-1123") + if if_unmodified_since is not None: + _headers["If-Unmodified-Since"] = _SERIALIZER.header("if_unmodified_since", if_unmodified_since, "rfc-1123") + if_match = prep_if_match(etag, match_condition) + if if_match is not None: + _headers["If-Match"] = _SERIALIZER.header("if_match", if_match, "str") + if_none_match = prep_if_none_match(etag, match_condition) + if if_none_match is not None: + _headers["If-None-Match"] = _SERIALIZER.header("if_none_match", if_none_match, "str") + + return HttpRequest(method="HEAD", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_path_delete_request( + *, + recursive: Optional[bool] = None, + continuation: Optional[str] = None, + lease_id: Optional[str] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + paginated: Optional[bool] = None, + timeout: Optional[int] = None, + etag: Optional[str] = None, + match_condition: Optional[MatchConditions] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + version: str = kwargs.pop("version", _headers.pop("x-ms-version", "2026-02-06")) + # Construct URL + _url = "" + + # Construct parameters + if recursive is not None: + _params["recursive"] = _SERIALIZER.query("recursive", recursive, "bool") + if continuation is not None: + _params["continuation"] = _SERIALIZER.query("continuation", continuation, "str") + if paginated is not None: + _params["paginated"] = _SERIALIZER.query("paginated", paginated, "bool") + if timeout is not None: + _params["timeout"] = _SERIALIZER.query("timeout", timeout, "int") + + # Construct headers + _headers["x-ms-version"] = _SERIALIZER.header("version", version, "str") + if lease_id is not None: + _headers["x-ms-lease-id"] = _SERIALIZER.header("lease_id", lease_id, "str") + if if_modified_since is not None: + _headers["If-Modified-Since"] = _SERIALIZER.header("if_modified_since", if_modified_since, "rfc-1123") + if if_unmodified_since is not None: + _headers["If-Unmodified-Since"] = _SERIALIZER.header("if_unmodified_since", if_unmodified_since, "rfc-1123") + if_match = prep_if_match(etag, match_condition) + if if_match is not None: + _headers["If-Match"] = _SERIALIZER.header("if_match", if_match, "str") + if_none_match = prep_if_none_match(etag, match_condition) + if if_none_match is not None: + _headers["If-None-Match"] = _SERIALIZER.header("if_none_match", if_none_match, "str") + + return HttpRequest(method="DELETE", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_path_set_access_control_request( + *, + lease_id: Optional[str] = None, + owner: Optional[str] = None, + group: Optional[str] = None, + permissions: Optional[str] = None, + acl: Optional[str] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + timeout: Optional[int] = None, + etag: Optional[str] = None, + match_condition: Optional[MatchConditions] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + version: str = kwargs.pop("version", _headers.pop("x-ms-version", "2026-02-06")) + # Construct URL + _url = "?action=setAccessControl" + + # Construct parameters + if timeout is not None: + _params["timeout"] = _SERIALIZER.query("timeout", timeout, "int") + + # Construct headers + _headers["x-ms-version"] = _SERIALIZER.header("version", version, "str") + if lease_id is not None: + _headers["x-ms-lease-id"] = _SERIALIZER.header("lease_id", lease_id, "str") + if owner is not None: + _headers["x-ms-owner"] = _SERIALIZER.header("owner", owner, "str") + if group is not None: + _headers["x-ms-group"] = _SERIALIZER.header("group", group, "str") + if permissions is not None: + _headers["x-ms-permissions"] = _SERIALIZER.header("permissions", permissions, "str") + if acl is not None: + _headers["x-ms-acl"] = _SERIALIZER.header("acl", acl, "str") + if if_modified_since is not None: + _headers["If-Modified-Since"] = _SERIALIZER.header("if_modified_since", if_modified_since, "rfc-1123") + if if_unmodified_since is not None: + _headers["If-Unmodified-Since"] = _SERIALIZER.header("if_unmodified_since", if_unmodified_since, "rfc-1123") + if_match = prep_if_match(etag, match_condition) + if if_match is not None: + _headers["If-Match"] = _SERIALIZER.header("if_match", if_match, "str") + if_none_match = prep_if_none_match(etag, match_condition) + if if_none_match is not None: + _headers["If-None-Match"] = _SERIALIZER.header("if_none_match", if_none_match, "str") + + return HttpRequest(method="PATCH", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_path_set_access_control_recursive_request( # pylint: disable=name-too-long + *, + mode: Union[str, _models.PathSetAccessControlRecursiveMode], + continuation: Optional[str] = None, + force_flag: Optional[bool] = None, + max_records: Optional[int] = None, + acl: Optional[str] = None, + timeout: Optional[int] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + version: str = kwargs.pop("version", _headers.pop("x-ms-version", "2026-02-06")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "?action=setAccessControlRecursive" + + # Construct parameters + _params["mode"] = _SERIALIZER.query("mode", mode, "str") + if continuation is not None: + _params["continuation"] = _SERIALIZER.query("continuation", continuation, "str") + if force_flag is not None: + _params["forceFlag"] = _SERIALIZER.query("force_flag", force_flag, "bool") + if max_records is not None: + _params["maxRecords"] = _SERIALIZER.query("max_records", max_records, "int") + if timeout is not None: + _params["timeout"] = _SERIALIZER.query("timeout", timeout, "int") + + # Construct headers + _headers["x-ms-version"] = _SERIALIZER.header("version", version, "str") + if acl is not None: + _headers["x-ms-acl"] = _SERIALIZER.header("acl", acl, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="PATCH", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_path_flush_data_request( # pylint: disable=too-many-locals + *, + position: Optional[int] = None, + retain_uncommitted_data: Optional[bool] = None, + close: Optional[bool] = None, + content_length: Optional[int] = None, + content_md5: Optional[bytes] = None, + lease_id: Optional[str] = None, + lease_action: Optional[Union[str, _models.LeaseAction]] = None, + lease_duration: Optional[int] = None, + proposed_lease_id: Optional[str] = None, + cache_control: Optional[str] = None, + content_type: Optional[str] = None, + content_disposition: Optional[str] = None, + content_encoding: Optional[str] = None, + content_language: Optional[str] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + encryption_key: Optional[str] = None, + encryption_key_sha256: Optional[str] = None, + encryption_algorithm: Optional[Union[str, _models.EncryptionAlgorithmType]] = None, + timeout: Optional[int] = None, + etag: Optional[str] = None, + match_condition: Optional[MatchConditions] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + version: str = kwargs.pop("version", _headers.pop("x-ms-version", "2026-02-06")) + # Construct URL + _url = "?action=flush" + + # Construct parameters + if position is not None: + _params["position"] = _SERIALIZER.query("position", position, "int") + if retain_uncommitted_data is not None: + _params["retainUncommittedData"] = _SERIALIZER.query("retain_uncommitted_data", retain_uncommitted_data, "bool") + if close is not None: + _params["close"] = _SERIALIZER.query("close", close, "bool") + if timeout is not None: + _params["timeout"] = _SERIALIZER.query("timeout", timeout, "int") + + # Construct headers + _headers["x-ms-version"] = _SERIALIZER.header("version", version, "str") + if content_length is not None: + _headers["Content-Length"] = _SERIALIZER.header("content_length", content_length, "int") + if content_md5 is not None: + _headers["x-ms-content-md5"] = _SERIALIZER.header("content_md5", content_md5, "bytearray") + if lease_id is not None: + _headers["x-ms-lease-id"] = _SERIALIZER.header("lease_id", lease_id, "str") + if lease_action is not None: + _headers["x-ms-lease-action"] = _SERIALIZER.header("lease_action", lease_action, "str") + if lease_duration is not None: + _headers["x-ms-lease-duration"] = _SERIALIZER.header("lease_duration", lease_duration, "int") + if proposed_lease_id is not None: + _headers["x-ms-proposed-lease-id"] = _SERIALIZER.header("proposed_lease_id", proposed_lease_id, "str") + if cache_control is not None: + _headers["x-ms-cache-control"] = _SERIALIZER.header("cache_control", cache_control, "str") + if content_type is not None: + _headers["x-ms-content-type"] = _SERIALIZER.header("content_type", content_type, "str") + if content_disposition is not None: + _headers["x-ms-content-disposition"] = _SERIALIZER.header("content_disposition", content_disposition, "str") + if content_encoding is not None: + _headers["x-ms-content-encoding"] = _SERIALIZER.header("content_encoding", content_encoding, "str") + if content_language is not None: + _headers["x-ms-content-language"] = _SERIALIZER.header("content_language", content_language, "str") + if if_modified_since is not None: + _headers["If-Modified-Since"] = _SERIALIZER.header("if_modified_since", if_modified_since, "rfc-1123") + if if_unmodified_since is not None: + _headers["If-Unmodified-Since"] = _SERIALIZER.header("if_unmodified_since", if_unmodified_since, "rfc-1123") + if encryption_key is not None: + _headers["x-ms-encryption-key"] = _SERIALIZER.header("encryption_key", encryption_key, "str") + if encryption_key_sha256 is not None: + _headers["x-ms-encryption-key-sha256"] = _SERIALIZER.header( + "encryption_key_sha256", encryption_key_sha256, "str" + ) + if encryption_algorithm is not None: + _headers["x-ms-encryption-algorithm"] = _SERIALIZER.header("encryption_algorithm", encryption_algorithm, "str") + if_match = prep_if_match(etag, match_condition) + if if_match is not None: + _headers["If-Match"] = _SERIALIZER.header("if_match", if_match, "str") + if_none_match = prep_if_none_match(etag, match_condition) + if if_none_match is not None: + _headers["If-None-Match"] = _SERIALIZER.header("if_none_match", if_none_match, "str") + + return HttpRequest(method="PATCH", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_path_append_data_request( + *, + position: Optional[int] = None, + content_length: Optional[int] = None, + transactional_content_hash: Optional[bytes] = None, + transactional_content_crc64: Optional[bytes] = None, + lease_id: Optional[str] = None, + lease_action: Optional[Union[str, _models.LeaseAction]] = None, + lease_duration: Optional[int] = None, + proposed_lease_id: Optional[str] = None, + encryption_key: Optional[str] = None, + encryption_key_sha256: Optional[str] = None, + encryption_algorithm: Optional[Union[str, _models.EncryptionAlgorithmType]] = None, + flush: Optional[bool] = None, + structured_body_type: Optional[str] = None, + structured_content_length: Optional[int] = None, + timeout: Optional[int] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + content_type: str = kwargs.pop("content_type") + version: str = kwargs.pop("version", _headers.pop("x-ms-version", "2026-02-06")) + # Construct URL + _url = "?action=append" + + # Construct parameters + if position is not None: + _params["position"] = _SERIALIZER.query("position", position, "int") + if flush is not None: + _params["flush"] = _SERIALIZER.query("flush", flush, "bool") + if timeout is not None: + _params["timeout"] = _SERIALIZER.query("timeout", timeout, "int") + + # Construct headers + _headers["x-ms-version"] = _SERIALIZER.header("version", version, "str") + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") + if content_length is not None: + _headers["Content-Length"] = _SERIALIZER.header("content_length", content_length, "int") + if transactional_content_hash is not None: + _headers["Content-MD5"] = _SERIALIZER.header( + "transactional_content_hash", transactional_content_hash, "bytearray" + ) + if transactional_content_crc64 is not None: + _headers["x-ms-content-crc64"] = _SERIALIZER.header( + "transactional_content_crc64", transactional_content_crc64, "bytearray" + ) + if lease_id is not None: + _headers["x-ms-lease-id"] = _SERIALIZER.header("lease_id", lease_id, "str") + if lease_action is not None: + _headers["x-ms-lease-action"] = _SERIALIZER.header("lease_action", lease_action, "str") + if lease_duration is not None: + _headers["x-ms-lease-duration"] = _SERIALIZER.header("lease_duration", lease_duration, "int") + if proposed_lease_id is not None: + _headers["x-ms-proposed-lease-id"] = _SERIALIZER.header("proposed_lease_id", proposed_lease_id, "str") + if encryption_key is not None: + _headers["x-ms-encryption-key"] = _SERIALIZER.header("encryption_key", encryption_key, "str") + if encryption_key_sha256 is not None: + _headers["x-ms-encryption-key-sha256"] = _SERIALIZER.header( + "encryption_key_sha256", encryption_key_sha256, "str" + ) + if encryption_algorithm is not None: + _headers["x-ms-encryption-algorithm"] = _SERIALIZER.header("encryption_algorithm", encryption_algorithm, "str") + if structured_body_type is not None: + _headers["x-ms-structured-body"] = _SERIALIZER.header("structured_body_type", structured_body_type, "str") + if structured_content_length is not None: + _headers["x-ms-structured-content-length"] = _SERIALIZER.header( + "structured_content_length", structured_content_length, "int" + ) + + return HttpRequest(method="PATCH", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_path_set_expiry_request( + *, + expiry_options: Union[str, _models.PathExpiryOptions], + expires_on: Optional[str] = None, + timeout: Optional[int] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + version: str = kwargs.pop("version", _headers.pop("x-ms-version", "2026-02-06")) + # Construct URL + _url = "?comp=expiry" + + # Construct parameters + if timeout is not None: + _params["timeout"] = _SERIALIZER.query("timeout", timeout, "int") + + # Construct headers + _headers["x-ms-version"] = _SERIALIZER.header("version", version, "str") + _headers["x-ms-expiry-option"] = _SERIALIZER.header("expiry_options", expiry_options, "str") + if expires_on is not None: + _headers["x-ms-expiry-time"] = _SERIALIZER.header("expires_on", expires_on, "str") + + return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_path_undelete_request( + *, undelete_source: Optional[str] = None, timeout: Optional[int] = None, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + version: str = kwargs.pop("version", _headers.pop("x-ms-version", "2026-02-06")) + # Construct URL + _url = "?comp=undelete" + + # Construct parameters + if timeout is not None: + _params["timeout"] = _SERIALIZER.query("timeout", timeout, "int") + + # Construct headers + _headers["x-ms-version"] = _SERIALIZER.header("version", version, "str") + if undelete_source is not None: + _headers["x-ms-undelete-source"] = _SERIALIZER.header("undelete_source", undelete_source, "str") + + return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) + + +class ServiceOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.storage.filedatalake._generated.DataLakeClient`'s + :attr:`service` attribute. + """ + + def __init__(self, *args, **kwargs) -> None: + input_args = list(args) + self._client: PipelineClient = input_args.pop(0) if input_args else kwargs.pop("client") + self._config: DataLakeClientConfiguration = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize: Serializer = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + @distributed_trace + def list_file_systems( + self, + *, + resource: Union[str, _models.AccountResourceType], + prefix: Optional[str] = None, + continuation: Optional[str] = None, + max_results: Optional[int] = None, + timeout: Optional[int] = None, + **kwargs: Any + ) -> _models.FileSystemList: + """List filesystems and their properties in given account. + + :keyword resource: The value must be "account" for all account operations. "account" Required. + :paramtype resource: str or ~azure.storage.filedatalake._generated.models.AccountResourceType + :keyword prefix: Filters results to filesystems within the specified prefix. Default value is + None. + :paramtype prefix: str + :keyword continuation: Optional. When deleting a directory, the number of paths that are + deleted with each invocation is limited. If the number of paths to be deleted exceeds this + limit, a continuation token is returned in this response header. When a continuation token is + returned in the response, it must be specified in a subsequent invocation of the delete + operation to continue deleting the directory. Default value is None. + :paramtype continuation: str + :keyword max_results: An optional value that specifies the maximum number of items to return. + If omitted or greater than 5,000, the response will include up to 5,000 items. Default value is + None. + :paramtype max_results: int + :keyword timeout: The timeout parameter is expressed in seconds. For more information, see Setting + Timeouts for Blob Service Operations.. Default value is None. + :paramtype timeout: int + :return: FileSystemList. The FileSystemList is compatible with MutableMapping + :rtype: ~azure.storage.filedatalake._generated.models.FileSystemList + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[_models.FileSystemList] = kwargs.pop("cls", None) + + _request = build_service_list_file_systems_request( + resource=resource, + prefix=prefix, + continuation=continuation, + max_results=max_results, + timeout=timeout, + version=self._config.version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "url": self._serialize.url("self._config.url", self._config.url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _decompress = kwargs.pop("decompress", True) + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.StorageError, + response, + ) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["x-ms-continuation"] = self._deserialize("str", response.headers.get("x-ms-continuation")) + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + response_headers["Content-Type"] = self._deserialize("str", response.headers.get("Content-Type")) + + if _stream: + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + else: + deserialized = _deserialize(_models.FileSystemList, response.json()) + + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore + + +class FileSystemOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.storage.filedatalake._generated.DataLakeClient`'s + :attr:`file_system` attribute. + """ + + def __init__(self, *args, **kwargs) -> None: + input_args = list(args) + self._client: PipelineClient = input_args.pop(0) if input_args else kwargs.pop("client") + self._config: DataLakeClientConfiguration = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize: Serializer = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + @distributed_trace + def create( # pylint: disable=inconsistent-return-statements + self, + *, + resource: Union[str, _models.FileSystemResourceType], + properties: Optional[str] = None, + timeout: Optional[int] = None, + **kwargs: Any + ) -> None: + """Create a FileSystem rooted at the specified location. If the FileSystem already exists, the + operation fails. This operation does not support conditional HTTP requests. + + :keyword resource: The value must be "filesystem" for all filesystem operations. "filesystem" + Required. + :paramtype resource: str or + ~azure.storage.filedatalake._generated.models.FileSystemResourceType + :keyword properties: Optional. User-defined properties to be stored with the filesystem, in the + format of a comma-separated list of name and value pairs "n1=v1, n2=v2, ...", where each value + is a base64 encoded string. Note that the string may only contain ASCII characters in the + ISO-8859-1 character set. If the filesystem exists, any properties not included in the list + will be removed. All properties are removed if the header is omitted. To merge new and existing + properties, first get all existing properties and the current E-Tag, then make a conditional + request with the E-Tag and include values for all properties. Default value is None. + :paramtype properties: str + :keyword timeout: The timeout parameter is expressed in seconds. For more information, see Setting + Timeouts for Blob Service Operations.. Default value is None. + :paramtype timeout: int + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[None] = kwargs.pop("cls", None) + + _request = build_file_system_create_request( + resource=resource, + properties=properties, + timeout=timeout, + version=self._config.version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "url": self._serialize.url("self._config.url", self._config.url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [201]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.StorageError, + response, + ) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["x-ms-namespace-enabled"] = self._deserialize( + "str", response.headers.get("x-ms-namespace-enabled") + ) + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + + if cls: + return cls(pipeline_response, None, response_headers) # type: ignore + + @distributed_trace + def set_properties( # pylint: disable=inconsistent-return-statements + self, + *, + resource: Union[str, _models.FileSystemResourceType], + properties: Optional[str] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + timeout: Optional[int] = None, + **kwargs: Any + ) -> None: + """Set properties for the FileSystem. This operation supports conditional HTTP requests. For more + information, see `Specifying Conditional Headers for Blob Service Operations + `_. + + :keyword resource: The value must be "filesystem" for all filesystem operations. "filesystem" + Required. + :paramtype resource: str or + ~azure.storage.filedatalake._generated.models.FileSystemResourceType + :keyword properties: Optional. User-defined properties to be stored with the filesystem, in the + format of a comma-separated list of name and value pairs "n1=v1, n2=v2, ...", where each value + is a base64 encoded string. Note that the string may only contain ASCII characters in the + ISO-8859-1 character set. If the filesystem exists, any properties not included in the list + will be removed. All properties are removed if the header is omitted. To merge new and existing + properties, first get all existing properties and the current E-Tag, then make a conditional + request with the E-Tag and include values for all properties. Default value is None. + :paramtype properties: str + :keyword if_modified_since: Specify this header value to operate only on a blob if it has been + modified since the specified date/time. Default value is None. + :paramtype if_modified_since: ~datetime.datetime + :keyword if_unmodified_since: Specify this header value to operate only on a blob if it has not + been modified since the specified date/time. Default value is None. + :paramtype if_unmodified_since: ~datetime.datetime + :keyword timeout: The timeout parameter is expressed in seconds. For more information, see Setting + Timeouts for Blob Service Operations.. Default value is None. + :paramtype timeout: int + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[None] = kwargs.pop("cls", None) + + _request = build_file_system_set_properties_request( + resource=resource, + properties=properties, + if_modified_since=if_modified_since, + if_unmodified_since=if_unmodified_since, + timeout=timeout, + version=self._config.version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "url": self._serialize.url("self._config.url", self._config.url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.StorageError, + response, + ) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + + if cls: + return cls(pipeline_response, None, response_headers) # type: ignore + + @distributed_trace + def get_properties( + self, *, resource: Union[str, _models.FileSystemResourceType], timeout: Optional[int] = None, **kwargs: Any + ) -> bool: + """All system and user-defined filesystem properties are specified in the response headers. + + :keyword resource: The value must be "filesystem" for all filesystem operations. "filesystem" + Required. + :paramtype resource: str or + ~azure.storage.filedatalake._generated.models.FileSystemResourceType + :keyword timeout: The timeout parameter is expressed in seconds. For more information, see Setting + Timeouts for Blob Service Operations.. Default value is None. + :paramtype timeout: int + :return: bool + :rtype: bool + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[None] = kwargs.pop("cls", None) + + _request = build_file_system_get_properties_request( + resource=resource, + timeout=timeout, + version=self._config.version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "url": self._serialize.url("self._config.url", self._config.url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.StorageError, + response, + ) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["x-ms-properties"] = self._deserialize("str", response.headers.get("x-ms-properties")) + response_headers["x-ms-namespace-enabled"] = self._deserialize( + "str", response.headers.get("x-ms-namespace-enabled") + ) + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + + if cls: + return cls(pipeline_response, None, response_headers) # type: ignore + return 200 <= response.status_code <= 299 + + @distributed_trace + def delete( # pylint: disable=inconsistent-return-statements + self, + *, + resource: Union[str, _models.FileSystemResourceType], + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + timeout: Optional[int] = None, + **kwargs: Any + ) -> None: + """Marks the FileSystem for deletion. When a FileSystem is deleted, a FileSystem with the same + identifier cannot be created for at least 30 seconds. While the filesystem is being deleted, + attempts to create a filesystem with the same identifier will fail with status code 409 + (Conflict), with the service returning additional error information indicating that the + filesystem is being deleted. All other operations, including operations on any files or + directories within the filesystem, will fail with status code 404 (Not Found) while the + filesystem is being deleted. This operation supports conditional HTTP requests. For more + information, see `Specifying Conditional Headers for Blob Service Operations + `_. + + :keyword resource: The value must be "filesystem" for all filesystem operations. "filesystem" + Required. + :paramtype resource: str or + ~azure.storage.filedatalake._generated.models.FileSystemResourceType + :keyword if_modified_since: Specify this header value to operate only on a blob if it has been + modified since the specified date/time. Default value is None. + :paramtype if_modified_since: ~datetime.datetime + :keyword if_unmodified_since: Specify this header value to operate only on a blob if it has not + been modified since the specified date/time. Default value is None. + :paramtype if_unmodified_since: ~datetime.datetime + :keyword timeout: The timeout parameter is expressed in seconds. For more information, see Setting + Timeouts for Blob Service Operations.. Default value is None. + :paramtype timeout: int + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[None] = kwargs.pop("cls", None) + + _request = build_file_system_delete_request( + resource=resource, + if_modified_since=if_modified_since, + if_unmodified_since=if_unmodified_since, + timeout=timeout, + version=self._config.version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "url": self._serialize.url("self._config.url", self._config.url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [202]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.StorageError, + response, + ) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + + if cls: + return cls(pipeline_response, None, response_headers) # type: ignore + + @distributed_trace + def list_paths( + self, + *, + recursive: bool, + continuation: Optional[str] = None, + path: Optional[str] = None, + max_results: Optional[int] = None, + upn: Optional[bool] = None, + begin_from: Optional[str] = None, + timeout: Optional[int] = None, + **kwargs: Any + ) -> _models.PathList: + """List FileSystem paths and their properties. + + :keyword recursive: Required. Required. + :paramtype recursive: bool + :keyword continuation: Optional. When deleting a directory, the number of paths that are + deleted with each invocation is limited. If the number of paths to be deleted exceeds this + limit, a continuation token is returned in this response header. When a continuation token is + returned in the response, it must be specified in a subsequent invocation of the delete + operation to continue deleting the directory. Default value is None. + :paramtype continuation: str + :keyword path: Optional. Filters results to paths within the specified directory. An error + occurs if the directory does not exist. Default value is None. + :paramtype path: str + :keyword max_results: An optional value that specifies the maximum number of items to return. + If omitted or greater than 5,000, the response will include up to 5,000 items. Default value is + None. + :paramtype max_results: int + :keyword upn: Optional. Valid only when Hierarchical Namespace is enabled for the account. If + "true", the user identity values returned in the x-ms-owner, x-ms-group, and x-ms-acl response + headers will be transformed from Azure Active Directory Object IDs to User Principal Names. If + "false", the values will be returned as Azure Active Directory Object IDs. The default value is + false. Note that group and application Object IDs are not translated because they do not have + unique friendly names. Default value is None. + :paramtype upn: bool + :keyword begin_from: Optional. A relative path within the specified directory where the listing + will start from. Default value is None. + :paramtype begin_from: str + :keyword timeout: The timeout parameter is expressed in seconds. For more information, see Setting + Timeouts for Blob Service Operations.. Default value is None. + :paramtype timeout: int + :return: PathList. The PathList is compatible with MutableMapping + :rtype: ~azure.storage.filedatalake._generated.models.PathList + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[_models.PathList] = kwargs.pop("cls", None) + + _request = build_file_system_list_paths_request( + recursive=recursive, + continuation=continuation, + path=path, + max_results=max_results, + upn=upn, + begin_from=begin_from, + timeout=timeout, + version=self._config.version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "url": self._serialize.url("self._config.url", self._config.url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _decompress = kwargs.pop("decompress", True) + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.StorageError, + response, + ) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["x-ms-continuation"] = self._deserialize("str", response.headers.get("x-ms-continuation")) + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + response_headers["Content-Type"] = self._deserialize("str", response.headers.get("Content-Type")) + + if _stream: + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + else: + deserialized = _deserialize(_models.PathList, response.json()) + + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore + + @distributed_trace + def list_blob_hierarchy_segment( + self, + *, + prefix: Optional[str] = None, + delimiter: Optional[str] = None, + marker: Optional[str] = None, + max_results: Optional[int] = None, + include: Optional[list[Union[str, _models.ListBlobsIncludeItem]]] = None, + showonly: Optional[Union[str, _models.ListBlobsShowOnly]] = None, + timeout: Optional[int] = None, + **kwargs: Any + ) -> _models.ListBlobsHierarchySegmentResponse: + """The List Blobs operation returns a list of the blobs under the specified container. + + :keyword prefix: Filters results to filesystems within the specified prefix. Default value is + None. + :paramtype prefix: str + :keyword delimiter: When the request includes this parameter, the operation returns a + BlobPrefix element in the response body that acts as a placeholder for all blobs whose names + begin with the same substring up to the appearance of the delimiter character. The delimiter + may be a single character or a string. Default value is None. + :paramtype delimiter: str + :keyword marker: A string value that identifies the portion of the list of containers to be + returned with the next listing operation. The operation returns the NextMarker value within the + response body if the listing operation did not return all containers remaining to be listed + with the current page. The NextMarker value can be used as the value for the marker parameter + in a subsequent call to request the next page of list items. The marker value is opaque to the + client. Default value is None. + :paramtype marker: str + :keyword max_results: An optional value that specifies the maximum number of items to return. + If omitted or greater than 5,000, the response will include up to 5,000 items. Default value is + None. + :paramtype max_results: int + :keyword include: Include this parameter to specify one or more datasets to include in the + response. Default value is None. + :paramtype include: list[str or + ~azure.storage.filedatalake._generated.models.ListBlobsIncludeItem] + :keyword showonly: Include this parameter to specify one or more datasets to include in the + response. "deleted" Default value is None. + :paramtype showonly: str or ~azure.storage.filedatalake._generated.models.ListBlobsShowOnly + :keyword timeout: The timeout parameter is expressed in seconds. For more information, see Setting + Timeouts for Blob Service Operations.. Default value is None. + :paramtype timeout: int + :return: ListBlobsHierarchySegmentResponse. The ListBlobsHierarchySegmentResponse is compatible + with MutableMapping + :rtype: ~azure.storage.filedatalake._generated.models.ListBlobsHierarchySegmentResponse + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[_models.ListBlobsHierarchySegmentResponse] = kwargs.pop("cls", None) + + _request = build_file_system_list_blob_hierarchy_segment_request( + prefix=prefix, + delimiter=delimiter, + marker=marker, + max_results=max_results, + include=include, + showonly=showonly, + timeout=timeout, + version=self._config.version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "url": self._serialize.url("self._config.url", self._config.url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _decompress = kwargs.pop("decompress", True) + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.StorageError, + response, + ) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + response_headers["Content-Type"] = self._deserialize("str", response.headers.get("Content-Type")) + + if _stream: + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + else: + deserialized = _deserialize_xml(_models.ListBlobsHierarchySegmentResponse, response.text()) + + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore + + +class PathOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.storage.filedatalake._generated.DataLakeClient`'s + :attr:`path` attribute. + """ + + def __init__(self, *args, **kwargs) -> None: + input_args = list(args) + self._client: PipelineClient = input_args.pop(0) if input_args else kwargs.pop("client") + self._config: DataLakeClientConfiguration = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize: Serializer = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + @distributed_trace + def create( # pylint: disable=inconsistent-return-statements,too-many-locals + self, + *, + resource: Optional[Union[str, _models.PathResourceType]] = None, + mode: Optional[Union[str, _models.PathRenameMode]] = None, + continuation: Optional[str] = None, + cache_control: Optional[str] = None, + content_encoding: Optional[str] = None, + content_language: Optional[str] = None, + content_disposition: Optional[str] = None, + content_type: Optional[str] = None, + rename_source: Optional[str] = None, + lease_id: Optional[str] = None, + source_lease_id: Optional[str] = None, + properties: Optional[str] = None, + permissions: Optional[str] = None, + umask: Optional[str] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + source_if_match: Optional[str] = None, + source_if_none_match: Optional[str] = None, + source_if_modified_since: Optional[datetime.datetime] = None, + source_if_unmodified_since: Optional[datetime.datetime] = None, + encryption_key: Optional[str] = None, + encryption_key_sha256: Optional[str] = None, + encryption_algorithm: Optional[Union[str, _models.EncryptionAlgorithmType]] = None, + owner: Optional[str] = None, + group: Optional[str] = None, + acl: Optional[str] = None, + proposed_lease_id: Optional[str] = None, + lease_duration: Optional[int] = None, + expiry_options: Optional[Union[str, _models.PathExpiryOptions]] = None, + expires_on: Optional[str] = None, + encryption_context: Optional[str] = None, + timeout: Optional[int] = None, + etag: Optional[str] = None, + match_condition: Optional[MatchConditions] = None, + **kwargs: Any + ) -> None: + """Create or rename a file or directory. By default, the destination is overwritten and if the + destination already exists and has a lease the lease is broken. This operation supports + conditional HTTP requests. For more information, see `Specifying Conditional Headers for Blob + Service Operations + `_. + To fail if the destination already exists, use a conditional request with If-None-Match: "*". + + :keyword resource: Required only for Create File and Create Directory. The value must be "file" + or "directory". Known values are: "directory" and "file". Default value is None. + :paramtype resource: str or ~azure.storage.filedatalake._generated.models.PathResourceType + :keyword mode: Optional. Valid only when namespace is enabled. This parameter determines the + behavior of the rename operation. The value must be "legacy" or "posix", and the default value + will be "posix". Known values are: "legacy" and "posix". Default value is None. + :paramtype mode: str or ~azure.storage.filedatalake._generated.models.PathRenameMode + :keyword continuation: Optional. When deleting a directory, the number of paths that are + deleted with each invocation is limited. If the number of paths to be deleted exceeds this + limit, a continuation token is returned in this response header. When a continuation token is + returned in the response, it must be specified in a subsequent invocation of the delete + operation to continue deleting the directory. Default value is None. + :paramtype continuation: str + :keyword cache_control: Optional. Sets the blob's cache control. If specified, this property is + stored with the blob and returned with a read request. Default value is None. + :paramtype cache_control: str + :keyword content_encoding: Optional. Sets the blob's content encoding. If specified, this + property is stored with the blob and returned with a read request. Default value is None. + :paramtype content_encoding: str + :keyword content_language: Optional. Set the blob's content language. If specified, this + property is stored with the blob and returned with a read request. Default value is None. + :paramtype content_language: str + :keyword content_disposition: Optional. Sets the blob's Content-Disposition header. Default + value is None. + :paramtype content_disposition: str + :keyword content_type: Optional. Sets the blob's content type. If specified, this property is + stored with the blob and returned with a read request. Default value is None. + :paramtype content_type: str + :keyword rename_source: An optional file or directory to be renamed. The value must have the + following format: "/{filesystem}/{path}". If "x-ms-properties" is specified, the properties + will overwrite the existing properties; otherwise, the existing properties will be preserved. + This value must be a URL percent-encoded string. Note that the string may only contain ASCII + characters in the ISO-8859-1 character set. Default value is None. + :paramtype rename_source: str + :keyword lease_id: If specified, the operation only succeeds if the resource's lease is active + and matches this ID. Default value is None. + :paramtype lease_id: str + :keyword source_lease_id: A lease ID for the source path. If specified, the source path must + have an active lease and the lease ID must match. Default value is None. + :paramtype source_lease_id: str + :keyword properties: Optional. User-defined properties to be stored with the filesystem, in the + format of a comma-separated list of name and value pairs "n1=v1, n2=v2, ...", where each value + is a base64 encoded string. Note that the string may only contain ASCII characters in the + ISO-8859-1 character set. If the filesystem exists, any properties not included in the list + will be removed. All properties are removed if the header is omitted. To merge new and existing + properties, first get all existing properties and the current E-Tag, then make a conditional + request with the E-Tag and include values for all properties. Default value is None. + :paramtype properties: str + :keyword permissions: Optional and only valid if Hierarchical Namespace is enabled for the + account. Sets POSIX access permissions for the file owner, the file owning group, and others. + Each class may be granted read, write, or execute permission. The sticky bit is also supported. + Both symbolic (rwxrw-rw-) and 4-digit octal notation (e.g. 0766) are supported. Default value + is None. + :paramtype permissions: str + :keyword umask: Optional and only valid if Hierarchical Namespace is enabled for the account. + When creating a file or directory and the parent folder does not have a default ACL, the umask + restricts the permissions of the file or directory to be created. The resulting permission is + given by p bitwise and not u, where p is the permission and u is the umask. For example, if p + is 0777 and u is 0057, then the resulting permission is 0720. The default permission is 0777 + for a directory and 0666 for a file. The default umask is 0027. The umask must be specified in + 4-digit octal notation (e.g. 0766). Default value is None. + :paramtype umask: str + :keyword if_modified_since: Specify this header value to operate only on a blob if it has been + modified since the specified date/time. Default value is None. + :paramtype if_modified_since: ~datetime.datetime + :keyword if_unmodified_since: Specify this header value to operate only on a blob if it has not + been modified since the specified date/time. Default value is None. + :paramtype if_unmodified_since: ~datetime.datetime + :keyword source_if_match: Specify an ETag value to operate only on blobs with a matching value. + Default value is None. + :paramtype source_if_match: str + :keyword source_if_none_match: Specify an ETag value to operate only on blobs without a + matching value. Default value is None. + :paramtype source_if_none_match: str + :keyword source_if_modified_since: Specify this header value to operate only on a blob if it + has been modified since the specified date/time. Default value is None. + :paramtype source_if_modified_since: ~datetime.datetime + :keyword source_if_unmodified_since: Specify this header value to operate only on a blob if it + has not been modified since the specified date/time. Default value is None. + :paramtype source_if_unmodified_since: ~datetime.datetime + :keyword encryption_key: Optional. Specifies the encryption key to use to encrypt the data + provided in the request. If not specified, encryption is performed with the root account + encryption key. Default value is None. + :paramtype encryption_key: str + :keyword encryption_key_sha256: The SHA-256 hash of the provided encryption key. Must be + provided if the x-ms-encryption-key header is provided. Default value is None. + :paramtype encryption_key_sha256: str + :keyword encryption_algorithm: The algorithm used to produce the encryption key hash. + Currently, the only accepted value is "AES256". Must be provided if the x-ms-encryption-key + header is provided. "AES256" Default value is None. + :paramtype encryption_algorithm: str or + ~azure.storage.filedatalake._generated.models.EncryptionAlgorithmType + :keyword owner: Optional. The owner of the blob or directory. Default value is None. + :paramtype owner: str + :keyword group: Optional. The owning group of the blob or directory. Default value is None. + :paramtype group: str + :keyword acl: Sets POSIX access control rights on files and directories. The value is a + comma-separated list of access control entries. Each access control entry (ACE) consists of a + scope, a type, a user or group identifier, and permissions in the format + "[scope:][type]:[id]:[permissions]". Default value is None. + :paramtype acl: str + :keyword proposed_lease_id: Proposed lease ID, in a GUID string format. Default value is None. + :paramtype proposed_lease_id: str + :keyword lease_duration: The lease duration is required to acquire a lease, and specifies the + duration of the lease in seconds. The lease duration must be between 15 and 60 seconds or -1 + for infinite lease. Default value is None. + :paramtype lease_duration: int + :keyword expiry_options: Required. Indicates mode of the expiry time. Known values are: + "NeverExpire", "RelativeToCreation", "RelativeToNow", and "Absolute". Default value is None. + :paramtype expiry_options: str or + ~azure.storage.filedatalake._generated.models.PathExpiryOptions + :keyword expires_on: The time to set the blob to expiry. Default value is None. + :paramtype expires_on: str + :keyword encryption_context: Specifies the encryption context to set on the file. Default value + is None. + :paramtype encryption_context: str + :keyword timeout: The timeout parameter is expressed in seconds. For more information, see Setting + Timeouts for Blob Service Operations.. Default value is None. + :paramtype timeout: int + :keyword etag: check if resource is changed. Set None to skip checking etag. Default value is + None. + :paramtype etag: str + :keyword match_condition: The match condition to use upon the etag. Default value is None. + :paramtype match_condition: ~azure.core.MatchConditions + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + if match_condition == MatchConditions.IfNotModified: + error_map[412] = ResourceModifiedError + elif match_condition == MatchConditions.IfPresent: + error_map[412] = ResourceNotFoundError + elif match_condition == MatchConditions.IfMissing: + error_map[412] = ResourceExistsError + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[None] = kwargs.pop("cls", None) + + _request = build_path_create_request( + resource=resource, + mode=mode, + continuation=continuation, + cache_control=cache_control, + content_encoding=content_encoding, + content_language=content_language, + content_disposition=content_disposition, + content_type=content_type, + rename_source=rename_source, + lease_id=lease_id, + source_lease_id=source_lease_id, + properties=properties, + permissions=permissions, + umask=umask, + if_modified_since=if_modified_since, + if_unmodified_since=if_unmodified_since, + source_if_match=source_if_match, + source_if_none_match=source_if_none_match, + source_if_modified_since=source_if_modified_since, + source_if_unmodified_since=source_if_unmodified_since, + encryption_key=encryption_key, + encryption_key_sha256=encryption_key_sha256, + encryption_algorithm=encryption_algorithm, + owner=owner, + group=group, + acl=acl, + proposed_lease_id=proposed_lease_id, + lease_duration=lease_duration, + expiry_options=expiry_options, + expires_on=expires_on, + encryption_context=encryption_context, + timeout=timeout, + etag=etag, + match_condition=match_condition, + version=self._config.version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "url": self._serialize.url("self._config.url", self._config.url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [201]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.StorageError, + response, + ) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["x-ms-continuation"] = self._deserialize("str", response.headers.get("x-ms-continuation")) + response_headers["Content-Length"] = self._deserialize("int", response.headers.get("Content-Length")) + response_headers["x-ms-request-server-encrypted"] = self._deserialize( + "bool", response.headers.get("x-ms-request-server-encrypted") + ) + response_headers["x-ms-encryption-key-sha256"] = self._deserialize( + "str", response.headers.get("x-ms-encryption-key-sha256") + ) + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + + if cls: + return cls(pipeline_response, None, response_headers) # type: ignore + + @distributed_trace + def update( # pylint: disable=too-many-locals + self, + body: bytes, + *, + action: Union[str, _models.PathUpdateAction], + max_records: Optional[int] = None, + continuation: Optional[str] = None, + mode: Optional[Union[str, _models.PathSetAccessControlRecursiveMode]] = None, + force_flag: Optional[bool] = None, + position: Optional[int] = None, + retain_uncommitted_data: Optional[bool] = None, + close: Optional[bool] = None, + content_length: Optional[int] = None, + content_md5: Optional[bytes] = None, + lease_id: Optional[str] = None, + cache_control: Optional[str] = None, + content_disposition: Optional[str] = None, + content_encoding: Optional[str] = None, + content_language: Optional[str] = None, + properties: Optional[str] = None, + owner: Optional[str] = None, + group: Optional[str] = None, + permissions: Optional[str] = None, + acl: Optional[str] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + structured_body_type: Optional[str] = None, + structured_content_length: Optional[int] = None, + timeout: Optional[int] = None, + etag: Optional[str] = None, + match_condition: Optional[MatchConditions] = None, + **kwargs: Any + ) -> _models.SetAccessControlRecursiveResponse: + """Uploads data to be appended to a file, flushes (writes) previously uploaded data to a file, + sets properties for a file or directory, or sets access control for a file or directory. Data + can only be appended to a file. Concurrent writes to the same file using multiple clients are + not supported. This operation supports conditional HTTP requests. For more information, see + `Specifying Conditional Headers for Blob Service Operations + `_. + + :param body: Initial data. Required. + :type body: bytes + :keyword action: The action must be "append" to upload data to be appended to a file, "flush" + to flush previously uploaded data to a file, "setProperties" to set the properties of a file or + directory, "setAccessControl" to set the owner, group, permissions, or access control list for + a file or directory, or "setAccessControlRecursive" to set the access control list for a + directory recursively. Note that Hierarchical Namespace must be enabled for the account in + order to use access control. Also note that the Access Control List (ACL) includes permissions + for the owner, owning group, and others, so the x-ms-permissions and x-ms-acl request headers + are mutually exclusive. Known values are: "append", "flush", "setProperties", + "setAccessControl", and "setAccessControlRecursive". Required. + :paramtype action: str or ~azure.storage.filedatalake._generated.models.PathUpdateAction + :keyword max_records: Optional. Valid for "SetAccessControlRecursive" operation. It specifies + the maximum number of files or directories on which the acl change will be applied. If omitted + or greater than 2,000, the request will process up to 2,000 items. Default value is None. + :paramtype max_records: int + :keyword continuation: Optional. The number of paths processed with each invocation is limited. + If the number of paths to be processed exceeds this limit, a continuation token is returned in + the response header x-ms-continuation. When a continuation token is returned in the response, + it must be percent-encoded and specified in a subsequent invocation of + setAccessControlRecursive operation. Default value is None. + :paramtype continuation: str + :keyword mode: Mode for set access control recursive. Known values are: "set", "modify", and + "remove". Default value is None. + :paramtype mode: str or + ~azure.storage.filedatalake._generated.models.PathSetAccessControlRecursiveMode + :keyword force_flag: Optional. Valid for "SetAccessControlRecursive" operation. If set to + false, the operation will terminate quickly on encountering user errors (4XX). If true, the + operation will ignore user errors and proceed with the operation on other sub-entities of the + directory. Continuation token will only be returned when forceFlag is true in case of user + errors. If not set the default value is false for this. Default value is None. + :paramtype force_flag: bool + :keyword position: This parameter allows the caller to upload data in parallel and control the + order in which it is appended to the file. It is required when uploading data to be appended to + the file and when flushing previously uploaded data to the file. The value must be the position + where the data is to be appended. Uploaded data is not immediately flushed, or written, to the + file. To flush, the previously uploaded data must be contiguous, the position parameter must be + specified and equal to the length of the file after all data has been written, and there must + not be a request entity body included with the request. Default value is None. + :paramtype position: int + :keyword retain_uncommitted_data: Valid only for flush operations. If "true", uncommitted data + is retained after the flush operation completes; otherwise, the uncommitted data is deleted + after the flush operation. The default is false. Data at offsets less than the specified + position are written to the file when flush succeeds, but this optional parameter allows data + after the flush position to be retained for a future flush operation. Default value is None. + :paramtype retain_uncommitted_data: bool + :keyword close: Azure Storage Events allow applications to receive notifications when files + change. When Azure Storage Events are enabled, a file changed event is raised. This event has a + property indicating whether this is the final change to distinguish the difference between an + intermediate flush to a file stream and the final close of a file stream. The close query + parameter is valid only when the action is "flush" and change notifications are enabled. If the + value of close is "true" and the flush operation completes successfully, the service raises a + file change notification with a property indicating that this is the final update (the file + stream has been closed). If "false" a change notification is raised indicating the file has + changed. The default is false. This query parameter is set to true by the Hadoop ABFS driver to + indicate that the file stream has been closed. Default value is None. + :paramtype close: bool + :keyword content_length: Required for "Append Data" and "Flush Data". Must be 0 for "Flush + Data". Must be the length of the request content in bytes for "Append Data". Default value is + None. + :paramtype content_length: int + :keyword content_md5: Specify the transactional md5 for the body, to be validated by the + service. Default value is None. + :paramtype content_md5: bytes + :keyword lease_id: If specified, the operation only succeeds if the resource's lease is active + and matches this ID. Default value is None. + :paramtype lease_id: str + :keyword cache_control: Optional. Sets the blob's cache control. If specified, this property is + stored with the blob and returned with a read request. Default value is None. + :paramtype cache_control: str + :keyword content_disposition: Optional. Sets the blob's Content-Disposition header. Default + value is None. + :paramtype content_disposition: str + :keyword content_encoding: Optional. Sets the blob's content encoding. If specified, this + property is stored with the blob and returned with a read request. Default value is None. + :paramtype content_encoding: str + :keyword content_language: Optional. Set the blob's content language. If specified, this + property is stored with the blob and returned with a read request. Default value is None. + :paramtype content_language: str + :keyword properties: Optional. User-defined properties to be stored with the filesystem, in the + format of a comma-separated list of name and value pairs "n1=v1, n2=v2, ...", where each value + is a base64 encoded string. Note that the string may only contain ASCII characters in the + ISO-8859-1 character set. If the filesystem exists, any properties not included in the list + will be removed. All properties are removed if the header is omitted. To merge new and existing + properties, first get all existing properties and the current E-Tag, then make a conditional + request with the E-Tag and include values for all properties. Default value is None. + :paramtype properties: str + :keyword owner: Optional. The owner of the blob or directory. Default value is None. + :paramtype owner: str + :keyword group: Optional. The owning group of the blob or directory. Default value is None. + :paramtype group: str + :keyword permissions: Optional and only valid if Hierarchical Namespace is enabled for the + account. Sets POSIX access permissions for the file owner, the file owning group, and others. + Each class may be granted read, write, or execute permission. The sticky bit is also supported. + Both symbolic (rwxrw-rw-) and 4-digit octal notation (e.g. 0766) are supported. Default value + is None. + :paramtype permissions: str + :keyword acl: Sets POSIX access control rights on files and directories. The value is a + comma-separated list of access control entries. Each access control entry (ACE) consists of a + scope, a type, a user or group identifier, and permissions in the format + "[scope:][type]:[id]:[permissions]". Default value is None. + :paramtype acl: str + :keyword if_modified_since: Specify this header value to operate only on a blob if it has been + modified since the specified date/time. Default value is None. + :paramtype if_modified_since: ~datetime.datetime + :keyword if_unmodified_since: Specify this header value to operate only on a blob if it has not + been modified since the specified date/time. Default value is None. + :paramtype if_unmodified_since: ~datetime.datetime + :keyword structured_body_type: Required if the request body is a structured message. Specifies + the message schema version and properties. Default value is None. + :paramtype structured_body_type: str + :keyword structured_content_length: Required if the request body is a structured message. + Specifies the length of the blob/file content inside the message body. Will always be smaller + than Content-Length. Default value is None. + :paramtype structured_content_length: int + :keyword timeout: The timeout parameter is expressed in seconds. For more information, see Setting + Timeouts for Blob Service Operations.. Default value is None. + :paramtype timeout: int + :keyword etag: check if resource is changed. Set None to skip checking etag. Default value is + None. + :paramtype etag: str + :keyword match_condition: The match condition to use upon the etag. Default value is None. + :paramtype match_condition: ~azure.core.MatchConditions + :return: SetAccessControlRecursiveResponse. The SetAccessControlRecursiveResponse is compatible + with MutableMapping + :rtype: ~azure.storage.filedatalake._generated.models.SetAccessControlRecursiveResponse + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + if match_condition == MatchConditions.IfNotModified: + error_map[412] = ResourceModifiedError + elif match_condition == MatchConditions.IfPresent: + error_map[412] = ResourceNotFoundError + elif match_condition == MatchConditions.IfMissing: + error_map[412] = ResourceExistsError + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: str = kwargs.pop("content_type", _headers.pop("Content-Type", "application/octet-stream")) + cls: ClsType[_models.SetAccessControlRecursiveResponse] = kwargs.pop("cls", None) + + _content = body + + _request = build_path_update_request( + action=action, + max_records=max_records, + continuation=continuation, + mode=mode, + force_flag=force_flag, + position=position, + retain_uncommitted_data=retain_uncommitted_data, + close=close, + content_length=content_length, + content_md5=content_md5, + lease_id=lease_id, + cache_control=cache_control, + content_disposition=content_disposition, + content_encoding=content_encoding, + content_language=content_language, + properties=properties, + owner=owner, + group=group, + permissions=permissions, + acl=acl, + if_modified_since=if_modified_since, + if_unmodified_since=if_unmodified_since, + structured_body_type=structured_body_type, + structured_content_length=structured_content_length, + timeout=timeout, + etag=etag, + match_condition=match_condition, + content_type=content_type, + version=self._config.version, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "url": self._serialize.url("self._config.url", self._config.url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _decompress = kwargs.pop("decompress", True) + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.StorageError, + response, + ) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["Accept-Ranges"] = self._deserialize("str", response.headers.get("Accept-Ranges")) + response_headers["Cache-Control"] = self._deserialize("str", response.headers.get("Cache-Control")) + response_headers["Content-Disposition"] = self._deserialize("str", response.headers.get("Content-Disposition")) + response_headers["Content-Encoding"] = self._deserialize("str", response.headers.get("Content-Encoding")) + response_headers["Content-Language"] = self._deserialize("str", response.headers.get("Content-Language")) + response_headers["Content-Length"] = self._deserialize("int", response.headers.get("Content-Length")) + response_headers["Content-Range"] = self._deserialize("str", response.headers.get("Content-Range")) + response_headers["Content-MD5"] = self._deserialize("bytearray", response.headers.get("Content-MD5")) + response_headers["x-ms-properties"] = self._deserialize("str", response.headers.get("x-ms-properties")) + response_headers["x-ms-continuation"] = self._deserialize("str", response.headers.get("x-ms-continuation")) + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + response_headers["Content-Type"] = self._deserialize("str", response.headers.get("Content-Type")) + + if _stream: + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + else: + deserialized = _deserialize(_models.SetAccessControlRecursiveResponse, response.json()) + + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore + + @distributed_trace + def lease( # pylint: disable=inconsistent-return-statements + self, + *, + lease_action: Union[str, _models.PathLeaseAction], + lease_duration: Optional[int] = None, + lease_break_period: Optional[int] = None, + lease_id: Optional[str] = None, + proposed_lease_id: Optional[str] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + timeout: Optional[int] = None, + etag: Optional[str] = None, + match_condition: Optional[MatchConditions] = None, + **kwargs: Any + ) -> None: + """Create and manage a lease to restrict write and delete access to the path. This operation + supports conditional HTTP requests. For more information, see `Specifying Conditional Headers + for Blob Service Operations + `_. + + :keyword lease_action: There are five lease actions: "acquire", "break", "change", "renew", and + "release". Use "acquire" and specify the "x-ms-proposed-lease-id" and "x-ms-lease-duration" to + acquire a new lease. Use "break" to break an existing lease. When a lease is broken, the lease + break period is allowed to elapse, during which time no lease operation except break and + release can be performed on the file. When a lease is successfully broken, the response + indicates the interval in seconds until a new lease can be acquired. Use "change" and specify + the current lease ID in "x-ms-lease-id" and the new lease ID in "x-ms-proposed-lease-id" to + change the lease ID of an active lease. Use "renew" and specify the "x-ms-lease-id" to renew an + existing lease. Use "release" and specify the "x-ms-lease-id" to release a lease. Known values + are: "acquire", "break", "change", "renew", and "release". Required. + :paramtype lease_action: str or ~azure.storage.filedatalake._generated.models.PathLeaseAction + :keyword lease_duration: The lease duration is required to acquire a lease, and specifies the + duration of the lease in seconds. The lease duration must be between 15 and 60 seconds or -1 + for infinite lease. Default value is None. + :paramtype lease_duration: int + :keyword lease_break_period: The lease break period duration is optional to break a lease, and + specifies the break period of the lease in seconds. The lease break duration must be between 0 + and 60 seconds. Default value is None. + :paramtype lease_break_period: int + :keyword lease_id: If specified, the operation only succeeds if the resource's lease is active + and matches this ID. Default value is None. + :paramtype lease_id: str + :keyword proposed_lease_id: Proposed lease ID, in a GUID string format. Default value is None. + :paramtype proposed_lease_id: str + :keyword if_modified_since: Specify this header value to operate only on a blob if it has been + modified since the specified date/time. Default value is None. + :paramtype if_modified_since: ~datetime.datetime + :keyword if_unmodified_since: Specify this header value to operate only on a blob if it has not + been modified since the specified date/time. Default value is None. + :paramtype if_unmodified_since: ~datetime.datetime + :keyword timeout: The timeout parameter is expressed in seconds. For more information, see Setting + Timeouts for Blob Service Operations.. Default value is None. + :paramtype timeout: int + :keyword etag: check if resource is changed. Set None to skip checking etag. Default value is + None. + :paramtype etag: str + :keyword match_condition: The match condition to use upon the etag. Default value is None. + :paramtype match_condition: ~azure.core.MatchConditions + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + if match_condition == MatchConditions.IfNotModified: + error_map[412] = ResourceModifiedError + elif match_condition == MatchConditions.IfPresent: + error_map[412] = ResourceNotFoundError + elif match_condition == MatchConditions.IfMissing: + error_map[412] = ResourceExistsError + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[None] = kwargs.pop("cls", None) + + _request = build_path_lease_request( + lease_action=lease_action, + lease_duration=lease_duration, + lease_break_period=lease_break_period, + lease_id=lease_id, + proposed_lease_id=proposed_lease_id, + if_modified_since=if_modified_since, + if_unmodified_since=if_unmodified_since, + timeout=timeout, + etag=etag, + match_condition=match_condition, + version=self._config.version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "url": self._serialize.url("self._config.url", self._config.url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.StorageError, + response, + ) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["x-ms-lease-id"] = self._deserialize("str", response.headers.get("x-ms-lease-id")) + response_headers["x-ms-lease-time"] = self._deserialize("str", response.headers.get("x-ms-lease-time")) + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + + if cls: + return cls(pipeline_response, None, response_headers) # type: ignore + + @distributed_trace + def read( + self, + *, + range: Optional[str] = None, + lease_id: Optional[str] = None, + range_get_content_md5: Optional[bool] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + encryption_key: Optional[str] = None, + encryption_key_sha256: Optional[str] = None, + encryption_algorithm: Optional[Union[str, _models.EncryptionAlgorithmType]] = None, + timeout: Optional[int] = None, + etag: Optional[str] = None, + match_condition: Optional[MatchConditions] = None, + **kwargs: Any + ) -> Iterator[bytes]: + """Read the contents of a file. For read operations, range requests are supported. This operation + supports conditional HTTP requests. For more information, see `Specifying Conditional Headers + for Blob Service Operations + `_. + + :keyword range: The HTTP Range request header specifies one or more byte ranges of the resource + to be retrieved. Default value is None. + :paramtype range: str + :keyword lease_id: If specified, the operation only succeeds if the resource's lease is active + and matches this ID. Default value is None. + :paramtype lease_id: str + :keyword range_get_content_md5: Optional. When this header is set to "true" and specified + together with the Range header, the service returns the MD5 hash for the range, as long as the + range is less than or equal to 4MB in size. Default value is None. + :paramtype range_get_content_md5: bool + :keyword if_modified_since: Specify this header value to operate only on a blob if it has been + modified since the specified date/time. Default value is None. + :paramtype if_modified_since: ~datetime.datetime + :keyword if_unmodified_since: Specify this header value to operate only on a blob if it has not + been modified since the specified date/time. Default value is None. + :paramtype if_unmodified_since: ~datetime.datetime + :keyword encryption_key: Optional. Specifies the encryption key to use to encrypt the data + provided in the request. If not specified, encryption is performed with the root account + encryption key. Default value is None. + :paramtype encryption_key: str + :keyword encryption_key_sha256: The SHA-256 hash of the provided encryption key. Must be + provided if the x-ms-encryption-key header is provided. Default value is None. + :paramtype encryption_key_sha256: str + :keyword encryption_algorithm: The algorithm used to produce the encryption key hash. + Currently, the only accepted value is "AES256". Must be provided if the x-ms-encryption-key + header is provided. "AES256" Default value is None. + :paramtype encryption_algorithm: str or + ~azure.storage.filedatalake._generated.models.EncryptionAlgorithmType + :keyword timeout: The timeout parameter is expressed in seconds. For more information, see Setting + Timeouts for Blob Service Operations.. Default value is None. + :paramtype timeout: int + :keyword etag: check if resource is changed. Set None to skip checking etag. Default value is + None. + :paramtype etag: str + :keyword match_condition: The match condition to use upon the etag. Default value is None. + :paramtype match_condition: ~azure.core.MatchConditions + :return: Iterator[bytes] + :rtype: Iterator[bytes] + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + if match_condition == MatchConditions.IfNotModified: + error_map[412] = ResourceModifiedError + elif match_condition == MatchConditions.IfPresent: + error_map[412] = ResourceNotFoundError + elif match_condition == MatchConditions.IfMissing: + error_map[412] = ResourceExistsError + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None) + + _request = build_path_read_request( + range=range, + lease_id=lease_id, + range_get_content_md5=range_get_content_md5, + if_modified_since=if_modified_since, + if_unmodified_since=if_unmodified_since, + encryption_key=encryption_key, + encryption_key_sha256=encryption_key_sha256, + encryption_algorithm=encryption_algorithm, + timeout=timeout, + etag=etag, + match_condition=match_condition, + version=self._config.version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "url": self._serialize.url("self._config.url", self._config.url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _decompress = kwargs.pop("decompress", True) + _stream = kwargs.pop("stream", True) + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.StorageError, + response, + ) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["Accept-Ranges"] = self._deserialize("str", response.headers.get("Accept-Ranges")) + response_headers["Cache-Control"] = self._deserialize("str", response.headers.get("Cache-Control")) + response_headers["Content-Disposition"] = self._deserialize("str", response.headers.get("Content-Disposition")) + response_headers["Content-Encoding"] = self._deserialize("str", response.headers.get("Content-Encoding")) + response_headers["Content-Language"] = self._deserialize("str", response.headers.get("Content-Language")) + response_headers["Content-Length"] = self._deserialize("int", response.headers.get("Content-Length")) + response_headers["Content-Range"] = self._deserialize("str", response.headers.get("Content-Range")) + response_headers["Content-MD5"] = self._deserialize("bytearray", response.headers.get("Content-MD5")) + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["x-ms-resource-type"] = self._deserialize("str", response.headers.get("x-ms-resource-type")) + response_headers["x-ms-properties"] = self._deserialize("str", response.headers.get("x-ms-properties")) + response_headers["x-ms-lease-duration"] = self._deserialize("str", response.headers.get("x-ms-lease-duration")) + response_headers["x-ms-lease-state"] = self._deserialize("str", response.headers.get("x-ms-lease-state")) + response_headers["x-ms-lease-status"] = self._deserialize("str", response.headers.get("x-ms-lease-status")) + response_headers["x-ms-request-server-encrypted"] = self._deserialize( + "bool", response.headers.get("x-ms-request-server-encrypted") + ) + response_headers["x-ms-encryption-key-sha256"] = self._deserialize( + "str", response.headers.get("x-ms-encryption-key-sha256") + ) + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + response_headers["Content-Type"] = self._deserialize("str", response.headers.get("Content-Type")) + + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore + + @distributed_trace + def get_properties( + self, + *, + action: Optional[Union[str, _models.PathGetPropertiesAction]] = None, + upn: Optional[bool] = None, + lease_id: Optional[str] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + timeout: Optional[int] = None, + etag: Optional[str] = None, + match_condition: Optional[MatchConditions] = None, + **kwargs: Any + ) -> bool: + """Get Properties returns all system and user defined properties for a path. Get Status returns + all system defined properties for a path. Get Access Control List returns the access control + list for a path. This operation supports conditional HTTP requests. For more information, see + `Specifying Conditional Headers for Blob Service Operations + `_. + + :keyword action: Optional. If the value is "getStatus" only the system defined properties for + the path are returned. If the value is "getAccessControl" the access control list is returned + in the response headers (Hierarchical Namespace must be enabled for the account), otherwise the + properties are returned. Known values are: "getAccessControl" and "getStatus". Default value is + None. + :paramtype action: str or ~azure.storage.filedatalake._generated.models.PathGetPropertiesAction + :keyword upn: Optional. Valid only when Hierarchical Namespace is enabled for the account. If + "true", the user identity values returned in the x-ms-owner, x-ms-group, and x-ms-acl response + headers will be transformed from Azure Active Directory Object IDs to User Principal Names. If + "false", the values will be returned as Azure Active Directory Object IDs. The default value is + false. Note that group and application Object IDs are not translated because they do not have + unique friendly names. Default value is None. + :paramtype upn: bool + :keyword lease_id: If specified, the operation only succeeds if the resource's lease is active + and matches this ID. Default value is None. + :paramtype lease_id: str + :keyword if_modified_since: Specify this header value to operate only on a blob if it has been + modified since the specified date/time. Default value is None. + :paramtype if_modified_since: ~datetime.datetime + :keyword if_unmodified_since: Specify this header value to operate only on a blob if it has not + been modified since the specified date/time. Default value is None. + :paramtype if_unmodified_since: ~datetime.datetime + :keyword timeout: The timeout parameter is expressed in seconds. For more information, see Setting + Timeouts for Blob Service Operations.. Default value is None. + :paramtype timeout: int + :keyword etag: check if resource is changed. Set None to skip checking etag. Default value is + None. + :paramtype etag: str + :keyword match_condition: The match condition to use upon the etag. Default value is None. + :paramtype match_condition: ~azure.core.MatchConditions + :return: bool + :rtype: bool + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + if match_condition == MatchConditions.IfNotModified: + error_map[412] = ResourceModifiedError + elif match_condition == MatchConditions.IfPresent: + error_map[412] = ResourceNotFoundError + elif match_condition == MatchConditions.IfMissing: + error_map[412] = ResourceExistsError + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[None] = kwargs.pop("cls", None) + + _request = build_path_get_properties_request( + action=action, + upn=upn, + lease_id=lease_id, + if_modified_since=if_modified_since, + if_unmodified_since=if_unmodified_since, + timeout=timeout, + etag=etag, + match_condition=match_condition, + version=self._config.version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "url": self._serialize.url("self._config.url", self._config.url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.StorageError, + response, + ) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["Accept-Ranges"] = self._deserialize("str", response.headers.get("Accept-Ranges")) + response_headers["Cache-Control"] = self._deserialize("str", response.headers.get("Cache-Control")) + response_headers["Content-Disposition"] = self._deserialize("str", response.headers.get("Content-Disposition")) + response_headers["Content-Encoding"] = self._deserialize("str", response.headers.get("Content-Encoding")) + response_headers["Content-Language"] = self._deserialize("str", response.headers.get("Content-Language")) + response_headers["Content-Length"] = self._deserialize("int", response.headers.get("Content-Length")) + response_headers["Content-Range"] = self._deserialize("str", response.headers.get("Content-Range")) + response_headers["Content-Type"] = self._deserialize("str", response.headers.get("Content-Type")) + response_headers["Content-MD5"] = self._deserialize("bytearray", response.headers.get("Content-MD5")) + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["x-ms-resource-type"] = self._deserialize("str", response.headers.get("x-ms-resource-type")) + response_headers["x-ms-properties"] = self._deserialize("str", response.headers.get("x-ms-properties")) + response_headers["x-ms-owner"] = self._deserialize("str", response.headers.get("x-ms-owner")) + response_headers["x-ms-group"] = self._deserialize("str", response.headers.get("x-ms-group")) + response_headers["x-ms-permissions"] = self._deserialize("str", response.headers.get("x-ms-permissions")) + response_headers["x-ms-acl"] = self._deserialize("str", response.headers.get("x-ms-acl")) + response_headers["x-ms-lease-duration"] = self._deserialize("str", response.headers.get("x-ms-lease-duration")) + response_headers["x-ms-lease-state"] = self._deserialize("str", response.headers.get("x-ms-lease-state")) + response_headers["x-ms-lease-status"] = self._deserialize("str", response.headers.get("x-ms-lease-status")) + response_headers["x-ms-server-encrypted"] = self._deserialize( + "bool", response.headers.get("x-ms-server-encrypted") + ) + response_headers["x-ms-encryption-key-sha256"] = self._deserialize( + "str", response.headers.get("x-ms-encryption-key-sha256") + ) + response_headers["x-ms-encryption-context"] = self._deserialize( + "str", response.headers.get("x-ms-encryption-context") + ) + response_headers["x-ms-encryption-scope"] = self._deserialize( + "str", response.headers.get("x-ms-encryption-scope") + ) + response_headers["x-ms-creation-time"] = self._deserialize( + "rfc-1123", response.headers.get("x-ms-creation-time") + ) + response_headers["x-ms-expiry-time"] = self._deserialize("rfc-1123", response.headers.get("x-ms-expiry-time")) + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + + if cls: + return cls(pipeline_response, None, response_headers) # type: ignore + return 200 <= response.status_code <= 299 + + @distributed_trace + def delete( # pylint: disable=inconsistent-return-statements + self, + *, + recursive: Optional[bool] = None, + continuation: Optional[str] = None, + lease_id: Optional[str] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + paginated: Optional[bool] = None, + timeout: Optional[int] = None, + etag: Optional[str] = None, + match_condition: Optional[MatchConditions] = None, + **kwargs: Any + ) -> None: + """Delete the file or directory. This operation supports conditional HTTP requests. For more + information, see `Specifying Conditional Headers for Blob Service Operations + `_. + + :keyword recursive: Required. Default value is None. + :paramtype recursive: bool + :keyword continuation: Optional. When deleting a directory, the number of paths that are + deleted with each invocation is limited. If the number of paths to be deleted exceeds this + limit, a continuation token is returned in this response header. When a continuation token is + returned in the response, it must be specified in a subsequent invocation of the delete + operation to continue deleting the directory. Default value is None. + :paramtype continuation: str + :keyword lease_id: If specified, the operation only succeeds if the resource's lease is active + and matches this ID. Default value is None. + :paramtype lease_id: str + :keyword if_modified_since: Specify this header value to operate only on a blob if it has been + modified since the specified date/time. Default value is None. + :paramtype if_modified_since: ~datetime.datetime + :keyword if_unmodified_since: Specify this header value to operate only on a blob if it has not + been modified since the specified date/time. Default value is None. + :paramtype if_unmodified_since: ~datetime.datetime + :keyword paginated: If true, paginated behavior will be seen. Pagination is for the recursive + ACL checks as a POSIX requirement in the server and Delete in an atomic operation once the ACL + checks are completed. If false or missing, normal default behavior will kick in, which may + timeout in case of very large directories due to recursive ACL checks. This new parameter is + introduced for backward compatibility. Default value is None. + :paramtype paginated: bool + :keyword timeout: The timeout parameter is expressed in seconds. For more information, see Setting + Timeouts for Blob Service Operations.. Default value is None. + :paramtype timeout: int + :keyword etag: check if resource is changed. Set None to skip checking etag. Default value is + None. + :paramtype etag: str + :keyword match_condition: The match condition to use upon the etag. Default value is None. + :paramtype match_condition: ~azure.core.MatchConditions + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + if match_condition == MatchConditions.IfNotModified: + error_map[412] = ResourceModifiedError + elif match_condition == MatchConditions.IfPresent: + error_map[412] = ResourceNotFoundError + elif match_condition == MatchConditions.IfMissing: + error_map[412] = ResourceExistsError + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[None] = kwargs.pop("cls", None) + + _request = build_path_delete_request( + recursive=recursive, + continuation=continuation, + lease_id=lease_id, + if_modified_since=if_modified_since, + if_unmodified_since=if_unmodified_since, + paginated=paginated, + timeout=timeout, + etag=etag, + match_condition=match_condition, + version=self._config.version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "url": self._serialize.url("self._config.url", self._config.url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.StorageError, + response, + ) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["x-ms-continuation"] = self._deserialize("str", response.headers.get("x-ms-continuation")) + response_headers["x-ms-deletion-id"] = self._deserialize("str", response.headers.get("x-ms-deletion-id")) + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + + if cls: + return cls(pipeline_response, None, response_headers) # type: ignore + + @distributed_trace + def set_access_control( # pylint: disable=inconsistent-return-statements + self, + *, + lease_id: Optional[str] = None, + owner: Optional[str] = None, + group: Optional[str] = None, + permissions: Optional[str] = None, + acl: Optional[str] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + timeout: Optional[int] = None, + etag: Optional[str] = None, + match_condition: Optional[MatchConditions] = None, + **kwargs: Any + ) -> None: + """Set the owner, group, permissions, or access control list for a path. + + :keyword lease_id: If specified, the operation only succeeds if the resource's lease is active + and matches this ID. Default value is None. + :paramtype lease_id: str + :keyword owner: Optional. The owner of the blob or directory. Default value is None. + :paramtype owner: str + :keyword group: Optional. The owning group of the blob or directory. Default value is None. + :paramtype group: str + :keyword permissions: Optional and only valid if Hierarchical Namespace is enabled for the + account. Sets POSIX access permissions for the file owner, the file owning group, and others. + Each class may be granted read, write, or execute permission. The sticky bit is also supported. + Both symbolic (rwxrw-rw-) and 4-digit octal notation (e.g. 0766) are supported. Default value + is None. + :paramtype permissions: str + :keyword acl: Sets POSIX access control rights on files and directories. The value is a + comma-separated list of access control entries. Each access control entry (ACE) consists of a + scope, a type, a user or group identifier, and permissions in the format + "[scope:][type]:[id]:[permissions]". Default value is None. + :paramtype acl: str + :keyword if_modified_since: Specify this header value to operate only on a blob if it has been + modified since the specified date/time. Default value is None. + :paramtype if_modified_since: ~datetime.datetime + :keyword if_unmodified_since: Specify this header value to operate only on a blob if it has not + been modified since the specified date/time. Default value is None. + :paramtype if_unmodified_since: ~datetime.datetime + :keyword timeout: The timeout parameter is expressed in seconds. For more information, see Setting + Timeouts for Blob Service Operations.. Default value is None. + :paramtype timeout: int + :keyword etag: check if resource is changed. Set None to skip checking etag. Default value is + None. + :paramtype etag: str + :keyword match_condition: The match condition to use upon the etag. Default value is None. + :paramtype match_condition: ~azure.core.MatchConditions + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + if match_condition == MatchConditions.IfNotModified: + error_map[412] = ResourceModifiedError + elif match_condition == MatchConditions.IfPresent: + error_map[412] = ResourceNotFoundError + elif match_condition == MatchConditions.IfMissing: + error_map[412] = ResourceExistsError + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[None] = kwargs.pop("cls", None) + + _request = build_path_set_access_control_request( + lease_id=lease_id, + owner=owner, + group=group, + permissions=permissions, + acl=acl, + if_modified_since=if_modified_since, + if_unmodified_since=if_unmodified_since, + timeout=timeout, + etag=etag, + match_condition=match_condition, + version=self._config.version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "url": self._serialize.url("self._config.url", self._config.url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.StorageError, + response, + ) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + + if cls: + return cls(pipeline_response, None, response_headers) # type: ignore + + @distributed_trace + def set_access_control_recursive( + self, + *, + mode: Union[str, _models.PathSetAccessControlRecursiveMode], + continuation: Optional[str] = None, + force_flag: Optional[bool] = None, + max_records: Optional[int] = None, + acl: Optional[str] = None, + timeout: Optional[int] = None, + **kwargs: Any + ) -> _models.SetAccessControlRecursiveResponse: + """Set the access control list for a path and sub-paths. + + :keyword mode: Mode "set" sets POSIX access control rights on files and directories, "modify" + modifies one or more POSIX access control rights that pre-exist on files and directories, + "remove" removes one or more POSIX access control rights that were present earlier on files and + directories. Known values are: "set", "modify", and "remove". Required. + :paramtype mode: str or + ~azure.storage.filedatalake._generated.models.PathSetAccessControlRecursiveMode + :keyword continuation: Optional. When deleting a directory, the number of paths that are + deleted with each invocation is limited. If the number of paths to be deleted exceeds this + limit, a continuation token is returned in this response header. When a continuation token is + returned in the response, it must be specified in a subsequent invocation of the delete + operation to continue deleting the directory. Default value is None. + :paramtype continuation: str + :keyword force_flag: Optional. Valid for "SetAccessControlRecursive" operation. If set to + false, the operation will terminate quickly on encountering user errors (4XX). If true, the + operation will ignore user errors and proceed with the operation on other sub-entities of the + directory. Continuation token will only be returned when forceFlag is true in case of user + errors. If not set the default value is false for this. Default value is None. + :paramtype force_flag: bool + :keyword max_records: Optional. It specifies the maximum number of files or directories on + which the acl change will be applied. If omitted or greater than 2,000, the request will + process up to 2,000 items. Default value is None. + :paramtype max_records: int + :keyword acl: Sets POSIX access control rights on files and directories. The value is a + comma-separated list of access control entries. Each access control entry (ACE) consists of a + scope, a type, a user or group identifier, and permissions in the format + "[scope:][type]:[id]:[permissions]". Default value is None. + :paramtype acl: str + :keyword timeout: The timeout parameter is expressed in seconds. For more information, see Setting + Timeouts for Blob Service Operations.. Default value is None. + :paramtype timeout: int + :return: SetAccessControlRecursiveResponse. The SetAccessControlRecursiveResponse is compatible + with MutableMapping + :rtype: ~azure.storage.filedatalake._generated.models.SetAccessControlRecursiveResponse + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[_models.SetAccessControlRecursiveResponse] = kwargs.pop("cls", None) + + _request = build_path_set_access_control_recursive_request( + mode=mode, + continuation=continuation, + force_flag=force_flag, + max_records=max_records, + acl=acl, + timeout=timeout, + version=self._config.version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "url": self._serialize.url("self._config.url", self._config.url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _decompress = kwargs.pop("decompress", True) + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.StorageError, + response, + ) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["x-ms-continuation"] = self._deserialize("str", response.headers.get("x-ms-continuation")) + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + response_headers["Content-Type"] = self._deserialize("str", response.headers.get("Content-Type")) + + if _stream: + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + else: + deserialized = _deserialize(_models.SetAccessControlRecursiveResponse, response.json()) + + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore + + @distributed_trace + def flush_data( # pylint: disable=inconsistent-return-statements,too-many-locals + self, + *, + position: Optional[int] = None, + retain_uncommitted_data: Optional[bool] = None, + close: Optional[bool] = None, + content_length: Optional[int] = None, + content_md5: Optional[bytes] = None, + lease_id: Optional[str] = None, + lease_action: Optional[Union[str, _models.LeaseAction]] = None, + lease_duration: Optional[int] = None, + proposed_lease_id: Optional[str] = None, + cache_control: Optional[str] = None, + content_type: Optional[str] = None, + content_disposition: Optional[str] = None, + content_encoding: Optional[str] = None, + content_language: Optional[str] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + encryption_key: Optional[str] = None, + encryption_key_sha256: Optional[str] = None, + encryption_algorithm: Optional[Union[str, _models.EncryptionAlgorithmType]] = None, + timeout: Optional[int] = None, + etag: Optional[str] = None, + match_condition: Optional[MatchConditions] = None, + **kwargs: Any + ) -> None: + """Set the owner, group, permissions, or access control list for a path. + + :keyword position: This parameter allows the caller to upload data in parallel and control the + order in which it is appended to the file. It is required when uploading data to be appended to + the file and when flushing previously uploaded data to the file. The value must be the position + where the data is to be appended. Uploaded data is not immediately flushed, or written, to the + file. To flush, the previously uploaded data must be contiguous, the position parameter must be + specified and equal to the length of the file after all data has been written, and there must + not be a request entity body included with the request. Default value is None. + :paramtype position: int + :keyword retain_uncommitted_data: Valid only for flush operations. If "true", uncommitted data + is retained after the flush operation completes; otherwise, the uncommitted data is deleted + after the flush operation. The default is false. Data at offsets less than the specified + position are written to the file when flush succeeds, but this optional parameter allows data + after the flush position to be retained for a future flush operation. Default value is None. + :paramtype retain_uncommitted_data: bool + :keyword close: Azure Storage Events allow applications to receive notifications when files + change. When Azure Storage Events are enabled, a file changed event is raised. This event has a + property indicating whether this is the final change to distinguish the difference between an + intermediate flush to a file stream and the final close of a file stream. The close query + parameter is valid only when the action is "flush" and change notifications are enabled. If the + value of close is "true" and the flush operation completes successfully, the service raises a + file change notification with a property indicating that this is the final update (the file + stream has been closed). If "false" a change notification is raised indicating the file has + changed. The default is false. This query parameter is set to true by the Hadoop ABFS driver to + indicate that the file stream has been closed. Default value is None. + :paramtype close: bool + :keyword content_length: Required for "Append Data" and "Flush Data". Must be 0 for "Flush + Data". Must be the length of the request content in bytes for "Append Data". Default value is + None. + :paramtype content_length: int + :keyword content_md5: Specify the transactional md5 for the body, to be validated by the + service. Default value is None. + :paramtype content_md5: bytes + :keyword lease_id: If specified, the operation only succeeds if the resource's lease is active + and matches this ID. Default value is None. + :paramtype lease_id: str + :keyword lease_action: Optional. If "acquire" it will acquire the lease. If "auto-renew" it + will renew the lease. If "release" it will release the lease only on flush. If + "acquire-release" it will acquire & complete the operation & release the lease once operation + is done. Known values are: "acquire", "auto-renew", "release", and "acquire-release". Default + value is None. + :paramtype lease_action: str or ~azure.storage.filedatalake._generated.models.LeaseAction + :keyword lease_duration: The lease duration is required to acquire a lease, and specifies the + duration of the lease in seconds. The lease duration must be between 15 and 60 seconds or -1 + for infinite lease. Default value is None. + :paramtype lease_duration: int + :keyword proposed_lease_id: Proposed lease ID, in a GUID string format. Default value is None. + :paramtype proposed_lease_id: str + :keyword cache_control: Optional. Sets the blob's cache control. If specified, this property is + stored with the blob and returned with a read request. Default value is None. + :paramtype cache_control: str + :keyword content_type: Optional. Sets the blob's content type. If specified, this property is + stored with the blob and returned with a read request. Default value is None. + :paramtype content_type: str + :keyword content_disposition: Optional. Sets the blob's Content-Disposition header. Default + value is None. + :paramtype content_disposition: str + :keyword content_encoding: Optional. Sets the blob's content encoding. If specified, this + property is stored with the blob and returned with a read request. Default value is None. + :paramtype content_encoding: str + :keyword content_language: Optional. Set the blob's content language. If specified, this + property is stored with the blob and returned with a read request. Default value is None. + :paramtype content_language: str + :keyword if_modified_since: Specify this header value to operate only on a blob if it has been + modified since the specified date/time. Default value is None. + :paramtype if_modified_since: ~datetime.datetime + :keyword if_unmodified_since: Specify this header value to operate only on a blob if it has not + been modified since the specified date/time. Default value is None. + :paramtype if_unmodified_since: ~datetime.datetime + :keyword encryption_key: Optional. Specifies the encryption key to use to encrypt the data + provided in the request. If not specified, encryption is performed with the root account + encryption key. Default value is None. + :paramtype encryption_key: str + :keyword encryption_key_sha256: The SHA-256 hash of the provided encryption key. Must be + provided if the x-ms-encryption-key header is provided. Default value is None. + :paramtype encryption_key_sha256: str + :keyword encryption_algorithm: The algorithm used to produce the encryption key hash. + Currently, the only accepted value is "AES256". Must be provided if the x-ms-encryption-key + header is provided. "AES256" Default value is None. + :paramtype encryption_algorithm: str or + ~azure.storage.filedatalake._generated.models.EncryptionAlgorithmType + :keyword timeout: The timeout parameter is expressed in seconds. For more information, see Setting + Timeouts for Blob Service Operations.. Default value is None. + :paramtype timeout: int + :keyword etag: check if resource is changed. Set None to skip checking etag. Default value is + None. + :paramtype etag: str + :keyword match_condition: The match condition to use upon the etag. Default value is None. + :paramtype match_condition: ~azure.core.MatchConditions + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + if match_condition == MatchConditions.IfNotModified: + error_map[412] = ResourceModifiedError + elif match_condition == MatchConditions.IfPresent: + error_map[412] = ResourceNotFoundError + elif match_condition == MatchConditions.IfMissing: + error_map[412] = ResourceExistsError + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[None] = kwargs.pop("cls", None) + + _request = build_path_flush_data_request( + position=position, + retain_uncommitted_data=retain_uncommitted_data, + close=close, + content_length=content_length, + content_md5=content_md5, + lease_id=lease_id, + lease_action=lease_action, + lease_duration=lease_duration, + proposed_lease_id=proposed_lease_id, + cache_control=cache_control, + content_type=content_type, + content_disposition=content_disposition, + content_encoding=content_encoding, + content_language=content_language, + if_modified_since=if_modified_since, + if_unmodified_since=if_unmodified_since, + encryption_key=encryption_key, + encryption_key_sha256=encryption_key_sha256, + encryption_algorithm=encryption_algorithm, + timeout=timeout, + etag=etag, + match_condition=match_condition, + version=self._config.version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "url": self._serialize.url("self._config.url", self._config.url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.StorageError, + response, + ) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["Content-Length"] = self._deserialize("int", response.headers.get("Content-Length")) + response_headers["x-ms-request-server-encrypted"] = self._deserialize( + "bool", response.headers.get("x-ms-request-server-encrypted") + ) + response_headers["x-ms-encryption-key-sha256"] = self._deserialize( + "str", response.headers.get("x-ms-encryption-key-sha256") + ) + response_headers["x-ms-lease-renewed"] = self._deserialize("bool", response.headers.get("x-ms-lease-renewed")) + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + + if cls: + return cls(pipeline_response, None, response_headers) # type: ignore + + @distributed_trace + def append_data( # pylint: disable=inconsistent-return-statements,too-many-locals + self, + body: bytes, + *, + position: Optional[int] = None, + content_length: Optional[int] = None, + transactional_content_hash: Optional[bytes] = None, + transactional_content_crc64: Optional[bytes] = None, + lease_id: Optional[str] = None, + lease_action: Optional[Union[str, _models.LeaseAction]] = None, + lease_duration: Optional[int] = None, + proposed_lease_id: Optional[str] = None, + encryption_key: Optional[str] = None, + encryption_key_sha256: Optional[str] = None, + encryption_algorithm: Optional[Union[str, _models.EncryptionAlgorithmType]] = None, + flush: Optional[bool] = None, + structured_body_type: Optional[str] = None, + structured_content_length: Optional[int] = None, + timeout: Optional[int] = None, + **kwargs: Any + ) -> None: + """Append data to the file. + + :param body: Initial data. Required. + :type body: bytes + :keyword position: This parameter allows the caller to upload data in parallel and control the + order in which it is appended to the file. It is required when uploading data to be appended to + the file and when flushing previously uploaded data to the file. The value must be the position + where the data is to be appended. Uploaded data is not immediately flushed, or written, to the + file. To flush, the previously uploaded data must be contiguous, the position parameter must be + specified and equal to the length of the file after all data has been written, and there must + not be a request entity body included with the request. Default value is None. + :paramtype position: int + :keyword content_length: Required for "Append Data" and "Flush Data". Must be 0 for "Flush + Data". Must be the length of the request content in bytes for "Append Data". Default value is + None. + :paramtype content_length: int + :keyword transactional_content_hash: Specify the transactional md5 for the body, to be + validated by the service. Default value is None. + :paramtype transactional_content_hash: bytes + :keyword transactional_content_crc64: Specify the transactional crc64 for the body, to be + validated by the service. Default value is None. + :paramtype transactional_content_crc64: bytes + :keyword lease_id: If specified, the operation only succeeds if the resource's lease is active + and matches this ID. Default value is None. + :paramtype lease_id: str + :keyword lease_action: Optional. If "acquire" it will acquire the lease. If "auto-renew" it + will renew the lease. If "release" it will release the lease only on flush. If + "acquire-release" it will acquire & complete the operation & release the lease once operation + is done. Known values are: "acquire", "auto-renew", "release", and "acquire-release". Default + value is None. + :paramtype lease_action: str or ~azure.storage.filedatalake._generated.models.LeaseAction + :keyword lease_duration: The lease duration is required to acquire a lease, and specifies the + duration of the lease in seconds. The lease duration must be between 15 and 60 seconds or -1 + for infinite lease. Default value is None. + :paramtype lease_duration: int + :keyword proposed_lease_id: Proposed lease ID, in a GUID string format. Default value is None. + :paramtype proposed_lease_id: str + :keyword encryption_key: Optional. Specifies the encryption key to use to encrypt the data + provided in the request. If not specified, encryption is performed with the root account + encryption key. Default value is None. + :paramtype encryption_key: str + :keyword encryption_key_sha256: The SHA-256 hash of the provided encryption key. Must be + provided if the x-ms-encryption-key header is provided. Default value is None. + :paramtype encryption_key_sha256: str + :keyword encryption_algorithm: The algorithm used to produce the encryption key hash. + Currently, the only accepted value is "AES256". Must be provided if the x-ms-encryption-key + header is provided. "AES256" Default value is None. + :paramtype encryption_algorithm: str or + ~azure.storage.filedatalake._generated.models.EncryptionAlgorithmType + :keyword flush: If file should be flushed after the append. Default value is None. + :paramtype flush: bool + :keyword structured_body_type: Required if the request body is a structured message. Specifies + the message schema version and properties. Default value is None. + :paramtype structured_body_type: str + :keyword structured_content_length: Required if the request body is a structured message. + Specifies the length of the blob/file content inside the message body. Will always be smaller + than Content-Length. Default value is None. + :paramtype structured_content_length: int + :keyword timeout: The timeout parameter is expressed in seconds. For more information, see Setting + Timeouts for Blob Service Operations.. Default value is None. + :paramtype timeout: int + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: str = kwargs.pop("content_type", _headers.pop("Content-Type", "application/octet-stream")) + cls: ClsType[None] = kwargs.pop("cls", None) + + _content = body + + _request = build_path_append_data_request( + position=position, + content_length=content_length, + transactional_content_hash=transactional_content_hash, + transactional_content_crc64=transactional_content_crc64, + lease_id=lease_id, + lease_action=lease_action, + lease_duration=lease_duration, + proposed_lease_id=proposed_lease_id, + encryption_key=encryption_key, + encryption_key_sha256=encryption_key_sha256, + encryption_algorithm=encryption_algorithm, + flush=flush, + structured_body_type=structured_body_type, + structured_content_length=structured_content_length, + timeout=timeout, + content_type=content_type, + version=self._config.version, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "url": self._serialize.url("self._config.url", self._config.url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [202]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.StorageError, + response, + ) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Content-MD5"] = self._deserialize("bytearray", response.headers.get("Content-MD5")) + response_headers["x-ms-content-crc64"] = self._deserialize( + "bytearray", response.headers.get("x-ms-content-crc64") + ) + response_headers["x-ms-request-server-encrypted"] = self._deserialize( + "bool", response.headers.get("x-ms-request-server-encrypted") + ) + response_headers["x-ms-encryption-key-sha256"] = self._deserialize( + "str", response.headers.get("x-ms-encryption-key-sha256") + ) + response_headers["x-ms-lease-renewed"] = self._deserialize("bool", response.headers.get("x-ms-lease-renewed")) + response_headers["x-ms-structured-body"] = self._deserialize( + "str", response.headers.get("x-ms-structured-body") + ) + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + + if cls: + return cls(pipeline_response, None, response_headers) # type: ignore + + @distributed_trace + def set_expiry( # pylint: disable=inconsistent-return-statements + self, + *, + expiry_options: Union[str, _models.PathExpiryOptions], + expires_on: Optional[str] = None, + timeout: Optional[int] = None, + **kwargs: Any + ) -> None: + """Sets the time a blob will expire and be deleted. + + :keyword expiry_options: Required. Indicates mode of the expiry time. Known values are: + "NeverExpire", "RelativeToCreation", "RelativeToNow", and "Absolute". Required. + :paramtype expiry_options: str or + ~azure.storage.filedatalake._generated.models.PathExpiryOptions + :keyword expires_on: The time to set the blob to expiry. Default value is None. + :paramtype expires_on: str + :keyword timeout: The timeout parameter is expressed in seconds. For more information, see Setting + Timeouts for Blob Service Operations.. Default value is None. + :paramtype timeout: int + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[None] = kwargs.pop("cls", None) + + _request = build_path_set_expiry_request( + expiry_options=expiry_options, + expires_on=expires_on, + timeout=timeout, + version=self._config.version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "url": self._serialize.url("self._config.url", self._config.url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.StorageError, + response, + ) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + + if cls: + return cls(pipeline_response, None, response_headers) # type: ignore + + @distributed_trace + def undelete( # pylint: disable=inconsistent-return-statements + self, *, undelete_source: Optional[str] = None, timeout: Optional[int] = None, **kwargs: Any + ) -> None: + """Undelete a path that was previously soft deleted. + + :keyword undelete_source: Only for hierarchical namespace enabled accounts. Optional. The path + of the soft deleted blob to undelete. Default value is None. + :paramtype undelete_source: str + :keyword timeout: The timeout parameter is expressed in seconds. For more information, see Setting + Timeouts for Blob Service Operations.. Default value is None. + :paramtype timeout: int + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[None] = kwargs.pop("cls", None) + + _request = build_path_undelete_request( + undelete_source=undelete_source, + timeout=timeout, + version=self._config.version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "url": self._serialize.url("self._config.url", self._config.url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.StorageError, + response, + ) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["x-ms-resource-type"] = self._deserialize("str", response.headers.get("x-ms-resource-type")) + response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) + response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) + response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) + response_headers["x-ms-client-request-id"] = self._deserialize( + "str", response.headers.get("x-ms-client-request-id") + ) + + if cls: + return cls(pipeline_response, None, response_headers) # type: ignore diff --git a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_generated/operations/_patch.py b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_generated/operations/_patch.py index f7dd32510333..5c098b4e8918 100644 --- a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_generated/operations/_patch.py +++ b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_generated/operations/_patch.py @@ -1,14 +1,114 @@ -# ------------------------------------ -# Copyright (c) Microsoft Corporation. -# Licensed under the MIT License. -# ------------------------------------ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# -------------------------------------------------------------------------- """Customize generated code here. Follow our quickstart for examples: https://aka.ms/azsdk/python/dpcodegen/python/customize """ -from typing import List +from typing import Any -__all__: List[str] = [] # Add all objects you want publicly available to users at this package level +from azure.core import MatchConditions + +from ._operations import PathOperations as _PathOperations + + +def extract_parameter_groups(kwargs: dict) -> None: + """Extract parameter group objects into flat kwargs for TypeSpec-generated operations. + + The convenience layer passes parameter group model objects (e.g. PathHTTPHeaders, + ModifiedAccessConditions) but the TypeSpec-generated operations expect flat parameters. + This function extracts the fields from any parameter group objects found in kwargs. + """ + path_http_headers = kwargs.pop("path_http_headers", None) + if path_http_headers is not None: + kwargs.setdefault("cache_control", getattr(path_http_headers, "cache_control", None)) + kwargs.setdefault("content_encoding", getattr(path_http_headers, "content_encoding", None)) + kwargs.setdefault("content_language", getattr(path_http_headers, "content_language", None)) + kwargs.setdefault("content_disposition", getattr(path_http_headers, "content_disposition", None)) + kwargs.setdefault("content_type", getattr(path_http_headers, "content_type", None)) + kwargs.setdefault("content_md5", getattr(path_http_headers, "content_md5", None)) + kwargs.setdefault("transactional_content_hash", getattr(path_http_headers, "transactional_content_hash", None)) + + modified_access_conditions = kwargs.pop("modified_access_conditions", None) + if modified_access_conditions is not None: + kwargs.setdefault("if_modified_since", getattr(modified_access_conditions, "if_modified_since", None)) + kwargs.setdefault("if_unmodified_since", getattr(modified_access_conditions, "if_unmodified_since", None)) + if_match = getattr(modified_access_conditions, "if_match", None) + if_none_match = getattr(modified_access_conditions, "if_none_match", None) + if if_match: + kwargs.setdefault("etag", if_match) + kwargs.setdefault("match_condition", MatchConditions.IfNotModified) + elif if_none_match == "*": + kwargs.setdefault("match_condition", MatchConditions.IfMissing) + elif if_none_match: + kwargs.setdefault("etag", if_none_match) + kwargs.setdefault("match_condition", MatchConditions.IfModified) + + lease_access_conditions = kwargs.pop("lease_access_conditions", None) + if lease_access_conditions is not None: + kwargs.setdefault("lease_id", getattr(lease_access_conditions, "lease_id", None)) + + cpk_info = kwargs.pop("cpk_info", None) + if cpk_info is not None: + kwargs.setdefault("encryption_key", getattr(cpk_info, "encryption_key", None)) + kwargs.setdefault("encryption_key_sha256", getattr(cpk_info, "encryption_key_sha256", None)) + kwargs.setdefault("encryption_algorithm", getattr(cpk_info, "encryption_algorithm", None)) + + source_modified_access_conditions = kwargs.pop("source_modified_access_conditions", None) + if source_modified_access_conditions is not None: + kwargs.setdefault("source_if_match", getattr(source_modified_access_conditions, "source_if_match", None)) + kwargs.setdefault("source_if_none_match", getattr(source_modified_access_conditions, "source_if_none_match", None)) + kwargs.setdefault("source_if_modified_since", getattr(source_modified_access_conditions, "source_if_modified_since", None)) + kwargs.setdefault("source_if_unmodified_since", getattr(source_modified_access_conditions, "source_if_unmodified_since", None)) + + +class _ParameterGroupExtractionMixin: + """Mixin that extracts parameter group objects into flat kwargs before calling generated operations.""" + + def create(self, **kwargs: Any) -> None: + extract_parameter_groups(kwargs) + return super().create(**kwargs) # type: ignore[misc] + + def update(self, **kwargs: Any): + extract_parameter_groups(kwargs) + return super().update(**kwargs) # type: ignore[misc] + + def delete(self, **kwargs: Any) -> None: + extract_parameter_groups(kwargs) + return super().delete(**kwargs) # type: ignore[misc] + + def set_access_control(self, **kwargs: Any) -> None: + extract_parameter_groups(kwargs) + return super().set_access_control(**kwargs) # type: ignore[misc] + + def get_properties(self, **kwargs: Any): + extract_parameter_groups(kwargs) + return super().get_properties(**kwargs) # type: ignore[misc] + + def flush_data(self, **kwargs: Any) -> None: + extract_parameter_groups(kwargs) + return super().flush_data(**kwargs) # type: ignore[misc] + + def append_data(self, **kwargs: Any) -> None: + extract_parameter_groups(kwargs) + return super().append_data(**kwargs) # type: ignore[misc] + + def set_access_control_recursive(self, **kwargs: Any): + extract_parameter_groups(kwargs) + return super().set_access_control_recursive(**kwargs) # type: ignore[misc] + + def undelete(self, **kwargs: Any): + extract_parameter_groups(kwargs) + return super().undelete(**kwargs) # type: ignore[misc] + + +class PathOperations(_ParameterGroupExtractionMixin, _PathOperations): + """PathOperations with parameter group extraction support.""" + + +__all__: list[str] = ["PathOperations"] def patch_sdk(): diff --git a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_generated/operations/_path_operations.py b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_generated/operations/_path_operations.py deleted file mode 100644 index ccff697bed0e..000000000000 --- a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_generated/operations/_path_operations.py +++ /dev/null @@ -1,2881 +0,0 @@ -# pylint: disable=line-too-long,useless-suppression,too-many-lines -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is regenerated. -# -------------------------------------------------------------------------- -from collections.abc import MutableMapping -import datetime -from typing import Any, Callable, IO, Iterator, Literal, Optional, TypeVar, Union - -from azure.core import PipelineClient -from azure.core.exceptions import ( - ClientAuthenticationError, - HttpResponseError, - ResourceExistsError, - ResourceNotFoundError, - ResourceNotModifiedError, - StreamClosedError, - StreamConsumedError, - map_error, -) -from azure.core.pipeline import PipelineResponse -from azure.core.rest import HttpRequest, HttpResponse -from azure.core.tracing.decorator import distributed_trace -from azure.core.utils import case_insensitive_dict - -from .. import models as _models -from .._configuration import AzureDataLakeStorageRESTAPIConfiguration -from .._utils.serialization import Deserializer, Serializer - -T = TypeVar("T") -ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, dict[str, Any]], Any]] - -_SERIALIZER = Serializer() -_SERIALIZER.client_side_validation = False - - -def build_create_request( # pylint: disable=too-many-locals,too-many-statements,too-many-branches - url: str, - *, - request_id_parameter: Optional[str] = None, - timeout: Optional[int] = None, - resource: Optional[Union[str, _models.PathResourceType]] = None, - continuation: Optional[str] = None, - mode: Optional[Union[str, _models.PathRenameMode]] = None, - cache_control: Optional[str] = None, - content_encoding: Optional[str] = None, - content_language: Optional[str] = None, - content_disposition: Optional[str] = None, - content_type_parameter: Optional[str] = None, - rename_source: Optional[str] = None, - lease_id: Optional[str] = None, - source_lease_id: Optional[str] = None, - properties: Optional[str] = None, - permissions: Optional[str] = None, - umask: Optional[str] = None, - if_match: Optional[str] = None, - if_none_match: Optional[str] = None, - if_modified_since: Optional[datetime.datetime] = None, - if_unmodified_since: Optional[datetime.datetime] = None, - source_if_match: Optional[str] = None, - source_if_none_match: Optional[str] = None, - source_if_modified_since: Optional[datetime.datetime] = None, - source_if_unmodified_since: Optional[datetime.datetime] = None, - encryption_key: Optional[str] = None, - encryption_key_sha256: Optional[str] = None, - encryption_algorithm: Literal["AES256"] = "AES256", - owner: Optional[str] = None, - group: Optional[str] = None, - acl: Optional[str] = None, - proposed_lease_id: Optional[str] = None, - lease_duration: Optional[int] = None, - expiry_options: Optional[Union[str, _models.PathExpiryOptions]] = None, - expires_on: Optional[str] = None, - encryption_context: Optional[str] = None, - **kwargs: Any -) -> HttpRequest: - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - version: Literal["2026-02-06"] = kwargs.pop("version", _headers.pop("x-ms-version", "2026-02-06")) - accept = _headers.pop("Accept", "application/json") - - # Construct URL - _url = kwargs.pop("template_url", "{url}") - path_format_arguments = { - "url": _SERIALIZER.url("url", url, "str", skip_quote=True), - } - - _url: str = _url.format(**path_format_arguments) # type: ignore - - # Construct parameters - if timeout is not None: - _params["timeout"] = _SERIALIZER.query("timeout", timeout, "int", minimum=0) - if resource is not None: - _params["resource"] = _SERIALIZER.query("resource", resource, "str") - if continuation is not None: - _params["continuation"] = _SERIALIZER.query("continuation", continuation, "str") - if mode is not None: - _params["mode"] = _SERIALIZER.query("mode", mode, "str") - - # Construct headers - if request_id_parameter is not None: - _headers["x-ms-client-request-id"] = _SERIALIZER.header("request_id_parameter", request_id_parameter, "str") - _headers["x-ms-version"] = _SERIALIZER.header("version", version, "str") - if cache_control is not None: - _headers["x-ms-cache-control"] = _SERIALIZER.header("cache_control", cache_control, "str") - if content_encoding is not None: - _headers["x-ms-content-encoding"] = _SERIALIZER.header("content_encoding", content_encoding, "str") - if content_language is not None: - _headers["x-ms-content-language"] = _SERIALIZER.header("content_language", content_language, "str") - if content_disposition is not None: - _headers["x-ms-content-disposition"] = _SERIALIZER.header("content_disposition", content_disposition, "str") - if content_type_parameter is not None: - _headers["x-ms-content-type"] = _SERIALIZER.header("content_type_parameter", content_type_parameter, "str") - if rename_source is not None: - _headers["x-ms-rename-source"] = _SERIALIZER.header("rename_source", rename_source, "str") - if lease_id is not None: - _headers["x-ms-lease-id"] = _SERIALIZER.header("lease_id", lease_id, "str") - if source_lease_id is not None: - _headers["x-ms-source-lease-id"] = _SERIALIZER.header("source_lease_id", source_lease_id, "str") - if properties is not None: - _headers["x-ms-properties"] = _SERIALIZER.header("properties", properties, "str") - if permissions is not None: - _headers["x-ms-permissions"] = _SERIALIZER.header("permissions", permissions, "str") - if umask is not None: - _headers["x-ms-umask"] = _SERIALIZER.header("umask", umask, "str") - if if_match is not None: - _headers["If-Match"] = _SERIALIZER.header("if_match", if_match, "str") - if if_none_match is not None: - _headers["If-None-Match"] = _SERIALIZER.header("if_none_match", if_none_match, "str") - if if_modified_since is not None: - _headers["If-Modified-Since"] = _SERIALIZER.header("if_modified_since", if_modified_since, "rfc-1123") - if if_unmodified_since is not None: - _headers["If-Unmodified-Since"] = _SERIALIZER.header("if_unmodified_since", if_unmodified_since, "rfc-1123") - if source_if_match is not None: - _headers["x-ms-source-if-match"] = _SERIALIZER.header("source_if_match", source_if_match, "str") - if source_if_none_match is not None: - _headers["x-ms-source-if-none-match"] = _SERIALIZER.header("source_if_none_match", source_if_none_match, "str") - if source_if_modified_since is not None: - _headers["x-ms-source-if-modified-since"] = _SERIALIZER.header( - "source_if_modified_since", source_if_modified_since, "rfc-1123" - ) - if source_if_unmodified_since is not None: - _headers["x-ms-source-if-unmodified-since"] = _SERIALIZER.header( - "source_if_unmodified_since", source_if_unmodified_since, "rfc-1123" - ) - if encryption_key is not None: - _headers["x-ms-encryption-key"] = _SERIALIZER.header("encryption_key", encryption_key, "str") - if encryption_key_sha256 is not None: - _headers["x-ms-encryption-key-sha256"] = _SERIALIZER.header( - "encryption_key_sha256", encryption_key_sha256, "str" - ) - if encryption_algorithm is not None: - _headers["x-ms-encryption-algorithm"] = _SERIALIZER.header("encryption_algorithm", encryption_algorithm, "str") - if owner is not None: - _headers["x-ms-owner"] = _SERIALIZER.header("owner", owner, "str") - if group is not None: - _headers["x-ms-group"] = _SERIALIZER.header("group", group, "str") - if acl is not None: - _headers["x-ms-acl"] = _SERIALIZER.header("acl", acl, "str") - if proposed_lease_id is not None: - _headers["x-ms-proposed-lease-id"] = _SERIALIZER.header("proposed_lease_id", proposed_lease_id, "str") - if lease_duration is not None: - _headers["x-ms-lease-duration"] = _SERIALIZER.header("lease_duration", lease_duration, "int") - if expiry_options is not None: - _headers["x-ms-expiry-option"] = _SERIALIZER.header("expiry_options", expiry_options, "str") - if expires_on is not None: - _headers["x-ms-expiry-time"] = _SERIALIZER.header("expires_on", expires_on, "str") - if encryption_context is not None: - _headers["x-ms-encryption-context"] = _SERIALIZER.header("encryption_context", encryption_context, "str") - _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - - return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) - - -def build_update_request( # pylint: disable=too-many-locals,too-many-statements,too-many-branches - url: str, - *, - action: Union[str, _models.PathUpdateAction], - mode: Union[str, _models.PathSetAccessControlRecursiveMode], - content: IO[bytes], - request_id_parameter: Optional[str] = None, - timeout: Optional[int] = None, - max_records: Optional[int] = None, - continuation: Optional[str] = None, - force_flag: Optional[bool] = None, - position: Optional[int] = None, - retain_uncommitted_data: Optional[bool] = None, - close: Optional[bool] = None, - content_length: Optional[int] = None, - content_md5: Optional[bytes] = None, - lease_id: Optional[str] = None, - cache_control: Optional[str] = None, - content_type_parameter: Optional[str] = None, - content_disposition: Optional[str] = None, - content_encoding: Optional[str] = None, - content_language: Optional[str] = None, - properties: Optional[str] = None, - owner: Optional[str] = None, - group: Optional[str] = None, - permissions: Optional[str] = None, - acl: Optional[str] = None, - if_match: Optional[str] = None, - if_none_match: Optional[str] = None, - if_modified_since: Optional[datetime.datetime] = None, - if_unmodified_since: Optional[datetime.datetime] = None, - structured_body_type: Optional[str] = None, - structured_content_length: Optional[int] = None, - **kwargs: Any -) -> HttpRequest: - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - version: Literal["2026-02-06"] = kwargs.pop("version", _headers.pop("x-ms-version", "2026-02-06")) - accept = _headers.pop("Accept", "application/json") - - # Construct URL - _url = kwargs.pop("template_url", "{url}") - path_format_arguments = { - "url": _SERIALIZER.url("url", url, "str", skip_quote=True), - } - - _url: str = _url.format(**path_format_arguments) # type: ignore - - # Construct parameters - if timeout is not None: - _params["timeout"] = _SERIALIZER.query("timeout", timeout, "int", minimum=0) - _params["action"] = _SERIALIZER.query("action", action, "str") - if max_records is not None: - _params["maxRecords"] = _SERIALIZER.query("max_records", max_records, "int", minimum=1) - if continuation is not None: - _params["continuation"] = _SERIALIZER.query("continuation", continuation, "str") - _params["mode"] = _SERIALIZER.query("mode", mode, "str") - if force_flag is not None: - _params["forceFlag"] = _SERIALIZER.query("force_flag", force_flag, "bool") - if position is not None: - _params["position"] = _SERIALIZER.query("position", position, "int") - if retain_uncommitted_data is not None: - _params["retainUncommittedData"] = _SERIALIZER.query("retain_uncommitted_data", retain_uncommitted_data, "bool") - if close is not None: - _params["close"] = _SERIALIZER.query("close", close, "bool") - - # Construct headers - if request_id_parameter is not None: - _headers["x-ms-client-request-id"] = _SERIALIZER.header("request_id_parameter", request_id_parameter, "str") - _headers["x-ms-version"] = _SERIALIZER.header("version", version, "str") - if content_length is not None: - _headers["Content-Length"] = _SERIALIZER.header("content_length", content_length, "int", minimum=0) - if content_md5 is not None: - _headers["x-ms-content-md5"] = _SERIALIZER.header("content_md5", content_md5, "bytearray") - if lease_id is not None: - _headers["x-ms-lease-id"] = _SERIALIZER.header("lease_id", lease_id, "str") - if cache_control is not None: - _headers["x-ms-cache-control"] = _SERIALIZER.header("cache_control", cache_control, "str") - if content_type_parameter is not None: - _headers["x-ms-content-type"] = _SERIALIZER.header("content_type_parameter", content_type_parameter, "str") - if content_disposition is not None: - _headers["x-ms-content-disposition"] = _SERIALIZER.header("content_disposition", content_disposition, "str") - if content_encoding is not None: - _headers["x-ms-content-encoding"] = _SERIALIZER.header("content_encoding", content_encoding, "str") - if content_language is not None: - _headers["x-ms-content-language"] = _SERIALIZER.header("content_language", content_language, "str") - if properties is not None: - _headers["x-ms-properties"] = _SERIALIZER.header("properties", properties, "str") - if owner is not None: - _headers["x-ms-owner"] = _SERIALIZER.header("owner", owner, "str") - if group is not None: - _headers["x-ms-group"] = _SERIALIZER.header("group", group, "str") - if permissions is not None: - _headers["x-ms-permissions"] = _SERIALIZER.header("permissions", permissions, "str") - if acl is not None: - _headers["x-ms-acl"] = _SERIALIZER.header("acl", acl, "str") - if if_match is not None: - _headers["If-Match"] = _SERIALIZER.header("if_match", if_match, "str") - if if_none_match is not None: - _headers["If-None-Match"] = _SERIALIZER.header("if_none_match", if_none_match, "str") - if if_modified_since is not None: - _headers["If-Modified-Since"] = _SERIALIZER.header("if_modified_since", if_modified_since, "rfc-1123") - if if_unmodified_since is not None: - _headers["If-Unmodified-Since"] = _SERIALIZER.header("if_unmodified_since", if_unmodified_since, "rfc-1123") - if structured_body_type is not None: - _headers["x-ms-structured-body"] = _SERIALIZER.header("structured_body_type", structured_body_type, "str") - if structured_content_length is not None: - _headers["x-ms-structured-content-length"] = _SERIALIZER.header( - "structured_content_length", structured_content_length, "int" - ) - if content_type is not None: - _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") - _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - - return HttpRequest(method="PATCH", url=_url, params=_params, headers=_headers, content=content, **kwargs) - - -def build_lease_request( - url: str, - *, - x_ms_lease_action: Union[str, _models.PathLeaseAction], - request_id_parameter: Optional[str] = None, - timeout: Optional[int] = None, - x_ms_lease_break_period: Optional[int] = None, - lease_id: Optional[str] = None, - proposed_lease_id: Optional[str] = None, - if_match: Optional[str] = None, - if_none_match: Optional[str] = None, - if_modified_since: Optional[datetime.datetime] = None, - if_unmodified_since: Optional[datetime.datetime] = None, - x_ms_lease_duration: Optional[int] = None, - **kwargs: Any -) -> HttpRequest: - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - version: Literal["2026-02-06"] = kwargs.pop("version", _headers.pop("x-ms-version", "2026-02-06")) - accept = _headers.pop("Accept", "application/json") - - # Construct URL - _url = kwargs.pop("template_url", "{url}") - path_format_arguments = { - "url": _SERIALIZER.url("url", url, "str", skip_quote=True), - } - - _url: str = _url.format(**path_format_arguments) # type: ignore - - # Construct parameters - if timeout is not None: - _params["timeout"] = _SERIALIZER.query("timeout", timeout, "int", minimum=0) - - # Construct headers - if request_id_parameter is not None: - _headers["x-ms-client-request-id"] = _SERIALIZER.header("request_id_parameter", request_id_parameter, "str") - _headers["x-ms-version"] = _SERIALIZER.header("version", version, "str") - _headers["x-ms-lease-action"] = _SERIALIZER.header("x_ms_lease_action", x_ms_lease_action, "str") - if x_ms_lease_duration is not None: - _headers["x-ms-lease-duration"] = _SERIALIZER.header("x_ms_lease_duration", x_ms_lease_duration, "int") - if x_ms_lease_break_period is not None: - _headers["x-ms-lease-break-period"] = _SERIALIZER.header( - "x_ms_lease_break_period", x_ms_lease_break_period, "int" - ) - if lease_id is not None: - _headers["x-ms-lease-id"] = _SERIALIZER.header("lease_id", lease_id, "str") - if proposed_lease_id is not None: - _headers["x-ms-proposed-lease-id"] = _SERIALIZER.header("proposed_lease_id", proposed_lease_id, "str") - if if_match is not None: - _headers["If-Match"] = _SERIALIZER.header("if_match", if_match, "str") - if if_none_match is not None: - _headers["If-None-Match"] = _SERIALIZER.header("if_none_match", if_none_match, "str") - if if_modified_since is not None: - _headers["If-Modified-Since"] = _SERIALIZER.header("if_modified_since", if_modified_since, "rfc-1123") - if if_unmodified_since is not None: - _headers["If-Unmodified-Since"] = _SERIALIZER.header("if_unmodified_since", if_unmodified_since, "rfc-1123") - _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - - return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs) - - -def build_read_request( - url: str, - *, - request_id_parameter: Optional[str] = None, - timeout: Optional[int] = None, - range: Optional[str] = None, - lease_id: Optional[str] = None, - x_ms_range_get_content_md5: Optional[bool] = None, - if_match: Optional[str] = None, - if_none_match: Optional[str] = None, - if_modified_since: Optional[datetime.datetime] = None, - if_unmodified_since: Optional[datetime.datetime] = None, - encryption_key: Optional[str] = None, - encryption_key_sha256: Optional[str] = None, - encryption_algorithm: Literal["AES256"] = "AES256", - **kwargs: Any -) -> HttpRequest: - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - version: Literal["2026-02-06"] = kwargs.pop("version", _headers.pop("x-ms-version", "2026-02-06")) - accept = _headers.pop("Accept", "application/json") - - # Construct URL - _url = kwargs.pop("template_url", "{url}") - path_format_arguments = { - "url": _SERIALIZER.url("url", url, "str", skip_quote=True), - } - - _url: str = _url.format(**path_format_arguments) # type: ignore - - # Construct parameters - if timeout is not None: - _params["timeout"] = _SERIALIZER.query("timeout", timeout, "int", minimum=0) - - # Construct headers - if request_id_parameter is not None: - _headers["x-ms-client-request-id"] = _SERIALIZER.header("request_id_parameter", request_id_parameter, "str") - _headers["x-ms-version"] = _SERIALIZER.header("version", version, "str") - if range is not None: - _headers["Range"] = _SERIALIZER.header("range", range, "str") - if lease_id is not None: - _headers["x-ms-lease-id"] = _SERIALIZER.header("lease_id", lease_id, "str") - if x_ms_range_get_content_md5 is not None: - _headers["x-ms-range-get-content-md5"] = _SERIALIZER.header( - "x_ms_range_get_content_md5", x_ms_range_get_content_md5, "bool" - ) - if if_match is not None: - _headers["If-Match"] = _SERIALIZER.header("if_match", if_match, "str") - if if_none_match is not None: - _headers["If-None-Match"] = _SERIALIZER.header("if_none_match", if_none_match, "str") - if if_modified_since is not None: - _headers["If-Modified-Since"] = _SERIALIZER.header("if_modified_since", if_modified_since, "rfc-1123") - if if_unmodified_since is not None: - _headers["If-Unmodified-Since"] = _SERIALIZER.header("if_unmodified_since", if_unmodified_since, "rfc-1123") - if encryption_key is not None: - _headers["x-ms-encryption-key"] = _SERIALIZER.header("encryption_key", encryption_key, "str") - if encryption_key_sha256 is not None: - _headers["x-ms-encryption-key-sha256"] = _SERIALIZER.header( - "encryption_key_sha256", encryption_key_sha256, "str" - ) - if encryption_algorithm is not None: - _headers["x-ms-encryption-algorithm"] = _SERIALIZER.header("encryption_algorithm", encryption_algorithm, "str") - _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - - return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) - - -def build_get_properties_request( - url: str, - *, - request_id_parameter: Optional[str] = None, - timeout: Optional[int] = None, - action: Optional[Union[str, _models.PathGetPropertiesAction]] = None, - upn: Optional[bool] = None, - lease_id: Optional[str] = None, - if_match: Optional[str] = None, - if_none_match: Optional[str] = None, - if_modified_since: Optional[datetime.datetime] = None, - if_unmodified_since: Optional[datetime.datetime] = None, - **kwargs: Any -) -> HttpRequest: - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - version: Literal["2026-02-06"] = kwargs.pop("version", _headers.pop("x-ms-version", "2026-02-06")) - accept = _headers.pop("Accept", "application/json") - - # Construct URL - _url = kwargs.pop("template_url", "{url}") - path_format_arguments = { - "url": _SERIALIZER.url("url", url, "str", skip_quote=True), - } - - _url: str = _url.format(**path_format_arguments) # type: ignore - - # Construct parameters - if timeout is not None: - _params["timeout"] = _SERIALIZER.query("timeout", timeout, "int", minimum=0) - if action is not None: - _params["action"] = _SERIALIZER.query("action", action, "str") - if upn is not None: - _params["upn"] = _SERIALIZER.query("upn", upn, "bool") - - # Construct headers - if request_id_parameter is not None: - _headers["x-ms-client-request-id"] = _SERIALIZER.header("request_id_parameter", request_id_parameter, "str") - _headers["x-ms-version"] = _SERIALIZER.header("version", version, "str") - if lease_id is not None: - _headers["x-ms-lease-id"] = _SERIALIZER.header("lease_id", lease_id, "str") - if if_match is not None: - _headers["If-Match"] = _SERIALIZER.header("if_match", if_match, "str") - if if_none_match is not None: - _headers["If-None-Match"] = _SERIALIZER.header("if_none_match", if_none_match, "str") - if if_modified_since is not None: - _headers["If-Modified-Since"] = _SERIALIZER.header("if_modified_since", if_modified_since, "rfc-1123") - if if_unmodified_since is not None: - _headers["If-Unmodified-Since"] = _SERIALIZER.header("if_unmodified_since", if_unmodified_since, "rfc-1123") - _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - - return HttpRequest(method="HEAD", url=_url, params=_params, headers=_headers, **kwargs) - - -def build_delete_request( - url: str, - *, - request_id_parameter: Optional[str] = None, - timeout: Optional[int] = None, - recursive: Optional[bool] = None, - continuation: Optional[str] = None, - lease_id: Optional[str] = None, - if_match: Optional[str] = None, - if_none_match: Optional[str] = None, - if_modified_since: Optional[datetime.datetime] = None, - if_unmodified_since: Optional[datetime.datetime] = None, - paginated: Optional[bool] = None, - **kwargs: Any -) -> HttpRequest: - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - version: Literal["2026-02-06"] = kwargs.pop("version", _headers.pop("x-ms-version", "2026-02-06")) - accept = _headers.pop("Accept", "application/json") - - # Construct URL - _url = kwargs.pop("template_url", "{url}") - path_format_arguments = { - "url": _SERIALIZER.url("url", url, "str", skip_quote=True), - } - - _url: str = _url.format(**path_format_arguments) # type: ignore - - # Construct parameters - if timeout is not None: - _params["timeout"] = _SERIALIZER.query("timeout", timeout, "int", minimum=0) - if recursive is not None: - _params["recursive"] = _SERIALIZER.query("recursive", recursive, "bool") - if continuation is not None: - _params["continuation"] = _SERIALIZER.query("continuation", continuation, "str") - if paginated is not None: - _params["paginated"] = _SERIALIZER.query("paginated", paginated, "bool") - - # Construct headers - if request_id_parameter is not None: - _headers["x-ms-client-request-id"] = _SERIALIZER.header("request_id_parameter", request_id_parameter, "str") - _headers["x-ms-version"] = _SERIALIZER.header("version", version, "str") - if lease_id is not None: - _headers["x-ms-lease-id"] = _SERIALIZER.header("lease_id", lease_id, "str") - if if_match is not None: - _headers["If-Match"] = _SERIALIZER.header("if_match", if_match, "str") - if if_none_match is not None: - _headers["If-None-Match"] = _SERIALIZER.header("if_none_match", if_none_match, "str") - if if_modified_since is not None: - _headers["If-Modified-Since"] = _SERIALIZER.header("if_modified_since", if_modified_since, "rfc-1123") - if if_unmodified_since is not None: - _headers["If-Unmodified-Since"] = _SERIALIZER.header("if_unmodified_since", if_unmodified_since, "rfc-1123") - _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - - return HttpRequest(method="DELETE", url=_url, params=_params, headers=_headers, **kwargs) - - -def build_set_access_control_request( - url: str, - *, - timeout: Optional[int] = None, - lease_id: Optional[str] = None, - owner: Optional[str] = None, - group: Optional[str] = None, - permissions: Optional[str] = None, - acl: Optional[str] = None, - if_match: Optional[str] = None, - if_none_match: Optional[str] = None, - if_modified_since: Optional[datetime.datetime] = None, - if_unmodified_since: Optional[datetime.datetime] = None, - request_id_parameter: Optional[str] = None, - **kwargs: Any -) -> HttpRequest: - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - action: Literal["setAccessControl"] = kwargs.pop("action", _params.pop("action", "setAccessControl")) - version: Literal["2026-02-06"] = kwargs.pop("version", _headers.pop("x-ms-version", "2026-02-06")) - accept = _headers.pop("Accept", "application/json") - - # Construct URL - _url = kwargs.pop("template_url", "{url}") - path_format_arguments = { - "url": _SERIALIZER.url("url", url, "str", skip_quote=True), - } - - _url: str = _url.format(**path_format_arguments) # type: ignore - - # Construct parameters - _params["action"] = _SERIALIZER.query("action", action, "str") - if timeout is not None: - _params["timeout"] = _SERIALIZER.query("timeout", timeout, "int", minimum=0) - - # Construct headers - if lease_id is not None: - _headers["x-ms-lease-id"] = _SERIALIZER.header("lease_id", lease_id, "str") - if owner is not None: - _headers["x-ms-owner"] = _SERIALIZER.header("owner", owner, "str") - if group is not None: - _headers["x-ms-group"] = _SERIALIZER.header("group", group, "str") - if permissions is not None: - _headers["x-ms-permissions"] = _SERIALIZER.header("permissions", permissions, "str") - if acl is not None: - _headers["x-ms-acl"] = _SERIALIZER.header("acl", acl, "str") - if if_match is not None: - _headers["If-Match"] = _SERIALIZER.header("if_match", if_match, "str") - if if_none_match is not None: - _headers["If-None-Match"] = _SERIALIZER.header("if_none_match", if_none_match, "str") - if if_modified_since is not None: - _headers["If-Modified-Since"] = _SERIALIZER.header("if_modified_since", if_modified_since, "rfc-1123") - if if_unmodified_since is not None: - _headers["If-Unmodified-Since"] = _SERIALIZER.header("if_unmodified_since", if_unmodified_since, "rfc-1123") - if request_id_parameter is not None: - _headers["x-ms-client-request-id"] = _SERIALIZER.header("request_id_parameter", request_id_parameter, "str") - _headers["x-ms-version"] = _SERIALIZER.header("version", version, "str") - _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - - return HttpRequest(method="PATCH", url=_url, params=_params, headers=_headers, **kwargs) - - -def build_set_access_control_recursive_request( # pylint: disable=name-too-long - url: str, - *, - mode: Union[str, _models.PathSetAccessControlRecursiveMode], - timeout: Optional[int] = None, - continuation: Optional[str] = None, - force_flag: Optional[bool] = None, - max_records: Optional[int] = None, - acl: Optional[str] = None, - request_id_parameter: Optional[str] = None, - **kwargs: Any -) -> HttpRequest: - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - action: Literal["setAccessControlRecursive"] = kwargs.pop( - "action", _params.pop("action", "setAccessControlRecursive") - ) - version: Literal["2026-02-06"] = kwargs.pop("version", _headers.pop("x-ms-version", "2026-02-06")) - accept = _headers.pop("Accept", "application/json") - - # Construct URL - _url = kwargs.pop("template_url", "{url}") - path_format_arguments = { - "url": _SERIALIZER.url("url", url, "str", skip_quote=True), - } - - _url: str = _url.format(**path_format_arguments) # type: ignore - - # Construct parameters - _params["action"] = _SERIALIZER.query("action", action, "str") - if timeout is not None: - _params["timeout"] = _SERIALIZER.query("timeout", timeout, "int", minimum=0) - if continuation is not None: - _params["continuation"] = _SERIALIZER.query("continuation", continuation, "str") - _params["mode"] = _SERIALIZER.query("mode", mode, "str") - if force_flag is not None: - _params["forceFlag"] = _SERIALIZER.query("force_flag", force_flag, "bool") - if max_records is not None: - _params["maxRecords"] = _SERIALIZER.query("max_records", max_records, "int", minimum=1) - - # Construct headers - if acl is not None: - _headers["x-ms-acl"] = _SERIALIZER.header("acl", acl, "str") - if request_id_parameter is not None: - _headers["x-ms-client-request-id"] = _SERIALIZER.header("request_id_parameter", request_id_parameter, "str") - _headers["x-ms-version"] = _SERIALIZER.header("version", version, "str") - _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - - return HttpRequest(method="PATCH", url=_url, params=_params, headers=_headers, **kwargs) - - -def build_flush_data_request( # pylint: disable=too-many-locals - url: str, - *, - timeout: Optional[int] = None, - position: Optional[int] = None, - retain_uncommitted_data: Optional[bool] = None, - close: Optional[bool] = None, - content_length: Optional[int] = None, - content_md5: Optional[bytes] = None, - lease_id: Optional[str] = None, - lease_action: Optional[Union[str, _models.LeaseAction]] = None, - lease_duration: Optional[int] = None, - proposed_lease_id: Optional[str] = None, - cache_control: Optional[str] = None, - content_type_parameter: Optional[str] = None, - content_disposition: Optional[str] = None, - content_encoding: Optional[str] = None, - content_language: Optional[str] = None, - if_match: Optional[str] = None, - if_none_match: Optional[str] = None, - if_modified_since: Optional[datetime.datetime] = None, - if_unmodified_since: Optional[datetime.datetime] = None, - request_id_parameter: Optional[str] = None, - encryption_key: Optional[str] = None, - encryption_key_sha256: Optional[str] = None, - encryption_algorithm: Literal["AES256"] = "AES256", - **kwargs: Any -) -> HttpRequest: - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - action: Literal["flush"] = kwargs.pop("action", _params.pop("action", "flush")) - version: Literal["2026-02-06"] = kwargs.pop("version", _headers.pop("x-ms-version", "2026-02-06")) - accept = _headers.pop("Accept", "application/json") - - # Construct URL - _url = kwargs.pop("template_url", "{url}") - path_format_arguments = { - "url": _SERIALIZER.url("url", url, "str", skip_quote=True), - } - - _url: str = _url.format(**path_format_arguments) # type: ignore - - # Construct parameters - _params["action"] = _SERIALIZER.query("action", action, "str") - if timeout is not None: - _params["timeout"] = _SERIALIZER.query("timeout", timeout, "int", minimum=0) - if position is not None: - _params["position"] = _SERIALIZER.query("position", position, "int") - if retain_uncommitted_data is not None: - _params["retainUncommittedData"] = _SERIALIZER.query("retain_uncommitted_data", retain_uncommitted_data, "bool") - if close is not None: - _params["close"] = _SERIALIZER.query("close", close, "bool") - - # Construct headers - if content_length is not None: - _headers["Content-Length"] = _SERIALIZER.header("content_length", content_length, "int", minimum=0) - if content_md5 is not None: - _headers["x-ms-content-md5"] = _SERIALIZER.header("content_md5", content_md5, "bytearray") - if lease_id is not None: - _headers["x-ms-lease-id"] = _SERIALIZER.header("lease_id", lease_id, "str") - if lease_action is not None: - _headers["x-ms-lease-action"] = _SERIALIZER.header("lease_action", lease_action, "str") - if lease_duration is not None: - _headers["x-ms-lease-duration"] = _SERIALIZER.header("lease_duration", lease_duration, "int") - if proposed_lease_id is not None: - _headers["x-ms-proposed-lease-id"] = _SERIALIZER.header("proposed_lease_id", proposed_lease_id, "str") - if cache_control is not None: - _headers["x-ms-cache-control"] = _SERIALIZER.header("cache_control", cache_control, "str") - if content_type_parameter is not None: - _headers["x-ms-content-type"] = _SERIALIZER.header("content_type_parameter", content_type_parameter, "str") - if content_disposition is not None: - _headers["x-ms-content-disposition"] = _SERIALIZER.header("content_disposition", content_disposition, "str") - if content_encoding is not None: - _headers["x-ms-content-encoding"] = _SERIALIZER.header("content_encoding", content_encoding, "str") - if content_language is not None: - _headers["x-ms-content-language"] = _SERIALIZER.header("content_language", content_language, "str") - if if_match is not None: - _headers["If-Match"] = _SERIALIZER.header("if_match", if_match, "str") - if if_none_match is not None: - _headers["If-None-Match"] = _SERIALIZER.header("if_none_match", if_none_match, "str") - if if_modified_since is not None: - _headers["If-Modified-Since"] = _SERIALIZER.header("if_modified_since", if_modified_since, "rfc-1123") - if if_unmodified_since is not None: - _headers["If-Unmodified-Since"] = _SERIALIZER.header("if_unmodified_since", if_unmodified_since, "rfc-1123") - if request_id_parameter is not None: - _headers["x-ms-client-request-id"] = _SERIALIZER.header("request_id_parameter", request_id_parameter, "str") - _headers["x-ms-version"] = _SERIALIZER.header("version", version, "str") - if encryption_key is not None: - _headers["x-ms-encryption-key"] = _SERIALIZER.header("encryption_key", encryption_key, "str") - if encryption_key_sha256 is not None: - _headers["x-ms-encryption-key-sha256"] = _SERIALIZER.header( - "encryption_key_sha256", encryption_key_sha256, "str" - ) - if encryption_algorithm is not None: - _headers["x-ms-encryption-algorithm"] = _SERIALIZER.header("encryption_algorithm", encryption_algorithm, "str") - _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - - return HttpRequest(method="PATCH", url=_url, params=_params, headers=_headers, **kwargs) - - -def build_append_data_request( # pylint: disable=too-many-locals - url: str, - *, - content: IO[bytes], - position: Optional[int] = None, - timeout: Optional[int] = None, - content_length: Optional[int] = None, - transactional_content_hash: Optional[bytes] = None, - transactional_content_crc64: Optional[bytes] = None, - lease_id: Optional[str] = None, - lease_action: Optional[Union[str, _models.LeaseAction]] = None, - lease_duration: Optional[int] = None, - proposed_lease_id: Optional[str] = None, - request_id_parameter: Optional[str] = None, - encryption_key: Optional[str] = None, - encryption_key_sha256: Optional[str] = None, - encryption_algorithm: Literal["AES256"] = "AES256", - flush: Optional[bool] = None, - structured_body_type: Optional[str] = None, - structured_content_length: Optional[int] = None, - **kwargs: Any -) -> HttpRequest: - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - action: Literal["append"] = kwargs.pop("action", _params.pop("action", "append")) - content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - version: Literal["2026-02-06"] = kwargs.pop("version", _headers.pop("x-ms-version", "2026-02-06")) - accept = _headers.pop("Accept", "application/json") - - # Construct URL - _url = kwargs.pop("template_url", "{url}") - path_format_arguments = { - "url": _SERIALIZER.url("url", url, "str", skip_quote=True), - } - - _url: str = _url.format(**path_format_arguments) # type: ignore - - # Construct parameters - _params["action"] = _SERIALIZER.query("action", action, "str") - if position is not None: - _params["position"] = _SERIALIZER.query("position", position, "int") - if timeout is not None: - _params["timeout"] = _SERIALIZER.query("timeout", timeout, "int", minimum=0) - if flush is not None: - _params["flush"] = _SERIALIZER.query("flush", flush, "bool") - - # Construct headers - if content_length is not None: - _headers["Content-Length"] = _SERIALIZER.header("content_length", content_length, "int", minimum=0) - if transactional_content_hash is not None: - _headers["Content-MD5"] = _SERIALIZER.header( - "transactional_content_hash", transactional_content_hash, "bytearray" - ) - if transactional_content_crc64 is not None: - _headers["x-ms-content-crc64"] = _SERIALIZER.header( - "transactional_content_crc64", transactional_content_crc64, "bytearray" - ) - if lease_id is not None: - _headers["x-ms-lease-id"] = _SERIALIZER.header("lease_id", lease_id, "str") - if lease_action is not None: - _headers["x-ms-lease-action"] = _SERIALIZER.header("lease_action", lease_action, "str") - if lease_duration is not None: - _headers["x-ms-lease-duration"] = _SERIALIZER.header("lease_duration", lease_duration, "int") - if proposed_lease_id is not None: - _headers["x-ms-proposed-lease-id"] = _SERIALIZER.header("proposed_lease_id", proposed_lease_id, "str") - if request_id_parameter is not None: - _headers["x-ms-client-request-id"] = _SERIALIZER.header("request_id_parameter", request_id_parameter, "str") - _headers["x-ms-version"] = _SERIALIZER.header("version", version, "str") - if encryption_key is not None: - _headers["x-ms-encryption-key"] = _SERIALIZER.header("encryption_key", encryption_key, "str") - if encryption_key_sha256 is not None: - _headers["x-ms-encryption-key-sha256"] = _SERIALIZER.header( - "encryption_key_sha256", encryption_key_sha256, "str" - ) - if encryption_algorithm is not None: - _headers["x-ms-encryption-algorithm"] = _SERIALIZER.header("encryption_algorithm", encryption_algorithm, "str") - if structured_body_type is not None: - _headers["x-ms-structured-body"] = _SERIALIZER.header("structured_body_type", structured_body_type, "str") - if structured_content_length is not None: - _headers["x-ms-structured-content-length"] = _SERIALIZER.header( - "structured_content_length", structured_content_length, "int" - ) - if content_type is not None: - _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") - _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - - return HttpRequest(method="PATCH", url=_url, params=_params, headers=_headers, content=content, **kwargs) - - -def build_set_expiry_request( - url: str, - *, - expiry_options: Union[str, _models.PathExpiryOptions], - timeout: Optional[int] = None, - request_id_parameter: Optional[str] = None, - expires_on: Optional[str] = None, - **kwargs: Any -) -> HttpRequest: - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - comp: Literal["expiry"] = kwargs.pop("comp", _params.pop("comp", "expiry")) - version: Literal["2026-02-06"] = kwargs.pop("version", _headers.pop("x-ms-version", "2026-02-06")) - accept = _headers.pop("Accept", "application/json") - - # Construct URL - _url = kwargs.pop("template_url", "{url}") - path_format_arguments = { - "url": _SERIALIZER.url("url", url, "str", skip_quote=True), - } - - _url: str = _url.format(**path_format_arguments) # type: ignore - - # Construct parameters - _params["comp"] = _SERIALIZER.query("comp", comp, "str") - if timeout is not None: - _params["timeout"] = _SERIALIZER.query("timeout", timeout, "int", minimum=0) - - # Construct headers - _headers["x-ms-version"] = _SERIALIZER.header("version", version, "str") - if request_id_parameter is not None: - _headers["x-ms-client-request-id"] = _SERIALIZER.header("request_id_parameter", request_id_parameter, "str") - _headers["x-ms-expiry-option"] = _SERIALIZER.header("expiry_options", expiry_options, "str") - if expires_on is not None: - _headers["x-ms-expiry-time"] = _SERIALIZER.header("expires_on", expires_on, "str") - _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - - return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) - - -def build_undelete_request( - url: str, - *, - timeout: Optional[int] = None, - undelete_source: Optional[str] = None, - request_id_parameter: Optional[str] = None, - **kwargs: Any -) -> HttpRequest: - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - comp: Literal["undelete"] = kwargs.pop("comp", _params.pop("comp", "undelete")) - version: Literal["2026-02-06"] = kwargs.pop("version", _headers.pop("x-ms-version", "2026-02-06")) - accept = _headers.pop("Accept", "application/json") - - # Construct URL - _url = kwargs.pop("template_url", "{url}") - path_format_arguments = { - "url": _SERIALIZER.url("url", url, "str", skip_quote=True), - } - - _url: str = _url.format(**path_format_arguments) # type: ignore - - # Construct parameters - _params["comp"] = _SERIALIZER.query("comp", comp, "str") - if timeout is not None: - _params["timeout"] = _SERIALIZER.query("timeout", timeout, "int", minimum=0) - - # Construct headers - if undelete_source is not None: - _headers["x-ms-undelete-source"] = _SERIALIZER.header("undelete_source", undelete_source, "str") - _headers["x-ms-version"] = _SERIALIZER.header("version", version, "str") - if request_id_parameter is not None: - _headers["x-ms-client-request-id"] = _SERIALIZER.header("request_id_parameter", request_id_parameter, "str") - _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - - return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) - - -class PathOperations: - """ - .. warning:: - **DO NOT** instantiate this class directly. - - Instead, you should access the following operations through - :class:`~azure.storage.filedatalake.AzureDataLakeStorageRESTAPI`'s - :attr:`path` attribute. - """ - - models = _models - - def __init__(self, *args, **kwargs) -> None: - input_args = list(args) - self._client: PipelineClient = input_args.pop(0) if input_args else kwargs.pop("client") - self._config: AzureDataLakeStorageRESTAPIConfiguration = ( - input_args.pop(0) if input_args else kwargs.pop("config") - ) - self._serialize: Serializer = input_args.pop(0) if input_args else kwargs.pop("serializer") - self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer") - - @distributed_trace - def create( # pylint: disable=inconsistent-return-statements,too-many-locals - self, - request_id_parameter: Optional[str] = None, - timeout: Optional[int] = None, - resource: Optional[Union[str, _models.PathResourceType]] = None, - continuation: Optional[str] = None, - mode: Optional[Union[str, _models.PathRenameMode]] = None, - rename_source: Optional[str] = None, - source_lease_id: Optional[str] = None, - properties: Optional[str] = None, - permissions: Optional[str] = None, - umask: Optional[str] = None, - owner: Optional[str] = None, - group: Optional[str] = None, - acl: Optional[str] = None, - proposed_lease_id: Optional[str] = None, - lease_duration: Optional[int] = None, - expiry_options: Optional[Union[str, _models.PathExpiryOptions]] = None, - expires_on: Optional[str] = None, - encryption_context: Optional[str] = None, - path_http_headers: Optional[_models.PathHTTPHeaders] = None, - lease_access_conditions: Optional[_models.LeaseAccessConditions] = None, - modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None, - source_modified_access_conditions: Optional[_models.SourceModifiedAccessConditions] = None, - cpk_info: Optional[_models.CpkInfo] = None, - **kwargs: Any - ) -> None: - """Create File | Create Directory | Rename File | Rename Directory. - - Create or rename a file or directory. By default, the destination is overwritten and if the - destination already exists and has a lease the lease is broken. This operation supports - conditional HTTP requests. For more information, see `Specifying Conditional Headers for Blob - Service Operations - `_. - To fail if the destination already exists, use a conditional request with If-None-Match: "*". - - :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. Default - value is None. - :type request_id_parameter: str - :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting - Timeouts for Blob Service Operations.`. Default value is None. - :type timeout: int - :param resource: Required only for Create File and Create Directory. The value must be "file" - or "directory". Known values are: "directory" and "file". Default value is None. - :type resource: str or ~azure.storage.filedatalake.models.PathResourceType - :param continuation: Optional. When deleting a directory, the number of paths that are deleted - with each invocation is limited. If the number of paths to be deleted exceeds this limit, a - continuation token is returned in this response header. When a continuation token is returned - in the response, it must be specified in a subsequent invocation of the delete operation to - continue deleting the directory. Default value is None. - :type continuation: str - :param mode: Optional. Valid only when namespace is enabled. This parameter determines the - behavior of the rename operation. The value must be "legacy" or "posix", and the default value - will be "posix". Known values are: "legacy" and "posix". Default value is None. - :type mode: str or ~azure.storage.filedatalake.models.PathRenameMode - :param rename_source: An optional file or directory to be renamed. The value must have the - following format: "/{filesystem}/{path}". If "x-ms-properties" is specified, the properties - will overwrite the existing properties; otherwise, the existing properties will be preserved. - This value must be a URL percent-encoded string. Note that the string may only contain ASCII - characters in the ISO-8859-1 character set. Default value is None. - :type rename_source: str - :param source_lease_id: A lease ID for the source path. If specified, the source path must have - an active lease and the lease ID must match. Default value is None. - :type source_lease_id: str - :param properties: Optional. User-defined properties to be stored with the filesystem, in the - format of a comma-separated list of name and value pairs "n1=v1, n2=v2, ...", where each value - is a base64 encoded string. Note that the string may only contain ASCII characters in the - ISO-8859-1 character set. If the filesystem exists, any properties not included in the list - will be removed. All properties are removed if the header is omitted. To merge new and - existing properties, first get all existing properties and the current E-Tag, then make a - conditional request with the E-Tag and include values for all properties. Default value is - None. - :type properties: str - :param permissions: Optional and only valid if Hierarchical Namespace is enabled for the - account. Sets POSIX access permissions for the file owner, the file owning group, and others. - Each class may be granted read, write, or execute permission. The sticky bit is also - supported. Both symbolic (rwxrw-rw-) and 4-digit octal notation (e.g. 0766) are supported. - Default value is None. - :type permissions: str - :param umask: Optional and only valid if Hierarchical Namespace is enabled for the account. - When creating a file or directory and the parent folder does not have a default ACL, the umask - restricts the permissions of the file or directory to be created. The resulting permission is - given by p bitwise and not u, where p is the permission and u is the umask. For example, if p - is 0777 and u is 0057, then the resulting permission is 0720. The default permission is 0777 - for a directory and 0666 for a file. The default umask is 0027. The umask must be specified - in 4-digit octal notation (e.g. 0766). Default value is None. - :type umask: str - :param owner: Optional. The owner of the blob or directory. Default value is None. - :type owner: str - :param group: Optional. The owning group of the blob or directory. Default value is None. - :type group: str - :param acl: Sets POSIX access control rights on files and directories. The value is a - comma-separated list of access control entries. Each access control entry (ACE) consists of a - scope, a type, a user or group identifier, and permissions in the format - "[scope:][type]:[id]:[permissions]". Default value is None. - :type acl: str - :param proposed_lease_id: Proposed lease ID, in a GUID string format. The Blob service returns - 400 (Invalid request) if the proposed lease ID is not in the correct format. See Guid - Constructor (String) for a list of valid GUID string formats. Default value is None. - :type proposed_lease_id: str - :param lease_duration: The lease duration is required to acquire a lease, and specifies the - duration of the lease in seconds. The lease duration must be between 15 and 60 seconds or -1 - for infinite lease. Default value is None. - :type lease_duration: int - :param expiry_options: Required. Indicates mode of the expiry time. Known values are: - "NeverExpire", "RelativeToCreation", "RelativeToNow", and "Absolute". Default value is None. - :type expiry_options: str or ~azure.storage.filedatalake.models.PathExpiryOptions - :param expires_on: The time to set the blob to expiry. Default value is None. - :type expires_on: str - :param encryption_context: Specifies the encryption context to set on the file. Default value - is None. - :type encryption_context: str - :param path_http_headers: Parameter group. Default value is None. - :type path_http_headers: ~azure.storage.filedatalake.models.PathHTTPHeaders - :param lease_access_conditions: Parameter group. Default value is None. - :type lease_access_conditions: ~azure.storage.filedatalake.models.LeaseAccessConditions - :param modified_access_conditions: Parameter group. Default value is None. - :type modified_access_conditions: ~azure.storage.filedatalake.models.ModifiedAccessConditions - :param source_modified_access_conditions: Parameter group. Default value is None. - :type source_modified_access_conditions: - ~azure.storage.filedatalake.models.SourceModifiedAccessConditions - :param cpk_info: Parameter group. Default value is None. - :type cpk_info: ~azure.storage.filedatalake.models.CpkInfo - :return: None or the result of cls(response) - :rtype: None - :raises ~azure.core.exceptions.HttpResponseError: - """ - error_map: MutableMapping = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = kwargs.pop("headers", {}) or {} - _params = kwargs.pop("params", {}) or {} - - cls: ClsType[None] = kwargs.pop("cls", None) - - _cache_control = None - _content_encoding = None - _content_language = None - _content_disposition = None - _content_type_parameter = None - _lease_id = None - _if_match = None - _if_none_match = None - _if_modified_since = None - _if_unmodified_since = None - _source_if_match = None - _source_if_none_match = None - _source_if_modified_since = None - _source_if_unmodified_since = None - _encryption_key = None - _encryption_key_sha256 = None - _encryption_algorithm = None - if path_http_headers is not None: - _cache_control = path_http_headers.cache_control - _content_disposition = path_http_headers.content_disposition - _content_encoding = path_http_headers.content_encoding - _content_language = path_http_headers.content_language - _content_type_parameter = path_http_headers.content_type - if lease_access_conditions is not None: - _lease_id = lease_access_conditions.lease_id - if modified_access_conditions is not None: - _if_match = modified_access_conditions.if_match - _if_modified_since = modified_access_conditions.if_modified_since - _if_none_match = modified_access_conditions.if_none_match - _if_unmodified_since = modified_access_conditions.if_unmodified_since - if source_modified_access_conditions is not None: - _source_if_match = source_modified_access_conditions.source_if_match - _source_if_modified_since = source_modified_access_conditions.source_if_modified_since - _source_if_none_match = source_modified_access_conditions.source_if_none_match - _source_if_unmodified_since = source_modified_access_conditions.source_if_unmodified_since - if cpk_info is not None: - _encryption_algorithm = cpk_info.encryption_algorithm - _encryption_key = cpk_info.encryption_key - _encryption_key_sha256 = cpk_info.encryption_key_sha256 - - _request = build_create_request( - url=self._config.url, - request_id_parameter=request_id_parameter, - timeout=timeout, - resource=resource, - continuation=continuation, - mode=mode, - cache_control=_cache_control, - content_encoding=_content_encoding, - content_language=_content_language, - content_disposition=_content_disposition, - content_type_parameter=_content_type_parameter, - rename_source=rename_source, - lease_id=_lease_id, - source_lease_id=source_lease_id, - properties=properties, - permissions=permissions, - umask=umask, - if_match=_if_match, - if_none_match=_if_none_match, - if_modified_since=_if_modified_since, - if_unmodified_since=_if_unmodified_since, - source_if_match=_source_if_match, - source_if_none_match=_source_if_none_match, - source_if_modified_since=_source_if_modified_since, - source_if_unmodified_since=_source_if_unmodified_since, - encryption_key=_encryption_key, - encryption_key_sha256=_encryption_key_sha256, - encryption_algorithm=_encryption_algorithm, # type: ignore - owner=owner, - group=group, - acl=acl, - proposed_lease_id=proposed_lease_id, - lease_duration=lease_duration, - expiry_options=expiry_options, - expires_on=expires_on, - encryption_context=encryption_context, - version=self._config.version, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - _stream = False - pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [201]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize( - _models.StorageError, - pipeline_response, - ) - raise HttpResponseError(response=response, model=error) - - response_headers = {} - response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) - response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) - response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) - response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) - response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) - response_headers["x-ms-continuation"] = self._deserialize("str", response.headers.get("x-ms-continuation")) - response_headers["Content-Length"] = self._deserialize("int", response.headers.get("Content-Length")) - response_headers["x-ms-request-server-encrypted"] = self._deserialize( - "bool", response.headers.get("x-ms-request-server-encrypted") - ) - response_headers["x-ms-encryption-key-sha256"] = self._deserialize( - "str", response.headers.get("x-ms-encryption-key-sha256") - ) - - if cls: - return cls(pipeline_response, None, response_headers) # type: ignore - - @distributed_trace - def update( # pylint: disable=too-many-locals - self, - action: Union[str, _models.PathUpdateAction], - mode: Union[str, _models.PathSetAccessControlRecursiveMode], - body: IO[bytes], - request_id_parameter: Optional[str] = None, - timeout: Optional[int] = None, - max_records: Optional[int] = None, - continuation: Optional[str] = None, - force_flag: Optional[bool] = None, - position: Optional[int] = None, - retain_uncommitted_data: Optional[bool] = None, - close: Optional[bool] = None, - content_length: Optional[int] = None, - properties: Optional[str] = None, - owner: Optional[str] = None, - group: Optional[str] = None, - permissions: Optional[str] = None, - acl: Optional[str] = None, - structured_body_type: Optional[str] = None, - structured_content_length: Optional[int] = None, - path_http_headers: Optional[_models.PathHTTPHeaders] = None, - lease_access_conditions: Optional[_models.LeaseAccessConditions] = None, - modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None, - **kwargs: Any - ) -> Optional[_models.SetAccessControlRecursiveResponse]: - """Append Data | Flush Data | Set Properties | Set Access Control. - - Uploads data to be appended to a file, flushes (writes) previously uploaded data to a file, - sets properties for a file or directory, or sets access control for a file or directory. Data - can only be appended to a file. Concurrent writes to the same file using multiple clients are - not supported. This operation supports conditional HTTP requests. For more information, see - `Specifying Conditional Headers for Blob Service Operations - `_. - - :param action: The action must be "append" to upload data to be appended to a file, "flush" to - flush previously uploaded data to a file, "setProperties" to set the properties of a file or - directory, "setAccessControl" to set the owner, group, permissions, or access control list for - a file or directory, or "setAccessControlRecursive" to set the access control list for a - directory recursively. Note that Hierarchical Namespace must be enabled for the account in - order to use access control. Also note that the Access Control List (ACL) includes permissions - for the owner, owning group, and others, so the x-ms-permissions and x-ms-acl request headers - are mutually exclusive. Known values are: "append", "flush", "setProperties", - "setAccessControl", and "setAccessControlRecursive". Required. - :type action: str or ~azure.storage.filedatalake.models.PathUpdateAction - :param mode: Mode "set" sets POSIX access control rights on files and directories, "modify" - modifies one or more POSIX access control rights that pre-exist on files and directories, - "remove" removes one or more POSIX access control rights that were present earlier on files - and directories. Known values are: "set", "modify", and "remove". Required. - :type mode: str or ~azure.storage.filedatalake.models.PathSetAccessControlRecursiveMode - :param body: Initial data. Required. - :type body: IO[bytes] - :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. Default - value is None. - :type request_id_parameter: str - :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting - Timeouts for Blob Service Operations.`. Default value is None. - :type timeout: int - :param max_records: Optional. Valid for "SetAccessControlRecursive" operation. It specifies the - maximum number of files or directories on which the acl change will be applied. If omitted or - greater than 2,000, the request will process up to 2,000 items. Default value is None. - :type max_records: int - :param continuation: Optional. The number of paths processed with each invocation is limited. - If the number of paths to be processed exceeds this limit, a continuation token is returned in - the response header x-ms-continuation. When a continuation token is returned in the response, - it must be percent-encoded and specified in a subsequent invocation of - setAccessControlRecursive operation. Default value is None. - :type continuation: str - :param force_flag: Optional. Valid for "SetAccessControlRecursive" operation. If set to false, - the operation will terminate quickly on encountering user errors (4XX). If true, the operation - will ignore user errors and proceed with the operation on other sub-entities of the directory. - Continuation token will only be returned when forceFlag is true in case of user errors. If not - set the default value is false for this. Default value is None. - :type force_flag: bool - :param position: This parameter allows the caller to upload data in parallel and control the - order in which it is appended to the file. It is required when uploading data to be appended - to the file and when flushing previously uploaded data to the file. The value must be the - position where the data is to be appended. Uploaded data is not immediately flushed, or - written, to the file. To flush, the previously uploaded data must be contiguous, the position - parameter must be specified and equal to the length of the file after all data has been - written, and there must not be a request entity body included with the request. Default value - is None. - :type position: int - :param retain_uncommitted_data: Valid only for flush operations. If "true", uncommitted data - is retained after the flush operation completes; otherwise, the uncommitted data is deleted - after the flush operation. The default is false. Data at offsets less than the specified - position are written to the file when flush succeeds, but this optional parameter allows data - after the flush position to be retained for a future flush operation. Default value is None. - :type retain_uncommitted_data: bool - :param close: Azure Storage Events allow applications to receive notifications when files - change. When Azure Storage Events are enabled, a file changed event is raised. This event has a - property indicating whether this is the final change to distinguish the difference between an - intermediate flush to a file stream and the final close of a file stream. The close query - parameter is valid only when the action is "flush" and change notifications are enabled. If the - value of close is "true" and the flush operation completes successfully, the service raises a - file change notification with a property indicating that this is the final update (the file - stream has been closed). If "false" a change notification is raised indicating the file has - changed. The default is false. This query parameter is set to true by the Hadoop ABFS driver to - indicate that the file stream has been closed.". Default value is None. - :type close: bool - :param content_length: Required for "Append Data" and "Flush Data". Must be 0 for "Flush - Data". Must be the length of the request content in bytes for "Append Data". Default value is - None. - :type content_length: int - :param properties: Optional. User-defined properties to be stored with the filesystem, in the - format of a comma-separated list of name and value pairs "n1=v1, n2=v2, ...", where each value - is a base64 encoded string. Note that the string may only contain ASCII characters in the - ISO-8859-1 character set. If the filesystem exists, any properties not included in the list - will be removed. All properties are removed if the header is omitted. To merge new and - existing properties, first get all existing properties and the current E-Tag, then make a - conditional request with the E-Tag and include values for all properties. Default value is - None. - :type properties: str - :param owner: Optional. The owner of the blob or directory. Default value is None. - :type owner: str - :param group: Optional. The owning group of the blob or directory. Default value is None. - :type group: str - :param permissions: Optional and only valid if Hierarchical Namespace is enabled for the - account. Sets POSIX access permissions for the file owner, the file owning group, and others. - Each class may be granted read, write, or execute permission. The sticky bit is also - supported. Both symbolic (rwxrw-rw-) and 4-digit octal notation (e.g. 0766) are supported. - Default value is None. - :type permissions: str - :param acl: Sets POSIX access control rights on files and directories. The value is a - comma-separated list of access control entries. Each access control entry (ACE) consists of a - scope, a type, a user or group identifier, and permissions in the format - "[scope:][type]:[id]:[permissions]". Default value is None. - :type acl: str - :param structured_body_type: Required if the request body is a structured message. Specifies - the message schema version and properties. Default value is None. - :type structured_body_type: str - :param structured_content_length: Required if the request body is a structured message. - Specifies the length of the blob/file content inside the message body. Will always be smaller - than Content-Length. Default value is None. - :type structured_content_length: int - :param path_http_headers: Parameter group. Default value is None. - :type path_http_headers: ~azure.storage.filedatalake.models.PathHTTPHeaders - :param lease_access_conditions: Parameter group. Default value is None. - :type lease_access_conditions: ~azure.storage.filedatalake.models.LeaseAccessConditions - :param modified_access_conditions: Parameter group. Default value is None. - :type modified_access_conditions: ~azure.storage.filedatalake.models.ModifiedAccessConditions - :return: SetAccessControlRecursiveResponse or None or the result of cls(response) - :rtype: ~azure.storage.filedatalake.models.SetAccessControlRecursiveResponse or None - :raises ~azure.core.exceptions.HttpResponseError: - """ - error_map: MutableMapping = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = kwargs.pop("params", {}) or {} - - content_type: str = kwargs.pop("content_type", _headers.pop("Content-Type", "application/octet-stream")) - cls: ClsType[Optional[_models.SetAccessControlRecursiveResponse]] = kwargs.pop("cls", None) - - _content_md5 = None - _lease_id = None - _cache_control = None - _content_type_parameter = None - _content_disposition = None - _content_encoding = None - _content_language = None - _if_match = None - _if_none_match = None - _if_modified_since = None - _if_unmodified_since = None - if path_http_headers is not None: - _cache_control = path_http_headers.cache_control - _content_disposition = path_http_headers.content_disposition - _content_encoding = path_http_headers.content_encoding - _content_language = path_http_headers.content_language - _content_md5 = path_http_headers.content_md5 - _content_type_parameter = path_http_headers.content_type - if lease_access_conditions is not None: - _lease_id = lease_access_conditions.lease_id - if modified_access_conditions is not None: - _if_match = modified_access_conditions.if_match - _if_modified_since = modified_access_conditions.if_modified_since - _if_none_match = modified_access_conditions.if_none_match - _if_unmodified_since = modified_access_conditions.if_unmodified_since - _content = body - - _request = build_update_request( - url=self._config.url, - action=action, - mode=mode, - request_id_parameter=request_id_parameter, - timeout=timeout, - max_records=max_records, - continuation=continuation, - force_flag=force_flag, - position=position, - retain_uncommitted_data=retain_uncommitted_data, - close=close, - content_length=content_length, - content_md5=_content_md5, - lease_id=_lease_id, - cache_control=_cache_control, - content_type_parameter=_content_type_parameter, - content_disposition=_content_disposition, - content_encoding=_content_encoding, - content_language=_content_language, - properties=properties, - owner=owner, - group=group, - permissions=permissions, - acl=acl, - if_match=_if_match, - if_none_match=_if_none_match, - if_modified_since=_if_modified_since, - if_unmodified_since=_if_unmodified_since, - structured_body_type=structured_body_type, - structured_content_length=structured_content_length, - content_type=content_type, - version=self._config.version, - content=_content, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - _stream = False - pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [200, 202]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize( - _models.StorageError, - pipeline_response, - ) - raise HttpResponseError(response=response, model=error) - - deserialized = None - response_headers = {} - if response.status_code == 200: - response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) - response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) - response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) - response_headers["Accept-Ranges"] = self._deserialize("str", response.headers.get("Accept-Ranges")) - response_headers["Cache-Control"] = self._deserialize("str", response.headers.get("Cache-Control")) - response_headers["Content-Disposition"] = self._deserialize( - "str", response.headers.get("Content-Disposition") - ) - response_headers["Content-Encoding"] = self._deserialize("str", response.headers.get("Content-Encoding")) - response_headers["Content-Language"] = self._deserialize("str", response.headers.get("Content-Language")) - response_headers["Content-Length"] = self._deserialize("int", response.headers.get("Content-Length")) - response_headers["Content-Range"] = self._deserialize("str", response.headers.get("Content-Range")) - response_headers["Content-Type"] = self._deserialize("str", response.headers.get("Content-Type")) - response_headers["Content-MD5"] = self._deserialize("str", response.headers.get("Content-MD5")) - response_headers["x-ms-properties"] = self._deserialize("str", response.headers.get("x-ms-properties")) - response_headers["x-ms-continuation"] = self._deserialize("str", response.headers.get("x-ms-continuation")) - response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) - response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) - - deserialized = self._deserialize("SetAccessControlRecursiveResponse", pipeline_response.http_response) - - if response.status_code == 202: - response_headers["Content-MD5"] = self._deserialize("str", response.headers.get("Content-MD5")) - response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) - response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) - response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) - response_headers["x-ms-structured-body"] = self._deserialize( - "str", response.headers.get("x-ms-structured-body") - ) - - if cls: - return cls(pipeline_response, deserialized, response_headers) # type: ignore - - return deserialized # type: ignore - - @distributed_trace - def lease( # pylint: disable=inconsistent-return-statements - self, - x_ms_lease_action: Union[str, _models.PathLeaseAction], - request_id_parameter: Optional[str] = None, - timeout: Optional[int] = None, - x_ms_lease_break_period: Optional[int] = None, - proposed_lease_id: Optional[str] = None, - lease_access_conditions: Optional[_models.LeaseAccessConditions] = None, - modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None, - **kwargs: Any - ) -> None: - """Lease Path. - - Create and manage a lease to restrict write and delete access to the path. This operation - supports conditional HTTP requests. For more information, see `Specifying Conditional Headers - for Blob Service Operations - `_. - - :param x_ms_lease_action: There are five lease actions: "acquire", "break", "change", "renew", - and "release". Use "acquire" and specify the "x-ms-proposed-lease-id" and "x-ms-lease-duration" - to acquire a new lease. Use "break" to break an existing lease. When a lease is broken, the - lease break period is allowed to elapse, during which time no lease operation except break and - release can be performed on the file. When a lease is successfully broken, the response - indicates the interval in seconds until a new lease can be acquired. Use "change" and specify - the current lease ID in "x-ms-lease-id" and the new lease ID in "x-ms-proposed-lease-id" to - change the lease ID of an active lease. Use "renew" and specify the "x-ms-lease-id" to renew an - existing lease. Use "release" and specify the "x-ms-lease-id" to release a lease. Known values - are: "acquire", "break", "change", "renew", "release", and "break". Required. - :type x_ms_lease_action: str or ~azure.storage.filedatalake.models.PathLeaseAction - :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. Default - value is None. - :type request_id_parameter: str - :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting - Timeouts for Blob Service Operations.`. Default value is None. - :type timeout: int - :param x_ms_lease_break_period: The lease break period duration is optional to break a lease, - and specifies the break period of the lease in seconds. The lease break duration must be - between 0 and 60 seconds. Default value is None. - :type x_ms_lease_break_period: int - :param proposed_lease_id: Proposed lease ID, in a GUID string format. The Blob service returns - 400 (Invalid request) if the proposed lease ID is not in the correct format. See Guid - Constructor (String) for a list of valid GUID string formats. Default value is None. - :type proposed_lease_id: str - :param lease_access_conditions: Parameter group. Default value is None. - :type lease_access_conditions: ~azure.storage.filedatalake.models.LeaseAccessConditions - :param modified_access_conditions: Parameter group. Default value is None. - :type modified_access_conditions: ~azure.storage.filedatalake.models.ModifiedAccessConditions - :return: None or the result of cls(response) - :rtype: None - :raises ~azure.core.exceptions.HttpResponseError: - """ - error_map: MutableMapping = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = kwargs.pop("headers", {}) or {} - _params = kwargs.pop("params", {}) or {} - - cls: ClsType[None] = kwargs.pop("cls", None) - - _lease_id = None - _if_match = None - _if_none_match = None - _if_modified_since = None - _if_unmodified_since = None - if lease_access_conditions is not None: - _lease_id = lease_access_conditions.lease_id - if modified_access_conditions is not None: - _if_match = modified_access_conditions.if_match - _if_modified_since = modified_access_conditions.if_modified_since - _if_none_match = modified_access_conditions.if_none_match - _if_unmodified_since = modified_access_conditions.if_unmodified_since - - _request = build_lease_request( - url=self._config.url, - x_ms_lease_action=x_ms_lease_action, - request_id_parameter=request_id_parameter, - timeout=timeout, - x_ms_lease_break_period=x_ms_lease_break_period, - lease_id=_lease_id, - proposed_lease_id=proposed_lease_id, - if_match=_if_match, - if_none_match=_if_none_match, - if_modified_since=_if_modified_since, - if_unmodified_since=_if_unmodified_since, - x_ms_lease_duration=self._config.x_ms_lease_duration, - version=self._config.version, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - _stream = False - pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [200, 201, 202]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize( - _models.StorageError, - pipeline_response, - ) - raise HttpResponseError(response=response, model=error) - - response_headers = {} - if response.status_code == 200: - response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) - response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) - response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) - response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) - response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) - response_headers["x-ms-lease-id"] = self._deserialize("str", response.headers.get("x-ms-lease-id")) - - if response.status_code == 201: - response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) - response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) - response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) - response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) - response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) - response_headers["x-ms-lease-id"] = self._deserialize("str", response.headers.get("x-ms-lease-id")) - - if response.status_code == 202: - response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) - response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) - response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) - response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) - response_headers["x-ms-lease-time"] = self._deserialize("str", response.headers.get("x-ms-lease-time")) - - if cls: - return cls(pipeline_response, None, response_headers) # type: ignore - - @distributed_trace - def read( - self, - request_id_parameter: Optional[str] = None, - timeout: Optional[int] = None, - range: Optional[str] = None, - x_ms_range_get_content_md5: Optional[bool] = None, - lease_access_conditions: Optional[_models.LeaseAccessConditions] = None, - modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None, - cpk_info: Optional[_models.CpkInfo] = None, - **kwargs: Any - ) -> Iterator[bytes]: - """Read File. - - Read the contents of a file. For read operations, range requests are supported. This operation - supports conditional HTTP requests. For more information, see `Specifying Conditional Headers - for Blob Service Operations - `_. - - :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. Default - value is None. - :type request_id_parameter: str - :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting - Timeouts for Blob Service Operations.`. Default value is None. - :type timeout: int - :param range: The HTTP Range request header specifies one or more byte ranges of the resource - to be retrieved. Default value is None. - :type range: str - :param x_ms_range_get_content_md5: Optional. When this header is set to "true" and specified - together with the Range header, the service returns the MD5 hash for the range, as long as the - range is less than or equal to 4MB in size. If this header is specified without the Range - header, the service returns status code 400 (Bad Request). If this header is set to true when - the range exceeds 4 MB in size, the service returns status code 400 (Bad Request). Default - value is None. - :type x_ms_range_get_content_md5: bool - :param lease_access_conditions: Parameter group. Default value is None. - :type lease_access_conditions: ~azure.storage.filedatalake.models.LeaseAccessConditions - :param modified_access_conditions: Parameter group. Default value is None. - :type modified_access_conditions: ~azure.storage.filedatalake.models.ModifiedAccessConditions - :param cpk_info: Parameter group. Default value is None. - :type cpk_info: ~azure.storage.filedatalake.models.CpkInfo - :return: Iterator[bytes] or the result of cls(response) - :rtype: Iterator[bytes] - :raises ~azure.core.exceptions.HttpResponseError: - """ - error_map: MutableMapping = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = kwargs.pop("headers", {}) or {} - _params = kwargs.pop("params", {}) or {} - - cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None) - - _lease_id = None - _if_match = None - _if_none_match = None - _if_modified_since = None - _if_unmodified_since = None - _encryption_key = None - _encryption_key_sha256 = None - _encryption_algorithm = None - if lease_access_conditions is not None: - _lease_id = lease_access_conditions.lease_id - if modified_access_conditions is not None: - _if_match = modified_access_conditions.if_match - _if_modified_since = modified_access_conditions.if_modified_since - _if_none_match = modified_access_conditions.if_none_match - _if_unmodified_since = modified_access_conditions.if_unmodified_since - if cpk_info is not None: - _encryption_algorithm = cpk_info.encryption_algorithm - _encryption_key = cpk_info.encryption_key - _encryption_key_sha256 = cpk_info.encryption_key_sha256 - - _request = build_read_request( - url=self._config.url, - request_id_parameter=request_id_parameter, - timeout=timeout, - range=range, - lease_id=_lease_id, - x_ms_range_get_content_md5=x_ms_range_get_content_md5, - if_match=_if_match, - if_none_match=_if_none_match, - if_modified_since=_if_modified_since, - if_unmodified_since=_if_unmodified_since, - encryption_key=_encryption_key, - encryption_key_sha256=_encryption_key_sha256, - encryption_algorithm=_encryption_algorithm, # type: ignore - version=self._config.version, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - _decompress = kwargs.pop("decompress", True) - _stream = True - pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [200, 206]: - try: - response.read() # Load the body in memory and close the socket - except (StreamConsumedError, StreamClosedError): - pass - map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize( - _models.StorageError, - pipeline_response, - ) - raise HttpResponseError(response=response, model=error) - - response_headers = {} - if response.status_code == 200: - response_headers["Accept-Ranges"] = self._deserialize("str", response.headers.get("Accept-Ranges")) - response_headers["Cache-Control"] = self._deserialize("str", response.headers.get("Cache-Control")) - response_headers["Content-Disposition"] = self._deserialize( - "str", response.headers.get("Content-Disposition") - ) - response_headers["Content-Encoding"] = self._deserialize("str", response.headers.get("Content-Encoding")) - response_headers["Content-Language"] = self._deserialize("str", response.headers.get("Content-Language")) - response_headers["Content-Length"] = self._deserialize("int", response.headers.get("Content-Length")) - response_headers["Content-Range"] = self._deserialize("str", response.headers.get("Content-Range")) - response_headers["Content-Type"] = self._deserialize("str", response.headers.get("Content-Type")) - response_headers["Content-MD5"] = self._deserialize("str", response.headers.get("Content-MD5")) - response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) - response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) - response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) - response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) - response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) - response_headers["x-ms-resource-type"] = self._deserialize( - "str", response.headers.get("x-ms-resource-type") - ) - response_headers["x-ms-properties"] = self._deserialize("str", response.headers.get("x-ms-properties")) - response_headers["x-ms-lease-duration"] = self._deserialize( - "str", response.headers.get("x-ms-lease-duration") - ) - response_headers["x-ms-lease-state"] = self._deserialize("str", response.headers.get("x-ms-lease-state")) - response_headers["x-ms-lease-status"] = self._deserialize("str", response.headers.get("x-ms-lease-status")) - response_headers["x-ms-request-server-encrypted"] = self._deserialize( - "bool", response.headers.get("x-ms-request-server-encrypted") - ) - response_headers["x-ms-encryption-key-sha256"] = self._deserialize( - "str", response.headers.get("x-ms-encryption-key-sha256") - ) - - if response.status_code == 206: - response_headers["Accept-Ranges"] = self._deserialize("str", response.headers.get("Accept-Ranges")) - response_headers["Cache-Control"] = self._deserialize("str", response.headers.get("Cache-Control")) - response_headers["Content-Disposition"] = self._deserialize( - "str", response.headers.get("Content-Disposition") - ) - response_headers["Content-Encoding"] = self._deserialize("str", response.headers.get("Content-Encoding")) - response_headers["Content-Language"] = self._deserialize("str", response.headers.get("Content-Language")) - response_headers["Content-Length"] = self._deserialize("int", response.headers.get("Content-Length")) - response_headers["Content-Range"] = self._deserialize("str", response.headers.get("Content-Range")) - response_headers["Content-Type"] = self._deserialize("str", response.headers.get("Content-Type")) - response_headers["Content-MD5"] = self._deserialize("str", response.headers.get("Content-MD5")) - response_headers["x-ms-content-md5"] = self._deserialize("str", response.headers.get("x-ms-content-md5")) - response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) - response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) - response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) - response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) - response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) - response_headers["x-ms-resource-type"] = self._deserialize( - "str", response.headers.get("x-ms-resource-type") - ) - response_headers["x-ms-properties"] = self._deserialize("str", response.headers.get("x-ms-properties")) - response_headers["x-ms-lease-duration"] = self._deserialize( - "str", response.headers.get("x-ms-lease-duration") - ) - response_headers["x-ms-lease-state"] = self._deserialize("str", response.headers.get("x-ms-lease-state")) - response_headers["x-ms-lease-status"] = self._deserialize("str", response.headers.get("x-ms-lease-status")) - response_headers["x-ms-request-server-encrypted"] = self._deserialize( - "bool", response.headers.get("x-ms-request-server-encrypted") - ) - response_headers["x-ms-encryption-key-sha256"] = self._deserialize( - "str", response.headers.get("x-ms-encryption-key-sha256") - ) - - deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) - - if cls: - return cls(pipeline_response, deserialized, response_headers) # type: ignore - - return deserialized # type: ignore - - @distributed_trace - def get_properties( # pylint: disable=inconsistent-return-statements - self, - request_id_parameter: Optional[str] = None, - timeout: Optional[int] = None, - action: Optional[Union[str, _models.PathGetPropertiesAction]] = None, - upn: Optional[bool] = None, - lease_access_conditions: Optional[_models.LeaseAccessConditions] = None, - modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None, - **kwargs: Any - ) -> None: - """Get Properties | Get Status | Get Access Control List. - - Get Properties returns all system and user defined properties for a path. Get Status returns - all system defined properties for a path. Get Access Control List returns the access control - list for a path. This operation supports conditional HTTP requests. For more information, see - `Specifying Conditional Headers for Blob Service Operations - `_. - - :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. Default - value is None. - :type request_id_parameter: str - :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting - Timeouts for Blob Service Operations.`. Default value is None. - :type timeout: int - :param action: Optional. If the value is "getStatus" only the system defined properties for the - path are returned. If the value is "getAccessControl" the access control list is returned in - the response headers (Hierarchical Namespace must be enabled for the account), otherwise the - properties are returned. Known values are: "getAccessControl" and "getStatus". Default value is - None. - :type action: str or ~azure.storage.filedatalake.models.PathGetPropertiesAction - :param upn: Optional. Valid only when Hierarchical Namespace is enabled for the account. If - "true", the user identity values returned in the x-ms-owner, x-ms-group, and x-ms-acl response - headers will be transformed from Azure Active Directory Object IDs to User Principal Names. If - "false", the values will be returned as Azure Active Directory Object IDs. The default value is - false. Note that group and application Object IDs are not translated because they do not have - unique friendly names. Default value is None. - :type upn: bool - :param lease_access_conditions: Parameter group. Default value is None. - :type lease_access_conditions: ~azure.storage.filedatalake.models.LeaseAccessConditions - :param modified_access_conditions: Parameter group. Default value is None. - :type modified_access_conditions: ~azure.storage.filedatalake.models.ModifiedAccessConditions - :return: None or the result of cls(response) - :rtype: None - :raises ~azure.core.exceptions.HttpResponseError: - """ - error_map: MutableMapping = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = kwargs.pop("headers", {}) or {} - _params = kwargs.pop("params", {}) or {} - - cls: ClsType[None] = kwargs.pop("cls", None) - - _lease_id = None - _if_match = None - _if_none_match = None - _if_modified_since = None - _if_unmodified_since = None - if lease_access_conditions is not None: - _lease_id = lease_access_conditions.lease_id - if modified_access_conditions is not None: - _if_match = modified_access_conditions.if_match - _if_modified_since = modified_access_conditions.if_modified_since - _if_none_match = modified_access_conditions.if_none_match - _if_unmodified_since = modified_access_conditions.if_unmodified_since - - _request = build_get_properties_request( - url=self._config.url, - request_id_parameter=request_id_parameter, - timeout=timeout, - action=action, - upn=upn, - lease_id=_lease_id, - if_match=_if_match, - if_none_match=_if_none_match, - if_modified_since=_if_modified_since, - if_unmodified_since=_if_unmodified_since, - version=self._config.version, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - _stream = False - pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize( - _models.StorageError, - pipeline_response, - ) - raise HttpResponseError(response=response, model=error) - - response_headers = {} - response_headers["Accept-Ranges"] = self._deserialize("str", response.headers.get("Accept-Ranges")) - response_headers["Cache-Control"] = self._deserialize("str", response.headers.get("Cache-Control")) - response_headers["Content-Disposition"] = self._deserialize("str", response.headers.get("Content-Disposition")) - response_headers["Content-Encoding"] = self._deserialize("str", response.headers.get("Content-Encoding")) - response_headers["Content-Language"] = self._deserialize("str", response.headers.get("Content-Language")) - response_headers["Content-Length"] = self._deserialize("int", response.headers.get("Content-Length")) - response_headers["Content-Range"] = self._deserialize("str", response.headers.get("Content-Range")) - response_headers["Content-Type"] = self._deserialize("str", response.headers.get("Content-Type")) - response_headers["Content-MD5"] = self._deserialize("str", response.headers.get("Content-MD5")) - response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) - response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) - response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) - response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) - response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) - response_headers["x-ms-resource-type"] = self._deserialize("str", response.headers.get("x-ms-resource-type")) - response_headers["x-ms-properties"] = self._deserialize("str", response.headers.get("x-ms-properties")) - response_headers["x-ms-owner"] = self._deserialize("str", response.headers.get("x-ms-owner")) - response_headers["x-ms-group"] = self._deserialize("str", response.headers.get("x-ms-group")) - response_headers["x-ms-permissions"] = self._deserialize("str", response.headers.get("x-ms-permissions")) - response_headers["x-ms-acl"] = self._deserialize("str", response.headers.get("x-ms-acl")) - response_headers["x-ms-lease-duration"] = self._deserialize("str", response.headers.get("x-ms-lease-duration")) - response_headers["x-ms-lease-state"] = self._deserialize("str", response.headers.get("x-ms-lease-state")) - response_headers["x-ms-lease-status"] = self._deserialize("str", response.headers.get("x-ms-lease-status")) - response_headers["x-ms-server-encrypted"] = self._deserialize( - "bool", response.headers.get("x-ms-server-encrypted") - ) - response_headers["x-ms-encryption-key-sha256"] = self._deserialize( - "str", response.headers.get("x-ms-encryption-key-sha256") - ) - response_headers["x-ms-encryption-context"] = self._deserialize( - "str", response.headers.get("x-ms-encryption-context") - ) - response_headers["x-ms-encryption-scope"] = self._deserialize( - "str", response.headers.get("x-ms-encryption-scope") - ) - response_headers["x-ms-creation-time"] = self._deserialize( - "rfc-1123", response.headers.get("x-ms-creation-time") - ) - response_headers["x-ms-expiry-time"] = self._deserialize("rfc-1123", response.headers.get("x-ms-expiry-time")) - - if cls: - return cls(pipeline_response, None, response_headers) # type: ignore - - @distributed_trace - def delete( # pylint: disable=inconsistent-return-statements - self, - request_id_parameter: Optional[str] = None, - timeout: Optional[int] = None, - recursive: Optional[bool] = None, - continuation: Optional[str] = None, - paginated: Optional[bool] = None, - lease_access_conditions: Optional[_models.LeaseAccessConditions] = None, - modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None, - **kwargs: Any - ) -> None: - """Delete File | Delete Directory. - - Delete the file or directory. This operation supports conditional HTTP requests. For more - information, see `Specifying Conditional Headers for Blob Service Operations - `_. - - :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. Default - value is None. - :type request_id_parameter: str - :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting - Timeouts for Blob Service Operations.`. Default value is None. - :type timeout: int - :param recursive: Required. Default value is None. - :type recursive: bool - :param continuation: Optional. When deleting a directory, the number of paths that are deleted - with each invocation is limited. If the number of paths to be deleted exceeds this limit, a - continuation token is returned in this response header. When a continuation token is returned - in the response, it must be specified in a subsequent invocation of the delete operation to - continue deleting the directory. Default value is None. - :type continuation: str - :param paginated: If true, paginated behavior will be seen. Pagination is for the recursive ACL - checks as a POSIX requirement in the server and Delete in an atomic operation once the ACL - checks are completed. If false or missing, normal default behavior will kick in, which may - timeout in case of very large directories due to recursive ACL checks. This new parameter is - introduced for backward compatibility. Default value is None. - :type paginated: bool - :param lease_access_conditions: Parameter group. Default value is None. - :type lease_access_conditions: ~azure.storage.filedatalake.models.LeaseAccessConditions - :param modified_access_conditions: Parameter group. Default value is None. - :type modified_access_conditions: ~azure.storage.filedatalake.models.ModifiedAccessConditions - :return: None or the result of cls(response) - :rtype: None - :raises ~azure.core.exceptions.HttpResponseError: - """ - error_map: MutableMapping = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = kwargs.pop("headers", {}) or {} - _params = kwargs.pop("params", {}) or {} - - cls: ClsType[None] = kwargs.pop("cls", None) - - _lease_id = None - _if_match = None - _if_none_match = None - _if_modified_since = None - _if_unmodified_since = None - if lease_access_conditions is not None: - _lease_id = lease_access_conditions.lease_id - if modified_access_conditions is not None: - _if_match = modified_access_conditions.if_match - _if_modified_since = modified_access_conditions.if_modified_since - _if_none_match = modified_access_conditions.if_none_match - _if_unmodified_since = modified_access_conditions.if_unmodified_since - - _request = build_delete_request( - url=self._config.url, - request_id_parameter=request_id_parameter, - timeout=timeout, - recursive=recursive, - continuation=continuation, - lease_id=_lease_id, - if_match=_if_match, - if_none_match=_if_none_match, - if_modified_since=_if_modified_since, - if_unmodified_since=_if_unmodified_since, - paginated=paginated, - version=self._config.version, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - _stream = False - pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [200, 202]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize( - _models.StorageError, - pipeline_response, - ) - raise HttpResponseError(response=response, model=error) - - response_headers = {} - if response.status_code == 200: - response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) - response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) - response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) - response_headers["x-ms-continuation"] = self._deserialize("str", response.headers.get("x-ms-continuation")) - response_headers["x-ms-deletion-id"] = self._deserialize("str", response.headers.get("x-ms-deletion-id")) - - if response.status_code == 202: - response_headers["Date"] = self._deserialize("str", response.headers.get("Date")) - response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) - response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) - response_headers["x-ms-continuation"] = self._deserialize("str", response.headers.get("x-ms-continuation")) - - if cls: - return cls(pipeline_response, None, response_headers) # type: ignore - - @distributed_trace - def set_access_control( # pylint: disable=inconsistent-return-statements - self, - timeout: Optional[int] = None, - owner: Optional[str] = None, - group: Optional[str] = None, - permissions: Optional[str] = None, - acl: Optional[str] = None, - request_id_parameter: Optional[str] = None, - lease_access_conditions: Optional[_models.LeaseAccessConditions] = None, - modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None, - **kwargs: Any - ) -> None: - """Set the owner, group, permissions, or access control list for a path. - - :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting - Timeouts for Blob Service Operations.`. Default value is None. - :type timeout: int - :param owner: Optional. The owner of the blob or directory. Default value is None. - :type owner: str - :param group: Optional. The owning group of the blob or directory. Default value is None. - :type group: str - :param permissions: Optional and only valid if Hierarchical Namespace is enabled for the - account. Sets POSIX access permissions for the file owner, the file owning group, and others. - Each class may be granted read, write, or execute permission. The sticky bit is also - supported. Both symbolic (rwxrw-rw-) and 4-digit octal notation (e.g. 0766) are supported. - Default value is None. - :type permissions: str - :param acl: Sets POSIX access control rights on files and directories. The value is a - comma-separated list of access control entries. Each access control entry (ACE) consists of a - scope, a type, a user or group identifier, and permissions in the format - "[scope:][type]:[id]:[permissions]". Default value is None. - :type acl: str - :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. Default - value is None. - :type request_id_parameter: str - :param lease_access_conditions: Parameter group. Default value is None. - :type lease_access_conditions: ~azure.storage.filedatalake.models.LeaseAccessConditions - :param modified_access_conditions: Parameter group. Default value is None. - :type modified_access_conditions: ~azure.storage.filedatalake.models.ModifiedAccessConditions - :return: None or the result of cls(response) - :rtype: None - :raises ~azure.core.exceptions.HttpResponseError: - """ - error_map: MutableMapping = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = kwargs.pop("headers", {}) or {} - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - action: Literal["setAccessControl"] = kwargs.pop("action", _params.pop("action", "setAccessControl")) - cls: ClsType[None] = kwargs.pop("cls", None) - - _lease_id = None - _if_match = None - _if_none_match = None - _if_modified_since = None - _if_unmodified_since = None - if lease_access_conditions is not None: - _lease_id = lease_access_conditions.lease_id - if modified_access_conditions is not None: - _if_match = modified_access_conditions.if_match - _if_modified_since = modified_access_conditions.if_modified_since - _if_none_match = modified_access_conditions.if_none_match - _if_unmodified_since = modified_access_conditions.if_unmodified_since - - _request = build_set_access_control_request( - url=self._config.url, - timeout=timeout, - lease_id=_lease_id, - owner=owner, - group=group, - permissions=permissions, - acl=acl, - if_match=_if_match, - if_none_match=_if_none_match, - if_modified_since=_if_modified_since, - if_unmodified_since=_if_unmodified_since, - request_id_parameter=request_id_parameter, - action=action, - version=self._config.version, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - _stream = False - pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize( - _models.StorageError, - pipeline_response, - ) - raise HttpResponseError(response=response, model=error) - - response_headers = {} - response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) - response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) - response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) - response_headers["x-ms-client-request-id"] = self._deserialize( - "str", response.headers.get("x-ms-client-request-id") - ) - response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) - response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) - - if cls: - return cls(pipeline_response, None, response_headers) # type: ignore - - @distributed_trace - def set_access_control_recursive( - self, - mode: Union[str, _models.PathSetAccessControlRecursiveMode], - timeout: Optional[int] = None, - continuation: Optional[str] = None, - force_flag: Optional[bool] = None, - max_records: Optional[int] = None, - acl: Optional[str] = None, - request_id_parameter: Optional[str] = None, - **kwargs: Any - ) -> _models.SetAccessControlRecursiveResponse: - """Set the access control list for a path and sub-paths. - - :param mode: Mode "set" sets POSIX access control rights on files and directories, "modify" - modifies one or more POSIX access control rights that pre-exist on files and directories, - "remove" removes one or more POSIX access control rights that were present earlier on files - and directories. Known values are: "set", "modify", and "remove". Required. - :type mode: str or ~azure.storage.filedatalake.models.PathSetAccessControlRecursiveMode - :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting - Timeouts for Blob Service Operations.`. Default value is None. - :type timeout: int - :param continuation: Optional. When deleting a directory, the number of paths that are deleted - with each invocation is limited. If the number of paths to be deleted exceeds this limit, a - continuation token is returned in this response header. When a continuation token is returned - in the response, it must be specified in a subsequent invocation of the delete operation to - continue deleting the directory. Default value is None. - :type continuation: str - :param force_flag: Optional. Valid for "SetAccessControlRecursive" operation. If set to false, - the operation will terminate quickly on encountering user errors (4XX). If true, the operation - will ignore user errors and proceed with the operation on other sub-entities of the directory. - Continuation token will only be returned when forceFlag is true in case of user errors. If not - set the default value is false for this. Default value is None. - :type force_flag: bool - :param max_records: Optional. It specifies the maximum number of files or directories on which - the acl change will be applied. If omitted or greater than 2,000, the request will process up - to 2,000 items. Default value is None. - :type max_records: int - :param acl: Sets POSIX access control rights on files and directories. The value is a - comma-separated list of access control entries. Each access control entry (ACE) consists of a - scope, a type, a user or group identifier, and permissions in the format - "[scope:][type]:[id]:[permissions]". Default value is None. - :type acl: str - :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. Default - value is None. - :type request_id_parameter: str - :return: SetAccessControlRecursiveResponse or the result of cls(response) - :rtype: ~azure.storage.filedatalake.models.SetAccessControlRecursiveResponse - :raises ~azure.core.exceptions.HttpResponseError: - """ - error_map: MutableMapping = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = kwargs.pop("headers", {}) or {} - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - action: Literal["setAccessControlRecursive"] = kwargs.pop( - "action", _params.pop("action", "setAccessControlRecursive") - ) - cls: ClsType[_models.SetAccessControlRecursiveResponse] = kwargs.pop("cls", None) - - _request = build_set_access_control_recursive_request( - url=self._config.url, - mode=mode, - timeout=timeout, - continuation=continuation, - force_flag=force_flag, - max_records=max_records, - acl=acl, - request_id_parameter=request_id_parameter, - action=action, - version=self._config.version, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - _stream = False - pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize( - _models.StorageError, - pipeline_response, - ) - raise HttpResponseError(response=response, model=error) - - response_headers = {} - response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) - response_headers["x-ms-client-request-id"] = self._deserialize( - "str", response.headers.get("x-ms-client-request-id") - ) - response_headers["x-ms-continuation"] = self._deserialize("str", response.headers.get("x-ms-continuation")) - response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) - response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) - - deserialized = self._deserialize("SetAccessControlRecursiveResponse", pipeline_response.http_response) - - if cls: - return cls(pipeline_response, deserialized, response_headers) # type: ignore - - return deserialized # type: ignore - - @distributed_trace - def flush_data( # pylint: disable=inconsistent-return-statements,too-many-locals - self, - timeout: Optional[int] = None, - position: Optional[int] = None, - retain_uncommitted_data: Optional[bool] = None, - close: Optional[bool] = None, - content_length: Optional[int] = None, - lease_action: Optional[Union[str, _models.LeaseAction]] = None, - lease_duration: Optional[int] = None, - proposed_lease_id: Optional[str] = None, - request_id_parameter: Optional[str] = None, - path_http_headers: Optional[_models.PathHTTPHeaders] = None, - lease_access_conditions: Optional[_models.LeaseAccessConditions] = None, - modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None, - cpk_info: Optional[_models.CpkInfo] = None, - **kwargs: Any - ) -> None: - """Set the owner, group, permissions, or access control list for a path. - - :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting - Timeouts for Blob Service Operations.`. Default value is None. - :type timeout: int - :param position: This parameter allows the caller to upload data in parallel and control the - order in which it is appended to the file. It is required when uploading data to be appended - to the file and when flushing previously uploaded data to the file. The value must be the - position where the data is to be appended. Uploaded data is not immediately flushed, or - written, to the file. To flush, the previously uploaded data must be contiguous, the position - parameter must be specified and equal to the length of the file after all data has been - written, and there must not be a request entity body included with the request. Default value - is None. - :type position: int - :param retain_uncommitted_data: Valid only for flush operations. If "true", uncommitted data - is retained after the flush operation completes; otherwise, the uncommitted data is deleted - after the flush operation. The default is false. Data at offsets less than the specified - position are written to the file when flush succeeds, but this optional parameter allows data - after the flush position to be retained for a future flush operation. Default value is None. - :type retain_uncommitted_data: bool - :param close: Azure Storage Events allow applications to receive notifications when files - change. When Azure Storage Events are enabled, a file changed event is raised. This event has a - property indicating whether this is the final change to distinguish the difference between an - intermediate flush to a file stream and the final close of a file stream. The close query - parameter is valid only when the action is "flush" and change notifications are enabled. If the - value of close is "true" and the flush operation completes successfully, the service raises a - file change notification with a property indicating that this is the final update (the file - stream has been closed). If "false" a change notification is raised indicating the file has - changed. The default is false. This query parameter is set to true by the Hadoop ABFS driver to - indicate that the file stream has been closed.". Default value is None. - :type close: bool - :param content_length: Required for "Append Data" and "Flush Data". Must be 0 for "Flush - Data". Must be the length of the request content in bytes for "Append Data". Default value is - None. - :type content_length: int - :param lease_action: Optional. If "acquire" it will acquire the lease. If "auto-renew" it will - renew the lease. If "release" it will release the lease only on flush. If "acquire-release" it - will acquire & complete the operation & release the lease once operation is done. Known values - are: "acquire", "auto-renew", "release", and "acquire-release". Default value is None. - :type lease_action: str or ~azure.storage.filedatalake.models.LeaseAction - :param lease_duration: The lease duration is required to acquire a lease, and specifies the - duration of the lease in seconds. The lease duration must be between 15 and 60 seconds or -1 - for infinite lease. Default value is None. - :type lease_duration: int - :param proposed_lease_id: Proposed lease ID, in a GUID string format. The Blob service returns - 400 (Invalid request) if the proposed lease ID is not in the correct format. See Guid - Constructor (String) for a list of valid GUID string formats. Default value is None. - :type proposed_lease_id: str - :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. Default - value is None. - :type request_id_parameter: str - :param path_http_headers: Parameter group. Default value is None. - :type path_http_headers: ~azure.storage.filedatalake.models.PathHTTPHeaders - :param lease_access_conditions: Parameter group. Default value is None. - :type lease_access_conditions: ~azure.storage.filedatalake.models.LeaseAccessConditions - :param modified_access_conditions: Parameter group. Default value is None. - :type modified_access_conditions: ~azure.storage.filedatalake.models.ModifiedAccessConditions - :param cpk_info: Parameter group. Default value is None. - :type cpk_info: ~azure.storage.filedatalake.models.CpkInfo - :return: None or the result of cls(response) - :rtype: None - :raises ~azure.core.exceptions.HttpResponseError: - """ - error_map: MutableMapping = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = kwargs.pop("headers", {}) or {} - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - action: Literal["flush"] = kwargs.pop("action", _params.pop("action", "flush")) - cls: ClsType[None] = kwargs.pop("cls", None) - - _content_md5 = None - _lease_id = None - _cache_control = None - _content_type_parameter = None - _content_disposition = None - _content_encoding = None - _content_language = None - _if_match = None - _if_none_match = None - _if_modified_since = None - _if_unmodified_since = None - _encryption_key = None - _encryption_key_sha256 = None - _encryption_algorithm = None - if path_http_headers is not None: - _cache_control = path_http_headers.cache_control - _content_disposition = path_http_headers.content_disposition - _content_encoding = path_http_headers.content_encoding - _content_language = path_http_headers.content_language - _content_md5 = path_http_headers.content_md5 - _content_type_parameter = path_http_headers.content_type - if lease_access_conditions is not None: - _lease_id = lease_access_conditions.lease_id - if modified_access_conditions is not None: - _if_match = modified_access_conditions.if_match - _if_modified_since = modified_access_conditions.if_modified_since - _if_none_match = modified_access_conditions.if_none_match - _if_unmodified_since = modified_access_conditions.if_unmodified_since - if cpk_info is not None: - _encryption_algorithm = cpk_info.encryption_algorithm - _encryption_key = cpk_info.encryption_key - _encryption_key_sha256 = cpk_info.encryption_key_sha256 - - _request = build_flush_data_request( - url=self._config.url, - timeout=timeout, - position=position, - retain_uncommitted_data=retain_uncommitted_data, - close=close, - content_length=content_length, - content_md5=_content_md5, - lease_id=_lease_id, - lease_action=lease_action, - lease_duration=lease_duration, - proposed_lease_id=proposed_lease_id, - cache_control=_cache_control, - content_type_parameter=_content_type_parameter, - content_disposition=_content_disposition, - content_encoding=_content_encoding, - content_language=_content_language, - if_match=_if_match, - if_none_match=_if_none_match, - if_modified_since=_if_modified_since, - if_unmodified_since=_if_unmodified_since, - request_id_parameter=request_id_parameter, - encryption_key=_encryption_key, - encryption_key_sha256=_encryption_key_sha256, - encryption_algorithm=_encryption_algorithm, # type: ignore - action=action, - version=self._config.version, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - _stream = False - pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize( - _models.StorageError, - pipeline_response, - ) - raise HttpResponseError(response=response, model=error) - - response_headers = {} - response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) - response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) - response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) - response_headers["Content-Length"] = self._deserialize("int", response.headers.get("Content-Length")) - response_headers["x-ms-client-request-id"] = self._deserialize( - "str", response.headers.get("x-ms-client-request-id") - ) - response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) - response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) - response_headers["x-ms-request-server-encrypted"] = self._deserialize( - "bool", response.headers.get("x-ms-request-server-encrypted") - ) - response_headers["x-ms-encryption-key-sha256"] = self._deserialize( - "str", response.headers.get("x-ms-encryption-key-sha256") - ) - response_headers["x-ms-lease-renewed"] = self._deserialize("bool", response.headers.get("x-ms-lease-renewed")) - - if cls: - return cls(pipeline_response, None, response_headers) # type: ignore - - @distributed_trace - def append_data( # pylint: disable=inconsistent-return-statements,too-many-locals - self, - body: IO[bytes], - position: Optional[int] = None, - timeout: Optional[int] = None, - content_length: Optional[int] = None, - transactional_content_crc64: Optional[bytes] = None, - lease_action: Optional[Union[str, _models.LeaseAction]] = None, - lease_duration: Optional[int] = None, - proposed_lease_id: Optional[str] = None, - request_id_parameter: Optional[str] = None, - flush: Optional[bool] = None, - structured_body_type: Optional[str] = None, - structured_content_length: Optional[int] = None, - path_http_headers: Optional[_models.PathHTTPHeaders] = None, - lease_access_conditions: Optional[_models.LeaseAccessConditions] = None, - cpk_info: Optional[_models.CpkInfo] = None, - **kwargs: Any - ) -> None: - """Append data to the file. - - :param body: Initial data. Required. - :type body: IO[bytes] - :param position: This parameter allows the caller to upload data in parallel and control the - order in which it is appended to the file. It is required when uploading data to be appended - to the file and when flushing previously uploaded data to the file. The value must be the - position where the data is to be appended. Uploaded data is not immediately flushed, or - written, to the file. To flush, the previously uploaded data must be contiguous, the position - parameter must be specified and equal to the length of the file after all data has been - written, and there must not be a request entity body included with the request. Default value - is None. - :type position: int - :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting - Timeouts for Blob Service Operations.`. Default value is None. - :type timeout: int - :param content_length: Required for "Append Data" and "Flush Data". Must be 0 for "Flush - Data". Must be the length of the request content in bytes for "Append Data". Default value is - None. - :type content_length: int - :param transactional_content_crc64: Specify the transactional crc64 for the body, to be - validated by the service. Default value is None. - :type transactional_content_crc64: bytes - :param lease_action: Optional. If "acquire" it will acquire the lease. If "auto-renew" it will - renew the lease. If "release" it will release the lease only on flush. If "acquire-release" it - will acquire & complete the operation & release the lease once operation is done. Known values - are: "acquire", "auto-renew", "release", and "acquire-release". Default value is None. - :type lease_action: str or ~azure.storage.filedatalake.models.LeaseAction - :param lease_duration: The lease duration is required to acquire a lease, and specifies the - duration of the lease in seconds. The lease duration must be between 15 and 60 seconds or -1 - for infinite lease. Default value is None. - :type lease_duration: int - :param proposed_lease_id: Proposed lease ID, in a GUID string format. The Blob service returns - 400 (Invalid request) if the proposed lease ID is not in the correct format. See Guid - Constructor (String) for a list of valid GUID string formats. Default value is None. - :type proposed_lease_id: str - :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. Default - value is None. - :type request_id_parameter: str - :param flush: If file should be flushed after the append. Default value is None. - :type flush: bool - :param structured_body_type: Required if the request body is a structured message. Specifies - the message schema version and properties. Default value is None. - :type structured_body_type: str - :param structured_content_length: Required if the request body is a structured message. - Specifies the length of the blob/file content inside the message body. Will always be smaller - than Content-Length. Default value is None. - :type structured_content_length: int - :param path_http_headers: Parameter group. Default value is None. - :type path_http_headers: ~azure.storage.filedatalake.models.PathHTTPHeaders - :param lease_access_conditions: Parameter group. Default value is None. - :type lease_access_conditions: ~azure.storage.filedatalake.models.LeaseAccessConditions - :param cpk_info: Parameter group. Default value is None. - :type cpk_info: ~azure.storage.filedatalake.models.CpkInfo - :return: None or the result of cls(response) - :rtype: None - :raises ~azure.core.exceptions.HttpResponseError: - """ - error_map: MutableMapping = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - action: Literal["append"] = kwargs.pop("action", _params.pop("action", "append")) - content_type: str = kwargs.pop("content_type", _headers.pop("Content-Type", "application/json")) - cls: ClsType[None] = kwargs.pop("cls", None) - - _transactional_content_hash = None - _lease_id = None - _encryption_key = None - _encryption_key_sha256 = None - _encryption_algorithm = None - if path_http_headers is not None: - _transactional_content_hash = path_http_headers.transactional_content_hash - if lease_access_conditions is not None: - _lease_id = lease_access_conditions.lease_id - if cpk_info is not None: - _encryption_algorithm = cpk_info.encryption_algorithm - _encryption_key = cpk_info.encryption_key - _encryption_key_sha256 = cpk_info.encryption_key_sha256 - _content = body - - _request = build_append_data_request( - url=self._config.url, - position=position, - timeout=timeout, - content_length=content_length, - transactional_content_hash=_transactional_content_hash, - transactional_content_crc64=transactional_content_crc64, - lease_id=_lease_id, - lease_action=lease_action, - lease_duration=lease_duration, - proposed_lease_id=proposed_lease_id, - request_id_parameter=request_id_parameter, - encryption_key=_encryption_key, - encryption_key_sha256=_encryption_key_sha256, - encryption_algorithm=_encryption_algorithm, # type: ignore - flush=flush, - structured_body_type=structured_body_type, - structured_content_length=structured_content_length, - action=action, - content_type=content_type, - version=self._config.version, - content=_content, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - _stream = False - pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [202]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize( - _models.StorageError, - pipeline_response, - ) - raise HttpResponseError(response=response, model=error) - - response_headers = {} - response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) - response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) - response_headers["x-ms-client-request-id"] = self._deserialize( - "str", response.headers.get("x-ms-client-request-id") - ) - response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) - response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) - response_headers["Content-MD5"] = self._deserialize("bytearray", response.headers.get("Content-MD5")) - response_headers["x-ms-content-crc64"] = self._deserialize( - "bytearray", response.headers.get("x-ms-content-crc64") - ) - response_headers["x-ms-request-server-encrypted"] = self._deserialize( - "bool", response.headers.get("x-ms-request-server-encrypted") - ) - response_headers["x-ms-encryption-key-sha256"] = self._deserialize( - "str", response.headers.get("x-ms-encryption-key-sha256") - ) - response_headers["x-ms-lease-renewed"] = self._deserialize("bool", response.headers.get("x-ms-lease-renewed")) - response_headers["x-ms-structured-body"] = self._deserialize( - "str", response.headers.get("x-ms-structured-body") - ) - - if cls: - return cls(pipeline_response, None, response_headers) # type: ignore - - @distributed_trace - def set_expiry( # pylint: disable=inconsistent-return-statements - self, - expiry_options: Union[str, _models.PathExpiryOptions], - timeout: Optional[int] = None, - request_id_parameter: Optional[str] = None, - expires_on: Optional[str] = None, - **kwargs: Any - ) -> None: - """Sets the time a blob will expire and be deleted. - - :param expiry_options: Required. Indicates mode of the expiry time. Known values are: - "NeverExpire", "RelativeToCreation", "RelativeToNow", and "Absolute". Required. - :type expiry_options: str or ~azure.storage.filedatalake.models.PathExpiryOptions - :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting - Timeouts for Blob Service Operations.`. Default value is None. - :type timeout: int - :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. Default - value is None. - :type request_id_parameter: str - :param expires_on: The time to set the blob to expiry. Default value is None. - :type expires_on: str - :return: None or the result of cls(response) - :rtype: None - :raises ~azure.core.exceptions.HttpResponseError: - """ - error_map: MutableMapping = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = kwargs.pop("headers", {}) or {} - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - comp: Literal["expiry"] = kwargs.pop("comp", _params.pop("comp", "expiry")) - cls: ClsType[None] = kwargs.pop("cls", None) - - _request = build_set_expiry_request( - url=self._config.url, - expiry_options=expiry_options, - timeout=timeout, - request_id_parameter=request_id_parameter, - expires_on=expires_on, - comp=comp, - version=self._config.version, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - _stream = False - pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize( - _models.StorageError, - pipeline_response, - ) - raise HttpResponseError(response=response, model=error) - - response_headers = {} - response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) - response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) - response_headers["x-ms-client-request-id"] = self._deserialize( - "str", response.headers.get("x-ms-client-request-id") - ) - response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) - response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) - response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) - - if cls: - return cls(pipeline_response, None, response_headers) # type: ignore - - @distributed_trace - def undelete( # pylint: disable=inconsistent-return-statements - self, - timeout: Optional[int] = None, - undelete_source: Optional[str] = None, - request_id_parameter: Optional[str] = None, - **kwargs: Any - ) -> None: - """Undelete a path that was previously soft deleted. - - :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting - Timeouts for Blob Service Operations.`. Default value is None. - :type timeout: int - :param undelete_source: Only for hierarchical namespace enabled accounts. Optional. The path of - the soft deleted blob to undelete. Default value is None. - :type undelete_source: str - :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. Default - value is None. - :type request_id_parameter: str - :return: None or the result of cls(response) - :rtype: None - :raises ~azure.core.exceptions.HttpResponseError: - """ - error_map: MutableMapping = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = kwargs.pop("headers", {}) or {} - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - comp: Literal["undelete"] = kwargs.pop("comp", _params.pop("comp", "undelete")) - cls: ClsType[None] = kwargs.pop("cls", None) - - _request = build_undelete_request( - url=self._config.url, - timeout=timeout, - undelete_source=undelete_source, - request_id_parameter=request_id_parameter, - comp=comp, - version=self._config.version, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - _stream = False - pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize( - _models.StorageError, - pipeline_response, - ) - raise HttpResponseError(response=response, model=error) - - response_headers = {} - response_headers["x-ms-client-request-id"] = self._deserialize( - "str", response.headers.get("x-ms-client-request-id") - ) - response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id")) - response_headers["x-ms-resource-type"] = self._deserialize("str", response.headers.get("x-ms-resource-type")) - response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version")) - response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date")) - - if cls: - return cls(pipeline_response, None, response_headers) # type: ignore diff --git a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_generated/operations/_service_operations.py b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_generated/operations/_service_operations.py deleted file mode 100644 index 49f604f39e94..000000000000 --- a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_generated/operations/_service_operations.py +++ /dev/null @@ -1,207 +0,0 @@ -# pylint: disable=line-too-long,useless-suppression -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is regenerated. -# -------------------------------------------------------------------------- -from collections.abc import MutableMapping -from typing import Any, Callable, Literal, Optional, TypeVar - -from azure.core import PipelineClient -from azure.core.exceptions import ( - ClientAuthenticationError, - HttpResponseError, - ResourceExistsError, - ResourceNotFoundError, - ResourceNotModifiedError, - map_error, -) -from azure.core.paging import ItemPaged -from azure.core.pipeline import PipelineResponse -from azure.core.rest import HttpRequest, HttpResponse -from azure.core.tracing.decorator import distributed_trace -from azure.core.utils import case_insensitive_dict - -from .. import models as _models -from .._configuration import AzureDataLakeStorageRESTAPIConfiguration -from .._utils.serialization import Deserializer, Serializer - -T = TypeVar("T") -ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, dict[str, Any]], Any]] - -_SERIALIZER = Serializer() -_SERIALIZER.client_side_validation = False - - -def build_list_file_systems_request( - url: str, - *, - prefix: Optional[str] = None, - continuation: Optional[str] = None, - max_results: Optional[int] = None, - request_id_parameter: Optional[str] = None, - timeout: Optional[int] = None, - **kwargs: Any -) -> HttpRequest: - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - resource: Literal["account"] = kwargs.pop("resource", _params.pop("resource", "account")) - version: Literal["2026-02-06"] = kwargs.pop("version", _headers.pop("x-ms-version", "2026-02-06")) - accept = _headers.pop("Accept", "application/json") - - # Construct URL - _url = kwargs.pop("template_url", "{url}") - path_format_arguments = { - "url": _SERIALIZER.url("url", url, "str", skip_quote=True), - } - - _url: str = _url.format(**path_format_arguments) # type: ignore - - # Construct parameters - _params["resource"] = _SERIALIZER.query("resource", resource, "str") - if prefix is not None: - _params["prefix"] = _SERIALIZER.query("prefix", prefix, "str") - if continuation is not None: - _params["continuation"] = _SERIALIZER.query("continuation", continuation, "str") - if max_results is not None: - _params["maxResults"] = _SERIALIZER.query("max_results", max_results, "int", minimum=1) - if timeout is not None: - _params["timeout"] = _SERIALIZER.query("timeout", timeout, "int", minimum=0) - - # Construct headers - if request_id_parameter is not None: - _headers["x-ms-client-request-id"] = _SERIALIZER.header("request_id_parameter", request_id_parameter, "str") - _headers["x-ms-version"] = _SERIALIZER.header("version", version, "str") - _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - - return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) - - -class ServiceOperations: - """ - .. warning:: - **DO NOT** instantiate this class directly. - - Instead, you should access the following operations through - :class:`~azure.storage.filedatalake.AzureDataLakeStorageRESTAPI`'s - :attr:`service` attribute. - """ - - models = _models - - def __init__(self, *args, **kwargs) -> None: - input_args = list(args) - self._client: PipelineClient = input_args.pop(0) if input_args else kwargs.pop("client") - self._config: AzureDataLakeStorageRESTAPIConfiguration = ( - input_args.pop(0) if input_args else kwargs.pop("config") - ) - self._serialize: Serializer = input_args.pop(0) if input_args else kwargs.pop("serializer") - self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer") - - @distributed_trace - def list_file_systems( - self, - prefix: Optional[str] = None, - continuation: Optional[str] = None, - max_results: Optional[int] = None, - request_id_parameter: Optional[str] = None, - timeout: Optional[int] = None, - **kwargs: Any - ) -> ItemPaged["_models.FileSystem"]: - """List FileSystems. - - List filesystems and their properties in given account. - - :param prefix: Filters results to filesystems within the specified prefix. Default value is - None. - :type prefix: str - :param continuation: Optional. When deleting a directory, the number of paths that are deleted - with each invocation is limited. If the number of paths to be deleted exceeds this limit, a - continuation token is returned in this response header. When a continuation token is returned - in the response, it must be specified in a subsequent invocation of the delete operation to - continue deleting the directory. Default value is None. - :type continuation: str - :param max_results: An optional value that specifies the maximum number of items to return. If - omitted or greater than 5,000, the response will include up to 5,000 items. Default value is - None. - :type max_results: int - :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character - limit that is recorded in the analytics logs when storage analytics logging is enabled. Default - value is None. - :type request_id_parameter: str - :param timeout: The timeout parameter is expressed in seconds. For more information, see - :code:`Setting - Timeouts for Blob Service Operations.`. Default value is None. - :type timeout: int - :return: An iterator like instance of either FileSystem or the result of cls(response) - :rtype: ~azure.core.paging.ItemPaged[~azure.storage.filedatalake.models.FileSystem] - :raises ~azure.core.exceptions.HttpResponseError: - """ - _headers = kwargs.pop("headers", {}) or {} - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - resource: Literal["account"] = kwargs.pop("resource", _params.pop("resource", "account")) - cls: ClsType[_models.FileSystemList] = kwargs.pop("cls", None) - - error_map: MutableMapping = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - def prepare_request(next_link=None): - if not next_link: - - _request = build_list_file_systems_request( - url=self._config.url, - prefix=prefix, - continuation=continuation, - max_results=max_results, - request_id_parameter=request_id_parameter, - timeout=timeout, - resource=resource, - version=self._config.version, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - else: - _request = HttpRequest("GET", next_link) - _request.url = self._client.format_url(_request.url) - _request.method = "GET" - return _request - - def extract_data(pipeline_response): - deserialized = self._deserialize("FileSystemList", pipeline_response) - list_of_elem = deserialized.filesystems - if cls: - list_of_elem = cls(list_of_elem) # type: ignore - return None, iter(list_of_elem) - - def get_next(next_link=None): - _request = prepare_request(next_link) - - _stream = False - pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize( - _models.StorageError, - pipeline_response, - ) - raise HttpResponseError(response=response, model=error) - - return pipeline_response - - return ItemPaged(get_next, extract_data) diff --git a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_list_paths_helper.py b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_list_paths_helper.py index d2c0ba471f42..d0b01b3655d7 100644 --- a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_list_paths_helper.py +++ b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_list_paths_helper.py @@ -4,10 +4,7 @@ # license information. # -------------------------------------------------------------------------- -from typing import ( - Any, Callable, cast, Dict, - List, Optional, Tuple, Union -) +from typing import Any, Callable, cast, Dict, List, Optional, Tuple, Union from azure.core.paging import PageIterator from azure.core.exceptions import HttpResponseError @@ -15,13 +12,9 @@ from ._deserialize import ( get_deleted_path_properties_from_generated_code, process_storage_error, - return_headers_and_deserialized_path_list -) -from ._generated.models import ( - BlobItemInternal, - BlobPrefix as GenBlobPrefix, - Path + return_headers_and_deserialized_path_list, ) +from ._generated.models import BlobItemInternal, BlobPrefix as GenBlobPrefix, Path from ._models import DeletedPathProperties, PathProperties from ._shared.models import DictMixin from ._shared.response_handlers import return_context_and_deserialized @@ -43,11 +36,11 @@ class DirectoryPrefix(DictMixin): options include "primary" and "secondary".""" def __init__(self, **kwargs: Any) -> None: - self.name = kwargs.get('prefix') # type: ignore [assignment] - self.results_per_page = kwargs.get('results_per_page') # type: ignore [assignment] - self.file_system = kwargs.get('container') # type: ignore [assignment] - self.delimiter = kwargs.get('delimiter') # type: ignore [assignment] - self.location_mode = kwargs.get('location_mode') # type: ignore [assignment] + self.name = kwargs.get("prefix") # type: ignore [assignment] + self.results_per_page = kwargs.get("results_per_page") # type: ignore [assignment] + self.file_system = kwargs.get("container") # type: ignore [assignment] + self.delimiter = kwargs.get("delimiter") # type: ignore [assignment] + self.location_mode = kwargs.get("location_mode") # type: ignore [assignment] class DeletedPathPropertiesPaged(PageIterator): @@ -74,18 +67,17 @@ class DeletedPathPropertiesPaged(PageIterator): options include "primary" and "secondary".""" def __init__( - self, command: Callable, + self, + command: Callable, container: Optional[str] = None, prefix: Optional[str] = None, results_per_page: Optional[int] = None, continuation_token: Optional[str] = None, delimiter: Optional[str] = None, - location_mode: Optional[str] = None + location_mode: Optional[str] = None, ) -> None: super(DeletedPathPropertiesPaged, self).__init__( - get_next=self._get_next_cb, - extract_data=self._extract_data_cb, - continuation_token=continuation_token or "" + get_next=self._get_next_cb, extract_data=self._extract_data_cb, continuation_token=continuation_token or "" ) self._command = command self.service_endpoint = None @@ -104,7 +96,7 @@ def _get_next_cb(self, continuation_token): marker=continuation_token or None, max_results=self.results_per_page, cls=return_context_and_deserialized, - use_location=self.location_mode + use_location=self.location_mode, ) except HttpResponseError as error: process_storage_error(error) @@ -134,7 +126,7 @@ def _build_item( container=self.container, prefix=item.name, results_per_page=self.results_per_page, - location_mode=self.location_mode + location_mode=self.location_mode, ) return item @@ -157,17 +149,16 @@ class PathPropertiesPaged(PageIterator): """The path list to build the items for the current page.""" def __init__( - self, command: Callable, + self, + command: Callable, recursive: bool, path: Optional[str] = None, max_results: Optional[int] = None, continuation_token: Optional[str] = None, - upn: Optional[str] = None + upn: Optional[str] = None, ) -> None: super(PathPropertiesPaged, self).__init__( - get_next=self._get_next_cb, - extract_data=self._extract_data_cb, - continuation_token=continuation_token or "" + get_next=self._get_next_cb, extract_data=self._extract_data_cb, continuation_token=continuation_token or "" ) self._command = command self.recursive = recursive @@ -180,12 +171,12 @@ def __init__( def _get_next_cb(self, continuation_token): try: return self._command( - self.recursive, + recursive=self.recursive, continuation=continuation_token or None, path=self.path, max_results=self.results_per_page, upn=self.upn, - cls=return_headers_and_deserialized_path_list + cls=return_headers_and_deserialized_path_list, ) except HttpResponseError as error: process_storage_error(error) @@ -194,7 +185,7 @@ def _extract_data_cb(self, get_next_return): self.path_list, self._response = cast(Tuple[List[Path], Dict[str, Any]], get_next_return) self.current_page = [self._build_item(item) for item in self.path_list] - return self._response['continuation'] or None, self.current_page + return self._response["continuation"] or None, self.current_page @staticmethod def _build_item(item: Union[Path, PathProperties]) -> PathProperties: diff --git a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_models.py b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_models.py index ee3ee10f5080..d48c83719d43 100644 --- a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_models.py +++ b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_models.py @@ -1,3 +1,4 @@ +# pylint: disable=too-many-lines # ------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for @@ -6,10 +7,7 @@ # pylint: disable=too-few-public-methods, too-many-instance-attributes, super-init-not-called, too-many-lines from enum import Enum -from typing import ( - Any, Dict, List, Optional, Union, - TYPE_CHECKING -) +from typing import Any, Dict, List, Optional, Union, TYPE_CHECKING from typing_extensions import Self from azure.core import CaseInsensitiveEnumMeta @@ -29,7 +27,7 @@ Logging as GenLogging, Metrics as GenMetrics, RetentionPolicy as GenRetentionPolicy, - StaticWebsite as GenStaticWebsite + StaticWebsite as GenStaticWebsite, ) from azure.storage.blob._models import ContainerPropertiesPaged @@ -91,7 +89,7 @@ class Metrics(GenMetrics): policy will be disabled by default. """ - version: str = '1.0' + version: str = "1.0" """The version of Storage Analytics to configure.""" enabled: bool = False """Indicates whether metrics are enabled for the Datalake service.""" @@ -101,10 +99,10 @@ class Metrics(GenMetrics): """Determines how long the associated data should persist.""" def __init__(self, **kwargs: Any) -> None: - self.version = kwargs.get('version', '1.0') - self.enabled = kwargs.get('enabled', False) - self.include_apis = kwargs.get('include_apis') - self.retention_policy = kwargs.get('retention_policy') or RetentionPolicy() + self.version = kwargs.get("version", "1.0") + self.enabled = kwargs.get("enabled", False) + self.include_apis = kwargs.get("include_apis") + self.retention_policy = kwargs.get("retention_policy") or RetentionPolicy() @classmethod def _from_generated(cls, generated): @@ -114,7 +112,9 @@ def _from_generated(cls, generated): version=generated.version, enabled=generated.enabled, include_apis=generated.include_apis, - retention_policy=RetentionPolicy._from_generated(generated.retention_policy) # pylint: disable=protected-access + retention_policy=RetentionPolicy._from_generated( + generated.retention_policy + ), # pylint: disable=protected-access ) @@ -164,11 +164,11 @@ class CorsRule(GenCorsRule): """The number of seconds that the client/browser should cache a pre-flight response.""" def __init__(self, allowed_origins: List[str], allowed_methods: List[str], **kwargs: Any) -> None: - self.allowed_origins = ','.join(allowed_origins) - self.allowed_methods = ','.join(allowed_methods) - self.allowed_headers = ','.join(kwargs.get('allowed_headers', [])) - self.exposed_headers = ','.join(kwargs.get('exposed_headers', [])) - self.max_age_in_seconds = kwargs.get('max_age_in_seconds', 0) + self.allowed_origins = ",".join(allowed_origins) + self.allowed_methods = ",".join(allowed_methods) + self.allowed_headers = ",".join(kwargs.get("allowed_headers", [])) + self.exposed_headers = ",".join(kwargs.get("exposed_headers", [])) + self.max_age_in_seconds = kwargs.get("max_age_in_seconds", 0) @staticmethod def _to_generated(rules: Optional[List["CorsRule"]]) -> Optional[List[GenCorsRule]]: @@ -224,16 +224,14 @@ class AccountSasPermissions(BlobAccountSasPermissions): """ def __init__( - self, read: bool = False, + self, + read: bool = False, write: bool = False, delete: bool = False, list: bool = False, # pylint: disable=redefined-builtin - create: bool = False + create: bool = False, ) -> None: - super(AccountSasPermissions, self).__init__( - read=read, create=create, write=write, list=list, - delete=delete - ) + super(AccountSasPermissions, self).__init__(read=read, create=create, write=write, list=list, delete=delete) class FileSystemSasPermissions: @@ -292,7 +290,8 @@ class FileSystemSasPermissions: """Allows the user to set permissions and POSIX ACLs on files and directories.""" def __init__( - self, read: bool = False, + self, + read: bool = False, write: bool = False, delete: bool = False, list: bool = False, # pylint: disable=redefined-builtin @@ -302,22 +301,24 @@ def __init__( self.write = write self.delete = delete self.list = list - self.add = kwargs.pop('add', None) - self.create = kwargs.pop('create', None) - self.move = kwargs.pop('move', None) - self.execute = kwargs.pop('execute', None) - self.manage_ownership = kwargs.pop('manage_ownership', None) - self.manage_access_control = kwargs.pop('manage_access_control', None) - self._str = (('r' if self.read else '') + - ('a' if self.add else '') + - ('c' if self.create else '') + - ('w' if self.write else '') + - ('d' if self.delete else '') + - ('l' if self.list else '') + - ('m' if self.move else '') + - ('e' if self.execute else '') + - ('o' if self.manage_ownership else '') + - ('p' if self.manage_access_control else '')) + self.add = kwargs.pop("add", None) + self.create = kwargs.pop("create", None) + self.move = kwargs.pop("move", None) + self.execute = kwargs.pop("execute", None) + self.manage_ownership = kwargs.pop("manage_ownership", None) + self.manage_access_control = kwargs.pop("manage_access_control", None) + self._str = ( + ("r" if self.read else "") + + ("a" if self.add else "") + + ("c" if self.create else "") + + ("w" if self.write else "") + + ("d" if self.delete else "") + + ("l" if self.list else "") + + ("m" if self.move else "") + + ("e" if self.execute else "") + + ("o" if self.manage_ownership else "") + + ("p" if self.manage_access_control else "") + ) def __str__(self): return self._str @@ -335,21 +336,29 @@ def from_string(cls, permission: str) -> Self: :return: A FileSystemSasPermissions object :rtype: ~azure.storage.filedatalake.FileSystemSasPermissions """ - p_read = 'r' in permission - p_add = 'a' in permission - p_create = 'c' in permission - p_write = 'w' in permission - p_delete = 'd' in permission - p_list = 'l' in permission - p_move = 'm' in permission - p_execute = 'e' in permission - p_manage_ownership = 'o' in permission - p_manage_access_control = 'p' in permission - - parsed = cls(read=p_read, write=p_write, delete=p_delete, - list=p_list, add=p_add, create=p_create, move=p_move, - execute=p_execute, manage_ownership=p_manage_ownership, - manage_access_control=p_manage_access_control) + p_read = "r" in permission + p_add = "a" in permission + p_create = "c" in permission + p_write = "w" in permission + p_delete = "d" in permission + p_list = "l" in permission + p_move = "m" in permission + p_execute = "e" in permission + p_manage_ownership = "o" in permission + p_manage_access_control = "p" in permission + + parsed = cls( + read=p_read, + write=p_write, + delete=p_delete, + list=p_list, + add=p_add, + create=p_create, + move=p_move, + execute=p_execute, + manage_ownership=p_manage_ownership, + manage_access_control=p_manage_access_control, + ) return parsed @@ -409,32 +418,30 @@ class DirectorySasPermissions: """Allows the user to set permissions and POSIX ACLs on files and directories.""" def __init__( - self, read: bool = False, - create: bool = False, - write: bool = False, - delete: bool = False, - **kwargs: Any + self, read: bool = False, create: bool = False, write: bool = False, delete: bool = False, **kwargs: Any ) -> None: self.read = read self.create = create self.write = write self.delete = delete - self.add = kwargs.pop('add', None) - self.list = kwargs.pop('list', None) - self.move = kwargs.pop('move', None) - self.execute = kwargs.pop('execute', None) - self.manage_ownership = kwargs.pop('manage_ownership', None) - self.manage_access_control = kwargs.pop('manage_access_control', None) - self._str = (('r' if self.read else '') + - ('a' if self.add else '') + - ('c' if self.create else '') + - ('w' if self.write else '') + - ('d' if self.delete else '') + - ('l' if self.list else '') + - ('m' if self.move else '') + - ('e' if self.execute else '') + - ('o' if self.manage_ownership else '') + - ('p' if self.manage_access_control else '')) + self.add = kwargs.pop("add", None) + self.list = kwargs.pop("list", None) + self.move = kwargs.pop("move", None) + self.execute = kwargs.pop("execute", None) + self.manage_ownership = kwargs.pop("manage_ownership", None) + self.manage_access_control = kwargs.pop("manage_access_control", None) + self._str = ( + ("r" if self.read else "") + + ("a" if self.add else "") + + ("c" if self.create else "") + + ("w" if self.write else "") + + ("d" if self.delete else "") + + ("l" if self.list else "") + + ("m" if self.move else "") + + ("e" if self.execute else "") + + ("o" if self.manage_ownership else "") + + ("p" if self.manage_access_control else "") + ) def __str__(self): return self._str @@ -452,20 +459,29 @@ def from_string(cls, permission: str) -> Self: :return: A DirectorySasPermissions object :rtype: ~azure.storage.filedatalake.DirectorySasPermissions """ - p_read = 'r' in permission - p_add = 'a' in permission - p_create = 'c' in permission - p_write = 'w' in permission - p_delete = 'd' in permission - p_list = 'l' in permission - p_move = 'm' in permission - p_execute = 'e' in permission - p_manage_ownership = 'o' in permission - p_manage_access_control = 'p' in permission - - parsed = cls(read=p_read, create=p_create, write=p_write, delete=p_delete, add=p_add, - list=p_list, move=p_move, execute=p_execute, manage_ownership=p_manage_ownership, - manage_access_control=p_manage_access_control) + p_read = "r" in permission + p_add = "a" in permission + p_create = "c" in permission + p_write = "w" in permission + p_delete = "d" in permission + p_list = "l" in permission + p_move = "m" in permission + p_execute = "e" in permission + p_manage_ownership = "o" in permission + p_manage_access_control = "p" in permission + + parsed = cls( + read=p_read, + create=p_create, + write=p_write, + delete=p_delete, + add=p_add, + list=p_list, + move=p_move, + execute=p_execute, + manage_ownership=p_manage_ownership, + manage_access_control=p_manage_access_control, + ) return parsed @@ -521,30 +537,28 @@ class FileSasPermissions: """Allows the user to set permissions and POSIX ACLs on files and directories.""" def __init__( - self, read: bool = False, - create: bool = False, - write: bool = False, - delete: bool = False, - **kwargs: Any + self, read: bool = False, create: bool = False, write: bool = False, delete: bool = False, **kwargs: Any ) -> None: self.read = read self.create = create self.write = write self.delete = delete - self.add = kwargs.pop('add', None) - self.move = kwargs.pop('move', None) - self.execute = kwargs.pop('execute', None) - self.manage_ownership = kwargs.pop('manage_ownership', None) - self.manage_access_control = kwargs.pop('manage_access_control', None) - self._str = (('r' if self.read else '') + - ('a' if self.add else '') + - ('c' if self.create else '') + - ('w' if self.write else '') + - ('d' if self.delete else '') + - ('m' if self.move else '') + - ('e' if self.execute else '') + - ('o' if self.manage_ownership else '') + - ('p' if self.manage_access_control else '')) + self.add = kwargs.pop("add", None) + self.move = kwargs.pop("move", None) + self.execute = kwargs.pop("execute", None) + self.manage_ownership = kwargs.pop("manage_ownership", None) + self.manage_access_control = kwargs.pop("manage_access_control", None) + self._str = ( + ("r" if self.read else "") + + ("a" if self.add else "") + + ("c" if self.create else "") + + ("w" if self.write else "") + + ("d" if self.delete else "") + + ("m" if self.move else "") + + ("e" if self.execute else "") + + ("o" if self.manage_ownership else "") + + ("p" if self.manage_access_control else "") + ) def __str__(self): return self._str @@ -562,19 +576,27 @@ def from_string(cls, permission: str) -> Self: :return: A FileSasPermissions object :rtype: ~azure.storage.filedatalake.FileSasPermissions """ - p_read = 'r' in permission - p_add = 'a' in permission - p_create = 'c' in permission - p_write = 'w' in permission - p_delete = 'd' in permission - p_move = 'm' in permission - p_execute = 'e' in permission - p_manage_ownership = 'o' in permission - p_manage_access_control = 'p' in permission - - parsed = cls(read=p_read, create=p_create, write=p_write, delete=p_delete, add=p_add, - move=p_move, execute=p_execute, manage_ownership=p_manage_ownership, - manage_access_control=p_manage_access_control) + p_read = "r" in permission + p_add = "a" in permission + p_create = "c" in permission + p_write = "w" in permission + p_delete = "d" in permission + p_move = "m" in permission + p_execute = "e" in permission + p_manage_ownership = "o" in permission + p_manage_access_control = "p" in permission + + parsed = cls( + read=p_read, + create=p_create, + write=p_write, + delete=p_delete, + add=p_add, + move=p_move, + execute=p_execute, + manage_ownership=p_manage_ownership, + manage_access_control=p_manage_access_control, + ) return parsed @@ -622,12 +644,13 @@ class AccessPolicy(BlobAccessPolicy): """ def __init__( - self, permission: Optional[Union[FileSystemSasPermissions, str]] = None, + self, + permission: Optional[Union[FileSystemSasPermissions, str]] = None, expiry: Optional[Union["datetime", str]] = None, **kwargs: Any ) -> None: super(AccessPolicy, self).__init__( - permission=permission, expiry=expiry, start=kwargs.pop('start', None) # type: ignore [arg-type] + permission=permission, expiry=expiry, start=kwargs.pop("start", None) # type: ignore [arg-type] ) @@ -719,11 +742,11 @@ def __init__(self, **kwargs: Any) -> None: self.metadata = None # type: ignore [assignment] self.deleted = None self.deleted_version = None - default_encryption_scope = kwargs.get('x-ms-default-encryption-scope') + default_encryption_scope = kwargs.get("x-ms-default-encryption-scope") if default_encryption_scope: self.encryption_scope = EncryptionScopeOptions( default_encryption_scope=default_encryption_scope, - prevent_encryption_scope_override=kwargs.get('x-ms-deny-encryption-scope-override', False) + prevent_encryption_scope_override=kwargs.get("x-ms-deny-encryption-scope-override", False), ) @classmethod @@ -736,7 +759,8 @@ def _from_generated(cls, generated): props.etag = generated.properties.etag props.lease = LeaseProperties._from_generated(generated) # pylint: disable=protected-access props.public_access = PublicAccess._from_generated( # pylint: disable=protected-access - generated.properties.public_access) + generated.properties.public_access + ) props.has_immutability_policy = generated.properties.has_immutability_policy props.has_legal_hold = generated.properties.has_legal_hold props.metadata = generated.metadata @@ -747,7 +771,8 @@ def _from_generated(cls, generated): def _convert_from_container_props(cls, container_properties): container_properties.__class__ = cls container_properties.public_access = PublicAccess._from_generated( # pylint: disable=protected-access - container_properties.public_access) + container_properties.public_access + ) container_properties.lease.__class__ = LeaseProperties return container_properties @@ -808,22 +833,22 @@ class DirectoryProperties(DictMixin): """The POSIX ACL permissions of the file or directory.""" def __init__(self, **kwargs: Any) -> None: - self.name = kwargs.get('name') # type: ignore [assignment] - self.etag = kwargs.get('ETag') # type: ignore [assignment] + self.name = kwargs.get("name") # type: ignore [assignment] + self.etag = kwargs.get("ETag") # type: ignore [assignment] self.deleted = False - self.metadata = kwargs.get('metadata') # type: ignore [assignment] + self.metadata = kwargs.get("metadata") # type: ignore [assignment] self.lease = LeaseProperties(**kwargs) - self.last_modified = kwargs.get('Last-Modified') # type: ignore [assignment] - self.creation_time = kwargs.get('x-ms-creation-time') # type: ignore [assignment] + self.last_modified = kwargs.get("Last-Modified") # type: ignore [assignment] + self.creation_time = kwargs.get("x-ms-creation-time") # type: ignore [assignment] self.deleted_time = None self.remaining_retention_days = None - self.encryption_scope = kwargs.get('x-ms-encryption-scope') + self.encryption_scope = kwargs.get("x-ms-encryption-scope") # This is being passed directly not coming from headers - self.owner = kwargs.get('owner', None) - self.group = kwargs.get('group', None) - self.permissions = kwargs.get('permissions', None) - self.acl = kwargs.get('acl', None) + self.owner = kwargs.get("owner", None) + self.group = kwargs.get("group", None) + self.permissions = kwargs.get("permissions", None) + self.acl = kwargs.get("acl", None) class FileProperties(DictMixin): @@ -871,26 +896,26 @@ class FileProperties(DictMixin): """The POSIX ACL permissions of the file or directory.""" def __init__(self, **kwargs: Any) -> None: - self.name = kwargs.get('name') # type: ignore [assignment] - self.etag = kwargs.get('ETag') # type: ignore [assignment] + self.name = kwargs.get("name") # type: ignore [assignment] + self.etag = kwargs.get("ETag") # type: ignore [assignment] self.deleted = False - self.metadata = kwargs.get('metadata') # type: ignore [assignment] + self.metadata = kwargs.get("metadata") # type: ignore [assignment] self.lease = LeaseProperties(**kwargs) - self.last_modified = kwargs.get('Last-Modified') # type: ignore [assignment] - self.creation_time = kwargs.get('x-ms-creation-time') # type: ignore [assignment] - self.size = kwargs.get('Content-Length') # type: ignore [assignment] + self.last_modified = kwargs.get("Last-Modified") # type: ignore [assignment] + self.creation_time = kwargs.get("x-ms-creation-time") # type: ignore [assignment] + self.size = kwargs.get("Content-Length") # type: ignore [assignment] self.deleted_time = None self.expiry_time = kwargs.get("x-ms-expiry-time") self.remaining_retention_days = None self.content_settings = ContentSettings(**kwargs) - self.encryption_scope = kwargs.get('x-ms-encryption-scope') + self.encryption_scope = kwargs.get("x-ms-encryption-scope") # This is being passed directly not coming from headers - self.encryption_context = kwargs.get('encryption_context') - self.owner = kwargs.get('owner', None) - self.group = kwargs.get('group', None) - self.permissions = kwargs.get('permissions', None) - self.acl = kwargs.get('acl', None) + self.encryption_context = kwargs.get("encryption_context") + self.owner = kwargs.get("owner", None) + self.group = kwargs.get("group", None) + self.permissions = kwargs.get("permissions", None) + self.acl = kwargs.get("acl", None) class PathProperties(DictMixin): @@ -934,18 +959,18 @@ class PathProperties(DictMixin): """Specifies the encryption context to set on the file.""" def __init__(self, **kwargs: Any) -> None: - self.name = kwargs.pop('name', None) # type: ignore [assignment] - self.owner = kwargs.get('owner', None) # type: ignore [assignment] - self.group = kwargs.get('group', None) # type: ignore [assignment] - self.permissions = kwargs.get('permissions', None) # type: ignore [assignment] - self.last_modified = kwargs.get('last_modified', None) # type: ignore [assignment] - self.is_directory = kwargs.get('is_directory', False) # type: ignore [assignment] - self.etag = kwargs.get('etag', None) # type: ignore [assignment] - self.content_length = kwargs.get('content_length', None) # type: ignore [assignment] - self.creation_time = kwargs.get('creation_time', None) # type: ignore [assignment] - self.expiry_time = kwargs.get('expiry_time', None) - self.encryption_scope = kwargs.get('x-ms-encryption-scope', None) - self.encryption_context = kwargs.get('x-ms-encryption-context', None) + self.name = kwargs.pop("name", None) # type: ignore [assignment] + self.owner = kwargs.get("owner", None) # type: ignore [assignment] + self.group = kwargs.get("group", None) # type: ignore [assignment] + self.permissions = kwargs.get("permissions", None) # type: ignore [assignment] + self.last_modified = kwargs.get("last_modified", None) # type: ignore [assignment] + self.is_directory = kwargs.get("is_directory", False) # type: ignore [assignment] + self.etag = kwargs.get("etag", None) # type: ignore [assignment] + self.content_length = kwargs.get("content_length", None) # type: ignore [assignment] + self.creation_time = kwargs.get("creation_time", None) # type: ignore [assignment] + self.expiry_time = kwargs.get("expiry_time", None) + self.encryption_scope = kwargs.get("x-ms-encryption-scope", None) + self.encryption_context = kwargs.get("x-ms-encryption-context", None) @classmethod def _from_generated(cls, generated): @@ -956,7 +981,7 @@ def _from_generated(cls, generated): path_prop.permissions = generated.permissions path_prop.last_modified = _rfc_1123_to_datetime(generated.last_modified) path_prop.is_directory = bool(generated.is_directory) - path_prop.etag = generated.additional_properties.get('etag') + path_prop.etag = generated.e_tag path_prop.content_length = generated.content_length path_prop.creation_time = _filetime_to_datetime(generated.creation_time) path_prop.expiry_time = _filetime_to_datetime(generated.expiry_time) @@ -978,9 +1003,10 @@ class ResourceTypes(BlobResourceTypes): """ def __init__( - self, service: bool = False, + self, + service: bool = False, file_system: bool = False, - object: bool = False # pylint: disable=redefined-builtin + object: bool = False, # pylint: disable=redefined-builtin ) -> None: super(ResourceTypes, self).__init__(service=service, container=file_system, object=object) @@ -1013,14 +1039,14 @@ class PublicAccess(str, Enum, metaclass=CaseInsensitiveEnumMeta): Specifies whether data in the file system may be accessed publicly and the level of access. """ - FILE = 'blob' + FILE = "blob" """ Specifies public read access for files. file data within this file system can be read via anonymous request, but file system data is not available. Clients cannot enumerate files within the container via anonymous request. """ - FILESYSTEM = 'container' + FILESYSTEM = "container" """ Specifies full public read access for file system and file data. Clients can enumerate files within the file system via anonymous request, but cannot enumerate file systems @@ -1044,8 +1070,8 @@ class LocationMode: must use PRIMARY. """ - PRIMARY = 'primary' #: Requests should be sent to the primary location. - SECONDARY = 'secondary' #: Requests should be sent to the secondary location, if possible. + PRIMARY = "primary" #: Requests should be sent to the primary location. + SECONDARY = "secondary" #: Requests should be sent to the secondary location, if possible. class DelimitedJsonDialect(BlobDelimitedJSON): @@ -1110,9 +1136,9 @@ class CustomerProvidedEncryptionKey(BlobCustomerProvidedEncryptionKey): class QuickQueryDialect(str, Enum, metaclass=CaseInsensitiveEnumMeta): """Specifies the quick query input/output dialect.""" - DELIMITEDTEXT = 'DelimitedTextDialect' - DELIMITEDJSON = 'DelimitedJsonDialect' - PARQUET = 'ParquetDialect' + DELIMITEDTEXT = "DelimitedTextDialect" + DELIMITEDJSON = "DelimitedJsonDialect" + PARQUET = "ParquetDialect" class ArrowType(str, Enum, metaclass=CaseInsensitiveEnumMeta): @@ -1122,7 +1148,7 @@ class ArrowType(str, Enum, metaclass=CaseInsensitiveEnumMeta): TIMESTAMP_MS = "timestamp[ms]" STRING = "string" DOUBLE = "double" - DECIMAL = 'decimal' + DECIMAL = "decimal" class DataLakeFileQueryError: @@ -1140,10 +1166,11 @@ class DataLakeFileQueryError: """The blob offset at which the error occurred.""" def __init__( - self, error: Optional[str] = None, + self, + error: Optional[str] = None, is_fatal: bool = False, description: Optional[str] = None, - position: Optional[int] = None + position: Optional[int] = None, ) -> None: self.error = error self.is_fatal = is_fatal @@ -1214,10 +1241,11 @@ class AccessControlChanges(DictMixin): """An opaque continuation token that may be used to resume the operations in case of failures.""" def __init__( - self, batch_counters: AccessControlChangeCounters, + self, + batch_counters: AccessControlChangeCounters, aggregate_counters: AccessControlChangeCounters, batch_failures: List[AccessControlChangeFailure], - continuation: Optional[str] + continuation: Optional[str], ) -> None: self.batch_counters = batch_counters self.aggregate_counters = aggregate_counters @@ -1240,7 +1268,7 @@ class DeletedPathProperties(DictMixin): """The filesystem associated with the deleted path.""" def __init__(self, **kwargs: Any) -> None: - self.name = kwargs.get('name') # type: ignore [assignment] + self.name = kwargs.get("name") # type: ignore [assignment] self.deleted_time = None self.remaining_retention_days = None self.deletion_id = None @@ -1263,11 +1291,11 @@ class AnalyticsLogging(GenLogging): policy will be disabled by default.""" def __init__(self, **kwargs: Any) -> None: - self.version = kwargs.get('version', '1.0') - self.delete = kwargs.get('delete', False) - self.read = kwargs.get('read', False) - self.write = kwargs.get('write', False) - self.retention_policy = kwargs.get('retention_policy') or RetentionPolicy() + self.version = kwargs.get("version", "1.0") + self.delete = kwargs.get("delete", False) + self.read = kwargs.get("read", False) + self.write = kwargs.get("write", False) + self.retention_policy = kwargs.get("retention_policy") or RetentionPolicy() @classmethod def _from_generated(cls, generated): @@ -1278,7 +1306,9 @@ def _from_generated(cls, generated): delete=generated.delete, read=generated.read, write=generated.write, - retention_policy=RetentionPolicy._from_generated(generated.retention_policy) # pylint: disable=protected-access + retention_policy=RetentionPolicy._from_generated( + generated.retention_policy + ), # pylint: disable=protected-access ) @@ -1306,11 +1336,11 @@ class StaticWebsite(GenStaticWebsite): """Absolute path of the default index page.""" def __init__(self, **kwargs: Any) -> None: - self.enabled = kwargs.get('enabled', False) + self.enabled = kwargs.get("enabled", False) if self.enabled: - self.index_document = kwargs.get('index_document') - self.error_document404_path = kwargs.get('error_document404_path') - self.default_index_document_path = kwargs.get('default_index_document_path') + self.index_document = kwargs.get("index_document") + self.error_document404_path = kwargs.get("error_document404_path") + self.default_index_document_path = kwargs.get("default_index_document_path") else: self.index_document = None self.error_document404_path = None @@ -1324,5 +1354,5 @@ def _from_generated(cls, generated): enabled=generated.enabled, index_document=generated.index_document, error_document404_path=generated.error_document404_path, - default_index_document_path=generated.default_index_document_path + default_index_document_path=generated.default_index_document_path, ) diff --git a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_path_client.py b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_path_client.py index 7d74cf5e0016..90eb986ed256 100644 --- a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_path_client.py +++ b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_path_client.py @@ -6,10 +6,7 @@ # pylint: disable=docstring-keyword-should-match-keyword-only from datetime import datetime -from typing import ( - Any, Callable, cast, Dict, Optional, Union, - TYPE_CHECKING -) +from typing import Any, Callable, cast, Dict, Optional, Union, TYPE_CHECKING from typing_extensions import Self from azure.core.exceptions import AzureError, HttpResponseError @@ -35,7 +32,7 @@ _parse_url, _rename_path_options, _set_access_control_options, - _set_access_control_recursive_options + _set_access_control_recursive_options, ) from ._shared.base_client import StorageAccountHostsMixin, parse_query from ._serialize import ( @@ -80,19 +77,23 @@ class PathClient(StorageAccountHostsMixin): authentication. Only has an effect when credential is of type TokenCredential. The value could be https://storage.azure.com/ (default) or https://.blob.core.windows.net. """ + def __init__( - self, account_url: str, + self, + account_url: str, file_system_name: str, path_name: str, - credential: Optional[Union[str, Dict[str, str], "AzureNamedKeyCredential", "AzureSasCredential", "TokenCredential"]] = None, # pylint: disable=line-too-long + credential: Optional[ + Union[str, Dict[str, str], "AzureNamedKeyCredential", "AzureSasCredential", "TokenCredential"] + ] = None, # pylint: disable=line-too-long **kwargs: Any ) -> None: # remove the preceding/trailing delimiter from the path components - file_system_name = file_system_name.strip('/') + file_system_name = file_system_name.strip("/") # the name of root directory is / - if path_name != '/': - path_name = path_name.strip('/') + if path_name != "/": + path_name = path_name.strip("/") if not (file_system_name and path_name): raise ValueError("Please specify a file system name and file path.") @@ -101,14 +102,11 @@ def __init__( blob_account_url = convert_dfs_url_to_blob_url(account_url) self._blob_account_url = blob_account_url - datalake_hosts = kwargs.pop('_hosts', None) + datalake_hosts = kwargs.pop("_hosts", None) blob_hosts = None if datalake_hosts: blob_primary_account_url = convert_dfs_url_to_blob_url(datalake_hosts[LocationMode.PRIMARY]) - blob_hosts = { - LocationMode.PRIMARY: blob_primary_account_url, - LocationMode.SECONDARY: "" - } + blob_hosts = {LocationMode.PRIMARY: blob_primary_account_url, LocationMode.SECONDARY: ""} self._blob_client = BlobClient( account_url=blob_account_url, container_name=file_system_name, @@ -125,11 +123,7 @@ def __init__( self._query_str, self._raw_credential = self._format_query_string(sas_token, credential) super(PathClient, self).__init__( - parsed_url, - service='dfs', - credential=self._raw_credential, - _hosts=datalake_hosts, - **kwargs + parsed_url, service="dfs", credential=self._raw_credential, _hosts=datalake_hosts, **kwargs ) # ADLS doesn't support secondary endpoint, make sure it's empty @@ -162,11 +156,7 @@ def close(self) -> None: def _build_generated_client(self, url: str) -> AzureDataLakeStorageRESTAPI: client = AzureDataLakeStorageRESTAPI( - url, - base_url=url, - file_system=self.file_system_name, - path=self.path_name, - pipeline=self._pipeline + url, base_url=url, file_system=self.file_system_name, path=self.path_name, pipeline=self._pipeline ) client._config.version = self._api_version # type: ignore [assignment] # pylint: disable=protected-access return client @@ -175,7 +165,8 @@ def _format_url(self, hostname: str) -> str: return _format_url(self.scheme, hostname, self.file_system_name, self.path_name, self._query_str) def _create( - self, resource_type: str, + self, + resource_type: str, content_settings: Optional["ContentSettings"] = None, metadata: Optional[Dict[str, str]] = None, **kwargs: Any @@ -269,8 +260,8 @@ def _create( :return: A dictionary of response headers. :rtype: Dict[str, Union[str, ~datetime.datetime]] """ - lease_id = kwargs.get('lease_id', None) - lease_duration = kwargs.get('lease_duration', None) + lease_id = kwargs.get("lease_id", None) + lease_duration = kwargs.get("lease_duration", None) if lease_id and not lease_duration: raise ValueError("Please specify a lease_id and a lease_duration.") if lease_duration and not lease_id: @@ -318,19 +309,19 @@ def _delete(self, **kwargs: Any) -> Dict[str, Any]: # Perform paginated delete only if using OAuth, deleting a directory, and api version is 2023-08-03 or later # The pagination is only for ACL checks, the final request remains the atomic delete operation paginated = None - if (compare_api_versions(self.api_version, '2023-08-03') >= 0 and - hasattr(self.credential, 'get_token') and - kwargs.get('recursive')): # Directory delete will always specify recursive + if ( + compare_api_versions(self.api_version, "2023-08-03") >= 0 + and hasattr(self.credential, "get_token") + and kwargs.get("recursive") + ): # Directory delete will always specify recursive paginated = True options = _delete_path_options(paginated, **kwargs) try: response_headers = self._client.path.delete(**options) # Loop until continuation token is None for paginated delete - while response_headers['continuation']: - response_headers = self._client.path.delete( - continuation=response_headers['continuation'], - **options) + while response_headers["continuation"]: + response_headers = self._client.path.delete(continuation=response_headers["continuation"], **options) return response_headers except HttpResponseError as error: @@ -338,7 +329,8 @@ def _delete(self, **kwargs: Any) -> Dict[str, Any]: @distributed_trace def set_access_control( - self, owner: Optional[str] = None, + self, + owner: Optional[str] = None, group: Optional[str] = None, permissions: Optional[str] = None, acl: Optional[str] = None, @@ -504,11 +496,10 @@ def set_access_control_recursive(self, acl: str, **kwargs: Any) -> AccessControl if not acl: raise ValueError("The Access Control List must be set for this operation") - progress_hook = kwargs.pop('progress_hook', None) - max_batches = kwargs.pop('max_batches', None) - options = _set_access_control_recursive_options(mode='set', acl=acl, **kwargs) - return self._set_access_control_internal(options=options, progress_hook=progress_hook, - max_batches=max_batches) + progress_hook = kwargs.pop("progress_hook", None) + max_batches = kwargs.pop("max_batches", None) + options = _set_access_control_recursive_options(mode="set", acl=acl, **kwargs) + return self._set_access_control_internal(options=options, progress_hook=progress_hook, max_batches=max_batches) @distributed_trace def update_access_control_recursive(self, acl: str, **kwargs: Any) -> AccessControlChangeResult: @@ -557,11 +548,10 @@ def update_access_control_recursive(self, acl: str, **kwargs: Any) -> AccessCont if not acl: raise ValueError("The Access Control List must be set for this operation") - progress_hook = kwargs.pop('progress_hook', None) - max_batches = kwargs.pop('max_batches', None) - options = _set_access_control_recursive_options(mode='modify', acl=acl, **kwargs) - return self._set_access_control_internal(options=options, progress_hook=progress_hook, - max_batches=max_batches) + progress_hook = kwargs.pop("progress_hook", None) + max_batches = kwargs.pop("max_batches", None) + options = _set_access_control_recursive_options(mode="modify", acl=acl, **kwargs) + return self._set_access_control_internal(options=options, progress_hook=progress_hook, max_batches=max_batches) @distributed_trace def remove_access_control_recursive(self, acl: str, **kwargs: Any) -> AccessControlChangeResult: @@ -609,19 +599,19 @@ def remove_access_control_recursive(self, acl: str, **kwargs: Any) -> AccessCont if not acl: raise ValueError("The Access Control List must be set for this operation") - progress_hook = kwargs.pop('progress_hook', None) - max_batches = kwargs.pop('max_batches', None) - options = _set_access_control_recursive_options(mode='remove', acl=acl, **kwargs) - return self._set_access_control_internal(options=options, progress_hook=progress_hook, - max_batches=max_batches) + progress_hook = kwargs.pop("progress_hook", None) + max_batches = kwargs.pop("max_batches", None) + options = _set_access_control_recursive_options(mode="remove", acl=acl, **kwargs) + return self._set_access_control_internal(options=options, progress_hook=progress_hook, max_batches=max_batches) def _set_access_control_internal( - self, options: Dict[str, Any], + self, + options: Dict[str, Any], progress_hook: Optional[Callable[[AccessControlChanges], None]], - max_batches: Optional[int] = None + max_batches: Optional[int] = None, ) -> AccessControlChangeResult: try: - continue_on_failure = options.get('force_flag') + continue_on_failure = options.get("force_flag") total_directories_successful = 0 total_files_success = 0 total_failure_count = 0 @@ -637,44 +627,56 @@ def _set_access_control_internal( total_files_success += resp.files_successful total_failure_count += resp.failure_count batch_count += 1 - current_continuation_token = headers['continuation'] + current_continuation_token = headers["continuation"] if current_continuation_token is not None: last_continuation_token = current_continuation_token if progress_hook is not None: - progress_hook(AccessControlChanges( - batch_counters=AccessControlChangeCounters( - directories_successful=resp.directories_successful, - files_successful=resp.files_successful, - failure_count=resp.failure_count, - ), - aggregate_counters=AccessControlChangeCounters( - directories_successful=total_directories_successful, - files_successful=total_files_success, - failure_count=total_failure_count, - ), - batch_failures=[AccessControlChangeFailure( - name=failure.name, - is_directory=failure.type == 'DIRECTORY', - error_message=failure.error_message) for failure in resp.failed_entries], - continuation=last_continuation_token - )) + progress_hook( + AccessControlChanges( + batch_counters=AccessControlChangeCounters( + directories_successful=resp.directories_successful, + files_successful=resp.files_successful, + failure_count=resp.failure_count, + ), + aggregate_counters=AccessControlChangeCounters( + directories_successful=total_directories_successful, + files_successful=total_files_success, + failure_count=total_failure_count, + ), + batch_failures=[ + AccessControlChangeFailure( + name=failure.name, + is_directory=failure.type == "DIRECTORY", + error_message=failure.error_message, + ) + for failure in resp.failed_entries + ], + continuation=last_continuation_token, + ) + ) # update the continuation token, if there are more operations that cannot be completed in a single call - max_batches_satisfied = (max_batches is not None and batch_count == max_batches) + max_batches_satisfied = max_batches is not None and batch_count == max_batches continue_operation = bool(current_continuation_token) and not max_batches_satisfied - options['continuation'] = current_continuation_token + options["continuation"] = current_continuation_token # currently the service stops on any failure, so we should send back the last continuation token # for the user to retry the failed updates # otherwise we should just return what the service gave us - return AccessControlChangeResult(counters=AccessControlChangeCounters( - directories_successful=total_directories_successful, - files_successful=total_files_success, - failure_count=total_failure_count), - continuation=last_continuation_token - if total_failure_count > 0 and not continue_on_failure else current_continuation_token) + return AccessControlChangeResult( + counters=AccessControlChangeCounters( + directories_successful=total_directories_successful, + files_successful=total_files_success, + failure_count=total_failure_count, + ), + continuation=( + last_continuation_token + if total_failure_count > 0 and not continue_on_failure + else current_continuation_token + ), + ) except HttpResponseError as error: error.continuation_token = last_continuation_token process_storage_error(error) @@ -805,11 +807,11 @@ def _get_path_properties(self, **kwargs: Any) -> Union[DirectoryProperties, File :dedent: 8 :caption: Getting the properties for a file/directory. """ - upn = kwargs.pop('upn', None) + upn = kwargs.pop("upn", None) if upn: - headers = kwargs.pop('headers', {}) - headers['x-ms-upn'] = str(upn) - kwargs['headers'] = headers + headers = kwargs.pop("headers", {}) + headers["x-ms-upn"] = str(upn) + kwargs["headers"] = headers path_properties = self._blob_client.get_blob_properties(**kwargs) return cast(Union[DirectoryProperties, FileProperties], path_properties) @@ -916,9 +918,7 @@ def set_http_headers(self, content_settings: Optional["ContentSettings"] = None, @distributed_trace def acquire_lease( - self, lease_duration: int = -1, - lease_id: Optional[str] = None, - **kwargs: Any + self, lease_duration: int = -1, lease_id: Optional[str] = None, **kwargs: Any ) -> DataLakeLeaseClient: """ Requests a new lease. If the file or directory does not have an active lease, diff --git a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_path_client_helpers.py b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_path_client_helpers.py index 0ef3bad262f9..b10949582b5a 100644 --- a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_path_client_helpers.py +++ b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_path_client_helpers.py @@ -5,10 +5,7 @@ # -------------------------------------------------------------------------- import re -from typing import ( - Any, Dict, Optional, Tuple, Union, - TYPE_CHECKING -) +from typing import Any, Dict, Optional, Tuple, Union, TYPE_CHECKING from urllib.parse import quote, urlparse from ._serialize import ( @@ -19,7 +16,7 @@ get_lease_id, get_mod_conditions, get_path_http_headers, - get_source_mod_conditions + get_source_mod_conditions, ) from ._shared.response_handlers import return_headers_and_deserialized, return_response_headers @@ -33,11 +30,11 @@ def _parse_url(account_url: str) -> "ParseResult": try: - if not account_url.lower().startswith('http'): + if not account_url.lower().startswith("http"): account_url = "https://" + account_url except AttributeError as exc: raise ValueError("Account URL must be a string.") from exc - parsed_url = urlparse(account_url.rstrip('/')) + parsed_url = urlparse(account_url.rstrip("/")) if not parsed_url.netloc: raise ValueError(f"Invalid URL: {account_url}") return parsed_url @@ -45,7 +42,7 @@ def _parse_url(account_url: str) -> "ParseResult": def _format_url(scheme: str, hostname: str, file_system_name: Union[str, bytes], path_name: str, query_str: str) -> str: if isinstance(file_system_name, str): - file_system_name = file_system_name.encode('UTF-8') + file_system_name = file_system_name.encode("UTF-8") return f"{scheme}://{hostname}/{quote(file_system_name)}/{quote(path_name, safe='~/')}{query_str}" @@ -54,9 +51,9 @@ def _create_path_options( scheme: str, content_settings: Optional["ContentSettings"] = None, metadata: Optional[Dict[str, str]] = None, - **kwargs: Any + **kwargs: Any, ) -> Dict[str, Any]: - access_conditions = get_access_conditions(kwargs.pop('lease', None)) + access_conditions = get_access_conditions(kwargs.pop("lease", None)) mod_conditions = get_mod_conditions(kwargs) path_http_headers = None @@ -65,49 +62,49 @@ def _create_path_options( cpk_info = get_cpk_info(scheme, kwargs) - expires_on = kwargs.pop('expires_on', None) + expires_on = kwargs.pop("expires_on", None) if expires_on: try: expires_on = convert_datetime_to_rfc1123(expires_on) - kwargs['expiry_options'] = 'Absolute' + kwargs["expiry_options"] = "Absolute" except AttributeError: expires_on = str(expires_on) - kwargs['expiry_options'] = 'RelativeToNow' + kwargs["expiry_options"] = "RelativeToNow" options = { - 'resource': resource_type, - 'properties': add_metadata_headers(metadata), - 'permissions': kwargs.pop('permissions', None), - 'umask': kwargs.pop('umask', None), - 'owner': kwargs.pop('owner', None), - 'group': kwargs.pop('group', None), - 'acl': kwargs.pop('acl', None), - 'proposed_lease_id': kwargs.pop('lease_id', None), - 'lease_duration': kwargs.pop('lease_duration', None), - 'expiry_options': kwargs.pop('expiry_options', None), - 'expires_on': expires_on, - 'path_http_headers': path_http_headers, - 'lease_access_conditions': access_conditions, - 'modified_access_conditions': mod_conditions, - 'cpk_info': cpk_info, - 'timeout': kwargs.pop('timeout', None), - 'encryption_context': kwargs.pop('encryption_context', None), - 'cls': return_response_headers + "resource": resource_type, + "properties": add_metadata_headers(metadata), + "permissions": kwargs.pop("permissions", None), + "umask": kwargs.pop("umask", None), + "owner": kwargs.pop("owner", None), + "group": kwargs.pop("group", None), + "acl": kwargs.pop("acl", None), + "proposed_lease_id": kwargs.pop("lease_id", None), + "lease_duration": kwargs.pop("lease_duration", None), + "expiry_options": kwargs.pop("expiry_options", None), + "expires_on": expires_on, + "path_http_headers": path_http_headers, + "lease_access_conditions": access_conditions, + "modified_access_conditions": mod_conditions, + "cpk_info": cpk_info, + "timeout": kwargs.pop("timeout", None), + "encryption_context": kwargs.pop("encryption_context", None), + "cls": return_response_headers, } options.update(kwargs) return options def _delete_path_options(paginated: Optional[bool], **kwargs) -> Dict[str, Any]: - access_conditions = get_access_conditions(kwargs.pop('lease', None)) + access_conditions = get_access_conditions(kwargs.pop("lease", None)) mod_conditions = get_mod_conditions(kwargs) options = { - 'paginated': paginated, - 'lease_access_conditions': access_conditions, - 'modified_access_conditions': mod_conditions, - 'cls': return_response_headers, - 'timeout': kwargs.pop('timeout', None) + "paginated": paginated, + "lease_access_conditions": access_conditions, + "modified_access_conditions": mod_conditions, + "cls": return_response_headers, + "timeout": kwargs.pop("timeout", None), } options.update(kwargs) return options @@ -118,36 +115,36 @@ def _set_access_control_options( group: Optional[str] = None, permissions: Optional[str] = None, acl: Optional[str] = None, - **kwargs: Any + **kwargs: Any, ) -> Dict[str, Any]: - access_conditions = get_access_conditions(kwargs.pop('lease', None)) + access_conditions = get_access_conditions(kwargs.pop("lease", None)) mod_conditions = get_mod_conditions(kwargs) options = { - 'owner': owner, - 'group': group, - 'permissions': permissions, - 'acl': acl, - 'lease_access_conditions': access_conditions, - 'modified_access_conditions': mod_conditions, - 'timeout': kwargs.pop('timeout', None), - 'cls': return_response_headers + "owner": owner, + "group": group, + "permissions": permissions, + "acl": acl, + "lease_access_conditions": access_conditions, + "modified_access_conditions": mod_conditions, + "timeout": kwargs.pop("timeout", None), + "cls": return_response_headers, } options.update(kwargs) return options def _get_access_control_options(upn: Optional[bool] = None, **kwargs: Any) -> Dict[str, Any]: - access_conditions = get_access_conditions(kwargs.pop('lease', None)) + access_conditions = get_access_conditions(kwargs.pop("lease", None)) mod_conditions = get_mod_conditions(kwargs) options = { - 'action': 'getAccessControl', - 'upn': upn if upn else False, - 'lease_access_conditions': access_conditions, - 'modified_access_conditions': mod_conditions, - 'timeout': kwargs.pop('timeout', None), - 'cls': return_response_headers + "action": "getAccessControl", + "upn": upn if upn else False, + "lease_access_conditions": access_conditions, + "modified_access_conditions": mod_conditions, + "timeout": kwargs.pop("timeout", None), + "cls": return_response_headers, } options.update(kwargs) return options @@ -155,13 +152,13 @@ def _get_access_control_options(upn: Optional[bool] = None, **kwargs: Any) -> Di def _set_access_control_recursive_options(mode: str, acl: str, **kwargs: Any) -> Dict[str, Any]: options = { - 'mode': mode, - 'force_flag': kwargs.pop('continue_on_failure', None), - 'timeout': kwargs.pop('timeout', None), - 'continuation': kwargs.pop('continuation_token', None), - 'max_records': kwargs.pop('batch_size', None), - 'acl': acl, - 'cls': return_headers_and_deserialized + "mode": mode, + "force_flag": kwargs.pop("continue_on_failure", None), + "timeout": kwargs.pop("timeout", None), + "continuation": kwargs.pop("continuation_token", None), + "max_records": kwargs.pop("batch_size", None), + "acl": acl, + "cls": return_headers_and_deserialized, } options.update(kwargs) return options @@ -171,13 +168,13 @@ def _rename_path_options( rename_source: str, content_settings: Optional["ContentSettings"] = None, metadata: Optional[Dict[str, str]] = None, - **kwargs: Any + **kwargs: Any, ) -> Dict[str, Any]: - if metadata or kwargs.pop('permissions', None) or kwargs.pop('umask', None): + if metadata or kwargs.pop("permissions", None) or kwargs.pop("umask", None): raise ValueError("metadata, permissions, umask is not supported for this operation") - access_conditions = get_access_conditions(kwargs.pop('lease', None)) - source_lease_id = get_lease_id(kwargs.pop('source_lease', None)) + access_conditions = get_access_conditions(kwargs.pop("lease", None)) + source_lease_id = get_lease_id(kwargs.pop("source_lease", None)) mod_conditions = get_mod_conditions(kwargs) source_mod_conditions = get_source_mod_conditions(kwargs) @@ -186,15 +183,15 @@ def _rename_path_options( path_http_headers = get_path_http_headers(content_settings) options = { - 'rename_source': rename_source, - 'path_http_headers': path_http_headers, - 'lease_access_conditions': access_conditions, - 'source_lease_id': source_lease_id, - 'modified_access_conditions': mod_conditions, - 'source_modified_access_conditions': source_mod_conditions, - 'timeout': kwargs.pop('timeout', None), - 'mode': 'legacy', - 'cls': return_response_headers + "rename_source": rename_source, + "path_http_headers": path_http_headers, + "lease_access_conditions": access_conditions, + "source_lease_id": source_lease_id, + "modified_access_conditions": mod_conditions, + "source_modified_access_conditions": source_mod_conditions, + "timeout": kwargs.pop("timeout", None), + "mode": "legacy", + "cls": return_response_headers, } options.update(kwargs) return options @@ -204,27 +201,36 @@ def _parse_rename_path( new_name: str, file_system_name: str, query_str: str, - raw_credential: Optional[Union[str, Dict[str, str], "AzureNamedKeyCredential", "AzureSasCredential", "TokenCredential", "AsyncTokenCredential"]] # pylint: disable=line-too-long + raw_credential: Optional[ + Union[ + str, + Dict[str, str], + "AzureNamedKeyCredential", + "AzureSasCredential", + "TokenCredential", + "AsyncTokenCredential", + ] + ], # pylint: disable=line-too-long ) -> Tuple[str, str, Optional[str]]: - new_name = new_name.strip('/') - new_file_system = new_name.split('/')[0] - new_path = new_name[len(new_file_system):].strip('/') + new_name = new_name.strip("/") + new_file_system = new_name.split("/")[0] + new_path = new_name[len(new_file_system) :].strip("/") new_sas = None - sas_split = new_path.split('?') + sas_split = new_path.split("?") # If there is a ?, there could be a SAS token if len(sas_split) > 0: # Check last element for SAS by looking for sv= and sig= potential_sas = sas_split[-1] - if re.search(r'sv=\d{4}-\d{2}-\d{2}', potential_sas) and 'sig=' in potential_sas: + if re.search(r"sv=\d{4}-\d{2}-\d{2}", potential_sas) and "sig=" in potential_sas: new_sas = potential_sas # Remove SAS from new path - new_path = new_path[:-(len(new_sas) + 1)] + new_path = new_path[: -(len(new_sas) + 1)] if not new_sas: if not raw_credential and new_file_system != file_system_name: raise ValueError("please provide the sas token for the new file") if not raw_credential and new_file_system == file_system_name: - new_sas = query_str.strip('?') + new_sas = query_str.strip("?") return new_file_system, new_path, new_sas diff --git a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_quick_query_helper.py b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_quick_query_helper.py index c0db2674084d..d621f70909a0 100644 --- a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_quick_query_helper.py +++ b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_quick_query_helper.py @@ -4,10 +4,7 @@ # license information. # -------------------------------------------------------------------------- -from typing import ( - Any, Dict, IO, Iterable, Union, - TYPE_CHECKING -) +from typing import Any, Dict, IO, Iterable, Union, TYPE_CHECKING if TYPE_CHECKING: from azure.storage.blob import BlobQueryReader diff --git a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_serialize.py b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_serialize.py index d52a4c8ca621..44b68c38203b 100644 --- a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_serialize.py +++ b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_serialize.py @@ -4,10 +4,7 @@ # license information. # -------------------------------------------------------------------------- -from typing import ( - Any, cast, Dict, Literal, Optional, Union, - TYPE_CHECKING -) +from typing import Any, cast, Dict, Literal, Optional, Union, TYPE_CHECKING from azure.storage.blob._serialize import _get_match_headers from ._generated.models import ( @@ -29,42 +26,42 @@ EncryptionAlgorithmType = Literal["AES256"] _SUPPORTED_API_VERSIONS = [ - '2019-02-02', - '2019-07-07', - '2019-10-10', - '2019-12-12', - '2020-02-10', - '2020-04-08', - '2020-06-12', - '2020-08-04', - '2020-10-02', - '2020-12-06', - '2021-02-12', - '2021-04-10', - '2021-06-08', - '2021-08-06', - '2021-12-02', - '2022-11-02', - '2023-01-03', - '2023-05-03', - '2023-08-03', - '2023-11-03', - '2024-05-04', - '2024-08-04', - '2024-11-04', - '2025-01-05', - '2025-05-05', - '2025-07-05', - '2025-11-05', - '2026-02-06', - '2026-04-06', + "2019-02-02", + "2019-07-07", + "2019-10-10", + "2019-12-12", + "2020-02-10", + "2020-04-08", + "2020-06-12", + "2020-08-04", + "2020-10-02", + "2020-12-06", + "2021-02-12", + "2021-04-10", + "2021-06-08", + "2021-08-06", + "2021-12-02", + "2022-11-02", + "2023-01-03", + "2023-05-03", + "2023-08-03", + "2023-11-03", + "2024-05-04", + "2024-08-04", + "2024-11-04", + "2025-01-05", + "2025-05-05", + "2025-07-05", + "2025-11-05", + "2026-02-06", + "2026-04-06", ] # This list must be in chronological order! def get_api_version(kwargs: Dict[str, Any]) -> str: - api_version = kwargs.get('api_version', None) + api_version = kwargs.get("api_version", None) if api_version and api_version not in _SUPPORTED_API_VERSIONS: - versions = '\n'.join(_SUPPORTED_API_VERSIONS) + versions = "\n".join(_SUPPORTED_API_VERSIONS) raise ValueError(f"Unsupported API version '{api_version}'. Please select from:\n{versions}") return api_version or _SUPPORTED_API_VERSIONS[-1] @@ -80,13 +77,12 @@ def compare_api_versions(version1: str, version2: str) -> int: def convert_dfs_url_to_blob_url(dfs_account_url: str) -> str: - return dfs_account_url.replace('.dfs.', '.blob.', 1) + return dfs_account_url.replace(".dfs.", ".blob.", 1) def convert_datetime_to_rfc1123(date: "datetime") -> str: weekday = ["Mon", "Tue", "Wed", "Thu", "Fri", "Sat", "Sun"][date.weekday()] - month = ["Jan", "Feb", "Mar", "Apr", "May", "Jun", "Jul", "Aug", "Sep", - "Oct", "Nov", "Dec"][date.month - 1] + month = ["Jan", "Feb", "Mar", "Apr", "May", "Jun", "Jul", "Aug", "Sep", "Oct", "Nov", "Dec"][date.month - 1] return f"{weekday}, {date.day:02} {month} {date.year:04} {date.hour:02}:{date.minute:02}:{date.second:02} GMT" @@ -96,33 +92,33 @@ def add_metadata_headers(metadata: Optional[Dict[str, str]] = None) -> Optional[ headers = [] if metadata: for key, value in metadata.items(): - headers.append(key + '=') + headers.append(key + "=") headers.append(encode_base64(value)) - headers.append(',') + headers.append(",") if headers: del headers[-1] - return ''.join(headers) + return "".join(headers) def get_mod_conditions(kwargs: Dict[str, Any]) -> ModifiedAccessConditions: - if_match, if_none_match = _get_match_headers(kwargs, 'match_condition', 'etag') + if_match, if_none_match = _get_match_headers(kwargs, "match_condition", "etag") return ModifiedAccessConditions( - if_modified_since=kwargs.pop('if_modified_since', None), - if_unmodified_since=kwargs.pop('if_unmodified_since', None), - if_match=if_match or kwargs.pop('if_match', None), - if_none_match=if_none_match or kwargs.pop('if_none_match', None) + if_modified_since=kwargs.pop("if_modified_since", None), + if_unmodified_since=kwargs.pop("if_unmodified_since", None), + if_match=if_match or kwargs.pop("if_match", None), + if_none_match=if_none_match or kwargs.pop("if_none_match", None), ) def get_source_mod_conditions(kwargs: Dict[str, Any]) -> SourceModifiedAccessConditions: - if_match, if_none_match = _get_match_headers(kwargs, 'source_match_condition', 'source_etag') + if_match, if_none_match = _get_match_headers(kwargs, "source_match_condition", "source_etag") return SourceModifiedAccessConditions( - source_if_modified_since=kwargs.pop('source_if_modified_since', None), - source_if_unmodified_since=kwargs.pop('source_if_unmodified_since', None), - source_if_match=if_match or kwargs.pop('source_if_match', None), - source_if_none_match=if_none_match or kwargs.pop('source_if_none_match', None) + source_if_modified_since=kwargs.pop("source_if_modified_since", None), + source_if_unmodified_since=kwargs.pop("source_if_unmodified_since", None), + source_if_match=if_match or kwargs.pop("source_if_match", None), + source_if_none_match=if_none_match or kwargs.pop("source_if_none_match", None), ) @@ -133,7 +129,7 @@ def get_path_http_headers(content_settings: "ContentSettings") -> PathHTTPHeader content_md5=bytearray(content_settings.content_md5) if content_settings.content_md5 else None, content_encoding=content_settings.content_encoding, content_language=content_settings.content_language, - content_disposition=content_settings.content_disposition + content_disposition=content_settings.content_disposition, ) return path_headers @@ -161,9 +157,9 @@ def get_lease_id(lease: Optional[Union["BlobLeaseClient", "BlobLeaseClientAsync" def get_lease_action_properties(kwargs: Dict[str, Any]) -> Dict[str, Any]: - lease_action = kwargs.pop('lease_action', None) - lease_duration = kwargs.pop('lease_duration', None) - lease = kwargs.pop('lease', None) + lease_action = kwargs.pop("lease_action", None) + lease_duration = kwargs.pop("lease_duration", None) + lease = kwargs.pop("lease", None) if hasattr(lease, "id"): lease_id = lease.id else: @@ -173,7 +169,7 @@ def get_lease_action_properties(kwargs: Dict[str, Any]) -> Dict[str, Any]: access_conditions = None # Acquiring a new lease - if lease_action in ['acquire', 'acquire-release']: + if lease_action in ["acquire", "acquire-release"]: # Use provided lease id as the new lease id proposed_lease_id = lease_id # Assign a default lease duration if not provided @@ -183,22 +179,22 @@ def get_lease_action_properties(kwargs: Dict[str, Any]) -> Dict[str, Any]: access_conditions = LeaseAccessConditions(lease_id=lease_id) if lease_id else None return { - 'lease_action': lease_action, - 'lease_duration': lease_duration, - 'proposed_lease_id': proposed_lease_id, - 'lease_access_conditions': access_conditions + "lease_action": lease_action, + "lease_duration": lease_duration, + "proposed_lease_id": proposed_lease_id, + "lease_access_conditions": access_conditions, } def get_cpk_info(scheme: str, kwargs: Dict[str, Any]) -> Optional[CpkInfo]: - cpk: Optional[CustomerProvidedEncryptionKey] = kwargs.pop('cpk', None) + cpk: Optional[CustomerProvidedEncryptionKey] = kwargs.pop("cpk", None) if cpk: - if scheme.lower() != 'https': + if scheme.lower() != "https": raise ValueError("Customer provided encryption key must be used over HTTPS.") return CpkInfo( encryption_key=cpk.key_value, encryption_key_sha256=cpk.key_hash, - encryption_algorithm=cast(EncryptionAlgorithmType, cpk.algorithm) + encryption_algorithm=cast(EncryptionAlgorithmType, cpk.algorithm), ) return None diff --git a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_shared/base_client.py b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_shared/base_client.py index 5441488d86a9..3f7609b9f026 100644 --- a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_shared/base_client.py +++ b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_shared/base_client.py @@ -145,7 +145,7 @@ def url(self) -> str: :return: The full endpoint URL to this entity, including SAS token if used. :rtype: str """ - return self._format_url(self._hosts[self._location_mode]) # type: ignore + return self._format_url(self._hosts[self._location_mode]) # type: ignore @property def primary_endpoint(self) -> str: @@ -178,7 +178,7 @@ def secondary_endpoint(self) -> str: """ if not self._hosts[LocationMode.SECONDARY]: raise ValueError("No secondary host configured.") - return self._format_url(self._hosts[LocationMode.SECONDARY]) # type: ignore + return self._format_url(self._hosts[LocationMode.SECONDARY]) # type: ignore @property def secondary_hostname(self) -> Optional[str]: @@ -416,7 +416,7 @@ def parse_connection_str( if any(len(tup) != 2 for tup in conn_settings_list): raise ValueError("Connection string is either blank or malformed.") conn_settings = dict((key.upper(), val) for key, val in conn_settings_list) - if conn_settings.get('USEDEVELOPMENTSTORAGE') == 'true': + if conn_settings.get("USEDEVELOPMENTSTORAGE") == "true": return _get_development_storage_endpoint(service), None, DEVSTORE_ACCOUNT_KEY endpoints = _SERVICE_PARAMS[service] primary = None diff --git a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_shared/base_client_async.py b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_shared/base_client_async.py index 16aba3116029..400f7d6f6dff 100644 --- a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_shared/base_client_async.py +++ b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_shared/base_client_async.py @@ -210,7 +210,7 @@ def parse_connection_str( if any(len(tup) != 2 for tup in conn_settings_list): raise ValueError("Connection string is either blank or malformed.") conn_settings = dict((key.upper(), val) for key, val in conn_settings_list) - if conn_settings.get('USEDEVELOPMENTSTORAGE') == 'true': + if conn_settings.get("USEDEVELOPMENTSTORAGE") == "true": return _get_development_storage_endpoint(service), None, DEVSTORE_ACCOUNT_KEY endpoints = _SERVICE_PARAMS[service] primary = None diff --git a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_shared/policies.py b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_shared/policies.py index 3f65ae8d6498..abb8c8ff9e7d 100644 --- a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_shared/policies.py +++ b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_shared/policies.py @@ -438,7 +438,7 @@ def _set_next_host_location(self, settings: Dict[str, Any], request: "PipelineRe def configure_retries(self, request: "PipelineRequest") -> Dict[str, Any]: """ Configure the retry settings for the request. - + :param request: A pipeline request object. :type request: ~azure.core.pipeline.PipelineRequest :return: A dictionary containing the retry settings. @@ -478,7 +478,7 @@ def get_backoff_time(self, settings: Dict[str, Any]) -> float: # pylint: disabl def sleep(self, settings, transport): """Sleep for the backoff time. - + :param Dict[str, Any] settings: The configurable values pertaining to the sleep operation. :param transport: The transport to use for sleeping. :type transport: @@ -552,7 +552,7 @@ def increment( def send(self, request): """Send the request with retry logic. - + :param request: A pipeline request object. :type request: ~azure.core.pipeline.PipelineRequest :return: A pipeline response object. @@ -713,11 +713,11 @@ def __init__(self, credential: "TokenCredential", audience: str, **kwargs: Any) def on_challenge(self, request: "PipelineRequest", response: "PipelineResponse") -> bool: """Handle the challenge from the service and authorize the request. - + :param request: The request object. :type request: ~azure.core.pipeline.PipelineRequest :param response: The response object. - :type response: ~azure.core.pipeline.PipelineResponse + :type response: ~azure.core.pipeline.PipelineResponse :return: True if the request was authorized, False otherwise. :rtype: bool """ diff --git a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_shared/response_handlers.py b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_shared/response_handlers.py index 9a079c56404f..cc3198a07398 100644 --- a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_shared/response_handlers.py +++ b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_shared/response_handlers.py @@ -1,3 +1,4 @@ +# pylint: disable=line-too-long,useless-suppression # ------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for @@ -176,8 +177,11 @@ def process_storage_error(storage_error) -> NoReturn: # type: ignore [misc] # p error_message += f"\n{name}:{info}" if additional_data.get("headername") == "x-ms-version" and error_code == StorageErrorCode.INVALID_HEADER_VALUE: - error_message = ("The provided service version is not enabled on this storage account." + - f"Please see {SV_DOCS_URL} for additional information.\n" + error_message) + error_message = ( + "The provided service version is not enabled on this storage account." + + f"Please see {SV_DOCS_URL} for additional information.\n" + + error_message + ) # No need to create an instance if it has already been serialized by the generated layer if serialized: diff --git a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_shared_access_signature.py b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_shared_access_signature.py index 506721dd69ab..c97cb5bf7035 100644 --- a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_shared_access_signature.py +++ b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_shared_access_signature.py @@ -5,10 +5,7 @@ # -------------------------------------------------------------------------- # pylint: disable=docstring-keyword-should-match-keyword-only -from typing import ( - Any, Callable, cast, Dict, Optional, Union, - TYPE_CHECKING -) +from typing import Any, Callable, cast, Dict, Optional, Union, TYPE_CHECKING from urllib.parse import parse_qs from azure.storage.blob import generate_account_sas as generate_blob_account_sas @@ -27,7 +24,7 @@ FileSasPermissions, FileSystemSasPermissions, ResourceTypes, - UserDelegationKey + UserDelegationKey, ) @@ -481,7 +478,7 @@ def generate_file_sas( :rtype: str """ if directory_name: - path = directory_name.rstrip('/') + "/" + file_name + path = directory_name.rstrip("/") + "/" + file_name else: path = file_name return generate_blob_sas( @@ -499,6 +496,7 @@ def generate_file_sas( **kwargs ) + def _is_credential_sastoken(credential: Any) -> bool: if not credential or not isinstance(credential, str): return False diff --git a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_upload_helper.py b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_upload_helper.py index d9215a46cc5d..f45f9c68f36d 100644 --- a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_upload_helper.py +++ b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_upload_helper.py @@ -4,20 +4,13 @@ # license information. # -------------------------------------------------------------------------- -from typing import ( - Any, cast, Dict, IO, Optional, - TYPE_CHECKING -) +from typing import Any, cast, Dict, IO, Optional, TYPE_CHECKING from azure.core.exceptions import HttpResponseError from ._deserialize import process_storage_error from ._shared.response_handlers import return_response_headers -from ._shared.uploads import ( - DataLakeFileChunkUploader, - upload_data_chunks, - upload_substream_blocks -) +from ._shared.uploads import DataLakeFileChunkUploader, upload_data_chunks, upload_substream_blocks if TYPE_CHECKING: from ._generated.operations import PathOperations @@ -25,12 +18,14 @@ def _any_conditions(modified_access_conditions=None, **kwargs): # pylint: disable=unused-argument - return any([ - modified_access_conditions.if_modified_since, - modified_access_conditions.if_unmodified_since, - modified_access_conditions.if_none_match, - modified_access_conditions.if_match - ]) + return any( + [ + modified_access_conditions.if_modified_since, + modified_access_conditions.if_unmodified_since, + modified_access_conditions.if_none_match, + modified_access_conditions.if_match, + ] + ) def upload_datalake_file( @@ -46,46 +41,54 @@ def upload_datalake_file( try: if length == 0: return {} - properties = kwargs.pop('properties', None) - umask = kwargs.pop('umask', None) - permissions = kwargs.pop('permissions', None) - path_http_headers = kwargs.pop('path_http_headers', None) - modified_access_conditions = kwargs.pop('modified_access_conditions', None) - chunk_size = kwargs.pop('chunk_size', 100 * 1024 * 1024) - encryption_context = kwargs.pop('encryption_context', None) - progress_hook = kwargs.pop('progress_hook', None) + properties = kwargs.pop("properties", None) + umask = kwargs.pop("umask", None) + permissions = kwargs.pop("permissions", None) + path_http_headers = kwargs.pop("path_http_headers", None) + modified_access_conditions = kwargs.pop("modified_access_conditions", None) + chunk_size = kwargs.pop("chunk_size", 100 * 1024 * 1024) + encryption_context = kwargs.pop("encryption_context", None) + progress_hook = kwargs.pop("progress_hook", None) if not overwrite: # if customers didn't specify access conditions, they cannot flush data to existing file if not _any_conditions(modified_access_conditions): - modified_access_conditions.if_none_match = '*' + modified_access_conditions.if_none_match = "*" if properties or umask or permissions: raise ValueError("metadata, umask and permissions can be set only when overwrite is enabled") if overwrite: - response = cast(Dict[str, Any], client.create( - resource='file', - path_http_headers=path_http_headers, - properties=properties, - modified_access_conditions=modified_access_conditions, - umask=umask, - permissions=permissions, - encryption_context=encryption_context, - cls=return_response_headers, - **kwargs - )) + response = cast( + Dict[str, Any], + client.create( + resource="file", + path_http_headers=path_http_headers, + properties=properties, + modified_access_conditions=modified_access_conditions, + umask=umask, + permissions=permissions, + encryption_context=encryption_context, + cls=return_response_headers, + **kwargs + ), + ) # this modified_access_conditions will be applied to flush_data to make sure # no other flush between create and the current flush - modified_access_conditions.if_match = response['etag'] + modified_access_conditions.if_match = response["etag"] modified_access_conditions.if_none_match = None modified_access_conditions.if_modified_since = None modified_access_conditions.if_unmodified_since = None - use_original_upload_path = file_settings.use_byte_buffer or \ - validate_content or chunk_size < file_settings.min_large_chunk_upload_threshold or \ - hasattr(stream, 'seekable') and not stream.seekable() or \ - not hasattr(stream, 'seek') or not hasattr(stream, 'tell') + use_original_upload_path = ( + file_settings.use_byte_buffer + or validate_content + or chunk_size < file_settings.min_large_chunk_upload_threshold + or hasattr(stream, "seekable") + and not stream.seekable() + or not hasattr(stream, "seek") + or not hasattr(stream, "tell") + ) if use_original_upload_path: upload_data_chunks( @@ -112,13 +115,16 @@ def upload_datalake_file( **kwargs ) - return cast(Dict[str, Any], client.flush_data( - position=length, - path_http_headers=path_http_headers, - modified_access_conditions=modified_access_conditions, - close=True, - cls=return_response_headers, - **kwargs - )) + return cast( + Dict[str, Any], + client.flush_data( + position=length, + path_http_headers=path_http_headers, + modified_access_conditions=modified_access_conditions, + close=True, + cls=return_response_headers, + **kwargs + ), + ) except HttpResponseError as error: process_storage_error(error) diff --git a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/aio/__init__.py b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/aio/__init__.py index c24dde8d3478..13741264bcd5 100644 --- a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/aio/__init__.py +++ b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/aio/__init__.py @@ -13,12 +13,12 @@ from ._data_lake_lease_async import DataLakeLeaseClient __all__ = [ - 'DataLakeServiceClient', - 'FileSystemClient', - 'DataLakeDirectoryClient', - 'DataLakeFileClient', - 'DataLakeLeaseClient', - 'ExponentialRetry', - 'LinearRetry', - 'StorageStreamDownloader' + "DataLakeServiceClient", + "FileSystemClient", + "DataLakeDirectoryClient", + "DataLakeFileClient", + "DataLakeLeaseClient", + "ExponentialRetry", + "LinearRetry", + "StorageStreamDownloader", ] diff --git a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/aio/_data_lake_directory_client_async.py b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/aio/_data_lake_directory_client_async.py index c0cd414369b5..c56fa9dd37f7 100644 --- a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/aio/_data_lake_directory_client_async.py +++ b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/aio/_data_lake_directory_client_async.py @@ -6,10 +6,7 @@ # pylint: disable=docstring-keyword-should-match-keyword-only import functools -from typing import ( - Any, cast, Dict, Optional, Union, - TYPE_CHECKING -) +from typing import Any, cast, Dict, Optional, Union, TYPE_CHECKING from typing_extensions import Self try: @@ -89,22 +86,29 @@ class DataLakeDirectoryClient(PathClient): """The hostname of the primary endpoint.""" def __init__( - self, account_url: str, + self, + account_url: str, file_system_name: str, directory_name: str, - credential: Optional[Union[str, Dict[str, str], "AzureNamedKeyCredential", "AzureSasCredential", "AsyncTokenCredential"]] = None, # pylint: disable=line-too-long - **kwargs: Any + credential: Optional[ + Union[str, Dict[str, str], "AzureNamedKeyCredential", "AzureSasCredential", "AsyncTokenCredential"] + ] = None, # pylint: disable=line-too-long + **kwargs: Any, ) -> None: - super(DataLakeDirectoryClient, self).__init__(account_url, file_system_name, path_name=directory_name, - credential=credential, **kwargs) + super(DataLakeDirectoryClient, self).__init__( + account_url, file_system_name, path_name=directory_name, credential=credential, **kwargs + ) @classmethod def from_connection_string( - cls, conn_str: str, + cls, + conn_str: str, file_system_name: str, directory_name: str, - credential: Optional[Union[str, Dict[str, str], "AzureNamedKeyCredential", "AzureSasCredential", "AsyncTokenCredential"]] = None, # pylint: disable=line-too-long - **kwargs: Any + credential: Optional[ + Union[str, Dict[str, str], "AzureNamedKeyCredential", "AzureSasCredential", "AsyncTokenCredential"] + ] = None, # pylint: disable=line-too-long + **kwargs: Any, ) -> Self: """ Create DataLakeDirectoryClient from a Connection String. @@ -140,15 +144,18 @@ def from_connection_string( :return: A DataLakeDirectoryClient. :rtype: ~azure.storage.filedatalake.aio.DataLakeDirectoryClient """ - account_url, _, credential = parse_connection_str(conn_str, credential, 'dfs') + account_url, _, credential = parse_connection_str(conn_str, credential, "dfs") return cls( - account_url, file_system_name=file_system_name, directory_name=directory_name, - credential=credential, **kwargs) + account_url, + file_system_name=file_system_name, + directory_name=directory_name, + credential=credential, + **kwargs, + ) @distributed_trace_async async def create_directory( - self, metadata: Optional[Dict[str, str]] = None, - **kwargs + self, metadata: Optional[Dict[str, str]] = None, **kwargs ) -> Dict[str, Union[str, "datetime"]]: """ Create a new directory. @@ -232,7 +239,7 @@ async def create_directory( :dedent: 8 :caption: Create directory. """ - return await self._create('directory', metadata=metadata, **kwargs) + return await self._create("directory", metadata=metadata, **kwargs) @distributed_trace_async async def exists(self, **kwargs: Any) -> bool: @@ -353,11 +360,11 @@ async def get_directory_properties(self, **kwargs: Any) -> DirectoryProperties: :dedent: 4 :caption: Getting the properties for a file/directory. """ - upn = kwargs.pop('upn', None) + upn = kwargs.pop("upn", None) if upn: - headers = kwargs.pop('headers', {}) - headers['x-ms-upn'] = str(upn) - kwargs['headers'] = headers + headers = kwargs.pop("headers", {}) + headers["x-ms-upn"] = str(upn) + kwargs["headers"] = headers props = await self._get_path_properties(cls=deserialize_dir_properties, **kwargs) return cast(DirectoryProperties, props) @@ -431,21 +438,26 @@ async def rename_directory(self, new_name: str, **kwargs: Any) -> "DataLakeDirec :caption: Rename the source directory. """ new_file_system, new_path, new_dir_sas = _parse_rename_path( - new_name, self.file_system_name, self._query_str, self._raw_credential) + new_name, self.file_system_name, self._query_str, self._raw_credential + ) new_directory_client = DataLakeDirectoryClient( - f"{self.scheme}://{self.primary_hostname}", new_file_system, directory_name=new_path, + f"{self.scheme}://{self.primary_hostname}", + new_file_system, + directory_name=new_path, credential=self._raw_credential or new_dir_sas, - _hosts=self._hosts, _configuration=self._config, _pipeline=self._pipeline) + _hosts=self._hosts, + _configuration=self._config, + _pipeline=self._pipeline, + ) await new_directory_client._rename_path( # pylint: disable=protected-access - f'/{quote(unquote(self.file_system_name))}/{quote(unquote(self.path_name))}{self._query_str}', **kwargs) + f"/{quote(unquote(self.file_system_name))}/{quote(unquote(self.path_name))}{self._query_str}", **kwargs + ) return new_directory_client @distributed_trace_async async def create_sub_directory( - self, sub_directory: Union[DirectoryProperties, str], - metadata: Optional[Dict[str, str]] = None, - **kwargs: Any + self, sub_directory: Union[DirectoryProperties, str], metadata: Optional[Dict[str, str]] = None, **kwargs: Any ) -> "DataLakeDirectoryClient": """ Create a subdirectory and return the subdirectory client to be interacted with. @@ -530,8 +542,7 @@ async def create_sub_directory( @distributed_trace_async async def delete_sub_directory( - self, sub_directory: Union[DirectoryProperties, str], - **kwargs: Any + self, sub_directory: Union[DirectoryProperties, str], **kwargs: Any ) -> "DataLakeDirectoryClient": """ Marks the specified subdirectory for deletion. @@ -667,13 +678,14 @@ async def create_file(self, file: Union[FileProperties, str], **kwargs: Any) -> @distributed_trace def get_paths( - self, *, + self, + *, recursive: bool = True, max_results: Optional[int] = None, upn: Optional[bool] = None, start_from: Optional[str] = None, timeout: Optional[int] = None, - **kwargs: Any + **kwargs: Any, ) -> AsyncItemPaged["PathProperties"]: """Returns an async generator to list the paths under specified file system and directory. The generator will lazily follow the continuation tokens returned by the service. @@ -708,15 +720,16 @@ def get_paths( url = f"{self.scheme}://{hostname}/{quote(self.file_system_name)}" client = self._build_generated_client(url) command = functools.partial( - client.file_system.list_paths, - path=self.path_name, - begin_from=start_from, - timeout=timeout, - **kwargs + client.file_system.list_paths, path=self.path_name, begin_from=start_from, timeout=timeout, **kwargs ) return AsyncItemPaged( - command, recursive, path=self.path_name, max_results=max_results, - upn=upn, page_iterator_class=PathPropertiesPaged, **kwargs + command, + recursive, + path=self.path_name, + max_results=max_results, + upn=upn, + page_iterator_class=PathPropertiesPaged, + **kwargs, ) def get_file_client(self, file: Union[FileProperties, str]) -> DataLakeFileClient: @@ -732,18 +745,24 @@ def get_file_client(self, file: Union[FileProperties, str]) -> DataLakeFileClien :rtype: ~azure.storage.filedatalake.aio.DataLakeFileClient """ if isinstance(file, FileProperties): - file_path = file.get('name') + file_path = file.get("name") else: - file_path = self.path_name + '/' + str(file) + file_path = self.path_name + "/" + str(file) _pipeline = AsyncPipeline( transport=AsyncTransportWrapper(self._pipeline._transport), # pylint: disable=protected-access - policies=self._pipeline._impl_policies # type: ignore [arg-type] # pylint: disable=protected-access + policies=self._pipeline._impl_policies, # type: ignore [arg-type] # pylint: disable=protected-access ) return DataLakeFileClient( - self.url, self.file_system_name, file_path=file_path, credential=self._raw_credential, + self.url, + self.file_system_name, + file_path=file_path, + credential=self._raw_credential, api_version=self.api_version, - _hosts=self._hosts, _configuration=self._config, _pipeline=_pipeline) + _hosts=self._hosts, + _configuration=self._config, + _pipeline=_pipeline, + ) def get_sub_directory_client(self, sub_directory: Union[DirectoryProperties, str]) -> "DataLakeDirectoryClient": """Get a client to interact with the specified subdirectory of the current directory. @@ -758,15 +777,21 @@ def get_sub_directory_client(self, sub_directory: Union[DirectoryProperties, str :rtype: ~azure.storage.filedatalake.aio.DataLakeDirectoryClient """ if isinstance(sub_directory, DirectoryProperties): - subdir_path = sub_directory.get('name') + subdir_path = sub_directory.get("name") else: - subdir_path = self.path_name + '/' + str(sub_directory) + subdir_path = self.path_name + "/" + str(sub_directory) _pipeline = AsyncPipeline( - transport=AsyncTransportWrapper(self._pipeline._transport), # pylint: disable=protected-access - policies=self._pipeline._impl_policies # type: ignore [arg-type] # pylint: disable=protected-access + transport=AsyncTransportWrapper(self._pipeline._transport), # pylint: disable=protected-access + policies=self._pipeline._impl_policies, # type: ignore [arg-type] # pylint: disable=protected-access ) return DataLakeDirectoryClient( - self.url, self.file_system_name, directory_name=subdir_path, credential=self._raw_credential, + self.url, + self.file_system_name, + directory_name=subdir_path, + credential=self._raw_credential, api_version=self.api_version, - _hosts=self._hosts, _configuration=self._config, _pipeline=_pipeline) + _hosts=self._hosts, + _configuration=self._config, + _pipeline=_pipeline, + ) diff --git a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/aio/_data_lake_file_client_async.py b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/aio/_data_lake_file_client_async.py index 1edf3832a690..1d8b3216e33b 100644 --- a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/aio/_data_lake_file_client_async.py +++ b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/aio/_data_lake_file_client_async.py @@ -6,10 +6,7 @@ # pylint: disable=docstring-keyword-should-match-keyword-only from datetime import datetime -from typing import ( - Any, AnyStr, AsyncIterable, cast, Dict, IO, Iterable, Optional, Union, - TYPE_CHECKING -) +from typing import Any, AnyStr, AsyncIterable, cast, Dict, IO, Iterable, Optional, Union, TYPE_CHECKING from urllib.parse import quote, unquote from typing_extensions import Self @@ -87,22 +84,29 @@ class DataLakeFileClient(PathClient): """The hostname of the primary endpoint.""" def __init__( - self, account_url: str, + self, + account_url: str, file_system_name: str, file_path: str, - credential: Optional[Union[str, Dict[str, str], "AzureNamedKeyCredential", "AzureSasCredential", "AsyncTokenCredential"]] = None, # pylint: disable=line-too-long - **kwargs: Any + credential: Optional[ + Union[str, Dict[str, str], "AzureNamedKeyCredential", "AzureSasCredential", "AsyncTokenCredential"] + ] = None, # pylint: disable=line-too-long + **kwargs: Any, ) -> None: - super(DataLakeFileClient, self).__init__(account_url, file_system_name, path_name=file_path, - credential=credential, **kwargs) + super(DataLakeFileClient, self).__init__( + account_url, file_system_name, path_name=file_path, credential=credential, **kwargs + ) @classmethod def from_connection_string( - cls, conn_str: str, + cls, + conn_str: str, file_system_name: str, file_path: str, - credential: Optional[Union[str, Dict[str, str], "AzureNamedKeyCredential", "AzureSasCredential", "AsyncTokenCredential"]] = None, # pylint: disable=line-too-long - **kwargs: Any + credential: Optional[ + Union[str, Dict[str, str], "AzureNamedKeyCredential", "AzureSasCredential", "AsyncTokenCredential"] + ] = None, # pylint: disable=line-too-long + **kwargs: Any, ) -> Self: """ Create DataLakeFileClient from a Connection String. @@ -137,16 +141,15 @@ def from_connection_string( :returns: A DataLakeFileClient. :rtype: ~azure.storage.filedatalake.aio.DataLakeFileClient """ - account_url, _, credential = parse_connection_str(conn_str, credential, 'dfs') - return cls( - account_url, file_system_name=file_system_name, file_path=file_path, - credential=credential, **kwargs) + account_url, _, credential = parse_connection_str(conn_str, credential, "dfs") + return cls(account_url, file_system_name=file_system_name, file_path=file_path, credential=credential, **kwargs) @distributed_trace_async async def create_file( - self, content_settings: Optional["ContentSettings"] = None, + self, + content_settings: Optional["ContentSettings"] = None, metadata: Optional[Dict[str, str]] = None, - **kwargs: Any + **kwargs: Any, ) -> Dict[str, Union[str, datetime]]: """ Create a new file. @@ -240,7 +243,7 @@ async def create_file( :dedent: 4 :caption: Create file. """ - return await self._create('file', content_settings=content_settings, metadata=metadata, **kwargs) + return await self._create("file", content_settings=content_settings, metadata=metadata, **kwargs) @distributed_trace_async async def exists(self, **kwargs: Any) -> bool: @@ -359,19 +362,17 @@ async def get_file_properties(self, **kwargs: Any) -> FileProperties: :dedent: 4 :caption: Getting the properties for a file. """ - upn = kwargs.pop('upn', None) + upn = kwargs.pop("upn", None) if upn: - headers = kwargs.pop('headers', {}) - headers['x-ms-upn'] = str(upn) - kwargs['headers'] = headers + headers = kwargs.pop("headers", {}) + headers["x-ms-upn"] = str(upn) + kwargs["headers"] = headers props = await self._get_path_properties(cls=deserialize_file_properties, **kwargs) return cast(FileProperties, props) @distributed_trace_async async def set_file_expiry( - self, expiry_options: str, - expires_on: Optional[Union[datetime, int]] = None, - **kwargs: Any + self, expiry_options: str, expires_on: Optional[Union[datetime, int]] = None, **kwargs: Any ) -> None: """Sets the time a file will expire and be deleted. @@ -398,10 +399,11 @@ async def set_file_expiry( @distributed_trace_async async def upload_data( - self, data: Union[bytes, str, Iterable[AnyStr], AsyncIterable[AnyStr], IO[bytes]], + self, + data: Union[bytes, str, Iterable[AnyStr], AsyncIterable[AnyStr], IO[bytes]], length: Optional[int] = None, overwrite: Optional[bool] = False, - **kwargs: Any + **kwargs: Any, ) -> Dict[str, Any]: """ Upload data to a file. @@ -485,29 +487,24 @@ async def upload_data( :rtype: Dict[str, Any] """ options = _upload_options( - data, - self.scheme, - self._config, - self._client.path, - length=length, - overwrite=overwrite, - **kwargs + data, self.scheme, self._config, self._client.path, length=length, overwrite=overwrite, **kwargs ) return await upload_datalake_file(**options) @distributed_trace_async async def append_data( - self, data: Union[bytes, Iterable[bytes], AsyncIterable[bytes], IO[bytes]], + self, + data: Union[bytes, Iterable[bytes], AsyncIterable[bytes], IO[bytes]], offset: int, length: Optional[int] = None, - **kwargs: Any + **kwargs: Any, ) -> Dict[str, Any]: """Append data to the file. :param data: Content to be appended to file :type data: Union[bytes, Iterable[bytes], AsyncIterable[bytes], IO[bytes]] :param int offset: start position of the data to be appended to. - :param length: + :param length: Size of the data to append. Optional if the length of data can be determined. For Iterable and IO, if the length is not provided and cannot be determined, all data will be read into memory. :type length: int or None @@ -563,12 +560,7 @@ async def append_data( :dedent: 4 :caption: Append data to the file. """ - options = _append_data_options( - data=data, - offset=offset, - scheme=self.scheme, - length=length, - **kwargs) + options = _append_data_options(data=data, offset=offset, scheme=self.scheme, length=length, **kwargs) try: return await self._client.path.append_data(**options) except HttpResponseError as error: @@ -576,9 +568,7 @@ async def append_data( @distributed_trace_async async def flush_data( - self, offset: int, - retain_uncommitted_data: Optional[bool] = False, - **kwargs: Any + self, offset: int, retain_uncommitted_data: Optional[bool] = False, **kwargs: Any ) -> Dict[str, Any]: """Commit the previous appended data. @@ -667,12 +657,7 @@ async def flush_data( :dedent: 12 :caption: Commit the previous appended data. """ - options = _flush_data_options( - offset, - self.scheme, - retain_uncommitted_data=retain_uncommitted_data, - **kwargs - ) + options = _flush_data_options(offset, self.scheme, retain_uncommitted_data=retain_uncommitted_data, **kwargs) try: return await self._client.path.flush_data(**options) except HttpResponseError as error: @@ -680,9 +665,7 @@ async def flush_data( @distributed_trace_async async def download_file( - self, offset: Optional[int] = None, - length: Optional[int] = None, - **kwargs: Any + self, offset: Optional[int] = None, length: Optional[int] = None, **kwargs: Any ) -> StorageStreamDownloader: """Downloads a file to the StorageStreamDownloader. The readall() method must be used to read all the content, or readinto() must be used to download the file into @@ -821,13 +804,20 @@ async def rename_file(self, new_name: str, **kwargs: Any) -> "DataLakeFileClient :caption: Rename the source file. """ new_file_system, new_path, new_file_sas = _parse_rename_path( - new_name, self.file_system_name, self._query_str, self._raw_credential) + new_name, self.file_system_name, self._query_str, self._raw_credential + ) new_file_client = DataLakeFileClient( - f"{self.scheme}://{self.primary_hostname}", new_file_system, file_path=new_path, + f"{self.scheme}://{self.primary_hostname}", + new_file_system, + file_path=new_path, credential=self._raw_credential or new_file_sas, - _hosts=self._hosts, _configuration=self._config, _pipeline=self._pipeline, - _location_mode=self._location_mode) + _hosts=self._hosts, + _configuration=self._config, + _pipeline=self._pipeline, + _location_mode=self._location_mode, + ) await new_file_client._rename_path( # pylint: disable=protected-access - f'/{quote(unquote(self.file_system_name))}/{quote(unquote(self.path_name))}{self._query_str}', **kwargs) + f"/{quote(unquote(self.file_system_name))}/{quote(unquote(self.path_name))}{self._query_str}", **kwargs + ) return new_file_client diff --git a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/aio/_data_lake_lease_async.py b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/aio/_data_lake_lease_async.py index 5010f5290625..d6b3d98b1ebf 100644 --- a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/aio/_data_lake_lease_async.py +++ b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/aio/_data_lake_lease_async.py @@ -1,3 +1,4 @@ +# pylint: disable=line-too-long,useless-suppression # ------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for @@ -6,10 +7,7 @@ # pylint: disable=docstring-keyword-should-match-keyword-only import uuid -from typing import ( - Union, Optional, Any, - TYPE_CHECKING -) +from typing import Union, Optional, Any, TYPE_CHECKING from typing_extensions import Self from azure.core.tracing.decorator_async import distributed_trace_async @@ -49,16 +47,17 @@ class DataLakeLeaseClient: # pylint: disable=client-accepts-api-version-keyword This will be `None` if no lease has yet been acquired or modified.""" def __init__( # pylint: disable=missing-client-constructor-parameter-credential, missing-client-constructor-parameter-kwargs - self, client: Union["FileSystemClient", "DataLakeDirectoryClient", "DataLakeFileClient"], - lease_id: Optional[str] = None + self, + client: Union["FileSystemClient", "DataLakeDirectoryClient", "DataLakeFileClient"], + lease_id: Optional[str] = None, ) -> None: self.id = lease_id or str(uuid.uuid4()) self.last_modified = None self.etag = None - if hasattr(client, '_blob_client'): + if hasattr(client, "_blob_client"): _client = client._blob_client - elif hasattr(client, '_container_client'): + elif hasattr(client, "_container_client"): _client = client._container_client else: raise TypeError("Lease must use any of FileSystemClient, DataLakeDirectoryClient, or DataLakeFileClient.") diff --git a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/aio/_data_lake_lease_async.pyi b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/aio/_data_lake_lease_async.pyi index cfb42dcae656..8acac55a0257 100644 --- a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/aio/_data_lake_lease_async.pyi +++ b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/aio/_data_lake_lease_async.pyi @@ -7,7 +7,9 @@ from datetime import datetime from typing import ( - Any, Optional, Union, + Any, + Optional, + Union, ) from types import TracebackType from typing_extensions import Self @@ -18,14 +20,14 @@ from ._file_system_client_async import FileSystemClient from ._data_lake_directory_client_async import DataLakeDirectoryClient from ._data_lake_file_client_async import DataLakeFileClient - class DataLakeLeaseClient: id: str etag: Optional[str] last_modified: Optional[datetime] def __init__( - self, client: Union[FileSystemClient, DataLakeDirectoryClient, DataLakeFileClient], - lease_id: Optional[str] = None + self, + client: Union[FileSystemClient, DataLakeDirectoryClient, DataLakeFileClient], + lease_id: Optional[str] = None, ) -> None: ... async def __aenter__(self) -> Self: ... async def __aexit__( diff --git a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/aio/_data_lake_service_client_async.py b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/aio/_data_lake_service_client_async.py index 5ff3b28659a3..53eec01bced4 100644 --- a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/aio/_data_lake_service_client_async.py +++ b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/aio/_data_lake_service_client_async.py @@ -5,10 +5,7 @@ # -------------------------------------------------------------------------- # pylint: disable=docstring-keyword-should-match-keyword-only -from typing import ( - Any, cast, Dict, Optional, Union, - TYPE_CHECKING -) +from typing import Any, cast, Dict, Optional, Union, TYPE_CHECKING from typing_extensions import Self from azure.core.async_paging import AsyncItemPaged @@ -19,13 +16,7 @@ from .._data_lake_service_client_helpers import _format_url, _parse_url from .._deserialize import get_datalake_service_properties from .._generated.aio import AzureDataLakeStorageRESTAPI -from .._models import ( - DirectoryProperties, - FileProperties, - FileSystemProperties, - LocationMode, - UserDelegationKey -) +from .._models import DirectoryProperties, FileProperties, FileSystemProperties, LocationMode, UserDelegationKey from .._serialize import convert_dfs_url_to_blob_url, get_api_version from .._shared.base_client import parse_query, StorageAccountHostsMixin from .._shared.base_client_async import AsyncStorageAccountHostsMixin, AsyncTransportWrapper, parse_connection_str @@ -100,11 +91,14 @@ class DataLakeServiceClient(AsyncStorageAccountHostsMixin, StorageAccountHostsMi """The hostname of the primary endpoint.""" def __init__( - self, account_url: str, - credential: Optional[Union[str, Dict[str, str], "AzureNamedKeyCredential", "AzureSasCredential", "AsyncTokenCredential"]] = None, # pylint: disable=line-too-long + self, + account_url: str, + credential: Optional[ + Union[str, Dict[str, str], "AzureNamedKeyCredential", "AzureSasCredential", "AsyncTokenCredential"] + ] = None, # pylint: disable=line-too-long **kwargs: Any ) -> None: - kwargs['retry_policy'] = kwargs.get('retry_policy') or ExponentialRetry(**kwargs) + kwargs["retry_policy"] = kwargs.get("retry_policy") or ExponentialRetry(**kwargs) parsed_url = _parse_url(account_url=account_url) blob_account_url = convert_dfs_url_to_blob_url(account_url) @@ -116,14 +110,15 @@ def __init__( _, sas_token = parse_query(parsed_url.query) self._query_str, self._raw_credential = self._format_query_string(sas_token, credential) - super(DataLakeServiceClient, self).__init__(parsed_url, service='dfs', - credential=self._raw_credential, **kwargs) + super(DataLakeServiceClient, self).__init__( + parsed_url, service="dfs", credential=self._raw_credential, **kwargs + ) # ADLS doesn't support secondary endpoint, make sure it's empty self._hosts[LocationMode.SECONDARY] = "" self._client = AzureDataLakeStorageRESTAPI(self.url, base_url=self.url, pipeline=self._pipeline) self._client._config.version = get_api_version(kwargs) # type: ignore [assignment] - self._loop = kwargs.get('loop', None) + self._loop = kwargs.get("loop", None) async def __aenter__(self) -> Self: await self._client.__aenter__() @@ -135,7 +130,7 @@ async def __aexit__(self, *args: Any) -> None: await self._client.__aexit__(*args) async def close(self) -> None: # type: ignore - """ This method is to close the sockets opened by the client. + """This method is to close the sockets opened by the client. It need not be used when using with a context manager. :return: None @@ -155,8 +150,11 @@ def _format_url(self, hostname: str) -> str: @classmethod def from_connection_string( - cls, conn_str: str, - credential: Optional[Union[str, Dict[str, str], "AzureNamedKeyCredential", "AzureSasCredential", "AsyncTokenCredential"]] = None, # pylint: disable=line-too-long + cls, + conn_str: str, + credential: Optional[ + Union[str, Dict[str, str], "AzureNamedKeyCredential", "AzureSasCredential", "AsyncTokenCredential"] + ] = None, # pylint: disable=line-too-long **kwargs: Any ) -> Self: """ @@ -194,12 +192,13 @@ def from_connection_string( :dedent: 8 :caption: Creating the DataLakeServiceClient from a connection string. """ - account_url, _, credential = parse_connection_str(conn_str, credential, 'dfs') + account_url, _, credential = parse_connection_str(conn_str, credential, "dfs") return cls(account_url, credential=credential, **kwargs) @distributed_trace_async async def get_user_delegation_key( - self, key_start_time: "datetime", + self, + key_start_time: "datetime", key_expiry_time: "datetime", *, delegated_user_tid: Optional[str] = None, @@ -242,9 +241,7 @@ async def get_user_delegation_key( @distributed_trace def list_file_systems( - self, name_starts_with: Optional[str] = None, - include_metadata: bool = False, - **kwargs: Any + self, name_starts_with: Optional[str] = None, include_metadata: bool = False, **kwargs: Any ) -> AsyncItemPaged[FileSystemProperties]: """Returns a generator to list the file systems under the specified account. @@ -285,17 +282,19 @@ def list_file_systems( :dedent: 8 :caption: Listing the file systems in the datalake service. """ - item_paged = cast(AsyncItemPaged[FileSystemProperties], self._blob_service_client.list_containers( - name_starts_with=name_starts_with, - include_metadata=include_metadata, - **kwargs - )) + item_paged = cast( + AsyncItemPaged[FileSystemProperties], + self._blob_service_client.list_containers( + name_starts_with=name_starts_with, include_metadata=include_metadata, **kwargs + ), + ) item_paged._page_iterator_class = FileSystemPropertiesPaged # pylint: disable=protected-access return item_paged @distributed_trace_async async def create_file_system( - self, file_system: Union[FileSystemProperties, str], + self, + file_system: Union[FileSystemProperties, str], metadata: Optional[Dict[str, str]] = None, public_access: Optional["PublicAccess"] = None, **kwargs: Any @@ -366,7 +365,7 @@ async def _rename_file_system(self, name: str, new_name: str, **kwargs: Any) -> :returns: FileSystemClient with the newly specified name. :rtype: ~azure.storage.filedatalake.FileSystemClient """ - await self._blob_service_client._rename_container(name, new_name, **kwargs) # pylint: disable=protected-access + await self._blob_service_client._rename_container(name, new_name, **kwargs) # pylint: disable=protected-access renamed_file_system = self.get_file_system_client(new_name) return renamed_file_system @@ -393,15 +392,14 @@ async def undelete_file_system(self, name: str, deleted_version: str, **kwargs: :returns: The FileSystemClient of the restored soft-deleted filesystem. :rtype: ~azure.storage.filedatalake.FileSystemClient """ - new_name = kwargs.pop('new_name', None) + new_name = kwargs.pop("new_name", None) await self._blob_service_client.undelete_container(name, deleted_version, new_name=new_name, **kwargs) file_system = self.get_file_system_client(new_name or name) return file_system @distributed_trace_async async def delete_file_system( - self, file_system: Union[FileSystemProperties, str], - **kwargs: Any + self, file_system: Union[FileSystemProperties, str], **kwargs: Any ) -> FileSystemClient: """Marks the specified file system for deletion. @@ -484,16 +482,20 @@ def get_file_system_client(self, file_system: Union[FileSystemProperties, str]) _pipeline = AsyncPipeline( transport=AsyncTransportWrapper(self._pipeline._transport), # pylint: disable=protected-access - policies=self._pipeline._impl_policies # type: ignore [arg-type] # pylint: disable=protected-access + policies=self._pipeline._impl_policies, # type: ignore [arg-type] # pylint: disable=protected-access + ) + return FileSystemClient( + self.url, + file_system_name, + credential=self._raw_credential, + api_version=self.api_version, + _configuration=self._config, + _pipeline=_pipeline, + _hosts=self._hosts, ) - return FileSystemClient(self.url, file_system_name, credential=self._raw_credential, - api_version=self.api_version, - _configuration=self._config, - _pipeline=_pipeline, _hosts=self._hosts) def get_directory_client( - self, file_system: Union[FileSystemProperties,str], - directory: Union[DirectoryProperties, str] + self, file_system: Union[FileSystemProperties, str], directory: Union[DirectoryProperties, str] ) -> DataLakeDirectoryClient: """Get a client to interact with the specified directory. @@ -530,17 +532,21 @@ def get_directory_client( _pipeline = AsyncPipeline( transport=AsyncTransportWrapper(self._pipeline._transport), # pylint: disable=protected-access - policies=self._pipeline._impl_policies # type: ignore [arg-type] # pylint: disable=protected-access + policies=self._pipeline._impl_policies, # type: ignore [arg-type] # pylint: disable=protected-access + ) + return DataLakeDirectoryClient( + self.url, + file_system_name, + directory_name=directory_name, + credential=self._raw_credential, + api_version=self.api_version, + _configuration=self._config, + _pipeline=_pipeline, + _hosts=self._hosts, ) - return DataLakeDirectoryClient(self.url, file_system_name, directory_name=directory_name, - credential=self._raw_credential, - api_version=self.api_version, - _configuration=self._config, _pipeline=_pipeline, - _hosts=self._hosts) def get_file_client( - self, file_system: Union[FileSystemProperties, str], - file_path: Union[FileProperties, str] + self, file_system: Union[FileSystemProperties, str], file_path: Union[FileProperties, str] ) -> DataLakeFileClient: """Get a client to interact with the specified file. @@ -577,12 +583,18 @@ def get_file_client( _pipeline = AsyncPipeline( transport=AsyncTransportWrapper(self._pipeline._transport), # pylint: disable=protected-access - policies=self._pipeline._impl_policies # type: ignore [arg-type] # pylint: disable=protected-access + policies=self._pipeline._impl_policies, # type: ignore [arg-type] # pylint: disable=protected-access ) return DataLakeFileClient( - self.url, file_system_name, file_path=file_path, credential=self._raw_credential, + self.url, + file_system_name, + file_path=file_path, + credential=self._raw_credential, api_version=self.api_version, - _hosts=self._hosts, _configuration=self._config, _pipeline=_pipeline) + _hosts=self._hosts, + _configuration=self._config, + _pipeline=_pipeline, + ) @distributed_trace_async async def set_service_properties(self, **kwargs: Any) -> None: diff --git a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/aio/_download_async.py b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/aio/_download_async.py index e85314cb06f8..d5ba877bfec4 100644 --- a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/aio/_download_async.py +++ b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/aio/_download_async.py @@ -3,10 +3,7 @@ # Licensed under the MIT License. See License.txt in the project root for # license information. # -------------------------------------------------------------------------- -from typing import ( - Any, AsyncIterator, cast, IO, - TYPE_CHECKING -) +from typing import Any, AsyncIterator, cast, IO, TYPE_CHECKING from .._deserialize import from_blob_properties @@ -31,13 +28,12 @@ def __init__(self, downloader: Any) -> None: self.name = self._downloader.name # Parse additional Datalake-only properties - encryption_context = self._downloader._response.response.headers.get('x-ms-encryption-context') - acl = self._downloader._response.response.headers.get('x-ms-acl') + encryption_context = self._downloader._response.response.headers.get("x-ms-encryption-context") + acl = self._downloader._response.response.headers.get("x-ms-acl") self.properties = from_blob_properties( - self._downloader.properties, - encryption_context=encryption_context, - acl=acl) + self._downloader.properties, encryption_context=encryption_context, acl=acl + ) self.size = self._downloader.size def __len__(self) -> int: diff --git a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/aio/_file_system_client_async.py b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/aio/_file_system_client_async.py index 735a6d410132..843ec65a78db 100644 --- a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/aio/_file_system_client_async.py +++ b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/aio/_file_system_client_async.py @@ -1,3 +1,4 @@ +# pylint: disable=too-many-lines # ------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for @@ -6,10 +7,7 @@ # pylint: disable=too-many-lines, docstring-keyword-should-match-keyword-only import functools -from typing import ( - Any, cast, Dict, Optional, Union, - TYPE_CHECKING -) +from typing import Any, cast, Dict, Optional, Union, TYPE_CHECKING from typing_extensions import Self from azure.core.async_paging import AsyncItemPaged @@ -27,7 +25,7 @@ FileProperties, FileSystemProperties, LocationMode, - PublicAccess + PublicAccess, ) from .._serialize import convert_dfs_url_to_blob_url, get_api_version from .._shared.base_client import parse_query, StorageAccountHostsMixin @@ -97,12 +95,15 @@ class FileSystemClient(AsyncStorageAccountHostsMixin, StorageAccountHostsMixin): """The hostname of the primary endpoint.""" def __init__( - self, account_url: str, + self, + account_url: str, file_system_name: str, - credential: Optional[Union[str, Dict[str, str], "AzureNamedKeyCredential", "AzureSasCredential", "AsyncTokenCredential"]] = None, # pylint: disable=line-too-long - **kwargs: Any + credential: Optional[ + Union[str, Dict[str, str], "AzureNamedKeyCredential", "AzureSasCredential", "AsyncTokenCredential"] + ] = None, # pylint: disable=line-too-long + **kwargs: Any, ) -> None: - kwargs['retry_policy'] = kwargs.get('retry_policy') or ExponentialRetry(**kwargs) + kwargs["retry_policy"] = kwargs.get("retry_policy") or ExponentialRetry(**kwargs) if not file_system_name: raise ValueError("Please specify a file system name.") @@ -113,24 +114,21 @@ def __init__( # TODO: add self.account_url to base_client and remove _blob_account_url self._blob_account_url = blob_account_url - datalake_hosts = kwargs.pop('_hosts', None) + datalake_hosts = kwargs.pop("_hosts", None) blob_hosts = None if datalake_hosts: blob_primary_account_url = convert_dfs_url_to_blob_url(datalake_hosts[LocationMode.PRIMARY]) blob_hosts = {LocationMode.PRIMARY: blob_primary_account_url, LocationMode.SECONDARY: ""} self._container_client = ContainerClient( - self._blob_account_url, - self.file_system_name, - credential=credential, - _hosts=blob_hosts, - **kwargs + self._blob_account_url, self.file_system_name, credential=credential, _hosts=blob_hosts, **kwargs ) _, sas_token = parse_query(parsed_url.query) self._query_str, self._raw_credential = self._format_query_string(sas_token, credential) - super(FileSystemClient, self).__init__(parsed_url, service='dfs', credential=self._raw_credential, - _hosts=datalake_hosts, **kwargs) + super(FileSystemClient, self).__init__( + parsed_url, service="dfs", credential=self._raw_credential, _hosts=datalake_hosts, **kwargs + ) # ADLS doesn't support secondary endpoint, make sure it's empty self._hosts[LocationMode.SECONDARY] = "" @@ -138,7 +136,7 @@ def __init__( self._api_version = get_api_version(kwargs) self._client = self._build_generated_client(self.url) self._datalake_client_for_blob_operation = self._build_generated_client(self._container_client.url) - self._loop = kwargs.get('loop', None) + self._loop = kwargs.get("loop", None) async def __aenter__(self) -> Self: await self._client.__aenter__() @@ -164,10 +162,7 @@ async def close(self) -> None: # type: ignore def _build_generated_client(self, url: str) -> AzureDataLakeStorageRESTAPI: client = AzureDataLakeStorageRESTAPI( - url, - base_url=url, - file_system=self.file_system_name, - pipeline=self._pipeline + url, base_url=url, file_system=self.file_system_name, pipeline=self._pipeline ) client._config.version = self._api_version # type: ignore [assignment] # pylint: disable=protected-access return client @@ -177,10 +172,13 @@ def _format_url(self, hostname: str) -> str: @classmethod def from_connection_string( - cls, conn_str: str, + cls, + conn_str: str, file_system_name: str, - credential: Optional[Union[str, Dict[str, str], "AzureNamedKeyCredential", "AzureSasCredential", "AsyncTokenCredential"]] = None, # pylint: disable=line-too-long - **kwargs: Any + credential: Optional[ + Union[str, Dict[str, str], "AzureNamedKeyCredential", "AzureSasCredential", "AsyncTokenCredential"] + ] = None, # pylint: disable=line-too-long + **kwargs: Any, ) -> Self: """ Create FileSystemClient from a Connection String. @@ -209,14 +207,12 @@ def from_connection_string( :returns: A FileSystemClient. :rtype: ~azure.storage.filedatalake.FileSystemClient """ - account_url, _, credential = parse_connection_str(conn_str, credential, 'dfs') + account_url, _, credential = parse_connection_str(conn_str, credential, "dfs") return cls(account_url, file_system_name=file_system_name, credential=credential, **kwargs) @distributed_trace_async async def acquire_lease( - self, lease_duration: int = -1, - lease_id: Optional[str] = None, - **kwargs: Any + self, lease_duration: int = -1, lease_id: Optional[str] = None, **kwargs: Any ) -> DataLakeLeaseClient: """ Requests a new lease. If the file system does not have an active lease, @@ -272,9 +268,7 @@ async def acquire_lease( @distributed_trace_async async def create_file_system( - self, metadata: Optional[Dict[str, str]] = None, - public_access: Optional[PublicAccess] = None, - **kwargs: Any + self, metadata: Optional[Dict[str, str]] = None, public_access: Optional[PublicAccess] = None, **kwargs: Any ) -> Dict[str, Union[str, "datetime"]]: """Creates a new file system under the specified account. @@ -314,12 +308,12 @@ async def create_file_system( :dedent: 16 :caption: Creating a file system in the datalake service. """ - encryption_scope_options = kwargs.pop('encryption_scope_options', None) + encryption_scope_options = kwargs.pop("encryption_scope_options", None) return await self._container_client.create_container( metadata=metadata, public_access=public_access, container_encryption_scope=encryption_scope_options, - **kwargs + **kwargs, ) @distributed_trace_async @@ -362,9 +356,15 @@ async def _rename_file_system(self, new_name: str, **kwargs: Any) -> "FileSystem await self._container_client._rename_container(new_name, **kwargs) # pylint: disable=protected-access # TODO: self._raw_credential would not work with SAS tokens renamed_file_system = FileSystemClient( - f"{self.scheme}://{self.primary_hostname}", file_system_name=new_name, - credential=self._raw_credential, api_version=self.api_version, _configuration=self._config, - _pipeline=self._pipeline, _location_mode=self._location_mode, _hosts=self._hosts) + f"{self.scheme}://{self.primary_hostname}", + file_system_name=new_name, + credential=self._raw_credential, + api_version=self.api_version, + _configuration=self._config, + _pipeline=self._pipeline, + _location_mode=self._location_mode, + _hosts=self._hosts, + ) return renamed_file_system @distributed_trace_async @@ -443,12 +443,13 @@ async def get_file_system_properties(self, **kwargs: Any) -> FileSystemPropertie :caption: Getting properties on the file system. """ container_properties = await self._container_client.get_container_properties(**kwargs) - return FileSystemProperties._convert_from_container_props(container_properties) # pylint: disable=protected-access + return FileSystemProperties._convert_from_container_props( + container_properties + ) # pylint: disable=protected-access @distributed_trace_async async def set_file_system_metadata( - self, metadata: Dict[str, str], - **kwargs: Any + self, metadata: Dict[str, str], **kwargs: Any ) -> Dict[str, Union[str, "datetime"]]: """Sets one or more user-defined name-value pairs for the specified file system. Each call to this operation replaces all existing metadata @@ -502,9 +503,10 @@ async def set_file_system_metadata( @distributed_trace_async async def set_file_system_access_policy( - self, signed_identifiers: Dict[str, "AccessPolicy"], + self, + signed_identifiers: Dict[str, "AccessPolicy"], public_access: Optional[Union[str, "PublicAccess"]] = None, - **kwargs: Any + **kwargs: Any, ) -> Dict[str, Union[str, "datetime"]]: """Sets the permissions for the specified file system or stored access policies that may be used with Shared Access Signatures. The permissions @@ -543,9 +545,7 @@ async def set_file_system_access_policy( :rtype: Dict[str, Union[str, ~datetime.datetime]] """ return await self._container_client.set_container_access_policy( - cast(Dict[str, "BlobAccessPolicy"], signed_identifiers), - public_access=public_access, - **kwargs + cast(Dict[str, "BlobAccessPolicy"], signed_identifiers), public_access=public_access, **kwargs ) @distributed_trace_async @@ -568,16 +568,19 @@ async def get_file_system_access_policy(self, **kwargs: Any) -> Dict[str, Any]: """ access_policy = await self._container_client.get_container_access_policy(**kwargs) return { - 'public_access': PublicAccess._from_generated(access_policy['public_access']), # pylint: disable=protected-access - 'signed_identifiers': access_policy['signed_identifiers'] + "public_access": PublicAccess._from_generated( + access_policy["public_access"] + ), # pylint: disable=protected-access + "signed_identifiers": access_policy["signed_identifiers"], } @distributed_trace def get_paths( - self, path: Optional[str] = None, + self, + path: Optional[str] = None, recursive: Optional[bool] = True, max_results: Optional[int] = None, - **kwargs: Any + **kwargs: Any, ) -> AsyncItemPaged["PathProperties"]: """Returns a generator to list the paths(could be files or directories) under the specified file system. The generator will lazily follow the continuation tokens returned by @@ -621,25 +624,18 @@ def get_paths( :dedent: 12 :caption: List the blobs in the file system. """ - timeout = kwargs.pop('timeout', None) + timeout = kwargs.pop("timeout", None) begin_from = kwargs.pop("start_from", None) command = functools.partial( - self._client.file_system.list_paths, - path=path, - timeout=timeout, - begin_from=begin_from, - **kwargs + self._client.file_system.list_paths, path=path, timeout=timeout, begin_from=begin_from, **kwargs ) return AsyncItemPaged( - command, recursive, path=path, max_results=max_results, - page_iterator_class=PathPropertiesPaged, **kwargs + command, recursive, path=path, max_results=max_results, page_iterator_class=PathPropertiesPaged, **kwargs ) @distributed_trace_async async def create_directory( - self, directory: Union[DirectoryProperties, str], - metadata: Optional[Dict[str, str]] = None, - **kwargs: Any + self, directory: Union[DirectoryProperties, str], metadata: Optional[Dict[str, str]] = None, **kwargs: Any ) -> DataLakeDirectoryClient: """ Create directory @@ -730,8 +726,7 @@ async def create_directory( @distributed_trace_async async def delete_directory( - self, directory: Union[DirectoryProperties, str], - **kwargs: Any + self, directory: Union[DirectoryProperties, str], **kwargs: Any ) -> DataLakeDirectoryClient: """ Marks the specified path for deletion. @@ -784,10 +779,7 @@ async def delete_directory( return directory_client @distributed_trace_async - async def create_file( - self, file: Union[FileProperties, str], - **kwargs: Any - ) -> DataLakeFileClient: + async def create_file(self, file: Union[FileProperties, str], **kwargs: Any) -> DataLakeFileClient: """ Create file @@ -884,10 +876,7 @@ async def create_file( return file_client @distributed_trace_async - async def delete_file( - self, file: Union[FileProperties, str], - **kwargs: Any - ) -> DataLakeFileClient: + async def delete_file(self, file: Union[FileProperties, str], **kwargs: Any) -> DataLakeFileClient: """ Marks the specified file for deletion. @@ -938,9 +927,7 @@ async def delete_file( @distributed_trace_async async def _undelete_path( - self, deleted_path_name: str, - deletion_id: str, - **kwargs: Any + self, deleted_path_name: str, deletion_id: str, **kwargs: Any ) -> Union[DataLakeDirectoryClient, DataLakeFileClient]: """Restores soft-deleted path. @@ -968,17 +955,15 @@ async def _undelete_path( pipeline = AsyncPipeline( transport=AsyncTransportWrapper(self._pipeline._transport), # pylint: disable=protected-access - policies=self._pipeline._impl_policies # type: ignore [arg-type] # pylint: disable=protected-access + policies=self._pipeline._impl_policies, # type: ignore [arg-type] # pylint: disable=protected-access ) path_client = AzureDataLakeStorageRESTAPI( - url, - filesystem=self.file_system_name, - path=deleted_path_name, - pipeline=pipeline + url, filesystem=self.file_system_name, path=deleted_path_name, pipeline=pipeline ) try: - is_file = cast(bool, await path_client.path.undelete( - undelete_source=undelete_source, cls=is_file_path, **kwargs)) + is_file = cast( + bool, await path_client.path.undelete(undelete_source=undelete_source, cls=is_file_path, **kwargs) + ) if is_file: return self.get_file_client(deleted_path_name) return self.get_directory_client(deleted_path_name) @@ -991,7 +976,7 @@ def _get_root_directory_client(self) -> DataLakeDirectoryClient: :returns: A DataLakeDirectoryClient. :rtype: ~azure.storage.filedatalake.aio.DataLakeDirectoryClient """ - return self.get_directory_client('/') + return self.get_directory_client("/") def get_directory_client(self, directory: Union[DirectoryProperties, str]) -> DataLakeDirectoryClient: """Get a client to interact with the specified directory. @@ -1015,19 +1000,24 @@ def get_directory_client(self, directory: Union[DirectoryProperties, str]) -> Da :caption: Getting the directory client to interact with a specific directory. """ if isinstance(directory, DirectoryProperties): - directory_name = directory.get('name') + directory_name = directory.get("name") else: directory_name = str(directory) _pipeline = AsyncPipeline( transport=AsyncTransportWrapper(self._pipeline._transport), # pylint: disable=protected-access - policies=self._pipeline._impl_policies # type: ignore [arg-type] # pylint: disable=protected-access + policies=self._pipeline._impl_policies, # type: ignore [arg-type] # pylint: disable=protected-access + ) + return DataLakeDirectoryClient( + self.url, + self.file_system_name, + directory_name=directory_name, + credential=self._raw_credential, + api_version=self.api_version, + _configuration=self._config, + _pipeline=_pipeline, + _hosts=self._hosts, + loop=self._loop, ) - return DataLakeDirectoryClient(self.url, self.file_system_name, directory_name=directory_name, - credential=self._raw_credential, - api_version=self.api_version, - _configuration=self._config, _pipeline=_pipeline, - _hosts=self._hosts, - loop=self._loop) def get_file_client(self, file_path: Union[FileProperties, str]) -> DataLakeFileClient: """Get a client to interact with the specified file. @@ -1056,12 +1046,19 @@ def get_file_client(self, file_path: Union[FileProperties, str]) -> DataLakeFile file_path = str(file_path) _pipeline = AsyncPipeline( transport=AsyncTransportWrapper(self._pipeline._transport), # pylint: disable=protected-access - policies=self._pipeline._impl_policies # type: ignore [arg-type] # pylint: disable=protected-access + policies=self._pipeline._impl_policies, # type: ignore [arg-type] # pylint: disable=protected-access ) return DataLakeFileClient( - self.url, self.file_system_name, file_path=file_path, credential=self._raw_credential, + self.url, + self.file_system_name, + file_path=file_path, + credential=self._raw_credential, api_version=self.api_version, - _hosts=self._hosts, _configuration=self._config, _pipeline=_pipeline, loop=self._loop) + _hosts=self._hosts, + _configuration=self._config, + _pipeline=_pipeline, + loop=self._loop, + ) @distributed_trace def list_deleted_paths(self, **kwargs: Any) -> AsyncItemPaged[DeletedPathProperties]: @@ -1087,14 +1084,19 @@ def list_deleted_paths(self, **kwargs: Any) -> AsyncItemPaged[DeletedPathPropert :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.storage.filedatalake.DeletedPathProperties] """ - path_prefix = kwargs.pop('path_prefix', None) - timeout = kwargs.pop('timeout', None) - results_per_page = kwargs.pop('results_per_page', None) + path_prefix = kwargs.pop("path_prefix", None) + timeout = kwargs.pop("timeout", None) + results_per_page = kwargs.pop("results_per_page", None) command = functools.partial( self._datalake_client_for_blob_operation.file_system.list_blob_hierarchy_segment, showonly="deleted", timeout=timeout, - **kwargs) + **kwargs, + ) return AsyncItemPaged( - command, prefix=path_prefix, page_iterator_class=DeletedPathPropertiesPaged, - results_per_page=results_per_page, **kwargs) + command, + prefix=path_prefix, + page_iterator_class=DeletedPathPropertiesPaged, + results_per_page=results_per_page, + **kwargs, + ) diff --git a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/aio/_list_paths_helper.py b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/aio/_list_paths_helper.py index d65effc98f32..0fd8c59e6242 100644 --- a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/aio/_list_paths_helper.py +++ b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/aio/_list_paths_helper.py @@ -4,10 +4,7 @@ # license information. # -------------------------------------------------------------------------- -from typing import ( - Any, Callable, cast, Dict, - List, Optional, Tuple, Union -) +from typing import Any, Callable, cast, Dict, List, Optional, Tuple, Union from azure.core.exceptions import HttpResponseError from azure.core.async_paging import AsyncPageIterator @@ -15,13 +12,9 @@ from .._deserialize import ( get_deleted_path_properties_from_generated_code, process_storage_error, - return_headers_and_deserialized_path_list -) -from .._generated.models import ( - BlobItemInternal, - BlobPrefix as GenBlobPrefix, - Path + return_headers_and_deserialized_path_list, ) +from .._generated.models import BlobItemInternal, BlobPrefix as GenBlobPrefix, Path from .._models import DeletedPathProperties, PathProperties from .._shared.models import DictMixin from .._shared.response_handlers import return_context_and_deserialized @@ -43,11 +36,11 @@ class DirectoryPrefix(DictMixin): options include "primary" and "secondary".""" def __init__(self, **kwargs: Any) -> None: - self.name = kwargs.get('prefix') # type: ignore [assignment] - self.results_per_page = kwargs.get('results_per_page') # type: ignore [assignment] - self.file_system = kwargs.get('container') # type: ignore [assignment] - self.delimiter = kwargs.get('delimiter') # type: ignore [assignment] - self.location_mode = kwargs.get('location_mode') # type: ignore [assignment] + self.name = kwargs.get("prefix") # type: ignore [assignment] + self.results_per_page = kwargs.get("results_per_page") # type: ignore [assignment] + self.file_system = kwargs.get("container") # type: ignore [assignment] + self.delimiter = kwargs.get("delimiter") # type: ignore [assignment] + self.location_mode = kwargs.get("location_mode") # type: ignore [assignment] class DeletedPathPropertiesPaged(AsyncPageIterator): @@ -74,18 +67,17 @@ class DeletedPathPropertiesPaged(AsyncPageIterator): options include "primary" and "secondary".""" def __init__( - self, command: Callable, + self, + command: Callable, container: Optional[str] = None, prefix: Optional[str] = None, results_per_page: Optional[int] = None, continuation_token: Optional[str] = None, delimiter: Optional[str] = None, - location_mode: Optional[str] = None + location_mode: Optional[str] = None, ) -> None: super(DeletedPathPropertiesPaged, self).__init__( - get_next=self._get_next_cb, - extract_data=self._extract_data_cb, - continuation_token=continuation_token or "" + get_next=self._get_next_cb, extract_data=self._extract_data_cb, continuation_token=continuation_token or "" ) self._command = command self.service_endpoint = None @@ -104,7 +96,7 @@ async def _get_next_cb(self, continuation_token): marker=continuation_token or None, max_results=self.results_per_page, cls=return_context_and_deserialized, - use_location=self.location_mode + use_location=self.location_mode, ) except HttpResponseError as error: process_storage_error(error) @@ -116,7 +108,7 @@ async def _extract_data_cb(self, get_next_return): self.marker = self._response.marker self.results_per_page = self._response.max_results self.container = self._response.container_name - self.current_page = self._response.segment.blob_prefixes + self._response.segment.blob_items + self.current_page = self._response.segment.blob_prefixes + self._response.segment.blob_items self.current_page = [self._build_item(item) for item in self.current_page] self.delimiter = self._response.delimiter @@ -132,7 +124,7 @@ def _build_item(self, item): container=self.container, prefix=item.name, results_per_page=self.results_per_page, - location_mode=self.location_mode + location_mode=self.location_mode, ) return item @@ -155,17 +147,16 @@ class PathPropertiesPaged(AsyncPageIterator): """The path list to build the items for the current page.""" def __init__( - self, command: Callable, + self, + command: Callable, recursive: bool, path: Optional[str] = None, max_results: Optional[int] = None, continuation_token: Optional[str] = None, - upn: Optional[str] = None + upn: Optional[str] = None, ) -> None: super(PathPropertiesPaged, self).__init__( - get_next=self._get_next_cb, - extract_data=self._extract_data_cb, - continuation_token=continuation_token or "" + get_next=self._get_next_cb, extract_data=self._extract_data_cb, continuation_token=continuation_token or "" ) self._command = command self.recursive = recursive @@ -178,12 +169,12 @@ def __init__( async def _get_next_cb(self, continuation_token): try: return await self._command( - self.recursive, + recursive=self.recursive, continuation=continuation_token or None, path=self.path, max_results=self.results_per_page, upn=self.upn, - cls=return_headers_and_deserialized_path_list + cls=return_headers_and_deserialized_path_list, ) except HttpResponseError as error: process_storage_error(error) @@ -192,7 +183,7 @@ async def _extract_data_cb(self, get_next_return): self.path_list, self._response = cast(Tuple[List[Path], Dict[str, Any]], get_next_return) self.current_page = [self._build_item(item) for item in self.path_list] - return self._response['continuation'] or None, self.current_page + return self._response["continuation"] or None, self.current_page @staticmethod def _build_item(item: Union[Path, PathProperties]) -> PathProperties: diff --git a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/aio/_models.py b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/aio/_models.py index 347c94b3733c..08203973984f 100644 --- a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/aio/_models.py +++ b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/aio/_models.py @@ -39,10 +39,7 @@ class FileSystemPropertiesPaged(ContainerPropertiesPaged): """The current page of listed results.""" def __init__(self, *args: Any, **kwargs: Any) -> None: - super(FileSystemPropertiesPaged, self).__init__( - *args, - **kwargs - ) + super(FileSystemPropertiesPaged, self).__init__(*args, **kwargs) @staticmethod def _build_item(item): diff --git a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/aio/_path_client_async.py b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/aio/_path_client_async.py index 27affaff74f3..81a917cd6f04 100644 --- a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/aio/_path_client_async.py +++ b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/aio/_path_client_async.py @@ -6,10 +6,7 @@ # pylint: disable=docstring-keyword-should-match-keyword-only from datetime import datetime -from typing import ( - Any, Awaitable, Callable, cast, Dict, Optional, Union, - TYPE_CHECKING -) +from typing import Any, Awaitable, Callable, cast, Dict, Optional, Union, TYPE_CHECKING from typing_extensions import Self from azure.core.exceptions import AzureError, HttpResponseError @@ -34,7 +31,7 @@ _parse_url, _rename_path_options, _set_access_control_options, - _set_access_control_recursive_options + _set_access_control_recursive_options, ) from .._serialize import compare_api_versions, convert_dfs_url_to_blob_url, get_api_version from .._shared.base_client import parse_query, StorageAccountHostsMixin @@ -79,21 +76,25 @@ class PathClient(AsyncStorageAccountHostsMixin, StorageAccountHostsMixin): # ty authentication. Only has an effect when credential is of type AsyncTokenCredential. The value could be https://storage.azure.com/ (default) or https://.blob.core.windows.net. """ + def __init__( - self, account_url: str, + self, + account_url: str, file_system_name: str, path_name: str, - credential: Optional[Union[str, Dict[str, str], "AzureNamedKeyCredential", "AzureSasCredential", "AsyncTokenCredential"]] = None, # pylint: disable=line-too-long + credential: Optional[ + Union[str, Dict[str, str], "AzureNamedKeyCredential", "AzureSasCredential", "AsyncTokenCredential"] + ] = None, # pylint: disable=line-too-long **kwargs: Any ) -> None: - kwargs['retry_policy'] = kwargs.get('retry_policy') or ExponentialRetry(**kwargs) + kwargs["retry_policy"] = kwargs.get("retry_policy") or ExponentialRetry(**kwargs) # remove the preceding/trailing delimiter from the path components - file_system_name = file_system_name.strip('/') + file_system_name = file_system_name.strip("/") # the name of root directory is / - if path_name != '/': - path_name = path_name.strip('/') + if path_name != "/": + path_name = path_name.strip("/") if not (file_system_name and path_name): raise ValueError("Please specify a file system name and file path.") @@ -102,14 +103,11 @@ def __init__( blob_account_url = convert_dfs_url_to_blob_url(account_url) self._blob_account_url = blob_account_url - datalake_hosts = kwargs.pop('_hosts', None) + datalake_hosts = kwargs.pop("_hosts", None) blob_hosts = None if datalake_hosts: blob_primary_account_url = convert_dfs_url_to_blob_url(datalake_hosts[LocationMode.PRIMARY]) - blob_hosts = { - LocationMode.PRIMARY: blob_primary_account_url, - LocationMode.SECONDARY: "" - } + blob_hosts = {LocationMode.PRIMARY: blob_primary_account_url, LocationMode.SECONDARY: ""} self._blob_client = BlobClient( account_url=blob_account_url, container_name=file_system_name, @@ -126,11 +124,7 @@ def __init__( self._query_str, self._raw_credential = self._format_query_string(sas_token, credential) super(PathClient, self).__init__( - parsed_url, - service='dfs', - credential=self._raw_credential, - _hosts=datalake_hosts, - **kwargs + parsed_url, service="dfs", credential=self._raw_credential, _hosts=datalake_hosts, **kwargs ) # ADLS doesn't support secondary endpoint, make sure it's empty @@ -138,7 +132,7 @@ def __init__( self._api_version = get_api_version(kwargs) self._client = self._build_generated_client(self.url) self._datalake_client_for_blob_operation = self._build_generated_client(self._blob_client.url) - self._loop = kwargs.get('loop', None) + self._loop = kwargs.get("loop", None) async def __aenter__(self) -> Self: await self._client.__aenter__() @@ -165,11 +159,7 @@ async def close(self) -> None: # type: ignore def _build_generated_client(self, url: str) -> AzureDataLakeStorageRESTAPI: client = AzureDataLakeStorageRESTAPI( - url, - base_url=url, - file_system=self.file_system_name, - path=self.path_name, - pipeline=self._pipeline + url, base_url=url, file_system=self.file_system_name, path=self.path_name, pipeline=self._pipeline ) client._config.version = self._api_version # type: ignore [assignment] # pylint: disable=protected-access return client @@ -178,7 +168,8 @@ def _format_url(self, hostname: str) -> str: return _format_url(self.scheme, hostname, self.file_system_name, self.path_name, self._query_str) async def _create( - self, resource_type: str, + self, + resource_type: str, content_settings: Optional["ContentSettings"] = None, metadata: Optional[Dict[str, str]] = None, **kwargs: Any @@ -272,8 +263,8 @@ async def _create( :return: A dictionary of response headers. :rtype: Dict[str, Union[str, ~datetime.datetime]] """ - lease_id = kwargs.get('lease_id', None) - lease_duration = kwargs.get('lease_duration', None) + lease_id = kwargs.get("lease_id", None) + lease_duration = kwargs.get("lease_duration", None) if lease_id and not lease_duration: raise ValueError("Please specify a lease_id and a lease_duration.") if lease_duration and not lease_id: @@ -321,19 +312,21 @@ async def _delete(self, **kwargs: Any) -> Dict[str, Any]: # Perform paginated delete only if using OAuth, deleting a directory, and api version is 2023-08-03 or later # The pagination is only for ACL checks, the final request remains the atomic delete operation paginated = None - if (compare_api_versions(self.api_version, '2023-08-03') >= 0 and - hasattr(self.credential, 'get_token') and - kwargs.get('recursive')): # Directory delete will always specify recursive + if ( + compare_api_versions(self.api_version, "2023-08-03") >= 0 + and hasattr(self.credential, "get_token") + and kwargs.get("recursive") + ): # Directory delete will always specify recursive paginated = True options = _delete_path_options(paginated, **kwargs) try: response_headers = await self._client.path.delete(**options) # Loop until continuation token is None for paginated delete - while response_headers['continuation']: + while response_headers["continuation"]: response_headers = await self._client.path.delete( - continuation=response_headers['continuation'], - **options) + continuation=response_headers["continuation"], **options + ) return response_headers except HttpResponseError as error: @@ -341,7 +334,8 @@ async def _delete(self, **kwargs: Any) -> Dict[str, Any]: @distributed_trace_async async def set_access_control( - self, owner: Optional[str] = None, + self, + owner: Optional[str] = None, group: Optional[str] = None, permissions: Optional[str] = None, acl: Optional[str] = None, @@ -509,11 +503,12 @@ async def set_access_control_recursive(self, acl: str, **kwargs: Any) -> AccessC if not acl: raise ValueError("The Access Control List must be set for this operation") - progress_hook = kwargs.pop('progress_hook', None) - max_batches = kwargs.pop('max_batches', None) - options = _set_access_control_recursive_options(mode='set', acl=acl, **kwargs) - return await self._set_access_control_internal(options=options, progress_hook=progress_hook, - max_batches=max_batches) + progress_hook = kwargs.pop("progress_hook", None) + max_batches = kwargs.pop("max_batches", None) + options = _set_access_control_recursive_options(mode="set", acl=acl, **kwargs) + return await self._set_access_control_internal( + options=options, progress_hook=progress_hook, max_batches=max_batches + ) @distributed_trace_async async def update_access_control_recursive(self, acl: str, **kwargs: Any) -> AccessControlChangeResult: @@ -563,11 +558,12 @@ async def update_access_control_recursive(self, acl: str, **kwargs: Any) -> Acce if not acl: raise ValueError("The Access Control List must be set for this operation") - progress_hook = kwargs.pop('progress_hook', None) - max_batches = kwargs.pop('max_batches', None) - options = _set_access_control_recursive_options(mode='modify', acl=acl, **kwargs) - return await self._set_access_control_internal(options=options, progress_hook=progress_hook, - max_batches=max_batches) + progress_hook = kwargs.pop("progress_hook", None) + max_batches = kwargs.pop("max_batches", None) + options = _set_access_control_recursive_options(mode="modify", acl=acl, **kwargs) + return await self._set_access_control_internal( + options=options, progress_hook=progress_hook, max_batches=max_batches + ) @distributed_trace_async async def remove_access_control_recursive(self, acl: str, **kwargs: Any) -> AccessControlChangeResult: @@ -615,19 +611,21 @@ async def remove_access_control_recursive(self, acl: str, **kwargs: Any) -> Acce if not acl: raise ValueError("The Access Control List must be set for this operation") - progress_hook = kwargs.pop('progress_hook', None) - max_batches = kwargs.pop('max_batches', None) - options = _set_access_control_recursive_options(mode='remove', acl=acl, **kwargs) - return await self._set_access_control_internal(options=options, progress_hook=progress_hook, - max_batches=max_batches) + progress_hook = kwargs.pop("progress_hook", None) + max_batches = kwargs.pop("max_batches", None) + options = _set_access_control_recursive_options(mode="remove", acl=acl, **kwargs) + return await self._set_access_control_internal( + options=options, progress_hook=progress_hook, max_batches=max_batches + ) async def _set_access_control_internal( - self, options: Dict[str, Any], + self, + options: Dict[str, Any], progress_hook: Optional[Callable[[AccessControlChanges], Awaitable[Any]]], - max_batches: Optional[int] = None + max_batches: Optional[int] = None, ) -> AccessControlChangeResult: try: - continue_on_failure = options.get('force_flag') + continue_on_failure = options.get("force_flag") total_directories_successful = 0 total_files_success = 0 total_failure_count = 0 @@ -643,44 +641,56 @@ async def _set_access_control_internal( total_files_success += resp.files_successful total_failure_count += resp.failure_count batch_count += 1 - current_continuation_token = headers['continuation'] + current_continuation_token = headers["continuation"] if current_continuation_token is not None: last_continuation_token = current_continuation_token if progress_hook is not None: - await progress_hook(AccessControlChanges( - batch_counters=AccessControlChangeCounters( - directories_successful=resp.directories_successful, - files_successful=resp.files_successful, - failure_count=resp.failure_count, - ), - aggregate_counters=AccessControlChangeCounters( - directories_successful=total_directories_successful, - files_successful=total_files_success, - failure_count=total_failure_count, - ), - batch_failures=[AccessControlChangeFailure( - name=failure.name, - is_directory=failure.type == 'DIRECTORY', - error_message=failure.error_message) for failure in resp.failed_entries], - continuation=last_continuation_token - )) + await progress_hook( + AccessControlChanges( + batch_counters=AccessControlChangeCounters( + directories_successful=resp.directories_successful, + files_successful=resp.files_successful, + failure_count=resp.failure_count, + ), + aggregate_counters=AccessControlChangeCounters( + directories_successful=total_directories_successful, + files_successful=total_files_success, + failure_count=total_failure_count, + ), + batch_failures=[ + AccessControlChangeFailure( + name=failure.name, + is_directory=failure.type == "DIRECTORY", + error_message=failure.error_message, + ) + for failure in resp.failed_entries + ], + continuation=last_continuation_token, + ) + ) # update the continuation token, if there are more operations that cannot be completed in a single call - max_batches_satisfied = (max_batches is not None and batch_count == max_batches) + max_batches_satisfied = max_batches is not None and batch_count == max_batches continue_operation = bool(current_continuation_token) and not max_batches_satisfied - options['continuation'] = current_continuation_token + options["continuation"] = current_continuation_token # currently the service stops on any failure, so we should send back the last continuation token # for the user to retry the failed updates # otherwise we should just return what the service gave us - return AccessControlChangeResult(counters=AccessControlChangeCounters( - directories_successful=total_directories_successful, - files_successful=total_files_success, - failure_count=total_failure_count), - continuation=last_continuation_token - if total_failure_count > 0 and not continue_on_failure else current_continuation_token) + return AccessControlChangeResult( + counters=AccessControlChangeCounters( + directories_successful=total_directories_successful, + files_successful=total_files_success, + failure_count=total_failure_count, + ), + continuation=( + last_continuation_token + if total_failure_count > 0 and not continue_on_failure + else current_continuation_token + ), + ) except HttpResponseError as error: error.continuation_token = last_continuation_token process_storage_error(error) @@ -800,11 +810,11 @@ async def _get_path_properties(self, **kwargs: Any) -> Union[DirectoryProperties and system properties for the file or directory. :rtype: DirectoryProperties or FileProperties """ - upn = kwargs.pop('upn', None) + upn = kwargs.pop("upn", None) if upn: - headers = kwargs.pop('headers', {}) - headers['x-ms-upn'] = str(upn) - kwargs['headers'] = headers + headers = kwargs.pop("headers", {}) + headers["x-ms-upn"] = str(upn) + kwargs["headers"] = headers path_properties = await self._blob_client.get_blob_properties(**kwargs) return cast(Union[DirectoryProperties, FileProperties], path_properties) @@ -871,8 +881,7 @@ async def set_metadata(self, metadata: Dict[str, str], **kwargs: Any) -> Dict[st @distributed_trace_async async def set_http_headers( - self, content_settings: Optional["ContentSettings"] = None, - **kwargs: Any + self, content_settings: Optional["ContentSettings"] = None, **kwargs: Any ) -> Dict[str, Any]: """Sets system properties on the file or directory. @@ -914,9 +923,7 @@ async def set_http_headers( @distributed_trace_async async def acquire_lease( - self, lease_duration: int = -1, - lease_id: Optional[str] = None, - **kwargs: Any + self, lease_duration: int = -1, lease_id: Optional[str] = None, **kwargs: Any ) -> DataLakeLeaseClient: """ Requests a new lease. If the file or directory does not have an active lease, diff --git a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/aio/_upload_helper.py b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/aio/_upload_helper.py index ebef79ee1514..8c50e1f41519 100644 --- a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/aio/_upload_helper.py +++ b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/aio/_upload_helper.py @@ -4,20 +4,13 @@ # license information. # -------------------------------------------------------------------------- -from typing import ( - Any, cast, Dict, IO, Optional, - TYPE_CHECKING -) +from typing import Any, cast, Dict, IO, Optional, TYPE_CHECKING from azure.core.exceptions import HttpResponseError from .._deserialize import process_storage_error from .._shared.response_handlers import return_response_headers -from .._shared.uploads_async import ( - DataLakeFileChunkUploader, - upload_data_chunks, - upload_substream_blocks -) +from .._shared.uploads_async import DataLakeFileChunkUploader, upload_data_chunks, upload_substream_blocks if TYPE_CHECKING: from .._generated.aio.operations import PathOperations @@ -25,12 +18,14 @@ def _any_conditions(modified_access_conditions=None, **kwargs): # pylint: disable=unused-argument - return any([ - modified_access_conditions.if_modified_since, - modified_access_conditions.if_unmodified_since, - modified_access_conditions.if_none_match, - modified_access_conditions.if_match - ]) + return any( + [ + modified_access_conditions.if_modified_since, + modified_access_conditions.if_unmodified_since, + modified_access_conditions.if_none_match, + modified_access_conditions.if_match, + ] + ) async def upload_datalake_file( @@ -46,46 +41,54 @@ async def upload_datalake_file( try: if length == 0: return {} - properties = kwargs.pop('properties', None) - umask = kwargs.pop('umask', None) - permissions = kwargs.pop('permissions', None) - path_http_headers = kwargs.pop('path_http_headers', None) - modified_access_conditions = kwargs.pop('modified_access_conditions', None) - chunk_size = kwargs.pop('chunk_size', 100 * 1024 * 1024) - encryption_context = kwargs.pop('encryption_context', None) - progress_hook = kwargs.pop('progress_hook', None) + properties = kwargs.pop("properties", None) + umask = kwargs.pop("umask", None) + permissions = kwargs.pop("permissions", None) + path_http_headers = kwargs.pop("path_http_headers", None) + modified_access_conditions = kwargs.pop("modified_access_conditions", None) + chunk_size = kwargs.pop("chunk_size", 100 * 1024 * 1024) + encryption_context = kwargs.pop("encryption_context", None) + progress_hook = kwargs.pop("progress_hook", None) if not overwrite: # if customers didn't specify access conditions, they cannot flush data to existing file if not _any_conditions(modified_access_conditions): - modified_access_conditions.if_none_match = '*' + modified_access_conditions.if_none_match = "*" if properties or umask or permissions: raise ValueError("metadata, umask and permissions can be set only when overwrite is enabled") if overwrite: - response = cast(Dict[str, Any], await client.create( - resource='file', - path_http_headers=path_http_headers, - properties=properties, - modified_access_conditions=modified_access_conditions, - umask=umask, - permissions=permissions, - encryption_context=encryption_context, - cls=return_response_headers, - **kwargs - )) + response = cast( + Dict[str, Any], + await client.create( + resource="file", + path_http_headers=path_http_headers, + properties=properties, + modified_access_conditions=modified_access_conditions, + umask=umask, + permissions=permissions, + encryption_context=encryption_context, + cls=return_response_headers, + **kwargs + ), + ) # this modified_access_conditions will be applied to flush_data to make sure # no other flush between create and the current flush - modified_access_conditions.if_match = response['etag'] + modified_access_conditions.if_match = response["etag"] modified_access_conditions.if_none_match = None modified_access_conditions.if_modified_since = None modified_access_conditions.if_unmodified_since = None - use_original_upload_path = file_settings.use_byte_buffer or \ - validate_content or chunk_size < file_settings.min_large_chunk_upload_threshold or \ - hasattr(stream, 'seekable') and not stream.seekable() or \ - not hasattr(stream, 'seek') or not hasattr(stream, 'tell') + use_original_upload_path = ( + file_settings.use_byte_buffer + or validate_content + or chunk_size < file_settings.min_large_chunk_upload_threshold + or hasattr(stream, "seekable") + and not stream.seekable() + or not hasattr(stream, "seek") + or not hasattr(stream, "tell") + ) if use_original_upload_path: await upload_data_chunks( @@ -112,13 +115,16 @@ async def upload_datalake_file( **kwargs ) - return cast(Dict[str, Any], await client.flush_data( - position=length, - path_http_headers=path_http_headers, - modified_access_conditions=modified_access_conditions, - close=True, - cls=return_response_headers, - **kwargs - )) + return cast( + Dict[str, Any], + await client.flush_data( + position=length, + path_http_headers=path_http_headers, + modified_access_conditions=modified_access_conditions, + close=True, + cls=return_response_headers, + **kwargs + ), + ) except HttpResponseError as error: process_storage_error(error) diff --git a/sdk/storage/azure-storage-file-datalake/conftest.py b/sdk/storage/azure-storage-file-datalake/conftest.py index b830bd31a95b..28cde3559001 100644 --- a/sdk/storage/azure-storage-file-datalake/conftest.py +++ b/sdk/storage/azure-storage-file-datalake/conftest.py @@ -16,6 +16,8 @@ add_uri_string_sanitizer, test_proxy, remove_batch_sanitizers, + add_remove_header_sanitizer, + set_custom_default_matcher, ) # Ignore async tests for PyPy @@ -23,6 +25,7 @@ if platform.python_implementation() == "PyPy": collect_ignore_glob.append("tests/*_async.py") + @pytest.fixture(scope="session", autouse=True) def add_sanitizers(test_proxy): subscription_id = os.environ.get("AZURE_SUBSCRIPTION_ID", "00000000-0000-0000-0000-000000000000") @@ -41,3 +44,6 @@ def add_sanitizers(test_proxy): # Remove the following sanitizers since certain fields are needed in tests and are non-sensitive: # - AZSDK3493: $..name remove_batch_sanitizers(["AZSDK3493"]) + + add_remove_header_sanitizer(headers="Accept") + set_custom_default_matcher(ignore_query_ordering=True) diff --git a/sdk/storage/azure-storage-file-datalake/pyproject.toml b/sdk/storage/azure-storage-file-datalake/pyproject.toml index 1f4a580e95d4..e99eb52be8c1 100644 --- a/sdk/storage/azure-storage-file-datalake/pyproject.toml +++ b/sdk/storage/azure-storage-file-datalake/pyproject.toml @@ -1,3 +1,66 @@ +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +[build-system] +requires = ["setuptools>=77.0.3", "wheel"] +build-backend = "setuptools.build_meta" + +[project] +name = "azure-storage-file-datalake" +authors = [ + { name = "Microsoft Corporation", email = "azpysdkhelp@microsoft.com" }, +] +description = "Microsoft Corporation Azure Storage File Datalake Client Library for Python" +license = "MIT" +classifiers = [ + "Development Status :: 4 - Beta", + "Programming Language :: Python", + "Programming Language :: Python :: 3 :: Only", + "Programming Language :: Python :: 3", + "Programming Language :: Python :: 3.9", + "Programming Language :: Python :: 3.10", + "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", + "Programming Language :: Python :: 3.13", +] +requires-python = ">=3.9" +keywords = ["azure", "azure sdk"] + +dependencies = [ + "isodate>=0.6.1", + "azure-core>=1.38.3", + "typing-extensions>=4.6.0", +] +dynamic = [ +"version", "readme" +] + +[project.urls] +repository = "https://github.com/Azure/azure-sdk-for-python" + +[tool.setuptools.dynamic] +version = {attr = "azure.storage.filedatalake._generated._version.VERSION"} +readme = {file = ["README.md", "CHANGELOG.md"], content-type = "text/markdown"} + +[tool.setuptools.packages.find] +exclude = [ + "tests*", + "generated_tests*", + "samples*", + "generated_samples*", + "doc*", + "azure", + "azure.storage", + "azure.storage.filedatalake", +] + +[tool.setuptools.package-data] +pytyped = ["py.typed"] + [tool.azure-sdk-build] mypy = true pyright = false diff --git a/sdk/storage/azure-storage-file-datalake/setup.py b/sdk/storage/azure-storage-file-datalake/setup.py deleted file mode 100644 index 7e71001d427c..000000000000 --- a/sdk/storage/azure-storage-file-datalake/setup.py +++ /dev/null @@ -1,91 +0,0 @@ -#!/usr/bin/env python - -# ------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -------------------------------------------------------------------------- - - -import os -import re - -from setuptools import setup, find_packages - - -# Change the PACKAGE_NAME only to change folder and different name -PACKAGE_NAME = "azure-storage-file-datalake" -NAMESPACE_NAME = "azure.storage.filedatalake" -PACKAGE_PPRINT_NAME = "Azure File DataLake Storage" - -# a-b-c => a/b/c -package_folder_path = NAMESPACE_NAME.replace('.', '/') - -# azure-storage v0.36.0 and prior are not compatible with this package -try: - import azure.storage - - try: - ver = azure.storage.__version__ - raise Exception( - f'This package is incompatible with azure-storage=={ver}. ' + - ' Uninstall it with "pip uninstall azure-storage".' - ) - except AttributeError: - pass -except ImportError: - pass - -# Version extraction inspired from 'requests' -with open(os.path.join(package_folder_path, '_version.py'), 'r') as fd: - version = re.search(r'^VERSION\s*=\s*[\'"]([^\'"]*)[\'"]', - fd.read(), re.MULTILINE).group(1) - -if not version: - raise RuntimeError('Cannot find version information') - -setup( - name=PACKAGE_NAME, - version=version, - include_package_data=True, - description=f'Microsoft {PACKAGE_PPRINT_NAME} Client Library for Python', - long_description=open('README.md', 'r').read(), - long_description_content_type='text/markdown', - license='MIT License', - author='Microsoft Corporation', - author_email='ascl@microsoft.com', - url='https://github.com/Azure/azure-sdk-for-python', - keywords="azure, azure sdk", - classifiers=[ - 'Development Status :: 4 - Beta', - 'Programming Language :: Python', - 'Programming Language :: Python :: 3 :: Only', - 'Programming Language :: Python :: 3', - 'Programming Language :: Python :: 3.9', - 'Programming Language :: Python :: 3.10', - 'Programming Language :: Python :: 3.11', - 'Programming Language :: Python :: 3.12', - 'Programming Language :: Python :: 3.13', - 'Programming Language :: Python :: 3.14', - 'License :: OSI Approved :: MIT License', - ], - zip_safe=False, - packages=find_packages(exclude=[ - # Exclude packages that will be covered by PEP420 or nspkg - 'azure', - 'azure.storage', - 'tests', - ]), - python_requires=">=3.9", - install_requires=[ - "azure-core>=1.37.0", - "azure-storage-blob>=12.30.0b1", - "typing-extensions>=4.6.0", - "isodate>=0.6.1" - ], - extras_require={ - "aio": [ - "azure-core[aio]>=1.37.0", - ], - }, -) diff --git a/sdk/storage/azure-storage-file-datalake/tsp-location.yaml b/sdk/storage/azure-storage-file-datalake/tsp-location.yaml new file mode 100644 index 000000000000..ab9d2589f874 --- /dev/null +++ b/sdk/storage/azure-storage-file-datalake/tsp-location.yaml @@ -0,0 +1,4 @@ +directory: specification/storage/data-plane/DataLakeStorage +commit: +repo: +additionalDirectories: