diff --git a/sdk/storage/azure-storage-blob/azure/storage/blob/_blob_client.py b/sdk/storage/azure-storage-blob/azure/storage/blob/_blob_client.py index ce4f738e1a48..90049ff88e32 100644 --- a/sdk/storage/azure-storage-blob/azure/storage/blob/_blob_client.py +++ b/sdk/storage/azure-storage-blob/azure/storage/blob/_blob_client.py @@ -540,8 +540,9 @@ def upload_blob( value specified in this header, the request will fail with MaxBlobSizeConditionNotMet error (HTTP status code 412 - Precondition Failed). :keyword int max_concurrency: - Maximum number of parallel connections to use when the blob size exceeds - 64MB. + Maximum number of parallel connections to use when transferring the blob in chunks. + This option does not affect the underlying connection pool, and may + require a separate configuration of the connection pool. :keyword ~azure.storage.blob.CustomerProvidedEncryptionKey cpk: Encrypts the data on the service-side with the given key. Use of customer-provided keys must be done over HTTPS. @@ -695,7 +696,9 @@ def download_blob( As the encryption key itself is provided in the request, a secure connection must be established to transfer the key. :keyword int max_concurrency: - The number of parallel connections with which to download. + Maximum number of parallel connections to use when transferring the blob in chunks. + This option does not affect the underlying connection pool, and may + require a separate configuration of the connection pool. :keyword Optional[str] encoding: Encoding to decode the downloaded bytes. Default is None, i.e. no decoding. :keyword progress_hook: diff --git a/sdk/storage/azure-storage-blob/azure/storage/blob/aio/_blob_client_async.py b/sdk/storage/azure-storage-blob/azure/storage/blob/aio/_blob_client_async.py index 5d522e318983..7cb074487f58 100644 --- a/sdk/storage/azure-storage-blob/azure/storage/blob/aio/_blob_client_async.py +++ b/sdk/storage/azure-storage-blob/azure/storage/blob/aio/_blob_client_async.py @@ -530,8 +530,9 @@ async def upload_blob( value specified in this header, the request will fail with MaxBlobSizeConditionNotMet error (HTTP status code 412 - Precondition Failed). :keyword int max_concurrency: - Maximum number of parallel connections to use when the blob size exceeds - 64MB. + Maximum number of parallel connections to use when transferring the blob in chunks. + This option does not affect the underlying connection pool, and may + require a separate configuration of the connection pool. :keyword ~azure.storage.blob.CustomerProvidedEncryptionKey cpk: Encrypts the data on the service-side with the given key. Use of customer-provided keys must be done over HTTPS. @@ -687,7 +688,9 @@ async def download_blob( As the encryption key itself is provided in the request, a secure connection must be established to transfer the key. :keyword int max_concurrency: - The number of parallel connections with which to download. + Maximum number of parallel connections to use when transferring the blob in chunks. + This option does not affect the underlying connection pool, and may + require a separate configuration of the connection pool. :keyword str encoding: Encoding to decode the downloaded bytes. Default is None, i.e. no decoding. :keyword progress_hook: diff --git a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_data_lake_file_client.py b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_data_lake_file_client.py index 214b4088acc1..a9777caf316e 100644 --- a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_data_lake_file_client.py +++ b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_data_lake_file_client.py @@ -493,6 +493,10 @@ def upload_data( see `here `_. This method may make multiple calls to the service and the timeout will apply to each call individually. + :keyword int max_concurrency: + Maximum number of parallel connections to use when transferring the file in chunks. + This option does not affect the underlying connection pool, and may + require a separate configuration of the connection pool. :keyword int chunk_size: The maximum chunk size for uploading a file in chunks. Defaults to 100*1024*1024, or 100MB. @@ -775,7 +779,9 @@ def download_file(self, offset=None, length=None, **kwargs): Use of customer-provided keys must be done over HTTPS. Required if the file was created with a Customer-Provided Key. :keyword int max_concurrency: - The number of parallel connections with which to download. + Maximum number of parallel connections to use when transferring the file in chunks. + This option does not affect the underlying connection pool, and may + require a separate configuration of the connection pool. :keyword int timeout: Sets the server-side timeout for the operation in seconds. For more details see https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. diff --git a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/aio/_data_lake_file_client_async.py b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/aio/_data_lake_file_client_async.py index 8bc360763a5b..6aa4f77d03ec 100644 --- a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/aio/_data_lake_file_client_async.py +++ b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/aio/_data_lake_file_client_async.py @@ -406,6 +406,10 @@ async def upload_data( see `here `_. This method may make multiple calls to the service and the timeout will apply to each call individually. + :keyword int max_concurrency: + Maximum number of parallel connections to use when transferring the file in chunks. + This option does not affect the underlying connection pool, and may + require a separate configuration of the connection pool. :keyword int chunk_size: The maximum chunk size for uploading a file in chunks. Defaults to 100*1024*1024, or 100MB. @@ -623,7 +627,9 @@ async def download_file(self, offset=None, length=None, **kwargs): Use of customer-provided keys must be done over HTTPS. Required if the file was created with a Customer-Provided Key. :keyword int max_concurrency: - The number of parallel connections with which to download. + Maximum number of parallel connections to use when transferring the file in chunks. + This option does not affect the underlying connection pool, and may + require a separate configuration of the connection pool. :keyword int timeout: Sets the server-side timeout for the operation in seconds. For more details see https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. diff --git a/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_file_client.py b/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_file_client.py index cb9c7ad43fb7..9610b070fc98 100644 --- a/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_file_client.py +++ b/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_file_client.py @@ -535,7 +535,9 @@ def upload_file( already validate. Note that this MD5 hash is not stored with the file. :keyword int max_concurrency: - Maximum number of parallel connections to use. + Maximum number of parallel connections to use when transferring the file in chunks. + This option does not affect the underlying connection pool, and may + require a separate configuration of the connection pool. :keyword lease: Required if the file has an active lease. Value can be a ShareLeaseClient object or the lease ID as a string. @@ -805,7 +807,9 @@ def download_file( Number of bytes to read from the stream. This is optional, but should be supplied for optimal performance. :keyword int max_concurrency: - Maximum number of parallel connections to use. + Maximum number of parallel connections to use when transferring the file in chunks. + This option does not affect the underlying connection pool, and may + require a separate configuration of the connection pool. :keyword bool validate_content: If true, calculates an MD5 hash for each chunk of the file. The storage service checks the hash of the content that has arrived with the hash diff --git a/sdk/storage/azure-storage-file-share/azure/storage/fileshare/aio/_file_client_async.py b/sdk/storage/azure-storage-file-share/azure/storage/fileshare/aio/_file_client_async.py index 4a5d4ccf34e2..09a353ac2d5f 100644 --- a/sdk/storage/azure-storage-file-share/azure/storage/fileshare/aio/_file_client_async.py +++ b/sdk/storage/azure-storage-file-share/azure/storage/fileshare/aio/_file_client_async.py @@ -532,7 +532,9 @@ async def upload_file( already validate. Note that this MD5 hash is not stored with the file. :keyword int max_concurrency: - Maximum number of parallel connections to use. + Maximum number of parallel connections to use when transferring the file in chunks. + This option does not affect the underlying connection pool, and may + require a separate configuration of the connection pool. :keyword str encoding: Defaults to UTF-8. :keyword lease: @@ -804,7 +806,9 @@ async def download_file( Number of bytes to read from the stream. This is optional, but should be supplied for optimal performance. :keyword int max_concurrency: - Maximum number of parallel connections to use. + Maximum number of parallel connections to use when transferring the file in chunks. + This option does not affect the underlying connection pool, and may + require a separate configuration of the connection pool. :keyword bool validate_content: If true, calculates an MD5 hash for each chunk of the file. The storage service checks the hash of the content that has arrived with the hash