Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[Storage][Blob][Fix]fix get_block_list bug #18751

Merged
merged 1 commit into from
Jun 28, 2021
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
12 changes: 10 additions & 2 deletions sdk/storage/azure-storage-blob/azure/storage/blob/_models.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@
from azure.core.exceptions import HttpResponseError
from ._generated.models import ArrowField

from ._shared import decode_base64_to_text
from ._shared import decode_base64_to_bytes
from ._shared.response_handlers import return_context_and_deserialized, process_storage_error
from ._shared.models import DictMixin, get_enum_value
from ._generated.models import Logging as GeneratedLogging
Expand Down Expand Up @@ -739,7 +739,15 @@ def __init__(self, block_id, state=BlockState.Latest):

@classmethod
def _from_generated(cls, generated):
block = cls(decode_base64_to_text(generated.name))
try:
decoded_bytes = decode_base64_to_bytes(generated.name)
block_id = decoded_bytes.decode('utf-8')
# this is to fix a bug. When large blocks are uploaded through upload_blob the block id isn't base64 encoded
# while service expected block id is base64 encoded, so when we get block_id if we cannot base64 decode, it
# means we didn't base64 encode it when stage the block, we want to use the returned block_id directly.
except UnicodeDecodeError:
block_id = generated.name
block = cls(block_id)
block.size = generated.size
return block

Expand Down
10 changes: 8 additions & 2 deletions sdk/storage/azure-storage-blob/tests/test_large_block_blob.py
Original file line number Diff line number Diff line change
Expand Up @@ -51,7 +51,10 @@ def _setup(self, storage_account, key):
self.container_name = self.get_resource_name('utcontainer')

if self.is_live:
self.bsc.create_container(self.container_name)
try:
self.bsc.create_container(self.container_name)
except:
pass

def _teardown(self, file_name):
if path.isfile(file_name):
Expand Down Expand Up @@ -170,9 +173,12 @@ def test_create_large_blob_from_path(self, resource_group, location, storage_acc

# Act
with open(FILE_PATH, 'rb') as stream:
blob.upload_blob(stream, max_concurrency=2)
blob.upload_blob(stream, max_concurrency=2, overwrite=True)

block_list = blob.get_block_list()

# Assert
self.assertIsNot(len(block_list), 0)
self.assertBlobEqual(self.container_name, blob_name, data)
self._teardown(FILE_PATH)

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -199,9 +199,12 @@ async def test_create_large_blob_from_path_async(self, resource_group, location,
# Act
try:
with open(FILE_PATH, 'rb') as stream:
await blob.upload_blob(stream, max_concurrency=2)
await blob.upload_blob(stream, max_concurrency=2, overwrite=True)

block_list = await blob.get_block_list()

# Assert
self.assertIsNot(len(block_list), 0)
await self.assertBlobEqual(self.container_name, blob_name, data)
finally:
self._teardown(FILE_PATH)
Expand Down