diff --git a/src/azure-cli/azure/cli/command_modules/storage/operations/blob.py b/src/azure-cli/azure/cli/command_modules/storage/operations/blob.py index 237e0d112bf..472ded350bc 100644 --- a/src/azure-cli/azure/cli/command_modules/storage/operations/blob.py +++ b/src/azure-cli/azure/cli/command_modules/storage/operations/blob.py @@ -415,9 +415,9 @@ def storage_blob_upload_batch(cmd, client, source, destination, pattern=None, # maxsize_condition=None, max_connections=2, lease_id=None, progress_callback=None, if_modified_since=None, if_unmodified_since=None, if_match=None, if_none_match=None, timeout=None, dryrun=False, socket_timeout=None, **kwargs): - def _create_return_result(blob_content_settings, upload_result=None): + def _create_return_result(blob_content_settings, blob_client, upload_result=None): return { - 'Blob': client.url, + 'Blob': blob_client.url, 'Type': blob_content_settings.content_type, 'Last Modified': upload_result['last_modified'] if upload_result else None, 'eTag': upload_result['etag'] if upload_result else None} @@ -434,8 +434,11 @@ def _create_return_result(blob_content_settings, upload_result=None): logger.info(' total %d', len(source_files)) results = [] for src, dst in source_files: + blob_client = client.get_blob_client(container=destination_container_name, + blob=normalize_blob_file_path(destination_path, dst)) results.append(_create_return_result(blob_content_settings=guess_content_type(src, content_settings, - t_content_settings))) + t_content_settings), + blob_client=blob_client)) else: @check_precondition_success def _upload_blob(*args, **kwargs): @@ -467,7 +470,7 @@ def _upload_blob(*args, **kwargs): if_none_match=if_none_match, timeout=timeout, **kwargs) if include: results.append(_create_return_result(blob_content_settings=guessed_content_settings, - upload_result=result)) + blob_client=blob_client, upload_result=result)) except (ResourceModifiedError, AzureResponseError) as ex: logger.error(ex) diff --git a/src/azure-cli/azure/cli/command_modules/storage/tests/latest/test_storage_batch_operations.py b/src/azure-cli/azure/cli/command_modules/storage/tests/latest/test_storage_batch_operations.py index 88d924d91a1..4951450cc2e 100644 --- a/src/azure-cli/azure/cli/command_modules/storage/tests/latest/test_storage_batch_operations.py +++ b/src/azure-cli/azure/cli/command_modules/storage/tests/latest/test_storage_batch_operations.py @@ -124,6 +124,8 @@ def test_storage_blob_batch_upload_scenarios(self, test_dir, storage_account_inf self.storage_cmd('storage blob list -c {} --prefix some_dir', storage_account_info, container).assert_with_checks(JMESPathCheck('length(@)', 4)) + # upload-batch with preconditon + container = self.create_container(storage_account_info) self.storage_cmd('storage blob upload-batch -d {} -s "{}"', storage_account_info, container, test_dir) import time @@ -142,6 +144,19 @@ def test_storage_blob_batch_upload_scenarios(self, test_dir, storage_account_inf container, test_dir, current).get_output_in_json() self.assertEqual(len(result), 41) + #check result url + container = self.create_container(storage_account_info) + result = self.storage_cmd('storage blob upload-batch -s "{}" -d {}', storage_account_info, + test_dir, container).get_output_in_json() + if result and result[0]: + res = result[0] + self.assertRegex(res['Blob'], '^.*[^\/]+$') + base_url = res['Blob'].split('/')[:3] + container = res['Blob'].split('/')[3] + blob_name = '/'.join(res['Blob'].split('/')[4:]) + self.storage_cmd('storage blob show -c {} -n {}', storage_account_info, + container, blob_name) + @ResourceGroupPreparer() @StorageAccountPreparer() @StorageTestFilesPreparer()