Not able to upload bigger files into azure blob container
manish-rocks opened this issue · 1 comments
manish-rocks commented
Which service(blob, file, queue) does this issue concern?
Note: for package version >= 12.0.0 please post the issue here instead: https://github.com/Azure/azure-sdk-for-python/issues
for table service, please post the issue here instead: https://github.com/Azure/azure-cosmosdb-python.
Which version of the SDK was used? Please provide the output of pip freeze
.
azure-storage-blob==12.11.0
What problem was encountered?
error : azure.core.exceptions.ServiceResponseError: ('Connection aborted.', timeout('The write operation timed out'))
Have you found a mitigation/solution?
NO. for small size of lists below function is working well but when the size is bigger around 3000 dictionaries in the list then
the code is giving error as I mentioned above. plz help how can I solve this problem.
`# to upload json files in azure blob containers
def file_upload_blob(sitename, property_list, location="", containername=os.getenv('containerName')):
"""
property_list: a big list of dictionaries,
sitename: a string value
"""
# grab the blob connection string.
try:
# connect to the container client.
container_client = ContainerClient.from_connection_string(
conn_str=os.getenv("BLOB_STORAGE_CONNECTION_STRING"),
container_name=containername
)
time.sleep(3)
logger.info(f"connected to the container {containername}")
# perform file operations
filename = os.path.join(sitename,
"{sn}_{location}{ts}.json".format(
ts=datetime.datetime.now().strftime(r'%Y-%b-%d_%H-%M-%S'),
sn=sitename,
location=location)
)
try:
if property_list:
container_client.upload_blob(
name=filename,
data=json.dumps(obj=property_list, indent=4),
blob_type='BlockBlob'
)
logger.info(f'file "{filename}" has been uploaded in azure blob storage')
else:
container_client.upload_blob(
name=filename,
data=json.dumps(obj=['got empty file'], indent=4),
blob_type='BlockBlob'
)
logger.info('found empty file! .. uploading empty file')
except Exception as e:
logger.error(f"getting error while uploading file to blob storage:{e}")
traceback.print_exc()
except Exception as e1:
logger.error(f" blob storage error: {e1}")
# logger.info("blob storage connection error:%s" % e1)
`
ljluestc commented
from azure.storage.blob import ContainerClient
import os
import json
import datetime
import time
def file_upload_blob(sitename, property_list, location="", containername=os.getenv('containerName')):
try:
# connect to the container client.
container_client = ContainerClient.from_connection_string(
conn_str=os.getenv("BLOB_STORAGE_CONNECTION_STRING"),
container_name=containername
)
time.sleep(3)
logger.info(f"connected to the container {containername}")
# perform file operations
filename = os.path.join(sitename,
"{sn}_{location}{ts}.json".format(
ts=datetime.datetime.now().strftime(r'%Y-%b-%d_%H-%M-%S'),
sn=sitename,
location=location)
)
try:
if property_list:
container_client.upload_blob(
name=filename,
data=json.dumps(obj=property_list, indent=4),
blob_type='BlockBlob',
timeout=3600 # Set a longer timeout in seconds (1 hour in this example)
)
logger.info(f'file "{filename}" has been uploaded in azure blob storage')
else:
container_client.upload_blob(
name=filename,
data=json.dumps(obj=['got empty file'], indent=4),
blob_type='BlockBlob',
timeout=3600 # Set a longer timeout in seconds (1 hour in this example)
)
logger.info('found empty file! .. uploading empty file')
except Exception as e:
logger.error(f"getting error while uploading file to blob storage:{e}")
traceback.print_exc()
except Exception as e1:
logger.error(f" blob storage error: {e1}")
# Call the function with appropriate parameters
# file_upload_blob(sitename, property_list, location, containername)
the timeout parameter in the upload_blob method has been set to 3600 seconds (1 hour) to allow longer time for the upload operation to complete