Google cloud Storage 308 Error while writing file
Opened this issue · 1 comments
Hi All,
I tried to pull the report from Double Click Search API and put it into Google Cloud Storage by using google app engine(Python).
The following code is used to pull the report and place in google cloud storage.
If I hard code the report_run_id in ds_check_status_report it is writing the file properly but when I try to pull the report_run_id from bigquery and pass then it is throwing 308 error.
class ds_check_status_report(webapp.RequestHandler):
_apptitle = None
_projectid = None
_projectnumber = None
def get(self):
cfg=appsettings()
## report_run_id="kdajdsjflsjfl"
result=dsbqfuns._dsbqquery()
result1=json.dumps(result['stacktrace'])
res=json.loads(result1)
self.response.write(res['rows'][0]['f'][0]['v'])
dsutils.poll_report(res['rows'][0]['f'][0]['v'])
def poll_report(report_id):
"""Poll the API with the reportId until the report is ready, up to ten times.
Args:
service: An authorized Doublelcicksearch service.
report_id: The ID DS has assigned to a report.
"""
print "Enter into poll_report"
cfg=appsettings()
creds = create_credentials(cfg._client_id , cfg._client_secret, cfg._refresh_token)
service = get_service(creds)
try:
request = service.reports().get(reportId=report_id)
json_data = request.execute()
if json_data['isReportReady']:
print('The report is ready.')
# For large reports, DS automatically fragments the report into multiple
# files. The 'files' property in the JSON object that DS returns contains
# the list of URLs for file fragment. To download a report, DS needs to
# know the report ID and the index of a file fragment.
for i in range(len(json_data['files'])):
print('Downloading fragment ' + str(i) + ' for report ' + report_id)
download_files(service, report_id, str(i)) # See Download the report.
else:
print('Report is not ready. I will try again.')
except:
exc_type, exc_value, exc_traceback = sys.exc_info()
print exc_type, exc_value, exc_traceback
def download_files(service, report_id, report_fragment):
"""Generate and print sample report.
Args:
service: An authorized Doublelcicksearch service.
report_id: The ID DS has assigned to a report.
report_fragment: The 0-based index of the file fragment from the files array.
"""
print "Enter into download_files", report_id
filename="/awstogcs/DoubleClickSearch_Campaign"+report_id+"_"+report_fragment+".csv"
write_retry_params = _gcs.RetryParams(backoff_factor=1.1)
gcs_file=_gcs.open(filename, 'w', content_type='text/plain',retry_params=write_retry_params)
request = service.reports().getFile(reportId=report_id, reportFragment=report_fragment)
gcs_file.write(request.execute())
gcs_file.close()
Error Message:
INFO 2017-06-26 13:23:37,417 module.py:809] default: "PUT /_ah/gcs/awstogcs/DoubleClickSearch_CampaignAAAndQT-U9IDyFYX_0.csv?upload_id=encoded_gs_file%3AYXdzdG9nY3MvRG91YmxlQ2xpY2tTZWFyY2hfQ2FtcGFpZ25BQUFuZFFULVU5SUR5RllYXzAuY3N2 HTTP/1.1" 308 -
INFO 2017-06-26 13:23:37,710 module.py:809] default: "PUT /_ah/gcs/awstogcs/DoubleClickSearch_CampaignAAAndQT-U9IDyFYX_0.csv?upload_id=encoded_gs_file%3AYXdzdG9nY3MvRG91YmxlQ2xpY2tTZWFyY2hfQ2FtcGFpZ25BQUFuZFFULVU5SUR5RllYXzAuY3N2 HTTP/1.1" 308 -
INFO 2017-06-26 13:23:37,976 module.py:809] default: "PUT /_ah/gcs/awstogcs/DoubleClickSearch_CampaignAAAndQT-U9IDyFYX_0.csv?upload_id=encoded_gs_file%3AYXdzdG9nY3MvRG91YmxlQ2xpY2tTZWFyY2hfQ2FtcGFpZ25BQUFuZFFULVU5SUR5RllYXzAuY3N2 HTTP/1.1" 308 -
INFO 2017-06-26 13:23:38,157 module.py:809] default: "PUT /_ah/gcs/awstogcs/DoubleClickSearch_CampaignAAAndQT-U9IDyFYX_0.csv?upload_id=encoded_gs_file%3AYXdzdG9nY3MvRG91YmxlQ2xpY2tTZWFyY2hfQ2FtcGFpZ25BQUFuZFFULVU5SUR5RllYXzAuY3N2 HTTP/1.1" 308 -
INFO 2017-06-26 13:23:38,440 module.py:809] default: "PUT /_ah/gcs/awstogcs/DoubleClickSearch_Campa
308
implies the upload isn't complete. The details for how much data has been committed are in the reply headers. The client will repeatedly upload chunks until the whole file has been uploaded. The line numbers don't seem to match up, so I'm not sure what would "throw" a 308 error, as this status is traditionally managed internally by the GCS client.