本文整理汇总了Python中apiclient.http.MediaFileUpload.mimetype方法的典型用法代码示例。如果您正苦于以下问题:Python MediaFileUpload.mimetype方法的具体用法?Python MediaFileUpload.mimetype怎么用?Python MediaFileUpload.mimetype使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类apiclient.http.MediaFileUpload
的用法示例。
在下文中一共展示了MediaFileUpload.mimetype方法的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: cloudstorage_upload
# 需要导入模块: from apiclient.http import MediaFileUpload [as 别名]
# 或者: from apiclient.http.MediaFileUpload import mimetype [as 别名]
def cloudstorage_upload(service, project_id, bucket, source_file, dest_file, show_status_messages=True):
"""Upload a local file to a Cloud Storage bucket.
Args:
service: BigQuery service object that is authenticated. Example: service = build('bigquery','v2', http=http)
project_id: string, Name of Google project to upload to
bucket: string, Name of Cloud Storage bucket (exclude the "gs://" prefix)
source_file: string, Path to the local file to upload
dest_file: string, Name to give the file on Cloud Storage
Returns:
Response of the upload in a JSON format
"""
# Starting code for this function is a combination from these sources:
# https://code.google.com/p/google-cloud-platform-samples/source/browse/file-transfer-json/chunked_transfer.py?repo=storage
# https://developers.google.com/api-client-library/python/guide/media_upload
filename = source_file
bucket_name = bucket
object_name = dest_file
assert bucket_name and object_name
if show_status_messages:
print('Upload request for {0}'.format(source_file))
media = MediaFileUpload(filename, chunksize=CHUNKSIZE, resumable=True)
if not media.mimetype():
media = MediaFileUpload(filename, DEFAULT_MIMETYPE, resumable=True)
request = service.objects().insert(bucket=bucket_name, name=object_name,
media_body=media)
response = request.execute()
if show_status_messages:
print('Upload complete')
return response
示例2: upload_file
# 需要导入模块: from apiclient.http import MediaFileUpload [as 别名]
# 或者: from apiclient.http.MediaFileUpload import mimetype [as 别名]
def upload_file(self, file, id, resource):
# Retry transport and file IO errors.
RETRYABLE_ERRORS = (httplib2.HttpLib2Error, IOError)
chunk_size = chunk_size = getattr(self, 'chunk_size', -1)
self.log("Uploading file '%s'" % (file))
start_time = time.time()
media = MediaFileUpload(file, chunksize=chunk_size, resumable=True)
if not media.mimetype():
# media = MediaFileUpload(file, 'application/octet-stream', resumable=True)
raise Exception("Could not determine mime-type. Please make lib mimetypes aware of it.")
request = resource.files().insert(id=id, filename=os.path.basename(file), media_body=media)
progressless_iters = 0
response = None
while response is None:
error = None
try:
start_time_chunk = time.time()
progress, response = request.next_chunk()
if progress:
Mbps = ((chunk_size / (time.time() - start_time_chunk)) * 0.008 * 0.001)
print "%s%% (%s/Mbps)" % (round(progress.progress() * 100), round(Mbps, 2))
except HttpError, err:
# Contray to the documentation GME does't return 201/200 for the last chunk
if err.resp.status == 204:
response = ""
else:
error = err
if err.resp.status < 500 and err.resp.status != 410:
raise
except RETRYABLE_ERRORS, err:
error = err
示例3: put
# 需要导入模块: from apiclient.http import MediaFileUpload [as 别名]
# 或者: from apiclient.http.MediaFileUpload import mimetype [as 别名]
def put(service):
#User inputs the file name that needs to be uploaded.
fileName = raw_input('Enter file name to be uploaded to Cloud:\n')
#Encrypt the given file using AES encryption
if not fileName or not os.path.isfile(fileName):
print 'Invalid file name or file not found. Terminating!'
return
directory, f_name = os.path.split(fileName)
#Upload the file to Bucket
try:
media = MediaFileUpload(fileName, chunksize=_CHUNK_SIZE, resumable=True)
if not media.mimetype():
media = MediaFileUpload(fileName, _DEFAULT_MIMETYPE, resumable=True)
request = service.objects().insert(bucket=_BUCKET_NAME, name=f_name,
media_body=media)
response = None
start = datetime.datetime.now()
while response is None:
status, response = request.next_chunk()
if status:
print "Uploaded %d%%." % int(status.progress() * 100)
print "Upload Complete!"
end = datetime.datetime.now()
duration = end - start
print ('Upload took {} seconds'.format(duration.seconds))
#Removes references to the uploaded file
media = request = None
except client.AccessTokenRefreshError:
print ("Error in the credentials")
示例4: upload
# 需要导入模块: from apiclient.http import MediaFileUpload [as 别名]
# 或者: from apiclient.http.MediaFileUpload import mimetype [as 别名]
def upload(self, filename, bucket_name, object_name):
assert bucket_name and object_name
print 'Building upload request...'
media = MediaFileUpload(filename, chunksize=CHUNKSIZE, resumable=True)
if not media.mimetype():
media = MediaFileUpload(filename, DEFAULT_MIMETYPE, resumable=True)
request = self.cloud.objects().insert(bucket=bucket_name,
name=object_name,
media_body=media)
print 'Uploading file: %s to bucket: %s object: %s ' % (filename,
bucket_name,
object_name)
progressless_iters = 0
response = None
while response is None:
error = None
try:
progress, response = request.next_chunk()
if progress:
self.print_with_carriage_return(
'Upload %d%%' % (100 * progress.progress()))
except HttpError, err:
error = err
if err.resp.status < 500:
raise
except RETRYABLE_ERRORS, err:
error = err
示例5: upload
# 需要导入模块: from apiclient.http import MediaFileUpload [as 别名]
# 或者: from apiclient.http.MediaFileUpload import mimetype [as 别名]
def upload(filename, bucketName, clientEmail, keyFile, **kwargs):
service = Google_Service_Builder.buildService(clientEmail, keyFile,
domain="devstorage.read_write",
service="storage",
version="v1", **kwargs)
print 'Building upload request...'
media = MediaFileUpload(filename, chunksize=CHUNKSIZE, resumable=True)
if not media.mimetype():
media = MediaFileUpload(filename, DEFAULT_MIMETYPE, resumable=True)
request = service.objects().insert(bucket=bucketName, name=filename,
media_body=media)
print 'Uploading file: %s to: %s/%s' % (filename, bucketName, filename)
progressless_iters = 0
response = None
while response is None:
error = None
try:
progress, response = request.next_chunk()
if progress:
print 'Upload progress: %.2f%%' % (100.0 * progress.progress())
except HttpError, err:
error = err
if err.resp.status < 500:
raise
except RETRYABLE_ERRORS, err:
error = err
示例6: upload_file_init
# 需要导入模块: from apiclient.http import MediaFileUpload [as 别名]
# 或者: from apiclient.http.MediaFileUpload import mimetype [as 别名]
def upload_file_init(ctx, asset_id, asset_type, filepath):
"""Upload the first 256KB of a given file to an asset.
This forces it into an "uploading" state which prevents processing from
occurring until all files are uploaded.
Built as an experiment and abandoned in favour of multithreaded uploading.
Parameters
----------
ctx : Context
A Click Context object.
asset_id : str
The Id of a valid raster or vector asset.
asset_type : str
The type of asset being represented. Possible values: table, raster
filepath : str
The absolute path to the file.
"""
@retries(1000)
def next_chunk(ctx, request):
return request.next_chunk()
chunk_size = 262144 # 256KB - smallest possible chunk size for resumable upload
media = MediaFileUpload(filepath, chunksize=chunk_size, resumable=True)
if not media.mimetype():
media = MediaFileUpload(filepath, mimetype='application/octet-stream', chunksize=chunk_size, resumable=True)
resource = ctx.service().tables() if asset_type == "vector" else ctx.service().rasters()
request = resource.files().insert(id=asset_id, filename=os.path.basename(filepath), media_body=media)
try:
next_chunk(ctx, request)
except NoContent as e:
pass
ctx.log("Init uploading %s" % (os.path.basename(filepath)))
示例7: upload
# 需要导入模块: from apiclient.http import MediaFileUpload [as 别名]
# 或者: from apiclient.http.MediaFileUpload import mimetype [as 别名]
def upload(argv):
filename = argv[1]
bucket_name, object_name = argv[2][5:].split('/', 1)
assert bucket_name and object_name
service = get_authenticated_service(RW_SCOPE)
print 'Building upload request...'
media = MediaFileUpload(filename, chunksize=CHUNKSIZE, resumable=True)
if not media.mimetype():
media = MediaFileUpload(filename, DEFAULT_MIMETYPE, resumable=True)
request = service.objects().insert(bucket=bucket_name, name=object_name,
media_body=media)
print 'Uploading file: %s to bucket: %s object: %s ' % (filename, bucket_name,
object_name)
progressless_iters = 0
response = None
while response is None:
error = None
try:
progress, response = request.next_chunk()
if progress:
print_with_carriage_return('Upload %d%%' % (100 * progress.progress()))
except HttpError, err:
error = err
if err.resp.status < 500:
raise
except RETRYABLE_ERRORS, err:
error = err
示例8: upload
# 需要导入模块: from apiclient.http import MediaFileUpload [as 别名]
# 或者: from apiclient.http.MediaFileUpload import mimetype [as 别名]
def upload(self, filename, gdrivename=None, parent_folder="root"):
logging.debug(
"Going to upload file to GDrive. filename=%s , gdrivename=%s , parent_folder=%s"
% (filename, gdrivename, parent_folder)
)
# Convert the name of the file on GDrive in case it is not provided
if gdrivename is None or gdrivename == "":
gdrivename = filename.split("/")[-1]
# Check whether the file does not already exists
try:
self.get_id(gdrivename, parent_folder)
except:
pass
else:
logging.error("The file to upload %s already exists" % gdrivename)
raise FileExistsError(gdrivename)
# Prepare for the file upload
logging.debug("Creating the media object for uploading from %s" % filename)
media = MediaFileUpload(filename, chunksize=CHUNKSIZE, resumable=True)
if not media.mimetype():
logging.debug("MIME type of the file has not been recognized, using the default %s" % DEFAULT_MIMETYPE)
media = MediaFileUpload(filename, mimeType=DEFAULT_MIMETYPE, chunksize=CHUNKSIZE, resumable=True)
body = {
"name": gdrivename,
#'parents': [{"id": parent_folder}],
"parents": [parent_folder],
}
logging.debug("Starting upload of the %s file as %s" % (filename, gdrivename))
request = self.service.files().create(body=body, media_body=media, fields="id")
retry = 5
while retry > 0:
try:
response = None
while response is None:
status, response = request.next_chunk()
if status:
logging.info("Uploaded %d%%." % int(status.progress() * 100))
logging.info("Upload has been completed")
# No need for a retry
retry = -1
except apiclient.errors.HttpError as e:
if e.resp.status in [404]:
# Start the upload all over again.
request = self.service.files().create(body=body, media_body=media, fields="id")
elif e.resp.status in [500, 502, 503, 504]:
# Call next_chunk() again, but use an exponential backoff for repeated errors.
logging.warning("Upload of a chunk has failed, retrying ...")
retry -= 1
time.sleep(3)
else:
# Do not retry. Log the error and fail.
logging.error("The upload has failed: %s" % str(e))
raise
if retry == 0:
logging.error("The upload has failed.")
raise ConnectionError
fid = response.get("id")
self.cache[fid] = (gdrivename, parent_folder)
return fid
示例9: update
# 需要导入模块: from apiclient.http import MediaFileUpload [as 别名]
# 或者: from apiclient.http.MediaFileUpload import mimetype [as 别名]
def update(self, new_path=None, parent_id='root'):
try:
if not hasattr(self, 'id'):
return self.create(parent_id)
existing_file = self.get_file(self.id)
if new_path is not None:
self.path = new_path
mime_type = defaul_mime_type
media_body = None
if not os.path.isdir(self.path):
media_body = MediaFileUpload(self.path, resumable=True)
if media_body.size() == 0:
logger.error('cannot update no content file %s', self.path)
return None
if media_body.mimetype() is not None:
mime_type = media_body.mimetype()
else:
media_body._mimetype = mime_type
else:
mime_type = folder_mime_type
existing_file['title'] = os.path.basename(self.path)
existing_file['parents'] = [{'id': parent_id}]
existing_file['mimeType'] = mime_type
logger.info('updated %s', self.path)
with drive.lock:
metadata = drive.service.files().update(
fileId=self.id,
body=existing_file,
media_body=media_body).execute()
self.id = metadata['id']
if metadata.has_key('downloadUrl'):
self.download_url = metadata['downloadUrl']
if metadata.has_key('md5Checksum'):
self.md5Checksum = metadata['md5Checksum']
return metadata
except errors.HttpError, error:
logger.error('an error occurred: %s', error)
return None
示例10: upload_zip_to_gcs
# 需要导入模块: from apiclient.http import MediaFileUpload [as 别名]
# 或者: from apiclient.http.MediaFileUpload import mimetype [as 别名]
def upload_zip_to_gcs(server_key, archive_file, backup=False):
name = get_gcs_archive_name(server_key)
credentials = gce.AppAssertionCredentials(scope=STORAGE_API_SCOPE)
http = credentials.authorize(httplib2.Http())
service = build('storage', STORAGE_API_VERSION, http=http)
retry = True
while retry:
media = MediaFileUpload(archive_file, chunksize=CHUNKSIZE, resumable=True)
if not media.mimetype():
media = MediaFileUpload(archive_file, 'application/zip', resumable=True)
request = service.objects().insert(bucket=app_bucket, name=name, media_body=media)
progress = previous_progress = None
tries = 0
response = None
while response is None:
try:
status, response = request.next_chunk()
tries = 0
progress = int(status.progress() * 100) if status is not None else 0
if response is not None: # Done
retry = False
progress = 100
if progress != previous_progress:
if progress % 10 == 0:
logger.info("Server {0} archive is {1}% uploaded".format(server_key, progress))
if not backup:
try:
client.post_event(server_key, STOP_EVENT, progress)
except Exception as e:
logger.exception(
"Error sending controller save event for server [{0}]: {1}".format(
server_key, e
)
)
previous_progress = progress
except HttpError as e:
if e.resp.status in [404]: # Start upload all over again
response = None
logging.error(
"Error ({0}) uploading archive for server {1}. Retrying....".format(
str(e), server_key
)
)
elif e.resp.status in [500, 502, 503, 504]: # Retry with backoff
tries += 1
if tries > NUM_RETRIES:
raise
sleeptime = 2**min(tries, 4)
logger.error(
"Error ({0}) uploading archive for server {1}. Sleeping {2} seconds.".format(
str(e), server_key, sleeptime
)
)
time.sleep(sleeptime)
else:
raise
os.remove(archive_file)
示例11: create
# 需要导入模块: from apiclient.http import MediaFileUpload [as 别名]
# 或者: from apiclient.http.MediaFileUpload import mimetype [as 别名]
def create(self, parent_id='root'):
mime_type = defaul_mime_type
media_body = None
if not os.path.isdir(self.path):
media_body = MediaFileUpload(self.path, resumable=True)
if media_body.size() == 0:
logger.error('cannot create no content file %s', self.path)
return None
if media_body.mimetype() is not None:
mime_type = media_body.mimetype()
else:
media_body._mimetype = mime_type
else:
mime_type = folder_mime_type
body = {
'title': os.path.basename(self.path),
'mimeType': mime_type,
'parents': [{'id': parent_id}]
}
try:
with drive.lock:
metadata = drive.service.files().insert(
body=body,
media_body=media_body).execute()
logger.info('created %s, %s', self.path, body['mimeType'])
self.id = metadata['id']
if metadata.has_key('downloadUrl'):
self.download_url = metadata['downloadUrl']
if metadata.has_key('md5Checksum'):
self.md5Checksum = metadata['md5Checksum']
return metadata
except errors.HttpError, error:
logger.error('an error occurred: %s', error)
return None
示例12: upload_file_to_bucket
# 需要导入模块: from apiclient.http import MediaFileUpload [as 别名]
# 或者: from apiclient.http.MediaFileUpload import mimetype [as 别名]
def upload_file_to_bucket(self, bucket_name, file_path):
def handle_progressless_iter(error, progressless_iters):
if progressless_iters > NUM_RETRIES:
self.logger.info('Failed to make progress for too many consecutive iterations.')
raise error
sleeptime = random.random() * (2 ** progressless_iters)
self.logger.info(
'Caught exception ({}). Sleeping for {} seconds before retry #{}.'.format(
str(error), sleeptime, progressless_iters))
time.sleep(sleeptime)
self.logger.info('Building upload request...')
media = MediaFileUpload(file_path, chunksize=CHUNKSIZE, resumable=True)
if not media.mimetype():
media = MediaFileUpload(file_path, DEFAULT_MIMETYPE, resumable=True)
blob_name = os.path.basename(file_path)
if not self.bucket_exists(bucket_name):
self.logger.error("Bucket {} doesn't exists".format(bucket_name))
raise "Bucket doesn't exist"
request = self._storage.objects().insert(
bucket=bucket_name, name=blob_name, media_body=media)
self.logger.info('Uploading file: {}, to bucket: {}, blob: {}'.format(
file_path, bucket_name, blob_name))
progressless_iters = 0
response = None
while response is None:
error = None
try:
progress, response = request.next_chunk()
if progress:
self.logger.info('Upload {}%'.format(100 * progress.progress()))
except errors.HttpError as error:
if error.resp.status < 500:
raise
except RETRYABLE_ERRORS as error:
if error:
progressless_iters += 1
handle_progressless_iter(error, progressless_iters)
else:
progressless_iters = 0
self.logger.info('Upload complete!')
self.logger.info('Uploaded Object:')
self.logger.info(json_dumps(response, indent=2))
return (True, blob_name)
示例13: upload_creative_asset
# 需要导入模块: from apiclient.http import MediaFileUpload [as 别名]
# 或者: from apiclient.http.MediaFileUpload import mimetype [as 别名]
def upload_creative_asset(service, profile_id, advertiser_id, asset_name, path_to_asset_file, asset_type):
"""Uploads a creative asset and returns an assetIdentifier."""
# Construct the creative asset metadata
creative_asset = {"assetIdentifier": {"name": asset_name, "type": asset_type}}
media = MediaFileUpload(path_to_asset_file)
if not media.mimetype():
media = MediaFileUpload(path_to_asset_file, "application/octet-stream")
response = (
service.creativeAssets()
.insert(advertiserId=advertiser_id, profileId=profile_id, media_body=media, body=creative_asset)
.execute()
)
return response["assetIdentifier"]
开发者ID:jleegainey,项目名称:googleads-dfa-reporting-samples,代码行数:18,代码来源:create_enhanced_image_creative.py
示例14: test_media_file_upload_to_from_json
# 需要导入模块: from apiclient.http import MediaFileUpload [as 别名]
# 或者: from apiclient.http.MediaFileUpload import mimetype [as 别名]
def test_media_file_upload_to_from_json(self):
upload = MediaFileUpload(datafile("small.png"), chunksize=500, resumable=True)
self.assertEqual("image/png", upload.mimetype())
self.assertEqual(190, upload.size())
self.assertEqual(True, upload.resumable())
self.assertEqual(500, upload.chunksize())
self.assertEqual("PNG", upload.getbytes(1, 3))
json = upload.to_json()
new_upload = MediaUpload.new_from_json(json)
self.assertEqual("image/png", new_upload.mimetype())
self.assertEqual(190, new_upload.size())
self.assertEqual(True, new_upload.resumable())
self.assertEqual(500, new_upload.chunksize())
self.assertEqual("PNG", new_upload.getbytes(1, 3))
示例15: upload_file_worker
# 需要导入模块: from apiclient.http import MediaFileUpload [as 别名]
# 或者: from apiclient.http.MediaFileUpload import mimetype [as 别名]
def upload_file_worker(ctx, asset_id, asset_type, filepath, chunk_size):
print "upload_file_worker %s" % (filepath)
"""Upload a given file to an asset in its own thread as
part of upload_files_multithreaded().
Parameters
----------
ctx : Context
A Click Context object.
asset_id : str
The Id of a valid raster or vector asset.
asset_type : int
A GME asset type defined by the Asset class.
filepath : str
The absolute path to the file.
chunk_size : int
The size of each upload chunk (must be a multiple of 256KB). Defaults to -1 (native Python streaming)
"""
@retries(1000)
def next_chunk(ctx, request):
return request.next_chunk()
ctx.log("Begun uploading %s" % (os.path.basename(filepath)))
start_time = time.time()
media = MediaFileUpload(filepath, chunksize=chunk_size, resumable=True)
if not media.mimetype():
media = MediaFileUpload(filepath, mimetype='application/octet-stream', chunksize=chunk_size, resumable=True)
resource = get_asset_resource(ctx.service(ident=current_process().ident), asset_type)
request = resource.files().insert(id=asset_id, filename=os.path.basename(filepath), media_body=media)
response = None
while response is None:
try:
start_time_chunk = time.time()
progress, response = next_chunk(ctx, request)
# Dodgy math is dodgy
# if progress:
# Mbps = ((chunk_size / (time.time() - start_time_chunk)) * 0.008 * 0.001)
# ctx.log("%s%% (%s/Mbps)" % (round(progress.progress() * 100), round(Mbps, 2)))
except NoContent as e:
# Files uploads return a 204 No Content "error" that actually means it's finished successfully.
response = ""
ctx.log("Finished uploading %s (%s mins)" % (os.path.basename(filepath), round((time.time() - start_time) / 60, 2)))