本文整理汇总了Python中tempfile.TemporaryFile.flush方法的典型用法代码示例。如果您正苦于以下问题:Python TemporaryFile.flush方法的具体用法?Python TemporaryFile.flush怎么用?Python TemporaryFile.flush使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类tempfile.TemporaryFile
的用法示例。
在下文中一共展示了TemporaryFile.flush方法的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: _create_temp_file
# 需要导入模块: from tempfile import TemporaryFile [as 别名]
# 或者: from tempfile.TemporaryFile import flush [as 别名]
def _create_temp_file(edid_binary):
edid_file = TemporaryFile()
edid_file.write(edid_binary)
edid_file.flush()
edid_file.seek(0)
return edid_file
示例2: getUpdateElements
# 需要导入模块: from tempfile import TemporaryFile [as 别名]
# 或者: from tempfile.TemporaryFile import flush [as 别名]
def getUpdateElements(self, valueMap):
'''
@param valueMap:
'''
elements = ""
for name in valueMap.keys():
fullname = name
if isinstance(name, types.StringType):
fullname = (self.defaultNameSpace, name)
if not fullname[0]:
tag = fullname[1]
else:
tag = self.shortcuts[fullname[0]] + ':' + fullname[1]
value = valueMap[name]
if value:
if isinstance(value, qp_xml._element):
tmpFile = TemporaryFile('w+')
value = qp_xml.dump(tmpFile, value)
tmpFile.flush()
tmpFile.seek(0)
tmpFile.readline()
value = tmpFile.read()
else:
value = "<![CDATA[%s]]>" % value
else:
value = ""
elements += "<%s>%s</%s>" % (tag, value, tag)
return elements
示例3: local_ds_files
# 需要导入模块: from tempfile import TemporaryFile [as 别名]
# 或者: from tempfile.TemporaryFile import flush [as 别名]
def local_ds_files(ds):
"""
Gets the list of files corresponding to a published dataset
stored on cms_dbs_ph_analysis_02.
Args:
ds: the path to the published dataset, ending in /USER
Returns:
A list of the LFN-s of the dataset.
"""
tf = TemporaryFile()
stdout = sys.stdout
stdout.flush()
sys.stdout = tf
print "Query"
ret = call_das_cli('--query=file dataset=%s instance=cms_dbs_ph_analysis_02' % ds, '--limit=0')
print ret
tf.flush()
tf.seek(0)
sys.stdout = stdout
fl = []
for li in tf.readlines():
if "/store/" in li:
fl.append(li.strip())
tf.close()
return fl
示例4: run_process
# 需要导入模块: from tempfile import TemporaryFile [as 别名]
# 或者: from tempfile.TemporaryFile import flush [as 别名]
def run_process(cmd, timeout=10):
"""
run process with timeout
"""
if type(cmd) == bytes:
cmd = cmd.decode('utf-8')
if type(cmd) == str:
cmd = cmd.split()
if not timeout:
subprocess.Popen(cmd)
return None, None, None
try:
out = TemporaryFile()
err = TemporaryFile()
prc = subprocess.Popen(cmd, stdout=out, stderr=err)
except:
LOG.exception('error in run_process %s' % cmd)
return -1, None, None
starttime = time.time()
while 1:
if time.time() - starttime > timeout:
LOG.error('run command %s timeout' % ' '.join(cmd))
try:
kill_prc(prc)
except:
pass
return -1, None, None
if not alive(prc):
out.flush()
err.flush()
out.seek(0)
err.seek(0)
return prc.poll(), out.read().decode('utf-8'), err.read().decode('utf-8')
time.sleep(0.1)
示例5: backup
# 需要导入模块: from tempfile import TemporaryFile [as 别名]
# 或者: from tempfile.TemporaryFile import flush [as 别名]
def backup(self):
if self.dry_run:
return
if not os.path.exists(self.config['tar']['directory']) \
or not os.path.isdir(self.config['tar']['directory']):
raise BackupError('{0} is not a directory!'.format(self.config['tar']['directory']))
out_name = "{0}.tar".format(
self.config['tar']['directory'].lstrip('/').replace('/', '_'))
outfile = os.path.join(self.target_directory, out_name)
args = ['tar', 'c', self.config['tar']['directory']]
errlog = TemporaryFile()
stream = self._open_stream(outfile, 'w')
LOG.info("Executing: %s", list2cmdline(args))
pid = Popen(
args,
stdout=stream.fileno(),
stderr=errlog.fileno(),
close_fds=True)
status = pid.wait()
try:
errlog.flush()
errlog.seek(0)
for line in errlog:
LOG.error("%s[%d]: %s", list2cmdline(args), pid.pid, line.rstrip())
finally:
errlog.close()
if status != 0:
raise BackupError('tar failed (status={0})'.format(status))
示例6: post
# 需要导入模块: from tempfile import TemporaryFile [as 别名]
# 或者: from tempfile.TemporaryFile import flush [as 别名]
def post(self):
# parse incoming POST data
reqparse = RequestParser()
reqparse.add_argument('blob', type=str, location='json')
data = reqparse.parse_args()
if not data.blob:
return self.argument_required('blob')
# encode content as bytestring
tmp = base64.b64decode(data.blob)
# calculate sha1 digest
digest = hashlib.sha1(tmp).hexdigest()
# write into temp file
f = TemporaryFile()
_ = f.write(tmp)
f.flush()
_ = f.seek(0)
# upload blob
created = self.blob_container.put(f, digest=digest)
# response json and status code
code = created and 201 or 409
response = dict(
digest = digest,
url = '/image/{0}'.format(digest)
)
return response, code
示例7: backup
# 需要导入模块: from tempfile import TemporaryFile [as 别名]
# 或者: from tempfile.TemporaryFile import flush [as 别名]
def backup(self):
"""
Create backup
"""
if self.dry_run:
return
if not os.path.exists(self.config["tar"]["directory"]) or not os.path.isdir(
self.config["tar"]["directory"]
):
raise BackupError("{0} is not a directory!".format(self.config["tar"]["directory"]))
out_name = "{0}.tar".format(self.config["tar"]["directory"].lstrip("/").replace("/", "_"))
outfile = os.path.join(self.target_directory, out_name)
args = ["tar", "c", self.config["tar"]["directory"]]
errlog = TemporaryFile()
stream = open_stream(outfile, "w", **self.config["compression"])
LOG.info("Executing: %s", list2cmdline(args))
pid = Popen(args, stdout=stream.fileno(), stderr=errlog.fileno(), close_fds=True)
status = pid.wait()
try:
errlog.flush()
errlog.seek(0)
for line in errlog:
LOG.error("%s[%d]: %s", list2cmdline(args), pid.pid, line.rstrip())
finally:
errlog.close()
if status != 0:
raise BackupError("tar failed (status={0})".format(status))
示例8: to_xml
# 需要导入模块: from tempfile import TemporaryFile [as 别名]
# 或者: from tempfile.TemporaryFile import flush [as 别名]
def to_xml(self, f=None):
"""Get this domain as an XML DOM Document
:param f: Optional File to dump directly to
:type f: File or Stream
:return: File object where the XML has been dumped to
:rtype: file
"""
if not f:
from tempfile import TemporaryFile
f = TemporaryFile()
print('<?xml version="1.0" encoding="UTF-8"?>', file=f)
print('<Domain id="%s">' % self.name, file=f)
for item in self:
print('\t<Item id="%s">' % item.name, file=f)
for k in item:
print('\t\t<attribute id="%s">' % k, file=f)
values = item[k]
if not isinstance(values, list):
values = [values]
for value in values:
print('\t\t\t<value><![CDATA[', end=' ', file=f)
if isinstance(value, unicode):
value = value.encode('utf-8', 'replace')
else:
value = unicode(value, errors='replace').encode('utf-8', 'replace')
f.write(value)
print(']]></value>', file=f)
print('\t\t</attribute>', file=f)
print('\t</Item>', file=f)
print('</Domain>', file=f)
f.flush()
f.seek(0)
return f
示例9: backup
# 需要导入模块: from tempfile import TemporaryFile [as 别名]
# 或者: from tempfile.TemporaryFile import flush [as 别名]
def backup(self, config, **flags):
# check if this is a config file
config_f = open(config, "rb")
mode = ("w:%s" % flags['compress']) if flags.get('compress') else "w"
buff = TemporaryFile()
with config_f:
cfg = ConfigParser()
cfg.readfp(config_f)
project = cfg.get("general", "project")
databases = cfg.get("general", "databases").split()
tarname = "%s.tar" % project
tar = tarfile.open(fileobj=buff, mode=mode, name=tarname)
to_close = self.__add_database_to_tar(tar, cfg, databases)
tar.close()
for f in to_close:
f.close()
buff.seek(0)
name = project + ".tar"
if flags.get('compress'):
name = project + ".t%s" % flags['compress']
if flags.get("upload"):
buff.flush()
timestamp = datetime.now().isoformat()
self.client.backup(project, name, buff, timestamp)
buff.close()
示例10: create_tarball
# 需要导入模块: from tempfile import TemporaryFile [as 别名]
# 或者: from tempfile.TemporaryFile import flush [as 别名]
def create_tarball(tar_paths):
"""
Context Manger that creates the tarball of the Docker Context to use for building the image
Parameters
----------
tar_paths dict(str, str)
Key representing a full path to the file or directory and the Value representing the path within the tarball
Yields
------
The tarball file
"""
tarballfile = TemporaryFile()
with tarfile.open(fileobj=tarballfile, mode='w') as archive:
for path_on_system, path_in_tarball in tar_paths.items():
archive.add(path_on_system, arcname=path_in_tarball)
# Flush are seek to the beginning of the file
tarballfile.flush()
tarballfile.seek(0)
try:
yield tarballfile
finally:
tarballfile.close()
示例11: sort_diskbased
# 需要导入模块: from tempfile import TemporaryFile [as 别名]
# 或者: from tempfile.TemporaryFile import flush [as 别名]
def sort_diskbased(stream, field, nsize=100000):
buf = []
files = []
count = 0
t = None
def iter_on_file(f):
try:
while True:
(key, v) = cPickle.load(f)
yield (key, t._make(v))
except EOFError:
f.close()
for elt in stream:
if isinstance(elt, StreamHeader):
t = elt.t
yield elt
elif isinstance(elt, StreamFooter):
buf.sort()
iterables = [iter_on_file(f) for f in files] + [itertools.imap(lambda obj: (getattr(obj, field), obj), buf)]
for (k, row) in heapq.merge(*iterables):
yield row
yield elt
else:
buf.append(elt)
count = count + 1
if count % nsize == 0:
buf.sort(key=lambda obj: getattr(obj, field))
f = TemporaryFile()
for item in buf:
cPickle.dump((getattr(item, field), list(item)), f, cPickle.HIGHEST_PROTOCOL)
f.flush()
files.append(f)
del buf[:]
示例12: run_reduce
# 需要导入模块: from tempfile import TemporaryFile [as 别名]
# 或者: from tempfile.TemporaryFile import flush [as 别名]
def run_reduce(self):
self.stopped_received = 0
self.merged_files = []
merged_iterator = None
while True:
# Iterate and merge files until all jobs are processed
get_next = self.get_next_file()
files = get_next
# itertools.islice(get_next, self.reduce_max_files)
all_files = [file for file in files]
iterables = [self.iter_on_file(file) for file in all_files]
merged_iterator = heapq.merge(*iterables)
if self.stopped_received < self.numprocs:
if self.debug:
debug_print("Performing intermediate merge on %u files" % len(iterables))
f = TemporaryFile()
self.merged_files.append(f)
for m in merged_iterator:
cPickle.dump(m, f, cPickle.HIGHEST_PROTOCOL)
f.seek(0)
f.flush()
else:
break
if len(self.merged_files) > 0:
if self.debug:
debug_print("Final merge")
# Final merge if required
merged_iterator = heapq.merge(
*([self.iter_on_file(stream) for stream in self.merged_files] + [merged_iterator])
)
if self.debug:
debug_print("Reduce loop")
result = self.reduce_loop(merged_iterator)
return result
示例13: parse_images
# 需要导入模块: from tempfile import TemporaryFile [as 别名]
# 或者: from tempfile.TemporaryFile import flush [as 别名]
def parse_images(instance):
if instance._content is None or 'img' not in instance._content:
return
content = instance._content[:]
soup = BeautifulSoup(content, "html.parser")
for img in soup('img'):
# Build the source image filename
my_url2path_func = instance.settings['MY_IMG_URL2PATH_FUNC']
if not my_url2path_func:
logger.error('Error: MY_IMG_URL2PATH_FUNC not defined in your pelican configuration.\n\
niux2_lazyload_helper cannot determine the image path from its url.\n')
return
imgPath, new_src = my_url2path_func(img['src'])
if not new_src.startswith('http') and not (path.isfile(imgPath) and access(imgPath, R_OK)):
logger.error('Error: image file not found: {}'.format(imgPath))
continue
img['src'] = new_src
# Open the source image and query dimensions
if new_src.startswith('http'):
img_data = urlopen(new_src).read()
fid = TemporaryFile('wb+')
fid.write(img_data)
fid.flush()
fid.seek(0)
else:
fid = open(imgPath, 'rb')
im = Image.open(fid)
imgWidth = im.size[0]
imgHeight = im.size[1]
imgResized = False
if not img.get('width'):
img['width'] = str(imgWidth) + 'px'
else:
imgResized = True
# for lazyload.js
if instance.settings.get('NIUX2_LAZY_LOAD', False):
if img.get('class'):
img['class'] += 'lazy'
else:
img['class'] = 'lazy'
img['data-original'] = img['src']
del img['src']
if imgResized:
newImgWidth = int(_width_attr_reg.sub('', img['width']).strip())
newImgHeight = imgHeight * newImgWidth / imgWidth
img['data-width'] = str(newImgWidth) + 'px'
img['data-height'] = str(newImgHeight) + 'px'
else:
img['data-width'] = str(imgWidth) + 'px'
img['data-height'] = str(imgHeight) + 'px'
instance._content = soup.decode()
示例14: savefile
# 需要导入模块: from tempfile import TemporaryFile [as 别名]
# 或者: from tempfile.TemporaryFile import flush [as 别名]
def savefile(fd,fname,bfirmid,bclientid):
# Encrypt each chunk from fd as it is read into a
# tmpfile which will be uploaded to Dropbox using
# the given filename.
r = requests.get("%s/keyserv/key/%s/%s" % (app.config['KEYSERVER_URI'],bfirmid,bclientid))
print "%s/keyserv/key/%s/%s" % (app.config['KEYSERVER_URI'],bfirmid,bclientid)
keyobj = r.json()
encrkey = keyobj['key']
print "Got key %s" % encrkey
# Carve out a 32byte/256 bit key from the keyserver
# but convert base64 back to binary first
bkey = binascii.a2b_base64(encrkey)
key = bkey[0:32]
try:
print "Starting encryption"
# Setup our AES cipher
iv = Random.new().read(AES.block_size)
cipher = AES.new(key,AES.MODE_CFB,iv)
#cipher = XORCipher.new(key)
print "Cipher created using iv %s" % binascii.hexlify(iv)
except:
raise
try:
f = TemporaryFile()
f.write(iv)
for chunk in chunkfd(fd,blocksize=4194304):
f.write(cipher.encrypt(chunk))
f.flush()
f.seek(0,os.SEEK_END)
fsize = f.tell()
f.seek(0)
except Exception as e:
print e
print "Getting ready for Dropbox upload"
# Get a Dropbox uploader
try:
access_token = config.get('Credentials','access_token')
dclient = DropboxClient(access_token)
uploader = dclient.get_chunked_uploader(f,fsize)
while uploader.offset < fsize:
try:
upload = uploader.upload_chunked()
except Exception as e:
print e
except Exception as e:
print e
f.close()
return uploader.finish(secure_filename("/%s_encr" % fname))
示例15: _read
# 需要导入模块: from tempfile import TemporaryFile [as 别名]
# 或者: from tempfile.TemporaryFile import flush [as 别名]
def _read(rej,res):
try:
f = TemporaryFile()
f.write(str)
f.flush()
f.seek(0)
res( f )
except IOError as e:
rej(e)