本文整理汇总了Python中tempfile.mkdtemp方法的典型用法代码示例。如果您正苦于以下问题:Python tempfile.mkdtemp方法的具体用法?Python tempfile.mkdtemp怎么用?Python tempfile.mkdtemp使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类tempfile
的用法示例。
在下文中一共展示了tempfile.mkdtemp方法的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: downloadDemo
# 需要导入模块: import tempfile [as 别名]
# 或者: from tempfile import mkdtemp [as 别名]
def downloadDemo(which):
try:
downloadDir = tempfile.mkdtemp()
archivePath = "{}/svviz-data.zip".format(downloadDir)
# logging.info("Downloading...")
downloadWithProgress("http://svviz.github.io/svviz/assets/examples/{}.zip".format(which), archivePath)
logging.info("Decompressing...")
archive = zipfile.ZipFile(archivePath)
archive.extractall("{}".format(downloadDir))
if not os.path.exists("svviz-examples"):
os.makedirs("svviz-examples/")
shutil.move("{temp}/{which}".format(temp=downloadDir, which=which), "svviz-examples/")
except Exception as e:
print("error downloading and decompressing example data: {}".format(e))
return False
if not os.path.exists("svviz-examples"):
print("error finding example data after download and decompression")
return False
return True
示例2: convertSVG
# 需要导入模块: import tempfile [as 别名]
# 或者: from tempfile import mkdtemp [as 别名]
def convertSVG(insvg, outformat, converter):
outdir = tempfile.mkdtemp()
inpath = "{}/original.svg".format(outdir)
infile = open(inpath, "w")
infile.write(insvg)
infile.flush()
infile.close()
outpath = "{}/converted.{}".format(outdir, outformat)
if converter == "webkittopdf":
exportData = _convertSVG_webkitToPDF(inpath, outpath, outformat)
elif converter == "librsvg":
exportData = _convertSVG_rsvg_convert(inpath, outpath, outformat)
elif converter == "inkscape":
exportData = _convertSVG_inkscape(inpath, outpath, outformat)
return exportData
示例3: plotInsertSizeDistribution
# 需要导入模块: import tempfile [as 别名]
# 或者: from tempfile import mkdtemp [as 别名]
def plotInsertSizeDistribution(isd, sampleName, dataHub):
try:
from rpy2 import robjects as ro
d = tempfile.mkdtemp()
filename = os.path.join(d, sampleName)
if not filename.endswith(".png"):
filename += ".png"
ro.r.png(filename, res=250, width=1200, height=1200)
alleles = ["alt", "ref", "amb"]
others = [[len(chosenSet) for chosenSet in dataHub.samples[sampleName].chosenSets(allele)] for allele in alleles]
plotting.ecdf([isd.insertSizes]+others, ["average"]+alleles, xlab="Insert size (bp)", main=sampleName, legendWhere="bottomright", lwd=2)
ro.r["dev.off"]()
data = open(filename).read()
return data
except ImportError:
return None
示例4: main
# 需要导入模块: import tempfile [as 别名]
# 或者: from tempfile import mkdtemp [as 别名]
def main(args):
print_in_box('Validating submission ' + args.submission_filename)
random.seed()
temp_dir = args.temp_dir
delete_temp_dir = False
if not temp_dir:
temp_dir = tempfile.mkdtemp()
logging.info('Created temporary directory: %s', temp_dir)
delete_temp_dir = True
validator = validate_submission_lib.SubmissionValidator(temp_dir,
args.use_gpu)
if validator.validate_submission(args.submission_filename,
args.submission_type):
print_in_box('Submission is VALID!')
else:
print_in_box('Submission is INVALID, see log messages for details')
if delete_temp_dir:
logging.info('Deleting temporary directory: %s', temp_dir)
subprocess.call(['rm', '-rf', temp_dir])
示例5: __init__
# 需要导入模块: import tempfile [as 别名]
# 或者: from tempfile import mkdtemp [as 别名]
def __init__(self, tmp_dir, output_dir, time_stamp=None, logfile=None, verbose=True, debug=False):
"""
Constructor
@param tmp_dir: Directory for temporary data
@type tmp_dir: str | unicode
@param output_dir: Directory where final data will be placed
@type output_dir: str | unicode
@param time_stamp: timestamp as string
@type time_stamp: str | unicode
@param logfile: file | FileIO | StringIO | basestring
@param verbose: Not verbose means that only warnings and errors will be past to stream
@type verbose: bool
@param debug: Display debug messages
@type debug: bool
"""
assert isinstance(tmp_dir, basestring)
assert isinstance(output_dir, basestring)
assert time_stamp is None or isinstance(time_stamp, basestring)
self._tmp_dir = tempfile.mkdtemp(dir=tmp_dir)
self._directory_output = output_dir
self._time_stamp = time_stamp
if time_stamp is None:
self._time_stamp = datetime.datetime.fromtimestamp(time.time()).strftime('%Y.%m.%d_%H.%M.%S')
super(ProjectFileFolderHandle, self).__init__(logfile, verbose, debug)
示例6: build_package
# 需要导入模块: import tempfile [as 别名]
# 或者: from tempfile import mkdtemp [as 别名]
def build_package():
build_dir = tempfile.mkdtemp(prefix='lambda_package_')
install_packages(build_dir, REQUIRED_PACKAGES)
for f in REQUIRED_FILES:
shutil.copyfile(
src=os.path.join(module_path, f),
dst=os.path.join(build_dir, f)
)
out_file = os.path.join(
tempfile.mkdtemp(prefix='lambda_package_built'),
'sqs_s3_logger_lambda_{}.zip'.format(datetime.datetime.now().isoformat())
)
LOGGER.info('Creating a function package file at {}'.format(out_file))
archive(build_dir, out_file)
return out_file
示例7: test_revert_config
# 需要导入模块: import tempfile [as 别名]
# 或者: from tempfile import mkdtemp [as 别名]
def test_revert_config():
"""
Test the revertConfig function
"""
from paradrop.core.config import osconfig
# Need to make a writable location for our config files.
settings.UCI_CONFIG_DIR = tempfile.mkdtemp()
settings.UCI_BACKUP_DIR = tempfile.mkdtemp()
update = UpdateObject({'name': 'test'})
update.old = None
update.new = MagicMock()
osconfig.revertConfig(update, "network")
# Clean up our config dir
pdos.remove(settings.UCI_CONFIG_DIR)
pdos.remove(settings.UCI_BACKUP_DIR)
示例8: testGetOrGenerateTxtVocab
# 需要导入模块: import tempfile [as 别名]
# 或者: from tempfile import mkdtemp [as 别名]
def testGetOrGenerateTxtVocab(self):
data_dir = tempfile.mkdtemp(dir=self.get_temp_dir())
test_file = os.path.join(self.get_temp_dir(), "test.txt")
with tf.gfile.Open(test_file, "w") as outfile:
outfile.write("a b c\n")
outfile.write("d e f\n")
# Create a vocab over the test file.
vocab1 = generator_utils.get_or_generate_txt_vocab(
data_dir, "test.voc", 20, test_file)
self.assertTrue(tf.gfile.Exists(os.path.join(data_dir, "test.voc")))
self.assertIsNotNone(vocab1)
# Append a new line to the test file which would change the vocab if
# the vocab were not being read from file.
with tf.gfile.Open(test_file, "a") as outfile:
outfile.write("g h i\n")
vocab2 = generator_utils.get_or_generate_txt_vocab(
data_dir, "test.voc", 20, test_file)
self.assertTrue(tf.gfile.Exists(os.path.join(data_dir, "test.voc")))
self.assertIsNotNone(vocab2)
self.assertEqual(vocab1.dump(), vocab2.dump())
示例9: dotplot2
# 需要导入模块: import tempfile [as 别名]
# 或者: from tempfile import mkdtemp [as 别名]
def dotplot2(s1, s2, wordsize=5, overlap=5, verbose=1):
""" verbose = 0 (no progress), 1 (progress if s1 and s2 are long) or
2 (progress in any case) """
doProgress = False
if verbose > 1 or len(s1)*len(s2) > 1e6:
doProgress = True
mat = numpy.ones(((len(s1)-wordsize)/overlap+2, (len(s2)-wordsize)/overlap+2))
for i in range(0, len(s1)-wordsize, overlap):
if i % 1000 == 0 and doProgress:
logging.info(" dotplot progress: {} of {} rows done".format(i, len(s1)-wordsize))
word1 = s1[i:i+wordsize]
for j in range(0, len(s2)-wordsize, overlap):
word2 = s2[j:j+wordsize]
if word1 == word2 or word1 == word2[::-1]:
mat[i/overlap, j/overlap] = 0
imgData = None
tempDir = tempfile.mkdtemp()
try:
path = os.path.join(tempDir, "dotplot.png")
misc.imsave(path, mat)
imgData = open(path).read()
except Exception as e:
logging.error("Error generating dotplots:'{}'".format(e))
finally:
shutil.rmtree(tempDir)
return imgData
示例10: __init__
# 需要导入模块: import tempfile [as 别名]
# 或者: from tempfile import mkdtemp [as 别名]
def __init__(self, datatype, topic, skip_conversion, **conf):
self._logger = logging.getLogger('SPOT.INGEST.COLLECTOR')
self._logger.info('Initializing Distributed Collector process...')
self._datatype = datatype
self._interval = conf['ingestion_interval']
self._isalive = True
self._process_opts = conf['pipelines'][datatype]['process_opt']
self._processes = conf['collector_processes']
self._producer_kwargs = conf['producer']
self._skip_conversion = skip_conversion
self._topic = topic
# .............................init FileObserver
self.FileWatcher = FileWatcher(**conf['file_watcher'])
# .............................set up local staging area
self._tmpdir = mkdtemp(prefix='_DC.', dir=conf['pipelines'][datatype]['local_staging'])
self._logger.info('Use directory "{0}" as local staging area.'.format(self._tmpdir))
# .............................define a process pool object
self._pool = Pool(self._processes, _init_child, [self._tmpdir])
self._logger.info('Master Collector will use {0} parallel processes.'
.format(self._processes))
signal.signal(signal.SIGUSR1, self.kill)
self._logger.info('Initialization completed successfully!')
示例11: mkdtemp
# 需要导入模块: import tempfile [as 别名]
# 或者: from tempfile import mkdtemp [as 别名]
def mkdtemp(suffix=None, prefix=None, dir=None):
"""
Wrap `tempfile.mkdtemp()` to make the suffix and prefix optional (like Python 3.5).
"""
kwargs = {k: v for (k, v) in
dict(suffix=suffix, prefix=prefix, dir=dir).items()
if v is not None}
return old_mkdtemp(**kwargs)
示例12: __init__
# 需要导入模块: import tempfile [as 别名]
# 或者: from tempfile import mkdtemp [as 别名]
def __init__(self, suffix=None, prefix=None, dir=None):
self.name = mkdtemp(suffix, prefix, dir)
self._finalizer = finalize(
self, self._cleanup, self.name,
warn_message="Implicitly cleaning up {!r}".format(self))
示例13: bindiff_pickle_export
# 需要导入模块: import tempfile [as 别名]
# 或者: from tempfile import mkdtemp [as 别名]
def bindiff_pickle_export(self, sample, is_64_bit = True, timeout = None):
"""
Load a sample into IDA Pro, perform autoanalysis and export a pickle file.
:param sample: The sample's path
:param is_64_bit: If the sample needs to be analyzed by the 64 bit version of IDA
:param timeout: Timeout for the analysis in seconds
:return: The file name of the exported pickle database. The file needs
to be deleted by the caller. Returns None on error.
"""
data_to_send = {
"timeout": timeout,
"is_64_bit": is_64_bit}
url = "%s/binexport_pickle" % next(self._urls)
log.debug("curl -XPOST --data '%s' '%s'", json.dumps(data_to_send), url)
response = requests.post(url, data = data_to_send, files = {os.path.basename(sample): open(sample, "rb")})
if response.status_code == 200:
handle_tar, path_tar = tempfile.mkstemp(suffix = ".tar.gz")
with os.fdopen(handle_tar, "wb") as f:
map(f.write, response.iter_content(1024))
directory = tempfile.mkdtemp()
subprocess.check_call(["tar", "xf", path_tar], cwd = directory)
handle_bindiff, output_bindiff = tempfile.mkstemp(suffix = ".BinExport")
with os.fdopen(handle_bindiff, "wb") as f:
with open(os.path.join(directory, "output.BinExport"), "rb") as f2:
shutil.copyfileobj(f2, f)
handle_pickle, output_pickle = tempfile.mkstemp(suffix = ".pickle")
with os.fdopen(handle_pickle, "wb") as f:
with open(os.path.join(directory, "output.pickle"), "rb") as f2:
shutil.copyfileobj(f2, f)
os.unlink(path_tar)
shutil.rmtree(directory)
return output_bindiff, output_pickle
else:
log.error("Bindiff server responded with status code %d: %s", response.status_code, response.content)
return None
示例14: bindiff_export
# 需要导入模块: import tempfile [as 别名]
# 或者: from tempfile import mkdtemp [as 别名]
def bindiff_export():
"""
Run the IDA Pro autoanalysis on the input file and export a BinExport database.
:param input: The input file
:return: Status code 200 and a JSON object containing the output database
name in key 'output', or status code 422 on invalid parameters, 408 on
timeout or 500 on other errors.
"""
logger.info("bindiff_export called")
directory = None
try:
directory = tempfile.mkdtemp()
if len(request.files) != 1:
return make_response(jsonify(error = "Missing file parameter"), 422)
filename, file_ = request.files.items()[0]
input_ = os.path.join(directory, sanitize_filename(filename))
file_.save(input_)
output = os.path.join(directory, "output.BinExport")
timeout = request.form.get('timeout', None)
is_64_bit = request.form.get('is_64_bit', True)
try:
run_ida(input_, is_64_bit, timeout, os.path.join(PREFIX, "export_binexport_pickle.py"), "binexport", output)
logger.info("Command completed successfully")
return send_file(open(output, "rb"), as_attachment = True, attachment_filename = "%s.BinExport" % filename, mimetype = "application/binary")
except TimeoutError:
return jsonify(error = "Program execution timed out"), 408
except OSError as err:
return jsonify(error = "Program execution failed with error %d" % err.errno), 500
finally:
if directory is not None:
shutil.rmtree(directory)
示例15: bindiff_pickle_export
# 需要导入模块: import tempfile [as 别名]
# 或者: from tempfile import mkdtemp [as 别名]
def bindiff_pickle_export():
"""
Run the IDA Pro autoanalysis on the input file and export a BinExport database.
:param input: The input file
:return: Status code 200 and a JSON object containing the output database
name in key 'output', or status code 422 on invalid parameters, 408 on
timeout or 500 on other errors.
"""
logger.info("bindiff_pickle_export called")
directory = None
try:
directory = tempfile.mkdtemp()
if len(request.files) != 1:
return make_response(jsonify(error = "Missing file parameter"), 422)
filename, file_ = request.files.items()[0]
input_ = os.path.join(directory, sanitize_filename(filename))
file_.save(input_)
output_binexport = os.path.join(directory, "output.BinExport")
output_pickle = os.path.join(directory, "output.pickle")
timeout = request.form.get('timeout', None)
is_64_bit = request.form.get('is_64_bit', True)
try:
run_ida(input_, is_64_bit, timeout, os.path.join(PREFIX, "export_binexport_pickle.py"), "binexport_pickle", output_binexport, output_pickle)
logger.info("Command completed successfully")
output_tar = os.path.join(directory, "output.tar.gz")
subprocess.check_call(["tar", "czf", output_tar, os.path.relpath(output_binexport, directory), os.path.relpath(output_pickle, directory)], cwd = directory)
return send_file(open(output_tar, "rb"), as_attachment = True, attachment_filename = "%s.tar.gz" % filename, mimetype = "application/gzip")
except TimeoutError:
return jsonify(error = "Program execution timed out"), 408
except OSError as err:
return jsonify(error = "Program execution failed with error %d" % err.errno), 500
finally:
if directory is not None:
shutil.rmtree(directory)