本文整理汇总了Python中tempfile.NamedTemporaryFile.unlink方法的典型用法代码示例。如果您正苦于以下问题:Python NamedTemporaryFile.unlink方法的具体用法?Python NamedTemporaryFile.unlink怎么用?Python NamedTemporaryFile.unlink使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类tempfile.NamedTemporaryFile
的用法示例。
在下文中一共展示了NamedTemporaryFile.unlink方法的14个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: export
# 需要导入模块: from tempfile import NamedTemporaryFile [as 别名]
# 或者: from tempfile.NamedTemporaryFile import unlink [as 别名]
def export(self, out_f=None, format='mp3'):
out_f = _fd_or_path_or_tempfile(out_f, 'wb+')
out_f.seek(0)
data = NamedTemporaryFile(mode="wb", delete=False)
wave_data = wave.open(data)
wave_data.setnchannels(self.channels)
wave_data.setsampwidth(self.sample_width)
wave_data.setframerate(self.frame_rate)
wave_data.setnframes(self.frame_count())
wave_data.writeframesraw(self._data)
wave_data.close()
output = NamedTemporaryFile(mode="w+")
# read stdin / write stdout
subprocess.call(['ffmpeg',
'-y', # always overwrite existing files
"-f", "wav", "-i", data.name, # input options (filename last)
"-f", format, output.name, # output options (filename last)
],
# make ffmpeg shut up
stderr=open(os.devnull))
output.seek(0)
out_f.write(output.read())
data.unlink(data.name)
out_f.seek(0)
return out_f
示例2: atomic_write
# 需要导入模块: from tempfile import NamedTemporaryFile [as 别名]
# 或者: from tempfile.NamedTemporaryFile import unlink [as 别名]
def atomic_write(content, target):
t = NamedTemporaryFile(dir="/tmp", delete=False)
t.file.write(content)
t.file.flush()
t.close()
copy(t.name, target)
t.unlink(t.name)
示例3: _save_samba_share
# 需要导入模块: from tempfile import NamedTemporaryFile [as 别名]
# 或者: from tempfile.NamedTemporaryFile import unlink [as 别名]
def _save_samba_share(conf):
temp = NamedTemporaryFile('w', delete=False)
conf.write(temp)
temp.close()
bkp_date = datetime.datetime.now().strftime('%Y-%m-%d_%Hh%Mm%Ss')
process = run_as_root('cp "/etc/samba/smb.conf" '\
'"/etc/samba/smb.conf-{0}.bkp"'
.format(bkp_date))
process.expect(pexpect.EOF)
process = run_as_root('cp "{0}" "/etc/samba/smb.conf"'
.format(temp.name))
process.expect(pexpect.EOF)
process = run_as_root('chmod 644 /etc/samba/smb.conf')
process.expect(pexpect.EOF)
process = run_as_root('chown root:root /etc/samba/smb.conf')
process.expect(pexpect.EOF)
temp.unlink(temp.name)
示例4: add_key
# 需要导入模块: from tempfile import NamedTemporaryFile [as 别名]
# 或者: from tempfile.NamedTemporaryFile import unlink [as 别名]
def add_key (alg, key, nm):
try:
name, machine = nm.split('@')
except ValueError:
return False
name = name
d = './' + keydir + '/' + machine
kf = d + '/' + prepend + name + '.pub'
if not mre.match (machine) or not nre.match (name) or not are.match (alg):
return False
if os.path.exists(kf):
print 'ignoring duplicate key for:', kf
return True # we do this so that we don't leak info
f = NamedTemporaryFile(delete=False)
f.file.write ('%s %s %[email protected]%s\n' % (alg, key, name, machine))
f.close()
p = Popen(['ssh-vulnkey', f.name], stdin=PIPE, stdout=PIPE, stderr=PIPE)
p.stdin.close()
if p.stderr.read().__len__() > 1:
f.unlink (f.name)
return False
if not os.path.exists (d):
os.makedirs (d)
shutil.move (f.name, kf)
print "Imported", kf
return True
示例5: test_dump
# 需要导入模块: from tempfile import NamedTemporaryFile [as 别名]
# 或者: from tempfile.NamedTemporaryFile import unlink [as 别名]
def test_dump(self):
cbd1 = CitiBikeData(source_url=self.test_data_url)
self.assert_data_loaded(cbd1)
js = StringIO()
cbd1.dump(js)
self.assert_data_loaded(cbd1)
self.assertGreater(len(js.getvalue()), 0)
js.reset()
cbd2 = CitiBikeData(load_on_init=False)
self.assert_data_not_loaded(cbd2)
cbd2.load(js)
self.assert_data_loaded(cbd2)
self.assertDictEqual(cbd1.json, cbd2.json)
ntf = NamedTemporaryFile(delete=False)
cbd1.dump(ntf)
self.assert_data_loaded(cbd1)
self.assertGreater(len(js.getvalue()), 0)
ntf.close()
cbd3 = CitiBikeData(source_url="file:"+ntf.name)
self.assert_data_loaded(cbd3)
self.assertDictEqual(cbd1.json, cbd3.json)
ntf.unlink(ntf.name) # delete file
示例6: test_pipehandler
# 需要导入模块: from tempfile import NamedTemporaryFile [as 别名]
# 或者: from tempfile.NamedTemporaryFile import unlink [as 别名]
def test_pipehandler(self):
tmp = NamedTemporaryFile(delete=False)
tmp.close()
Handler = HandlerFactory("pipe", {"path":tmp.name})
h = Handler("test")
h([])
with open(tmp.name,"r") as f:
self.assertEqual(f.read(), "test")
tmp.unlink(tmp.name)
示例7: test_clbhandler
# 需要导入模块: from tempfile import NamedTemporaryFile [as 别名]
# 或者: from tempfile.NamedTemporaryFile import unlink [as 别名]
def test_clbhandler(self):
tmp = NamedTemporaryFile(delete=False)
tmp.file.write("def raise_(): raise FutureWarning\n")
tmp.file.write("def raise__(smt): raise smt")
tmp.close()
Handler = HandlerFactory("callback", {"path": tmp.name})
self.assertRaises(FutureWarning, Handler("raise_()"), [])
self.assertRaises(FutureWarning, Handler("raise__({0})"), [FutureWarning])
tmp.unlink(tmp.name)
示例8: upload
# 需要导入模块: from tempfile import NamedTemporaryFile [as 别名]
# 或者: from tempfile.NamedTemporaryFile import unlink [as 别名]
def upload(self, src_name, dst_name, compress=True):
if not compress:
self._upload(src_name, dst_name)
fout = NamedTemporaryFile(suffix='.gz', mode='wb', delete=False)
try:
fout.close()
logging.debug('Compressing file %s...', src_name)
with \
open(src_name, 'rb') as fin, \
closing(gzip.GzipFile(fout.name, mode='wb')) as gzout:
for chunk in iterchunks(fin):
gzout.write(chunk)
return self._upload(fout.name, dst_name + '.gz')
finally:
fout.unlink(fout.name)
示例9: export
# 需要导入模块: from tempfile import NamedTemporaryFile [as 别名]
# 或者: from tempfile.NamedTemporaryFile import unlink [as 别名]
def export(self, out_f=None, format='mp3', codec=None):
out_f = _fd_or_path_or_tempfile(out_f, 'wb+')
out_f.seek(0)
# for wav output we can just write the data directly to out_f
if format == "wav":
data = out_f
else:
data = NamedTemporaryFile(mode="wb", delete=False)
wave_data = wave.open(data, 'wb')
wave_data.setnchannels(self.channels)
wave_data.setsampwidth(self.sample_width)
wave_data.setframerate(self.frame_rate)
wave_data.setnframes(self.frame_count())
wave_data.writeframesraw(self._data)
wave_data.close()
# for wav files, we're done (wav data is written directly to out_f)
if format == 'wav':
return out_f
output = NamedTemporaryFile(mode="w+")
# build call args
args =[self.ffmpeg,
'-y', # always overwrite existing files
"-f", "wav", "-i", data.name, # input options (filename last)
]
if codec is not None:
# force audio encoder
args.extend(["-acodec", codec])
args.extend([
"-f", format, output.name, # output options (filename last)
])
# read stdin / write stdout
subprocess.call(args,
# make ffmpeg shut up
stderr=open(os.devnull)
)
output.seek(0)
out_f.write(output.read())
data.unlink(data.name)
out_f.seek(0)
return out_f
示例10: upload
# 需要导入模块: from tempfile import NamedTemporaryFile [as 别名]
# 或者: from tempfile.NamedTemporaryFile import unlink [as 别名]
def upload(self, src_name, dst_name, compress=True, use_gzip=False):
if compress:
fout = NamedTemporaryFile(suffix='.gz', mode='wb', delete=False)
try:
if use_gzip:
logging.debug('Compressing file %s with gzip...', src_name)
p = subprocess.Popen(["gzip", '-c', src_name], stdout=fout)
assert p.wait() == 0, 'Gzip compression failed'
fout.close()
return self._upload(fout.name, dst_name + '.gz')
else:
fout.close()
logging.debug('Compressing file %s...', src_name)
with \
open(src_name, 'rb') as fin, \
closing(gzip.GzipFile(fout.name, mode='wb')) as gzout:
for chunk in iterchunks(fin):
gzout.write(chunk)
return self._upload(fout.name, dst_name + '.gz')
finally:
fout.unlink(fout.name)
else:
self._upload(src_name, dst_name)
示例11: FacadeTest
# 需要导入模块: from tempfile import NamedTemporaryFile [as 别名]
# 或者: from tempfile.NamedTemporaryFile import unlink [as 别名]
class FacadeTest(TestCase):
def setUp(self):
cfg = ConfigParser()
cfg.add_section('some_section')
cfg.set('some_section', 'foo', 'bar')
cfg.set('some_section', 'host', 'foo')
cfg.set('some_section', 'port', '29192')
self.config_file = NamedTemporaryFile(mode='w', delete=False)
cfg.write(self.config_file)
self.config_file.close()
self.env_file = NamedTemporaryFile(mode='w', delete=False)
cfg = ConfigParser()
cfg.add_section('some_section')
cfg.set('some_section', 'host', 'SOME_HOST_NAME')
cfg.set('some_section', 'port', 'SOME_PORT_NAME')
cfg.add_section('other_section')
cfg.set('other_section', 'foo', 'bar')
cfg.write(self.env_file)
self.env_file.close()
def tearDown(self):
self.config_file.unlink(self.config_file.name)
self.env_file.unlink(self.env_file.name)
def test_not_existing_config(self):
configuration = get_configuration('non_existing_section')
self.assertIsNone(configuration)
def test_not_existing_variables(self):
configuration = get_configuration('messaging',
config_file='non-existing')
self.assertIsNone(configuration)
def test_existing_config(self):
configuration = get_configuration(section_name='some_section',
config_file=self.config_file.name,
variables=self.env_file.name)
self.assertTrue('host' in configuration)
self.assertTrue('port' in configuration)
self.assertEquals(configuration['host'], 'foo')
self.assertEquals(configuration['port'], '29192')
self.assertFalse('foo' in configuration)
def test_existing_env(self):
os.environ['SOME_HOST_NAME'] = 'bar'
os.environ['SOME_PORT_NAME'] = '6661'
configuration = get_configuration(section_name='some_section',
config_file=self.config_file.name,
variables=self.env_file.name)
self.assertTrue('host' in configuration)
self.assertTrue('port' in configuration)
self.assertEquals(configuration['host'], 'bar')
self.assertEquals(configuration['port'], '6661')
self.assertFalse('foo' in configuration)
os.environ.pop('SOME_HOST_NAME')
os.environ.pop('SOME_PORT_NAME')
def test_not_in_config(self):
configuration = get_configuration(section_name='other_section',
config_file=self.config_file.name,
variables=self.env_file.name)
self.assertIsNone(configuration)
示例12: handle
# 需要导入模块: from tempfile import NamedTemporaryFile [as 别名]
# 或者: from tempfile.NamedTemporaryFile import unlink [as 别名]
def handle(self, *args, **options):
if not GOOGLE_DOCS_ACCOUNT and GOOGLE_DOCS_PASSWORD and GOOGLE_DOCS_RESOURCE_ID:
raise CommandError('You must set both GOOGLE_DOCS_ACCOUNT, GOOGLE_DOCS_PASSWORD and GOOGLE_DOCS_RESOURCE_ID in your settings file.')
verbosity = int(options.get('verbosity', 1))
output_all = options.get('output_all')
dry_run = options.get('dry_run')
fields = ('email', 'first_name', 'last_name', 'phone', 'city', 'state', 'zipcode', 'is_a', 'broadcasters', 'date_created', 'share_info')
profile_list = NonUserProfile.objects.order_by('-date_created')
if not output_all:
profile_list = profile_list.filter(share_info=True)
if len(profile_list):
if verbosity > 1:
self.stdout.write('{0} signups to record.'.format(len(profile_list)))
fp = NamedTemporaryFile(delete=False)
writer = csv.DictWriter(fp, fields)
writer.writeheader()
for signup in profile_list:
output = {
'email': signup.email,
'first_name': signup.first_name,
'last_name': signup.last_name,
'phone': signup.phone,
'city': signup.city,
'state': signup.state,
'zipcode': signup.zipcode,
'is_a': signup.is_a,
'date_created': signup.date_created.strftime('%m/%d/%Y %H:%M:%S'),
'share_info': signup.share_info
}
extra_fields_data = signup.extra_fields
for extra_field in SIGNUP_EXTRA_FIELDS:
input_val = None
if isinstance(extra_fields_data[extra_field], list):
input_val = ', '.join(extra_fields_data[extra_field])
else:
input_val = extra_fields_data[extra_field]
output[extra_field] = input_val
writer.writerow(output)
if dry_run:
self.stdout.write('Row created:\n{0}\n'.format('|'.join([str(output[f]) for f in fields])))
del(writer)
if not dry_run:
client = gdata.docs.client.DocsClient()
login_token = client.ClientLogin(GOOGLE_DOCS_ACCOUNT, GOOGLE_DOCS_PASSWORD, 'politicaladsleuth')
fp.close()
media = gdata.data.MediaSource(file_path=fp.name, content_type='text/csv')
try:
resource = client.get_resource_by_id(GOOGLE_DOCS_RESOURCE_ID)
updated_resource = client.update_resource(resource, media=media, update_metadata=False, new_revision=True)
self.stdout.write('Data uploaded to "%s"\n'.format(updated_resource.title.text))
except gdata.client.RequestError as e:
self.stderr.write(e.message + '\n')
self.stdout.write('****Upload may have succeeded despite an InvalidEntryException error****\n')
fp.close()
fp.unlink(fp.name)
else:
self.stdout.write('No signups for the given parameters\n')
示例13: store
# 需要导入模块: from tempfile import NamedTemporaryFile [as 别名]
# 或者: from tempfile.NamedTemporaryFile import unlink [as 别名]
def store(self, file=None, content=None, ctype=None, **kwd):
"""save a file-like item"""
if content is None and not hasattr(file, 'read'):
raise TypeError('invalid file-like object')
data = content if content is not None else file.read()
size = len(data)
ext = guessImageType(data[:32])
if ext is None:
raise ValueError('invalid image file')
hashes = [md5(data).hexdigest()]
_exists_id = self.exists(hashed=hashes[0])
if _exists_id:
id = _exists_id
filename = _make_filename(id, ext)
print ('id {} or hash {} exists!!'.format(id, hashes[0]))
#raise DuplicateError('already exists')
return [True, id, filename]
ids = [_make_id(hashes[0])]
if 'id' in kwd and kwd['id'] and kwd['id'] not in ids:
ids += [kwd['id']]
from image import SimpImage, MIN_QUALITY
max_file_size = int(self.get_config('max_file_size'))
max_jpeg_quality = int(self.get_config('max_jpeg_quality'))
max_width = int(self.get_config('max_width'))
max_height = int(self.get_config('max_height'))
if size > max_file_size: max_jpeg_quality -= 1
if max_jpeg_quality < MIN_QUALITY: max_jpeg_quality = MIN_QUALITY
im = SimpImage(blob=data)
meta = im.meta
if meta['width'] > max_width or meta['height'] > max_height:
if self.get_config('auto_scale') and im.thumbnail(max_width, max_height):
if im.format == 'JPEG' and im.quality > max_jpeg_quality:
im.quality = max_jpeg_quality
data = im.get_blob()
size = len(data)
print im.meta
print 'new scaled size {}'.format(size)
hashes += [md5(data).hexdigest()]
else:
raise ValueError('file: {} dimension {}x{} is too big, max is {}x{}'.format(kwd['name'] if 'name' in kwd else '', meta['width'], meta['height'], max_width, max_height))
if im.format == 'JPEG':
if im.quality > max_jpeg_quality:
print 'quality {} is too high, hash {}'.format(im.quality, hashes[0])
from tempfile import NamedTemporaryFile
_tmp = NamedTemporaryFile('w+b',dir=self.get_config('temp_root'),delete=False)
_tmp.file.close()
save_file(_tmp.name, blob=data)
if jpegoptim(_tmp.name):
fp = open(_tmp.name)
data = fp.read()
size = len(data)
# print 'new optimized size {}'.format(size)
fp.close()
_tmp.unlink(_tmp.name)
del im
im = SimpImage(blob=data)
meta = im.meta
hashes += [md5(data).hexdigest()]
else:
raise EnvironmentError('jpeg qualty is too high, or need jpegoptim')
elif im.format == 'PNG' and self.get_config('force_jpeg'):
im.format = 'JPEG'
im.quality = max_jpeg_quality
data = im.get_blob()
size = len(data)
hashes += [md5(data).hexdigest()]
ext = 'jpg'
meta = im.meta
del im
if (size > max_file_size):
raise ValueError('file: {} size {} is too big, max is {}'.format(kwd['name'] if 'name' in kwd else '', size, max_file_size))
hashed = hashes[len(hashes)-1] #md5(data).hexdigest()
# print ('md5 hash: {}'.format(hashed))
# TODO: add for support (md5 + size) id
id = _make_id(hashed)
# print ('new filename: %r' % filename)
# TODO: fix for support s3 front browse
_exists_id = self.exists(id) or self.exists(hashed=hashed)
if _exists_id:
id = _exists_id
filename = _make_filename(id, ext)
print ('id {} or hash {} exists!!'.format(id, hashed))
#raise DuplicateError('already exists')
return [True, id, filename]
filename = _make_filename(id, ext)
# print ('id: {}'.format(id))
#.........这里部分代码省略.........
示例14: ConfigManagerTest
# 需要导入模块: from tempfile import NamedTemporaryFile [as 别名]
# 或者: from tempfile.NamedTemporaryFile import unlink [as 别名]
class ConfigManagerTest(unittest.TestCase):
def setUp(self):
cfg = ConfigParser.ConfigParser()
cfg.add_section('some_section')
cfg.set('some_section', 'host', 'foo')
cfg.set('some_section', 'port', '29192')
cfg.set('some_section', 'backend', 'tests.test_config_manager.Backend')
self.config_file = NamedTemporaryFile(mode='w', delete=False)
cfg.write(self.config_file)
self.config_file.close()
self.env_file = NamedTemporaryFile(mode='w', delete=False)
cfg = ConfigParser.ConfigParser()
cfg.add_section('some_section')
cfg.set('some_section', 'host', 'SOME_HOST_NAME')
cfg.set('some_section', 'port', 'SOME_PORT_NAME')
cfg.add_section('other_section')
cfg.set('other_section', 'foo', 'bar')
cfg.write(self.env_file)
self.env_file.close()
def tearDown(self):
self.config_file.unlink(self.config_file.name)
self.env_file.unlink(self.env_file.name)
def test_not_existing_section(self):
configuration = get_configuration('not-existing-section',
config_file=self.config_file.name)
self.assertIsNone(configuration)
def test_existing_config(self):
configuration = get_configuration(section_name='some_section',
config_file=self.config_file.name,
variables_file=self.env_file.name)
self.assertTrue('host' in configuration)
self.assertTrue('port' in configuration)
self.assertEquals(configuration['host'], 'foo')
self.assertEquals(configuration['port'], '29192')
self.assertFalse('foo' in configuration)
def test_existing_env(self):
os.environ['SOME_HOST_NAME'] = 'bar'
os.environ['SOME_PORT_NAME'] = '6661'
configuration = get_configuration(section_name='some_section',
config_file=self.config_file.name,
variables_file=self.env_file.name)
self.assertTrue('host' in configuration)
self.assertTrue('port' in configuration)
self.assertEquals(configuration['host'], 'bar')
self.assertEquals(configuration['port'], '6661')
self.assertFalse('foo' in configuration)
os.environ.pop('SOME_HOST_NAME')
os.environ.pop('SOME_PORT_NAME')
def test_get_backend_class(self):
configuration = get_configuration(section_name='some_section',
config_file=self.config_file.name,
variables_file=self.env_file.name)
backend_class = get_backend_class(configuration)
self.assertTrue('Backend' in str(backend_class))