本文整理汇总了Python中shutil.make_archive函数的典型用法代码示例。如果您正苦于以下问题:Python make_archive函数的具体用法?Python make_archive怎么用?Python make_archive使用的例子?那么恭喜您, 这里精选的函数代码示例或许可以为您提供帮助。
在下文中一共展示了make_archive函数的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: zip_dict
def zip_dict(dst_fname):
dir_fpath, basename = os.path.split(dst_fname)
root_dir, dir_fname = os.path.split(dir_fpath)
# uzado de zip ne funkciigas, rompas Colordict - t.e. la lasta komencas blinki senfine
#fmt = "zip"
fmt = "gztar"
# se vortara datumo estas ĝuste en arkivo, ne en ia dosierujo, do CSS/figuroj estas ne trovataj
save_without_folder = False # True #
if save_without_folder:
fname = shutil.make_archive(o_p.join(revo_dicts_fpath, dir_fname), fmt, dir_fpath)
else:
fname = shutil.make_archive(o_p.join(revo_dicts_fpath, dir_fname), fmt, root_dir, base_dir=dir_fname)
ifo_fname = os.path.splitext(dst_fname)[0] + ".ifo"
with open(ifo_fname) as ifo_f:
properties = {}
for line in ifo_f:
lst = line.split("=")
if len(lst) >= 2:
key, value = lst[0].strip(), lst[1].strip()
if key and value:
properties[key] = value
words_cnt = int(properties.get("wordcount"))
synwordcount = properties.get("synwordcount")
if synwordcount:
words_cnt += int(synwordcount)
fname = os.path.basename(fname)
# du spacetoj fine estas por Markdown liniavanco
print("http://new.bombono.org/download/revo/%(fname)s\t%(words_cnt)s " % locals())
示例2: mongo
def mongo(host='localhost', output='/opt/canopsis/var/backups'):
logger = logging.getLogger()
logger.debug('Mongo Backup start')
logger.debug('Host : %s' % host)
logger.debug('Output: %s' % output)
logger.debug('Create temp dir')
archive_name = 'backup_mongodb'
tmp_dir = mkdtemp(prefix='/opt/canopsis/tmp/')
os.makedirs('%s/%s' % (tmp_dir, archive_name))
logger.debug('Create output dir if not exists')
if not os.path.exists(output):
os.makedirs(output)
logger.debug('Launch mongodump')
mongodump_cmd = '/opt/canopsis/bin/mongodump --host %s --out %s/%s' % (host, tmp_dir, archive_name)
logger.debug('Command: %s' % mongodump_cmd)
dump_output = Popen(mongodump_cmd, shell=True)
dump_output.wait()
logger.debug('Create archive into %s' % output)
shutil.make_archive('%s/%s' % (output, archive_name),
'zip',
tmp_dir)
logger.debug('Remove temp dir')
shutil.rmtree(tmp_dir)
logger.debug('Mongo Backup finished')
示例3: fetch_sync_gateway_logs
def fetch_sync_gateway_logs(prefix, is_perf_run=False):
print("\n")
print("Pulling logs")
# fetch logs from sync_gateway instances
status = run_ansible_playbook("fetch-sync-gateway-logs.yml", stop_on_fail=False)
if status != 0:
log.error("Error pulling logs")
# zip logs and timestamp
if os.path.isdir("/tmp/sg_logs"):
date_time = time.strftime("%Y-%m-%d-%H-%M-%S")
if is_perf_run:
name = "/tmp/{}-sglogs".format(prefix)
else:
name = "/tmp/{}-{}-sglogs".format(prefix, date_time)
shutil.make_archive(name, "zip", "/tmp/sg_logs")
shutil.rmtree("/tmp/sg_logs")
print("sync_gateway logs copied here {}\n".format(name))
zip_file_path = "{}.zip".format(name)
if is_perf_run:
# Move perf logs to performance_results
shutil.copy(zip_file_path, "performance_results/{}/".format(prefix))
print("\n")
return zip_file_path
示例4: Craeate_addon_from_github
def Craeate_addon_from_github(URL,local_repo_folder):
archive_suffix="/archive/master.zip"
print(URL)
addonname=URL.strip('/').split('/')[-1]
if not os.path.exists(local_repo_folder+os.sep+addonname):
print("Making folder for addon in repo: ",addonname)
os.makedirs(local_repo_folder+os.sep+addonname)
download_file(URL+archive_suffix,local_repo_folder+os.sep+addonname+os.sep+"master.zip")
try:
xml_frm_file,ziptype=zipfilehandler(local_repo_folder+os.sep+addonname+os.sep+"master.zip")
except Exception as e:
print("cannot create a zip from githuburl ",URL)
return
root = ET.fromstring(xml_frm_file)
for element in root.iter("addon"):
addon_name=element.attrib['id']
addon_version=element.attrib['version']
try:
currntzip=zipfile.ZipFile(local_repo_folder+os.sep+addonname+os.sep+"master.zip")
currntzip.extractall(local_repo_folder+os.sep+addonname+os.sep)
currntzip.close()
shutil.move(local_repo_folder+os.sep+addonname+os.sep+addon_name+"-master",local_repo_folder+os.sep+addonname+os.sep+addon_name)
os.remove(local_repo_folder+os.sep+addonname+os.sep+"master.zip")
shutil.make_archive(local_repo_folder+os.sep+addon_name+os.sep+addon_name+"-"+addon_version,'zip',local_repo_folder+os.sep+addon_name,addon_name)
shutil.rmtree(local_repo_folder+os.sep+addonname+os.sep+addon_name)
except Exception as e:
print("could not save fil ",addonname)
示例5: build_windows
def build_windows():
"""Build windows executables/setups."""
utils.print_title("Updating 3rdparty content")
update_3rdparty.run(ace=False, pdfjs=True, fancy_dmg=False)
utils.print_title("Building Windows binaries")
parts = str(sys.version_info.major), str(sys.version_info.minor)
ver = ''.join(parts)
dot_ver = '.'.join(parts)
# Get python path from registry if possible
try:
reg64_key = winreg.OpenKeyEx(winreg.HKEY_LOCAL_MACHINE,
r'SOFTWARE\Python\PythonCore'
r'\{}\InstallPath'.format(dot_ver))
python_x64 = winreg.QueryValueEx(reg64_key, 'ExecutablePath')[0]
except FileNotFoundError:
python_x64 = r'C:\Python{}\python.exe'.format(ver)
out_pyinstaller = os.path.join('dist', 'qutebrowser')
out_64 = os.path.join('dist',
'qutebrowser-{}-x64'.format(qutebrowser.__version__))
artifacts = []
from scripts.dev import gen_versioninfo
utils.print_title("Updating VersionInfo file")
gen_versioninfo.main()
utils.print_title("Running pyinstaller 64bit")
_maybe_remove(out_64)
call_tox('pyinstaller', '-r', python=python_x64)
shutil.move(out_pyinstaller, out_64)
utils.print_title("Running 64bit smoke test")
smoke_test(os.path.join(out_64, 'qutebrowser.exe'))
utils.print_title("Building installers")
subprocess.run(['makensis.exe',
'/DX64',
'/DVERSION={}'.format(qutebrowser.__version__),
'misc/qutebrowser.nsi'], check=True)
name_64 = 'qutebrowser-{}-amd64.exe'.format(qutebrowser.__version__)
artifacts += [
(os.path.join('dist', name_64),
'application/vnd.microsoft.portable-executable',
'Windows 64bit installer'),
]
utils.print_title("Zipping 64bit standalone...")
name = 'qutebrowser-{}-windows-standalone-amd64'.format(
qutebrowser.__version__)
shutil.make_archive(name, 'zip', 'dist', os.path.basename(out_64))
artifacts.append(('{}.zip'.format(name),
'application/zip',
'Windows 64bit standalone'))
return artifacts
示例6: ziptex
def ziptex(self):
localskeletond = os.path.join(WD,'skeleton')
try:
shutil.rmtree(localskeletond)
except OSError:
pass
shutil.copytree(lspskeletond, localskeletond)
os.chdir(localskeletond)
localcommands = codecs.open('localcommands.tex','a', encoding='utf-8')
localpackages = codecs.open('localpackages.tex','a', encoding='utf-8')
localcounters = codecs.open('localcounters.tex','a', encoding='utf-8')
content = codecs.open('chapters/filename.tex','w', encoding='utf-8')
contentorig = codecs.open('chapters/filenameorig.tex','w', encoding='utf-8')
localcommands.write(self.commands)
localcommands.write(self.environments)
localcommands.close()
localpackages.write(self.packages)
localpackages.close()
localcounters.write(self.counters)
localcounters.close()
content.write(self.modtext)
content.close()
contentorig.write(self.text)
contentorig.close()
os.chdir(WD)
self.zipfn = str(uuid.uuid4())
shutil.make_archive(self.zipfn, 'zip', localskeletond)
shutil.move(self.zipfn+'.zip',wwwdir)
示例7: zip_dicom
def zip_dicom(directory):
"""
Function that zip a directory.
:param directory: path to the directory to zip
:type directory: str
:return: archive -> path to the created zip file
:rtype: str
"""
archive = directory + '.zip'
if (os.listdir(directory) == []):
sys.exit(
"The directory " + directory + " is empty and will not be zipped.")
else:
shutil.make_archive(directory, 'zip', directory)
if (os.path.exists(archive)):
shutil.rmtree(directory)
return archive
else:
sys.exit(archive + " could not be created.")
示例8: zip_directory
def zip_directory(manga, chapter_num, url):
directory = application.config['UPLOAD_FOLDER'] + url + "/"
filename = make_zip_filename(manga, chapter_num)
# To make sure there is no recursive compression
shutil.make_archive(directory + "../" + filename, "zip", directory)
shutil.move(directory + "../" + filename + ".zip",
directory + filename + ".zip")
示例9: test_make_archive_owner_group
def test_make_archive_owner_group(self):
# testing make_archive with owner and group, with various combinations
# this works even if there's not gid/uid support
if UID_GID_SUPPORT:
group = grp.getgrgid(0)[0]
owner = pwd.getpwuid(0)[0]
else:
group = owner = 'root'
base_dir, root_dir, base_name = self._create_files()
base_name = os.path.join(self.mkdtemp() , 'archive')
res = make_archive(base_name, 'zip', root_dir, base_dir, owner=owner,
group=group)
self.assertTrue(os.path.exists(res))
res = make_archive(base_name, 'zip', root_dir, base_dir)
self.assertTrue(os.path.exists(res))
res = make_archive(base_name, 'tar', root_dir, base_dir,
owner=owner, group=group)
self.assertTrue(os.path.exists(res))
res = make_archive(base_name, 'tar', root_dir, base_dir,
owner='kjhkjhkjg', group='oihohoh')
self.assertTrue(os.path.exists(res))
示例10: test_make_tarball
def test_make_tarball(self):
# creating something to tar
root_dir, base_dir = self._create_files('')
tmpdir2 = self.mkdtemp()
# force shutil to create the directory
os.rmdir(tmpdir2)
# working with relative paths
work_dir = os.path.dirname(tmpdir2)
rel_base_name = os.path.join(os.path.basename(tmpdir2), 'archive')
with support.change_cwd(work_dir):
base_name = os.path.abspath(rel_base_name)
tarball = make_archive(rel_base_name, 'gztar', root_dir, '.')
# check if the compressed tarball was created
self.assertEqual(tarball, base_name + '.tar.gz')
self.assertTrue(os.path.isfile(tarball))
self.assertTrue(tarfile.is_tarfile(tarball))
with tarfile.open(tarball, 'r:gz') as tf:
self.assertEqual(sorted(tf.getnames()),
['.', './file1', './file2',
'./sub', './sub/file3', './sub2'])
# trying an uncompressed one
with support.change_cwd(work_dir):
tarball = make_archive(rel_base_name, 'tar', root_dir, '.')
self.assertEqual(tarball, base_name + '.tar')
self.assertTrue(os.path.isfile(tarball))
self.assertTrue(tarfile.is_tarfile(tarball))
with tarfile.open(tarball, 'r') as tf:
self.assertEqual(sorted(tf.getnames()),
['.', './file1', './file2',
'./sub', './sub/file3', './sub2'])
示例11: test_tarfile_vs_tar
def test_tarfile_vs_tar(self):
root_dir, base_dir = self._create_files()
base_name = os.path.join(self.mkdtemp(), 'archive')
tarball = make_archive(base_name, 'gztar', root_dir, base_dir)
# check if the compressed tarball was created
self.assertEqual(tarball, base_name + '.tar.gz')
self.assertTrue(os.path.isfile(tarball))
# now create another tarball using `tar`
tarball2 = os.path.join(root_dir, 'archive2.tar')
tar_cmd = ['tar', '-cf', 'archive2.tar', base_dir]
subprocess.check_call(tar_cmd, cwd=root_dir)
self.assertTrue(os.path.isfile(tarball2))
# let's compare both tarballs
self.assertEqual(self._tarinfo(tarball), self._tarinfo(tarball2))
# trying an uncompressed one
tarball = make_archive(base_name, 'tar', root_dir, base_dir)
self.assertEqual(tarball, base_name + '.tar')
self.assertTrue(os.path.isfile(tarball))
# now for a dry_run
tarball = make_archive(base_name, 'tar', root_dir, base_dir,
dry_run=True)
self.assertEqual(tarball, base_name + '.tar')
self.assertTrue(os.path.isfile(tarball))
示例12: domove
def domove(self, current):
#directory
if self.dir_list[current][2] == True:
#clobber
if self.dir_list[current][3] == 'yes':
shutil.make_archive(save_root + current + ".zip", "zip", self.dir_list[current][0] )
else:
#noclobber
#get date and time
date = datetime.datetime.now()
date = date.isoformat().split('.')[0:-1][0].replace(':',"")
shutil.make_archive(save_root + current + date + ".zip", "zip", self.dir_list[current][0] )
else:
#single file
if self.dir_list[current][3] == 'yes':
#clobber
zf = zipfile.ZipFile(save_root + current + '.zip', 'w')
zf.write(self.dir_list[current][0])
zf.close()
else:
#append to zipfile
date = datetime.datetime.now()
date = date.isoformat().split('.')[0:-1][0].replace(':',"")
zf = zipfile.ZipFile(save_root + current + date+ '.zip', 'w')
zf.write(self.dir_list[current][0])
zf.close()
return
pass
示例13: main
def main():
parse_command_line()
version = get_version()
dir_name = APP_NAME + '-' + version
if not stable_build:
dir_name += '-canary'
if debug_build:
dir_name += '-dbg'
print(dir_name)
out_dir = os.path.join(BUILD_DIR, dir_name)
archive_path = out_dir + '.zip'
delete(out_dir, archive_path)
copy_files(SOURCE_DIR, out_dir, FILES_TO_COPY)
background_js_files = process_manifest(out_dir, version)
compile_js(os.path.join(out_dir, 'js', 'background.js'),
background_js_files,
BACKGROUND_COMPILATION_LEVEL,
BACKGROUND_EXTERNS)
js_files = process_index(out_dir)
compile_js(os.path.join(out_dir, TARGET_JS),
js_files,
COMPILATION_LEVEL,
JS_EXTERNS)
print('Archiving', archive_path)
shutil.make_archive(out_dir, 'zip',
root_dir=os.path.abspath(BUILD_DIR),
base_dir=dir_name,
verbose=True)
示例14: _backup_linear_log
def _backup_linear_log(self, fs_now, current_work_dir, backup_format, backup_work_dir, backup_history):
delete_previous_backups = False
# Aren't we going to exceed the limit?
max_backups = min(backup_history, MAX_BACKUPS)
backup_contents = []
for item in os.listdir(backup_work_dir):
item = os.path.join(backup_work_dir, item)
# We tally files only and assume each file must be one of ours so we can safely drop it if need be
if os.path.isfile(item):
backup_contents.append(item)
len_backups = len(backup_contents)
# It's the first backup or we're past the limit so we need a fresh prefix
if not len_backups or len_backups >= max_backups:
next_prefix = _first_prefix
else:
next_prefix = str(len_backups).zfill(len(_first_prefix))
# Also, we need to delete previous backups if we're starting anew
if len_backups >= max_backups:
delete_previous_backups = True
backup_name = '{}-{}'.format(next_prefix, fs_now)
backup_path = os.path.join(backup_work_dir, backup_name)
shutil.make_archive(backup_path, backup_format, current_work_dir, verbose=True, logger=self.logger)
if delete_previous_backups:
self._delete(backup_contents)
示例15: config
def config():
logger = logging.getLogger()
logger.debug('Config Backup start:')
archive_name = 'backup_config'
tmp_dir = '%s/%s' % (backup_path, archive_name)
if os.path.exists(tmp_dir):
logger.debug(' + Remove old temp dir')
shutil.rmtree(tmp_dir)
logger.debug(' + List all packages')
lines = []
for package in ubik_api.db.get_installed():
lines.append(package.name)
lines.append('\n')
lines = lines[:-1]
f = open('%s/etc/.packages' % home_path, 'w')
f.writelines(lines)
f.close()
logger.debug(' + Copy config files')
shutil.copytree('%s/etc' % home_path, '%s/' % tmp_dir)
logger.debug(' + Make archive')
shutil.make_archive('%s/%s' % (backup_path, archive_name), 'zip', tmp_dir)
logger.debug(' + Remove temp dir')
shutil.rmtree(tmp_dir)
logger.debug('Config Backup finished')