本文整理汇总了Python中shared.try_delete函数的典型用法代码示例。如果您正苦于以下问题:Python try_delete函数的具体用法?Python try_delete怎么用?Python try_delete使用的例子?那么恭喜您, 这里精选的函数代码示例或许可以为您提供帮助。
在下文中一共展示了try_delete函数的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: try_js
def try_js(args=[]):
shared.try_delete(filename + '.js')
js_args = [shared.PYTHON, shared.EMCC, opts] + llvm_opts + [fullname, '-o', filename + '.js'] + CSMITH_CFLAGS + args + ['-w']
if random.random() < 0.5:
js_args += ['-s', 'ALLOW_MEMORY_GROWTH=1']
if random.random() < 0.5 and 'ALLOW_MEMORY_GROWTH=1' not in js_args:
js_args += ['-s', 'MAIN_MODULE=1']
if random.random() < 0.25:
js_args += ['-s', 'INLINING_LIMIT=1'] # inline nothing, for more call interaction
if random.random() < 0.333:
js_args += ['-s', 'EMTERPRETIFY=1']
if random.random() < 0.5:
if random.random() < 0.5:
js_args += ['-s', 'EMTERPRETIFY_BLACKLIST=["_main"]'] # blacklist main and all inlined into it, but interpret the rest, tests mixing
else:
js_args += ['-s', 'EMTERPRETIFY_WHITELIST=["_main"]'] # the opposite direction
if random.random() < 0.5:
js_args += ['-s', 'EMTERPRETIFY_ASYNC=1']
if random.random() < 0.5:
js_args += ["--memory-init-file", "0", "-s", "MEM_INIT_METHOD=2"]
if random.random() < 0.5:
js_args += ['-s', 'ASSERTIONS=1']
print '(compile)', ' '.join(js_args)
open(fullname, 'a').write('\n// ' + ' '.join(js_args) + '\n\n')
try:
shared.check_execute(js_args)
assert os.path.exists(filename + '.js')
return js_args
except:
return False
示例2: try_js
def try_js(args):
shared.try_delete(filename + '.js')
print '(compile)'
shared.check_execute([shared.PYTHON, shared.EMCC, opts, fullname, '-o', filename + '.js'] + CSMITH_CFLAGS + args)
assert os.path.exists(filename + '.js')
print '(run)'
js = shared.run_js(filename + '.js', engine=engine1, check_timeout=True, assert_returncode=None, cwd='/tmp/emscripten_temp')
assert correct1 == js or correct2 == js, ''.join([a.rstrip()+'\n' for a in difflib.unified_diff(correct1.split('\n'), js.split('\n'), fromfile='expected', tofile='actual')])
示例3: try_js
def try_js(args):
shared.try_delete(filename + '.js')
print '(compile)'
shared.execute([shared.EMCC, '-O2', '-s', 'ASM_JS=1', filename + '.c', '-o', filename + '.js'] + CSMITH_CFLAGS + args, stderr=PIPE)
assert os.path.exists(filename + '.js')
print '(run)'
js = shared.run_js(filename + '.js', stderr=PIPE, engine=engine1, check_timeout=True)
assert correct1 == js or correct2 == js, ''.join([a.rstrip()+'\n' for a in difflib.unified_diff(correct1.split('\n'), js.split('\n'), fromfile='expected', tofile='actual')])
示例4: create_load_wasm_worker
def create_load_wasm_worker():
emscripten.logging.debug('building load-wasm-worker')
output = emscripten.Cache.get_path('load-wasm-worker.js')
emscripten.try_delete(output)
check_call([PYTHON, emscripten.EMCC, emscripten.path_from_root('third_party', 'wasm-polyfill', 'src', 'unpack.cpp'),
emscripten.path_from_root('tools', 'optimizer', 'parser.cpp'),
'-o', output] + \
'-O3 -std=c++11 --memory-init-file 0 --llvm-lto 1 -s TOTAL_MEMORY=67108864 -s WASM=0'.split(' '))
assert os.path.exists(output)
open(output, 'a').write(open(emscripten.path_from_root('third_party', 'wasm-polyfill', 'src', 'load-wasm-worker.js')).read())
return output
示例5: create_optimizer
def create_optimizer():
shared.logging.debug('building native optimizer')
output = shared.Cache.get_path('optimizer.exe')
shared.try_delete(output)
errs = []
for compiler in [shared.CLANG, 'g++', 'clang++']: # try our clang first, otherwise hope for a system compiler in the path
shared.logging.debug(' using ' + compiler)
out, err = subprocess.Popen([compiler, shared.path_from_root('tools', 'optimizer', 'optimizer.cpp'), '-O3', '-std=c++11', '-fno-exceptions', '-fno-rtti', '-o', output], stderr=subprocess.PIPE).communicate()
# for profiling/debugging: '-g', '-fno-omit-frame-pointer'
if os.path.exists(output): return output
errs.append(err)
raise Exception('failed to build native optimizer, errors from each attempt: ' + '\n=================\n'.join(errs))
示例6: create_pack_asmjs
def create_pack_asmjs():
emscripten.logging.debug('building pack-asmjs')
output = emscripten.Cache.get_path('pack-asmjs.js')
emscripten.try_delete(output)
check_call([PYTHON, emscripten.EMCC, emscripten.path_from_root('third_party', 'wasm-polyfill', 'src', 'pack-asmjs.cpp'),
emscripten.path_from_root('third_party', 'wasm-polyfill', 'src', 'unpack.cpp'),
emscripten.path_from_root('tools', 'optimizer', 'parser.cpp'),
'-o', output] + \
'-O3 -std=c++11 -DCHECKED_OUTPUT_SIZE --memory-init-file 0 --llvm-lto 1 -s TOTAL_MEMORY=67108864 -s WASM=0 -s INVOKE_RUN=0'.split(' ') + \
['-I' + emscripten.path_from_root('tools', 'optimizer')])
assert os.path.exists(output)
open(output, 'a').write(open(emscripten.path_from_root('third_party', 'wasm-polyfill', 'src', 'pack-asmjs.js')).read())
return output
示例7: create_optimizer
def create_optimizer():
shared.logging.debug('building native optimizer: ' + name)
output = shared.Cache.get_path(name)
shared.try_delete(output)
for compiler in [shared.CLANG, 'g++', 'clang++']: # try our clang first, otherwise hope for a system compiler in the path
shared.logging.debug(' using ' + compiler)
try:
subprocess.Popen([compiler,
shared.path_from_root('tools', 'optimizer', 'parser.cpp'),
shared.path_from_root('tools', 'optimizer', 'simple_ast.cpp'),
shared.path_from_root('tools', 'optimizer', 'optimizer.cpp'),
'-O3', '-std=c++11', '-fno-exceptions', '-fno-rtti', '-o', output] + args).communicate()
except OSError:
if compiler == shared.CLANG: raise # otherwise, OSError is likely due to g++ or clang++ not being in the path
if os.path.exists(output): return output
raise NativeOptimizerCreationException()
示例8: try_js
def try_js(args):
shared.try_delete(filename + ".js")
print "(compile)"
shared.execute(
[shared.EMCC, "-O2", "-s", "ASM_JS=1", filename + ".c", "-o", filename + ".js"] + CSMITH_CFLAGS + args,
stderr=PIPE,
)
assert os.path.exists(filename + ".js")
print "(run)"
js = shared.run_js(filename + ".js", stderr=PIPE, engine=engine1, check_timeout=True)
assert correct1 == js or correct2 == js, "".join(
[
a.rstrip() + "\n"
for a in difflib.unified_diff(
correct1.split("\n"), js.split("\n"), fromfile="expected", tofile="actual"
)
]
)
示例9: create_optimizer
def create_optimizer():
shared.logging.debug("building native optimizer: " + name)
output = shared.Cache.get_path(name)
shared.try_delete(output)
for compiler in [
shared.CLANG,
"g++",
"clang++",
]: # try our clang first, otherwise hope for a system compiler in the path
shared.logging.debug(" using " + compiler)
try:
out, err = subprocess.Popen(
[
compiler,
shared.path_from_root("tools", "optimizer", "parser.cpp"),
shared.path_from_root("tools", "optimizer", "simple_ast.cpp"),
shared.path_from_root("tools", "optimizer", "optimizer.cpp"),
shared.path_from_root("tools", "optimizer", "optimizer-main.cpp"),
"-O3",
"-std=c++11",
"-fno-exceptions",
"-fno-rtti",
"-o",
output,
]
+ args,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
).communicate()
outs.append(out)
errs.append(err)
except OSError:
if compiler == shared.CLANG:
raise # otherwise, OSError is likely due to g++ or clang++ not being in the path
if os.path.exists(output):
return output
raise NativeOptimizerCreationException()
示例10: create_optimizer_cmake
def create_optimizer_cmake():
shared.logging.debug('building native optimizer via CMake: ' + name)
output = shared.Cache.get_path(name)
shared.try_delete(output)
if NATIVE_OPTIMIZER == '1':
cmake_build_type = 'RelWithDebInfo'
elif NATIVE_OPTIMIZER == '2':
cmake_build_type = 'Release'
elif NATIVE_OPTIMIZER == 'g':
cmake_build_type = 'Debug'
build_path = shared.Cache.get_path('optimizer_build_' + cmake_build_type)
shared.try_delete(os.path.join(build_path, 'CMakeCache.txt'))
log_output = None if DEBUG else subprocess.PIPE
if not os.path.exists(build_path):
os.mkdir(build_path)
if WINDOWS:
# Poor man's check for whether or not we should attempt 64 bit build
if os.environ.get('ProgramFiles(x86)'):
cmake_generators = ['Visual Studio 12 Win64', 'Visual Studio 12', 'Visual Studio 11 Win64', 'Visual Studio 11', 'MinGW Makefiles', 'Unix Makefiles']
else:
cmake_generators = ['Visual Studio 12', 'Visual Studio 11', 'MinGW Makefiles', 'Unix Makefiles']
else:
cmake_generators = ['Unix Makefiles']
for cmake_generator in cmake_generators:
proc = subprocess.Popen(['cmake', '-G', cmake_generator, '-DCMAKE_BUILD_TYPE='+cmake_build_type, shared.path_from_root('tools', 'optimizer')], cwd=build_path, stdin=log_output, stdout=log_output, stderr=log_output)
proc.communicate()
make_env = os.environ.copy()
if proc.returncode == 0:
if 'Visual Studio' in cmake_generator:
ret = find_msbuild(os.path.join(build_path, 'asmjs_optimizer.sln'), make_env)
make = [ret[0], '/t:Build', '/p:Configuration='+cmake_build_type, '/nologo', '/verbosity:minimal', 'asmjs_optimizer.sln']
make_env = ret[1]
elif 'MinGW' in cmake_generator:
make = ['mingw32-make']
else:
make = ['make']
proc = subprocess.Popen(make, cwd=build_path, stdin=log_output, stdout=log_output, stderr=log_output, env=make_env)
proc.communicate()
if proc.returncode == 0:
if WINDOWS and 'Visual Studio' in cmake_generator:
shutil.copyfile(os.path.join(build_path, cmake_build_type, 'optimizer.exe'), output)
else:
shutil.copyfile(os.path.join(build_path, 'optimizer'), output)
return output
else:
shared.try_delete(os.path.join(build_path, 'CMakeCache.txt'))
# Proceed to next iteration of the loop to try next possible CMake generator.
raise NativeOptimizerCreationException()
示例11: relocate_into
def relocate_into(self, main):
# heap initializer
if self.staticbump > 0:
new_mem_init = self.mem_init_js[:self.mem_init_js.rfind(', ')] + ', Runtime.GLOBAL_BASE+%d)' % main.staticbump
main.pre_js = re.sub(shared.JS.memory_staticbump_pattern, 'STATICTOP = STATIC_BASE + %d;\n' % (main.staticbump + self.staticbump) + new_mem_init, main.pre_js, count=1)
# Find function name replacements TODO: do not rename duplicate names with duplicate contents, just merge them
replacements = {}
for func in self.funcs:
rep = func
while rep in main.funcs:
rep += '_'
replacements[func] = rep
#print >> sys.stderr, 'replacements:', replacements
# sendings: add invokes for new tables
all_sendings = main.sendings
added_sending = False
for table in self.tables:
if table not in main.tables:
sig = table[table.rfind('_')+1:]
func = 'invoke_%s' % sig
all_sendings[func] = func
main.pre_js += 'var %s = %s;\n' % (func, shared.JS.make_invoke(sig, named=False))
added_sending = True
# imports
all_imports = main.imports
for key, value in self.imports.iteritems():
if key in self.funcs or key in main.funcs: continue # external function in one module, implemented in the other
value_concrete = '.' not in value # env.key means it is an import, an external value, and not a concrete one
main_value = main.imports.get(key)
main_value_concrete = main_value and '.' not in main_value
if value_concrete and main_value_concrete: continue # standard global var
if not main_value or value_concrete:
if '+' in value:
# relocate
value = value.replace('(', '').replace(')', '').replace('| 0', '').replace('|0', '').replace(' ', '')
left, right = value.split('+')
assert left == 'H_BASE'
value = str(main.staticbump + int(right))
all_imports[key] = value
if (value_concrete or main_value_concrete) and key in all_sendings:
del all_sendings[key] # import of external value no longer needed
main.imports_js = '\n'.join(['var %s = %s;' % (key, value) for key, value in all_imports.iteritems()]) + '\n'
# check for undefined references to global variables
def check_import(key, value):
if value.startswith('+') or value.endswith('|0'): # ignore functions
if key not in all_sendings:
print >> sys.stderr, 'warning: external variable %s is still not defined after linking' % key
all_sendings[key] = '0'
for key, value in all_imports.iteritems(): check_import(key, value)
if added_sending:
sendings_js = ', '.join(['%s: %s' % (key, value) for key, value in all_sendings.iteritems()])
sendings_start = main.post_js.find('}, { ')+5
sendings_end = main.post_js.find(' }, buffer);')
main.post_js = main.post_js[:sendings_start] + sendings_js + main.post_js[sendings_end:]
# tables
f_bases = {}
f_sizes = {}
for table, data in self.tables.iteritems():
main.tables[table] = self.merge_tables(table, main.tables.get(table), data, replacements, f_bases, f_sizes)
main.combine_tables()
#print >> sys.stderr, 'f bases', f_bases
# relocate
temp = shared.Building.js_optimizer(self.filename, ['asm', 'relocate', 'last'], extra_info={
'replacements': replacements,
'fBases': f_bases,
'hBase': main.staticbump
})
#print >> sys.stderr, 'relocated side into', temp
relocated_funcs = AsmModule(temp)
shared.try_delete(temp)
main.extra_funcs_js = relocated_funcs.funcs_js.replace(js_optimizer.start_funcs_marker, '\n')
# update function table uses
ft_marker = 'FUNCTION_TABLE_'
def update_fts(what):
updates = []
i = 1 # avoid seeing marker in recursion
while 1:
i = what.find(ft_marker, i)
if i < 0: break;
start = i
end = what.find('[', start)
table = what[i:end]
if table not in f_sizes:
# table was not modified
i += len(ft_marker)
continue
nesting = 1
while nesting > 0:
next = what.find(']', end+1)
nesting -= 1
nesting += what.count('[', end+1, next)
#.........这里部分代码省略.........
示例12: clear_project_build
def clear_project_build(name):
shared.try_delete(os.path.join(Ports.get_build_dir(), name))
shared.try_delete(shared.Cache.get_path(name + '.bc'))
示例13: fetch_project
def fetch_project(name, url, expected_version):
fullname = os.path.join(Ports.get_dir(), name)
if name not in Ports.name_cache: # only mention each port once in log
logging.warning('including port: ' + name)
logging.debug(' (at ' + fullname + ')')
Ports.name_cache.add(name)
class State:
retrieved = False
unpacked = False
def retrieve():
logging.warning('retrieving port: ' + name + ' from ' + url)
import urllib2
f = urllib2.urlopen(url)
data = f.read()
open(fullname + '.zip', 'wb').write(data)
State.retrieved = True
def unpack():
logging.warning('unpacking port: ' + name)
import zipfile
shared.safe_ensure_dirs(fullname)
z = zipfile.ZipFile(fullname + '.zip', 'r')
try:
cwd = os.getcwd()
os.chdir(fullname)
z.extractall()
finally:
os.chdir(cwd)
State.unpacked = True
def check_version(expected_version):
try:
ok = False
if not os.path.exists(fullname): return False
subdir = os.listdir(fullname)
if len(subdir) != 1: return False
subdir = subdir[0] # each port has a singleton subdir
f = os.path.join(fullname, subdir, 'version.txt')
if not os.path.exists(f): return False # no version, need an update
version = open(f).read()
version = int(version)
ok = True
finally:
if not ok: logging.error('error when checking port version for ' + name)
return version >= expected_version
# main logic
if not os.path.exists(fullname + '.zip'):
retrieve()
if not os.path.exists(fullname):
unpack()
if not check_version(expected_version):
# fetch a newer version
assert not State.retrieved, 'just retrieved port ' + name + ', but not a new enough version?'
shared.try_delete(fullname)
shared.try_delete(fullname + '.zip')
retrieve()
unpack()
assert check_version(expected_version), 'just retrieved replacement port ' + name + ', but not a new enough version?'
if State.unpacked:
# we unpacked a new version, clear the build in the cache
Ports.clear_project_build(name)
示例14: erase
def erase():
shared.try_delete(Ports.get_dir())
示例15: fetch_project
def fetch_project(name, url, subdir):
fullname = os.path.join(Ports.get_dir(), name)
if name not in Ports.name_cache: # only mention each port once in log
logging.debug('including port: ' + name)
logging.debug(' (at ' + fullname + ')')
Ports.name_cache.add(name)
class State:
retrieved = False
unpacked = False
def retrieve():
# if EMCC_LOCAL_PORTS is set, we use a local directory as our ports. This is useful
# for testing. This env var should be in format
# name=dir|tag,name=dir|tag
# e.g.
# sdl2=/home/username/dev/ports/SDL2|SDL2-master
# so you could run
# EMCC_LOCAL_PORTS="sdl2=/home/alon/Dev/ports/SDL2|SDL2-master" ./tests/runner.py browser.test_sdl2_mouse
# note that tag **must** be the tag in sdl.py, it is where we store to (not where we load from, we just load the local dir)
local_ports = os.environ.get('EMCC_LOCAL_PORTS')
if local_ports:
local_ports = map(lambda pair: pair.split('='), local_ports.split(','))
for local in local_ports:
if name == local[0]:
path, subdir = local[1].split('|')
logging.warning('grabbing local port: ' + name + ' from ' + path + ', into ' + subdir)
# zip up the directory, so it looks the same as if we downloaded a zip from the remote server
z = zipfile.ZipFile(fullname + '.zip', 'w')
def add_dir(p):
for f in os.listdir(p):
full = os.path.join(p, f)
if os.path.isdir(full):
add_dir(full)
else:
if not f.startswith('.'): # ignore hidden files, including .git/ etc.
z.write(full, os.path.join(subdir, os.path.relpath(full, path)))
add_dir(path)
z.close()
State.retrieved = True
return
# retrieve from remote server
logging.warning('retrieving port: ' + name + ' from ' + url)
import urllib2
f = urllib2.urlopen(url)
data = f.read()
open(fullname + '.zip', 'wb').write(data)
State.retrieved = True
def check_tag():
z = zipfile.ZipFile(fullname + '.zip', 'r')
names = z.namelist()
if not (names[0].startswith(subdir + '/') or names[0].startswith(subdir + '\\')):
# current zip file is old, force a retrieve
return False
return True
def unpack():
logging.warning('unpacking port: ' + name)
shared.safe_ensure_dirs(fullname)
z = zipfile.ZipFile(fullname + '.zip', 'r')
try:
cwd = os.getcwd()
os.chdir(fullname)
z.extractall()
finally:
os.chdir(cwd)
State.unpacked = True
# main logic
if not os.path.exists(fullname + '.zip'):
retrieve()
if not os.path.exists(fullname):
unpack()
if not check_tag():
logging.warning('local copy of port is not correct, retrieving from remote server')
shared.try_delete(fullname)
shared.try_delete(fullname + '.zip')
retrieve()
unpack()
if State.unpacked:
# we unpacked a new version, clear the build in the cache
Ports.clear_project_build(name)