本文整理汇总了Python中os.sys.exit方法的典型用法代码示例。如果您正苦于以下问题:Python sys.exit方法的具体用法?Python sys.exit怎么用?Python sys.exit使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类os.sys
的用法示例。
在下文中一共展示了sys.exit方法的14个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: processZip
# 需要导入模块: from os import sys [as 别名]
# 或者: from os.sys import exit [as 别名]
def processZip( sDirectoryName, sZipfilename, datfilename, indexlist ):
sBasename = sZipfilename.replace('.zip', '')
thiszip = zipfile.ZipFile( sDirectoryName + '/' + sZipfilename )
datafile = open( sDirectoryName + '/' + datfilename + '~', 'wb' )
namelist = thiszip.namelist()
for index in indexlist:
print( 'index: ' + str(index) )
# for name in thiszip.namelist():
name = namelist[index + 1] # skip name of directory
print( name )
if name.endswith('.sgf'):
contents = thiszip.read( name )
#print( contents )
walkthroughSgf( datafile, contents )
# sys.exit(-1)
#break
else:
print('not sgf: [' + name + ']' )
sys.exit(-1)
datafile.write('END')
datafile.close()
os.rename( sDirectoryName + '/' + datfilename + '~', sDirectoryName + '/' + datfilename )
示例2: downloadFiles
# 需要导入模块: from os import sys [as 别名]
# 或者: from os.sys import exit [as 别名]
def downloadFiles( sTargetDirectory ):
fileInfos = index_processor.get_fileInfos( sTargetDirectory )
urlsToDo = []
for fileinfo in fileInfos:
url = fileinfo['url']
print( url )
sFilename = fileinfo['filename']
if not os.path.isfile( sTargetDirectory + '/' + sFilename ):
urlsToDo.append( ( url, sTargetDirectory + '/' + sFilename ) )
print( 'to do: ' + url + ' ... ' )
pool = multiprocessing.Pool( processes = 16 )
try:
it = pool.imap( worker, urlsToDo, )
for i in it:
# print( i )
pass
pool.close()
pool.join()
except KeyboardInterrupt:
print( "Caught KeyboardInterrupt, terminating workers" )
pool.terminate()
pool.join()
sys.exit(-1)
示例3: main_mulprocessing
# 需要导入模块: from os import sys [as 别名]
# 或者: from os.sys import exit [as 别名]
def main_mulprocessing(min_id=None,
max_id=None,
window_size=5000,
number_of_processes=4,
drop_first=False):
engine = create_engine(
URL(**CONF['database']['connect_args']),
pool_size=1,
pool_recycle=CONF['database']['pool_recycle'],
client_encoding='utf8')
init_tables(engine, drop_first)
try:
manager = ParserManager(
min_id=min_id,
max_id=max_id,
window_size=window_size,
number_of_processes=number_of_processes)
manager.start()
manager.join()
except (KeyboardInterrupt, SystemExit):
logger.info('interrupt signal received')
sys.exit(1)
except Exception as e:
raise e
示例4: zipsToDats
# 需要导入模块: from os import sys [as 别名]
# 或者: from os.sys import exit [as 别名]
def zipsToDats( sTargetDirectory, samplesList, name ):
iCount = 0
zipNames = set()
indexesByZipName = {}
for sample in samplesList:
(filename,index) = sample
zipNames.add( filename )
if not indexesByZipName.has_key( filename ):
indexesByZipName[filename] = []
indexesByZipName[filename].append( index )
print( 'num zips: ' + str( len( zipNames ) ) )
zipsToDo = []
for zipName in zipNames:
sBasename = zipName.replace('.zip','')
sDatFilename = sBasename + name + '.dat'
if not os.path.isfile( sTargetDirectory + '/' + sDatFilename ):
zipsToDo.append( ( sTargetDirectory, zipName, sDatFilename, indexesByZipName[zipName] ) )
cores = multiprocessing.cpu_count()
pool = multiprocessing.Pool( processes = cores )
# pool.map( loadAllSgfs, dirsToDo )
p = pool.map_async( worker, zipsToDo )
try:
results = p.get(0xFFFF)
# pool.close()
# pool.join()
except KeyboardInterrupt:
print( "Caught KeyboardInterrupt, terminating workers" )
pool.terminate()
pool.join()
sys.exit(-1)
示例5: createSingleDat
# 需要导入模块: from os import sys [as 别名]
# 或者: from os.sys import exit [as 别名]
def createSingleDat( targetDirectory, name, samples ):
print( 'creating consolidated .dat...' )
filePath = targetDirectory + '/kgsgo-' + name + '.dat'
if os.path.isfile( filePath ):
print( 'consolidated file ' + filePath + ' already exists :-)' )
return
# first check if we have all files
# first need to get the names of all files
datfilesNeeded = set()
for sample in samples:
(filename, index ) = sample
datfilesNeeded.add( filename )
print( 'total dat files to be consolidated: ' + str( len( datfilesNeeded ) ) )
datfilenames = []
for zipfilename in datfilesNeeded:
datfilename = zipfilename.replace('.zip','') + name + '.dat'
datfilenames.append(datfilename)
allfilespresent = True
for datfilename in datfilenames:
if not os.path.isfile( targetDirectory + '/' + datfilename ):
allfilespresent = False
print( 'Missing dat file: ' + datfilename )
sys.exit(-1)
consolidatedfile = open( filePath + '~', 'wb' )
for filename in datfilenames:
print( 'reading from ' + filename + ' ...' )
filepath = sTargetDirectory + '/' + filename
singledat = open( filepath, 'rb' )
data = singledat.read()
if data[-3:] != 'END':
print( 'Invalid file, doesnt end with END: ' + filepath )
sys.exit(-1)
consolidatedfile.write( data[:-3] )
singledat.close()
consolidatedfile.write( 'END' )
consolidatedfile.close()
os.rename( filePath + '~', filePath )
示例6: zipsToDats
# 需要导入模块: from os import sys [as 别名]
# 或者: from os.sys import exit [as 别名]
def zipsToDats( sTargetDirectory, samplesList, name ):
iCount = 0
zipNames = set()
indexesByZipName = {}
for sample in samplesList:
(filename,index) = sample
zipNames.add( filename )
if not indexesByZipName.has_key( filename ):
indexesByZipName[filename] = []
indexesByZipName[filename].append( index )
print( 'num zips: ' + str( len( zipNames ) ) )
zipsToDo = []
for zipName in zipNames:
sBasename = zipName.replace('.zip','')
sDatFilename = sBasename + name + '-v2.dat'
if not os.path.isfile( sTargetDirectory + '/' + sDatFilename ):
zipsToDo.append( ( sTargetDirectory, zipName, sDatFilename, indexesByZipName[zipName] ) )
cores = multiprocessing.cpu_count()
pool = multiprocessing.Pool( processes = cores )
# pool.map( loadAllSgfs, dirsToDo )
p = pool.map_async( worker, zipsToDo )
try:
results = p.get(0xFFFF)
# pool.close()
# pool.join()
except KeyboardInterrupt:
print( "Caught KeyboardInterrupt, terminating workers" )
pool.terminate()
pool.join()
sys.exit(-1)
示例7: get_params
# 需要导入模块: from os import sys [as 别名]
# 或者: from os.sys import exit [as 别名]
def get_params():
parser = get_params_parser()
args = parser.parse_args()
if not args.org or not args.token:
parser.error("token and org params must be provided.")
sys.exit(1)
return args
示例8: map_to_workers
# 需要导入模块: from os import sys [as 别名]
# 或者: from os.sys import exit [as 别名]
def map_to_workers(self, name, samples):
'''
Determine the list of zip files that need to be processed, then map load
to number of available CPUs.
'''
# Create a dictionary with file names as keys and games as values
zip_names = set()
indices_by_zip_name = {}
for filename, index in samples:
zip_names.add(filename)
if filename not in indices_by_zip_name:
indices_by_zip_name[filename] = []
indices_by_zip_name[filename].append(index)
print('>>> Number of zip files: ' + str(len(zip_names)))
# Transform the above dictionary to a list that can be processed in parallel
zips_to_process = []
for zip_name in zip_names:
base_name = zip_name.replace('.tar.gz', '')
data_file_name = base_name + name
if not os.path.isfile(self.data_dir + '/' + data_file_name):
zips_to_process.append((self.__class__, self.data_dir, self.num_planes, zip_name,
data_file_name, indices_by_zip_name[zip_name]))
# Determine number of CPU cores and split work load among them
cores = multiprocessing.cpu_count()
pool = multiprocessing.Pool(processes=cores)
p = pool.map_async(worker, zips_to_process)
try:
results = p.get(0xFFFF)
print(results)
except KeyboardInterrupt:
print("Caught KeyboardInterrupt, terminating workers")
pool.terminate()
pool.join()
sys.exit(-1)
示例9: _download_pretrained_model
# 需要导入模块: from os import sys [as 别名]
# 或者: from os.sys import exit [as 别名]
def _download_pretrained_model(prompt=True):
"""Downloads the pre-trained BIST model if non-existent."""
model_info_exists = path.isfile(IntentExtractionApi.pretrained_model_info)
model_exists = path.isfile(IntentExtractionApi.pretrained_model)
if not model_exists or not model_info_exists:
print(
"The pre-trained models to be downloaded for the intent extraction dataset "
"are licensed under Apache 2.0. By downloading, you accept the terms "
"and conditions provided by the license"
)
makedirs(IntentExtractionApi.model_dir, exist_ok=True)
if prompt is True:
agreed = IntentExtractionApi._prompt()
if agreed is False:
sys.exit(0)
download_unlicensed_file(
"https://s3-us-west-2.amazonaws.com/nlp-architect-data" "/models/intent/",
"model_info.dat",
IntentExtractionApi.pretrained_model_info,
)
download_unlicensed_file(
"https://s3-us-west-2.amazonaws.com/nlp-architect-data" "/models/intent/",
"model.h5",
IntentExtractionApi.pretrained_model,
)
print("Done.")
示例10: _download_pretrained_model
# 需要导入模块: from os import sys [as 别名]
# 或者: from os.sys import exit [as 别名]
def _download_pretrained_model(self, prompt=True):
"""Downloads the pre-trained BIST model if non-existent."""
model_exists = path.isfile(self.pretrained_model)
model_info_exists = path.isfile(self.pretrained_model_info)
if not model_exists or not model_info_exists:
print(
"The pre-trained models to be downloaded for the NER dataset "
"are licensed under Apache 2.0. By downloading, you accept the terms "
"and conditions provided by the license"
)
makedirs(self.model_dir, exist_ok=True)
if prompt is True:
agreed = NerApi._prompt()
if agreed is False:
sys.exit(0)
download_unlicensed_file(
"https://s3-us-west-2.amazonaws.com/nlp-architect-data" "/models/ner/",
"model_v4.h5",
self.pretrained_model,
)
download_unlicensed_file(
"https://s3-us-west-2.amazonaws.com/nlp-architect-data" "/models/ner/",
"model_info_v4.dat",
self.pretrained_model_info,
)
print("Done.")
示例11: walkthroughSgf
# 需要导入模块: from os import sys [as 别名]
# 或者: from os.sys import exit [as 别名]
def walkthroughSgf( datafile, sgfContents ):
sgf = gomill.sgf.Sgf_game.from_string( sgfContents )
# print sgf
if sgf.get_size() != 19:
print( 'boardsize not 19, ignoring' )
return
goBoard = GoBoard.GoBoard(19)
doneFirstMove = False
if sgf.get_handicap() != None and sgf.get_handicap() != 0:
#print 'handicap not zero, ignoring (' + str( sgf.get_handicap() ) + ')'
#handicappoints = gomill.handicap_layout.handicap_points( sgf.get_handicap(), 19 )
numhandicap = sgf.get_handicap()
#print sgf.get_root().get_setup_stones()
#sys.exit(-1)
#for move in getHandicapPoints( numhandicap ):
for set in sgf.get_root().get_setup_stones():
#print set
for move in set:
#print move
goBoard.applyMove( 'b', move )
#sys.exit(-1)
# print( 'handicap: ' + str(numhandicap) )
doneFirstMove = True
#sys.exit(-1)
moveIdx = 0
for it in sgf.main_sequence_iter():
(color,move) = it.get_move()
# print( 'color ' + str(color) )
# print( move )
if color != None and move != None:
(row,col) = move
if doneFirstMove and datafile != None:
addToDataFile( datafile, color, move, goBoard )
#print 'applying move ' + str( moveIdx )
goBoard.applyMove( color, (row,col) )
#print goBoard
moveIdx = moveIdx + 1
doneFirstMove = True
#if moveIdx >= 120:
# sys.exit(-1)
#print( goBoard )
#print 'winner: ' + sgf.get_winner()
示例12: walkthroughSgf
# 需要导入模块: from os import sys [as 别名]
# 或者: from os.sys import exit [as 别名]
def walkthroughSgf( countOnly, datafile, sgfContents ):
sgf = gomill.sgf.Sgf_game.from_string( sgfContents )
# print sgf
if sgf.get_size() != 19:
print( 'boardsize not 19, ignoring' )
return
goBoard = GoBoard.GoBoard(19)
doneFirstMove = False
if sgf.get_handicap() != None and sgf.get_handicap() != 0:
#print 'handicap not zero, ignoring (' + str( sgf.get_handicap() ) + ')'
#handicappoints = gomill.handicap_layout.handicap_points( sgf.get_handicap(), 19 )
numhandicap = sgf.get_handicap()
#print sgf.get_root().get_setup_stones()
#sys.exit(-1)
#for move in getHandicapPoints( numhandicap ):
for set in sgf.get_root().get_setup_stones():
#print set
for move in set:
#print move
goBoard.applyMove( 'b', move )
#sys.exit(-1)
# print( 'handicap: ' + str(numhandicap) )
doneFirstMove = True
#sys.exit(-1)
# first, count number of moves...
if countOnly:
numMoves = 0
countDoneFirstMove = doneFirstMove
for it in sgf.main_sequence_iter():
(color,move) = it.get_move()
if color != None and move != None:
#(row,col) = move
if countDoneFirstMove:
numMoves = numMoves + 1
#addToDataFile( datafile, color, move, goBoard )
countDoneFirstMove = True
return numMoves
#writeFileHeader( datafile, numMoves, 7, 19, 'int', 1 )
moveIdx = 0
for it in sgf.main_sequence_iter():
(color,move) = it.get_move()
if color != None and move != None:
(row,col) = move
if doneFirstMove:
addToDataFile( datafile, color, move, goBoard )
goBoard.applyMove( color, (row,col) )
moveIdx = moveIdx + 1
doneFirstMove = True
# unzip the zip, process the sgfs to .dat, and remove the unzipped directory
示例13: createSingleDat
# 需要导入模块: from os import sys [as 别名]
# 或者: from os.sys import exit [as 别名]
def createSingleDat( targetDirectory, name, samples ):
print( 'creating consolidated .dat...' )
filePath = targetDirectory + '/kgsgo-' + name + '-v2.dat'
if os.path.isfile( filePath ):
print( 'consolidated file ' + filePath + ' already exists :-)' )
return
# first check if we have all files
# first need to get the names of all files
# also, need to count total number of records
numRecords = 0
datfilesNeeded = set()
for sample in samples:
(filename, index ) = sample
datfilesNeeded.add( filename )
print( 'total dat files to be consolidated: ' + str( len( datfilesNeeded ) ) )
datfilenames = []
for zipfilename in datfilesNeeded:
datfilename = zipfilename.replace('.zip','') + name + '-v2.dat'
datfilenames.append(datfilename)
allfilespresent = True
for datfilename in datfilenames:
if not os.path.isfile( targetDirectory + '/' + datfilename ):
allfilespresent = False
print( 'Missing dat file: ' + datfilename )
sys.exit(-1)
childdatfile = open( targetDirectory + '/' + datfilename, 'rb' )
header = childdatfile.read(1024)
thisN = int( header.split('-n=')[1].split('-')[0] )
childdatfile.close()
numRecords = numRecords + thisN
print( 'child ' + datfilename + ' N=' + str(thisN) )
consolidatedfile = open( filePath + '~', 'wb' )
writeFileHeader( consolidatedfile, numRecords, 7, 19, 'int', 1 )
for filename in datfilenames:
print( 'reading from ' + filename + ' ...' )
filepath = targetDirectory + '/' + filename
singledat = open( filepath, 'rb' )
# first, skip header
singledat.read(1024)
data = singledat.read()
if data[-3:] != 'END':
print( 'Invalid file, doesnt end with END: ' + filepath )
sys.exit(-1)
consolidatedfile.write( data[:-3] )
singledat.close()
consolidatedfile.write( 'END' )
consolidatedfile.close()
os.rename( filePath + '~', filePath )
示例14: consolidate_games
# 需要导入模块: from os import sys [as 别名]
# 或者: from os.sys import exit [as 别名]
def consolidate_games(self, name, samples):
print('>>> Creating consolidated .dat...')
file_path = self.data_dir + '/kgsgo_' + name
if os.path.isfile(file_path):
print('>>> File ' + file_path + ' already exists')
return
files_needed = set(file_name for file_name, index in samples)
print('>>> Total dat files to be consolidated: ' + str(len(files_needed)))
# Collect names of data files
data_file_names = []
for zip_file_name in files_needed:
data_file_name = zip_file_name.replace('.tar.gz', '') + name
data_file_names.append(data_file_name)
# Count total number of moves
num_records = 0
for data_file_name in data_file_names:
if not os.path.isfile(self.data_dir + '/' + data_file_name):
print('>>> Missing file: ' + data_file_name)
sys.exit(-1)
child = open(self.data_dir + '/' + data_file_name, 'rb')
header = child.read(1024)
this_n = int(header.split('-n=')[1].split('-')[0])
child.close()
num_records = num_records + this_n
# Write content to consolidate file
consolidated_file = open(file_path, 'wb')
self.write_file_header(consolidated_file, num_records, self.num_planes, 19, 1)
for filename in data_file_names:
print('>>> Reading from ' + filename + ' ...')
file_path = self.data_dir + '/' + filename
single_dat = open(file_path, 'rb')
single_dat.read(1024)
data = single_dat.read()
if data[-3:] != 'END':
raise Exception('Invalid file, doesnt end with END: ' + file_path)
consolidated_file.write(data[:-3])
single_dat.close()
consolidated_file.write('END')
consolidated_file.close()