本文整理汇总了Python中utool.ensuredir函数的典型用法代码示例。如果您正苦于以下问题:Python ensuredir函数的具体用法?Python ensuredir怎么用?Python ensuredir使用的例子?那么恭喜您, 这里精选的函数代码示例或许可以为您提供帮助。
在下文中一共展示了ensuredir函数的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: get_global_species_scorenorm_cachedir
def get_global_species_scorenorm_cachedir(ibs, species_text, ensure=True):
"""
Args:
species_text (str):
ensure (bool):
Returns:
str: species_cachedir
CommandLine:
python -m ibeis.control.IBEISControl --test-get_global_species_scorenorm_cachedir
Example:
>>> # ENABLE_DOCTEST
>>> from ibeis.control.IBEISControl import * # NOQA
>>> import ibeis # NOQA
>>> ibs = ibeis.opendb('testdb1')
>>> species_text = ibeis.const.TEST_SPECIES.ZEB_GREVY
>>> ensure = True
>>> species_cachedir = ibs.get_global_species_scorenorm_cachedir(species_text, ensure)
>>> resourcedir = ibs.get_ibeis_resource_dir()
>>> result = ut.relpath_unix(species_cachedir, resourcedir)
>>> print(result)
scorenorm/zebra_grevys
"""
scorenorm_cachedir = join(ibs.get_ibeis_resource_dir(),
const.PATH_NAMES.scorenormdir)
species_cachedir = join(scorenorm_cachedir, species_text)
if ensure:
ut.ensurepath(scorenorm_cachedir)
ut.ensuredir(species_cachedir)
return species_cachedir
示例2: new_database
def new_database(back, new_dbdir=None):
""" File -> New Database"""
if new_dbdir is None:
new_dbname = back.user_input(
msg='What do you want to name the new database?',
title='New Database')
if new_dbname is None or len(new_dbname) == 0:
print('Abort new database. new_dbname=%r' % new_dbname)
return
reply = back.user_option(
msg='Where should I put the new database?',
title='Import Images',
options=['Choose Directory', 'My Work Dir'],
use_cache=False)
if reply == 'Choose Directory':
print('[back] new_database(): SELECT A DIRECTORY')
putdir = guitool.select_directory('Select new database directory')
elif reply == 'My Work Dir':
putdir = back.get_work_directory()
else:
print('Abort new database')
return
new_dbdir = join(putdir, new_dbname)
if not exists(putdir):
raise ValueError('Directory %r does not exist.' % putdir)
if exists(new_dbdir):
raise ValueError('New DB %r already exists.' % new_dbdir)
utool.ensuredir(new_dbdir)
print('[back] new_database(new_dbdir=%r)' % new_dbdir)
back.open_database(dbdir=new_dbdir)
示例3: load_qcx2_res
def load_qcx2_res(ibs, qrid_list, nocache=False):
'Prefrosm / loads all queries'
qreq = mc3.quickly_ensure_qreq(ibs, qrids=qrid_list)
# Build query big cache rowid
query_rowid = qreq.get_rowid()
hs_rowid = ibs.get_db_name()
qcxs_rowid = utool.hashstr_arr(qrid_list, lbl='_qcxs')
qres_rowid = hs_rowid + query_rowid + qcxs_rowid
cache_dir = join(ibs.dirs.cache_dir, 'query_results_bigcache')
print('[rr2] load_qcx2_res(): %r' % qres_rowid)
io_kwargs = dict(dpath=cache_dir, fname='query_results', rowid=qres_rowid, ext='.cPkl')
# Return cache if available
if not params.args.nocache_query and (not nocache):
qrid2_qres = io.smart_load(**io_kwargs)
if qrid2_qres is not None:
print('[rr2] * cache hit')
return qrid2_qres
print('[rr2] * cache miss')
else:
print('[rr2] * cache off')
# Individually load / compute queries
if isinstance(qrid_list, list):
qcx_set = set(qrid_list)
else:
qcx_set = set(qrid_list.tolist())
qcx_max = max(qrid_list) + 1
qrid2_qres = [ibs.query(qrid) if qrid in qcx_set else None for qrid in xrange(qcx_max)]
# Save to the cache
print('[rr2] Saving query_results to bigcache: %r' % qres_rowid)
utool.ensuredir(cache_dir)
io.smart_save(qrid2_qres, **io_kwargs)
return qrid2_qres
示例4: try_download_baseline_ibeis_normalizer
def try_download_baseline_ibeis_normalizer(ibs, qreq_):
"""
tries to download a baseline normalizer for some species.
creates an empty normalizer if it cannot
"""
baseline_url_dict = {
# TODO: Populate
}
species_text = '_'.join(qreq_.get_unique_species()) # HACK
query_cfgstr = qreq_.qparams.query_cfgstr
cachedir = qreq_.ibs.get_global_species_scorenorm_cachedir(species_text)
key = species_text + query_cfgstr
baseline_url = baseline_url_dict.get(key, None)
if baseline_url is not None:
try:
cachedir = qreq_.ibs.get_global_species_scorenorm_cachedir(species_text)
baseline_cachedir = join(cachedir, 'baseline')
ut.ensuredir(baseline_cachedir)
encoder = vt.ScoreNormalizer(cfgstr=query_cfgstr, prefix=species_text)
encoder.load(baseline_cachedir)
except Exception:
encoder = None
else:
encoder = None
if encoder is None:
if False and ut.is_developer(['hyrule']):
# train new encoder. only do this on hyrule
print('Baseline does not exist and cannot be downlaoded. Training baseline')
encoder = train_baseline_ibeis_normalizer(qreq_.ibs)
else:
# return empty score encoder
encoder = vt.ScoreNormalizer(cfgstr=query_cfgstr, prefix=species_text)
print('returning empty encoder')
#raise NotImplementedError('return the nodata noramlizer with 1/2 default')
return encoder
示例5: set_logdir
def set_logdir(log_dir):
from os.path import realpath, expanduser
log_dir = realpath(expanduser(log_dir))
ut.ensuredir(log_dir, verbose=True)
ut.stop_logging()
_ibeis_cache_write(LOGDIR_CACHEID, log_dir)
ut.start_logging(appname=__APPNAME__)
示例6: TEST_SQL_NAMES
def TEST_SQL_NAMES():
# -------- INIT DATABASE ------------
#
# Create new temp database
sqldb_fname = 'temp_test_sql_names.sqlite3'
sqldb_dpath = utool.util_cplat.get_app_resource_dir('ibeis', 'testfiles')
utool.ensuredir(sqldb_dpath)
print('Remove Old Temp Database')
utool.util_path.remove_file(join(sqldb_dpath, sqldb_fname), dryrun=False)
print('New Temp Database')
db = SQLDatabaseControl.SQLDatabaseController(sqldb_dpath=sqldb_dpath,
sqldb_fname=sqldb_fname)
#
# Define the schema
__define_schema(db)
#
# -------- RUN INSERTS --------------
print('[TEST] --- INSERT NAMES --- ')
test_names = [
'fred',
'sue',
'Robert\');DROP TABLE names;--',
'joe',
'rob',
]
__insert_names(db, test_names)
__insert_names(db, test_names[2:3])
#
# -------- RUN SELECT NAMES --------------
print('[TEST] --- SELECT NAMES ---')
name_text_results = db.executeone('SELECT name_text FROM names', [])
print(' * name_text_results=%r' % name_text_results)
#assert name_text_results == test_names, 'unexpected results from select names'
return locals()
示例7: get_smart_patrol_dir
def get_smart_patrol_dir(ibs, ensure=True):
"""
Args:
ensure (bool):
Returns:
str smart_patrol_dpath
CommandLine:
python -m ibeis.control.IBEISControl --test-get_smart_patrol_dir
Example:
>>> # ENABLE_DOCTEST
>>> from ibeis.control.IBEISControl import * # NOQA
>>> import ibeis
>>> # build test data
>>> ibs = ibeis.opendb('testdb1')
>>> ensure = True
>>> # execute function
>>> smart_patrol_dpath = ibs.get_smart_patrol_dir(ensure)
>>> # verify results
>>> ut.assertpath(smart_patrol_dpath, verbose=True)
"""
smart_patrol_dpath = join(ibs.dbdir, const.PATH_NAMES.smartpatrol)
if ensure:
ut.ensuredir(smart_patrol_dpath)
return smart_patrol_dpath
示例8: vizualize_vocabulary
def vizualize_vocabulary(ibs, invindex):
"""
cleaned up version of dump_word_patches. Makes idf scatter plots and dumps
the patches that contributed to each word.
CommandLine:
python -m ibeis.algo.hots.smk.smk_plots --test-vizualize_vocabulary
python -m ibeis.algo.hots.smk.smk_plots --test-vizualize_vocabulary --vf
Example:
>>> from ibeis.algo.hots.smk.smk_plots import * # NOQA
>>> from ibeis.algo.hots.smk import smk_debug
>>> from ibeis.algo.hots.smk import smk_repr
>>> #tup = smk_debug.testdata_raw_internals0(db='GZ_ALL', nWords=64000)
>>> #tup = smk_debug.testdata_raw_internals0(db='GZ_ALL', nWords=8000)
>>> tup = smk_debug.testdata_raw_internals0(db='PZ_Master0', nWords=64000)
>>> #tup = smk_debug.testdata_raw_internals0(db='PZ_Mothers', nWords=8000)
>>> ibs, annots_df, daids, qaids, invindex, qreq_ = tup
>>> smk_repr.compute_data_internals_(invindex, qreq_.qparams, delete_rawvecs=False)
>>> vizualize_vocabulary(ibs, invindex)
"""
invindex.idx2_wxs = np.array(invindex.idx2_wxs)
print('[smk_plots] Vizualizing vocabulary')
# DUMPING PART --- dumps patches to disk
figdir = ibs.get_fig_dir()
ut.ensuredir(figdir)
if ut.get_argflag('--vf'):
ut.view_directory(figdir)
# Compute Word Statistics
metrics = compute_word_metrics(invindex)
wx2_nMembers, wx2_pdist_stats, wx2_wdist_stats = metrics
#(wx2_pdist, wx2_wdist, wx2_nMembers, wx2_pdist_stats, wx2_wdist_stats) = metrics
#wx2_prad = {wx: pdist_stats['max'] for wx, pdist_stats in six.iteritems(wx2_pdist_stats) if 'max' in pdist_stats}
#wx2_wrad = {wx: wdist_stats['max'] for wx, wdist_stats in six.iteritems(wx2_wdist_stats) if 'max' in wdist_stats}
wx2_prad = {wx: stats['max'] for wx, stats in wx2_pdist_stats.items() if 'max' in stats}
wx2_wrad = {wx: stats['max'] for wx, stats in wx2_wdist_stats.items() if 'max' in stats}
#wx2_prad = get_metric(metrics, 'wx2_pdist_stats', 'max')
#wx2_wrad = get_metric(metrics, 'wx2_wdist_stats', 'max')
wx_sample1 = select_by_metric(wx2_nMembers)
wx_sample2 = select_by_metric(wx2_prad)
wx_sample3 = select_by_metric(wx2_wrad)
wx_sample = wx_sample1 + wx_sample2 + wx_sample3
overlap123 = len(wx_sample) - len(set(wx_sample))
print('overlap123 = %r' % overlap123)
wx_sample = set(wx_sample)
print('len(wx_sample) = %r' % len(wx_sample))
#make_scatterplots(ibs, figdir, invindex, metrics)
vocabdir = join(figdir, 'vocab_patches2')
wx2_dpath = get_word_dpaths(vocabdir, wx_sample, metrics)
make_wordfigures(ibs, metrics, invindex, figdir, wx_sample, wx2_dpath)
示例9: TEST_SQL_NAMES
def TEST_SQL_NAMES():
# -------- INIT DATABASE ------------
#
# Create new temp database
sqldb_fname = 'temp_test_sql_names.sqlite3'
sqldb_dpath = utool.util_cplat.get_app_resource_dir('ibeis', 'testfiles')
utool.ensuredir(sqldb_dpath)
print('Remove Old Temp Database')
utool.util_path.remove_file(join(sqldb_dpath, sqldb_fname), dryrun=False)
print('New Temp Database')
db = SQLDatabaseControl.SQLDatabaseController(sqldb_dpath=sqldb_dpath,
sqldb_fname=sqldb_fname)
#
# Define the schema
__define_schema(db)
#
# -------- RUN INSERTS --------------
print('[TEST] --- INSERT NAMES --- ')
test_names = [
'fred',
'sue',
'Robert\');DROP TABLE Students;--',
'joe',
'rob',
]
__insert_names(db, test_names)
__insert_names(db, test_names[2:4])
#
# -------- RUN SELECT NAMES --------------
print('[TEST] --- SELECT NAMES ---')
name_text_results = db.executeone('SELECT name_text FROM names', [])
print(' * name_text_results=%r' % name_text_results)
#assert name_text_results == test_names, 'unexpected results from select names'
#
# -------- RUN SELECT NIDS --------------
print('[TEST] --- SELECT NIDS ---')
query_names = test_names[::2] + ['missingno']
nid_list = db.executemany(
operation='''
SELECT name_rowid
FROM names
WHERE name_text=?
''',
params_iter=((name,) for name in query_names))
# Get the parameter indexes that failed
failx_list = [count for count, nid in enumerate(nid_list) if nid is None]
assert failx_list == [3]
failed_names = [query_names[failx] for failx in failx_list] # NOQA
utool.printvar2('failed_names')
# We selected a name not in the table.
# Its return index is an empty list
print('[TEST] nid_list=%r' % nid_list)
print('[TEST] query_names=%r' % query_names)
print('[TEST] test_names=%r' % test_names)
# SQL INTEGERS START AT 1 APPARENTLY
#expected_names = [test_names[nid - 1] for nid in nid_list]
#assert expected_names == query_names, 'unexpected results from select names'
return locals()
示例10: get_annot_probchip_fpath_list
def get_annot_probchip_fpath_list(ibs, aid_list, config2_=None, species=None):
""" Build probability chip file paths based on the current IBEIS configuration
Args:
ibs (IBEISController):
aid_list (list):
suffix (None):
Returns:
probchip_fpath_list
Example:
>>> # ENABLE_DOCTEST
>>> from ibeis.algo.preproc.preproc_probchip import * # NOQA
>>> from os.path import basename
>>> ibs, aid_list = preproc_chip.testdata_ibeis()
>>> config2_ = ibs.new_query_params(dict(fg_on=False))
>>> probchip_fpath_list = get_annot_probchip_fpath_list(ibs, aid_list, config2_=config2_)
>>> result = ut.relpath_unix(probchip_fpath_list[1], ibs.get_dbdir())
>>> print(result)
_ibsdb/_ibeis_cache/prob_chips/probchip_avuuid=5a1a53ba-fd44-b113-7f8c-fcf248d7047f_CHIP(sz450)_FEATWEIGHT(OFF).png
_ibsdb/_ibeis_cache/prob_chips/probchip_avuuid=5a1a53ba-fd44-b113-7f8c-fcf248d7047f_CHIP(sz450)_FEATWEIGHT(ON,uselabel,rf).png
probchip_aid=5_bbox=(0,0,1072,804)_theta=0.0tau_gid=5_CHIP(sz450)_FEATWEIGHT(ON,uselabel,rf)_CHIP().png
"""
ibs.probchipdir = ibs.get_probchip_dir()
cachedir = ibs.get_probchip_dir()
ut.ensuredir(cachedir)
probchip_fname_fmt = get_probchip_fname_fmt(ibs, config2_=config2_, species=species)
annot_visual_uuid_list = ibs.get_annot_visual_uuids(aid_list)
probchip_fpath_list = [ut.unixjoin(cachedir, probchip_fname_fmt.format(avuuid=avuuid))
for avuuid in annot_visual_uuid_list]
return probchip_fpath_list
示例11: train
def train(dark, voc_path, weight_path, **kwargs):
"""
Train a new forest with the given positive chips and negative chips.
Args:
train_pos_chip_path_list (list of str): list of positive training chips
train_neg_chip_path_list (list of str): list of negative training chips
trees_path (str): string path of where the newly trained trees are to be saved
Kwargs:
chips_norm_width (int, optional): Chip normalization width for resizing;
the chip is resized to have a width of chips_norm_width and
whatever resulting height in order to best match the original
aspect ratio; defaults to 128
If both chips_norm_width and chips_norm_height are specified,
the original aspect ratio of the chip is not respected
chips_norm_height (int, optional): Chip normalization height for resizing;
the chip is resized to have a height of chips_norm_height and
whatever resulting width in order to best match the original
aspect ratio; defaults to None
If both chips_norm_width and chips_norm_height are specified,
the original aspect ratio of the chip is not respected
verbose (bool, optional): verbose flag; defaults to object's verbose or
selectively enabled for this function
Returns:
None
"""
# Default values
params = odict(
[
("weight_filepath", None), # This value always gets overwritten
("verbose", dark.verbose),
("quiet", dark.quiet),
]
)
# params.update(kwargs)
ut.update_existing(params, kwargs)
# Make the tree path absolute
weight_path = abspath(weight_path)
ut.ensuredir(weight_path)
# Setup training files and folder structures
results = dark._train_setup(voc_path, weight_path)
manifest_filename, num_images, config_filepath, class_filepath = results
# Run training algorithm
params_list = [dark.net, manifest_filename, weight_path, num_images] + list(params.values())
DARKNET_CLIB.train(*params_list)
weight_filepath = params["weight_filepath"]
if not params["quiet"]:
print("\n\n[pydarknet py] *************************************")
print("[pydarknet py] Training Completed")
print("[pydarknet py] Weight file saved to: %s" % (weight_filepath,))
return weight_filepath, config_filepath, class_filepath
示例12: db_to_dbdir
def db_to_dbdir(db, allow_newdir=False, extra_workdirs=[], use_sync=False):
""" Implicitly gets dbdir. Searches for db inside of workdir """
work_dir = get_workdir()
dbalias_dict = get_dbalias_dict()
workdir_list = []
for extra_dir in extra_workdirs:
if exists(extra_dir):
workdir_list.append(extra_dir)
if use_sync:
sync_dir = join(work_dir, '../sync')
if exists(sync_dir):
workdir_list.append(sync_dir)
workdir_list.append(work_dir) # TODO: Allow multiple workdirs
# Check all of your work directories for the database
for _dir in workdir_list:
dbdir = realpath(join(_dir, db))
# Use db aliases
if not exists(dbdir) and db.upper() in dbalias_dict:
dbdir = join(_dir, dbalias_dict[db.upper()])
if exists(dbdir):
break
# Create the database if newdbs are allowed in the workdir
#print('allow_newdir=%r' % allow_newdir)
if allow_newdir:
utool.ensuredir(dbdir, verbose=True)
# Complain if the implicit dbdir does not exist
if not exists(dbdir):
print('!!!')
print('[sysres] WARNING: db=%r not found in work_dir=%r' %
(db, work_dir))
fname_list = os.listdir(work_dir)
lower_list = [fname.lower() for fname in fname_list]
index = util_list.listfind(lower_list, db.lower())
if index is not None:
print('[sysres] WARNING: db capitalization seems to be off')
if not utool.STRICT:
print('[sysres] attempting to fix it')
db = fname_list[index]
dbdir = join(work_dir, db)
print('[sysres] dbdir=%r' % dbdir)
print('[sysres] db=%r' % db)
if not exists(dbdir):
msg = '[sysres!] ERROR: Database does not exist'
print('<!!!>')
print(msg)
print('[sysres!] Here is a list of valid dbs: ' +
utool.indentjoin(fname_list, '\n * '))
print('[sysres!] dbdir=%r' % dbdir)
print('[sysres!] db=%r' % db)
print('[sysres!] work_dir=%r' % work_dir)
print('</!!!>')
raise AssertionError(msg)
print('!!!')
return dbdir
示例13: dump_tables_to_csv
def dump_tables_to_csv(db):
""" Convenience: Dumps all csv database files to disk """
dump_dir = join(db.dir_, 'CSV_DUMP')
utool.ensuredir(dump_dir)
for tablename in six.iterkeys(db.table_columns):
table_fname = tablename + '.csv'
table_csv = db.get_table_csv(tablename)
with open(join(dump_dir, table_fname), 'w') as file_:
file_.write(table_csv)
示例14: ingest_standard_database
def ingest_standard_database(db, force_delete=False):
print('[ingest] Ingest Standard Database: db=%r' % (db,))
ingestable = get_standard_ingestable(db)
dbdir = ibeis.sysres.db_to_dbdir(ingestable.db, allow_newdir=True, use_sync=False)
utool.ensuredir(dbdir, verbose=True)
if force_delete:
ibsfuncs.delete_ibeis_database(dbdir)
ibs = IBEISControl.IBEISController(dbdir)
ingest_rawdata(ibs, ingestable)
示例15: _make_empty_controller
def _make_empty_controller():
print('make_empty_controller')
sqldb_fname = 'temp_test_sql_control.sqlite3'
sqldb_dpath = utool.util_cplat.get_app_resource_dir('ibeis', 'testfiles')
utool.ensuredir(sqldb_dpath)
utool.remove_file(join(sqldb_dpath, sqldb_fname), dryrun=False)
db = SQLDatabaseControl.SQLDatabaseController(sqldb_dpath=sqldb_dpath,
sqldb_fname=sqldb_fname)
return db