本文整理汇总了Python中qiita_db.util.get_mountpoint函数的典型用法代码示例。如果您正苦于以下问题:Python get_mountpoint函数的具体用法?Python get_mountpoint怎么用?Python get_mountpoint使用的例子?那么恭喜您, 这里精选的函数代码示例或许可以为您提供帮助。
在下文中一共展示了get_mountpoint函数的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: test_get_process_target_gene_cmd
def test_get_process_target_gene_cmd(self):
preprocessed_data = PreprocessedData(1)
params = ProcessedSortmernaParams(1)
obs_cmd, obs_output_dir = _get_process_target_gene_cmd(
preprocessed_data, params)
_, ref_dir = get_mountpoint('reference')[0]
_, preprocessed_dir = get_mountpoint('preprocessed_data')[0]
exp_cmd = ("pick_closed_reference_otus.py -i {}1_seqs.fna -r "
"{}GreenGenes_13_8_97_otus.fasta -o {} -p placeholder -t "
"{}GreenGenes_13_8_97_otu_taxonomy.txt".format(
preprocessed_dir, ref_dir, obs_output_dir, ref_dir))
obs_tokens = obs_cmd.split()[::-1]
exp_tokens = exp_cmd.split()[::-1]
self.assertEqual(len(obs_tokens), len(exp_tokens))
while obs_tokens:
o_t = obs_tokens.pop()
e_t = exp_tokens.pop()
if o_t == '-p':
# skip parameters file
obs_tokens.pop()
exp_tokens.pop()
else:
self.assertEqual(o_t, e_t)
示例2: test_prep_template_filepaths_get_req
def test_prep_template_filepaths_get_req(self):
obs = prep_template_filepaths_get_req(1, '[email protected]')
exp = {'status': 'success',
'message': '',
'filepaths': [
(19, join(get_mountpoint('templates')[0][1],
'1_prep_1_qiime_19700101-000000.txt')),
(18, join(get_mountpoint('templates')[0][1],
'1_prep_1_19700101-000000.txt'))]}
self.assertEqual(obs, exp)
示例3: tearDown
def tearDown(self):
with open(self.biom_fp, 'w') as f:
f.write("")
with open(self.map_fp, 'w') as f:
f.write("")
fp = join(get_mountpoint('analysis')[0][1], 'testfile.txt')
if exists(fp):
remove(fp)
mp = get_mountpoint("processed_data")[0][1]
study2fp = join(mp, "2_2_study_1001_closed_reference_otu_table.biom")
if exists(study2fp):
move(study2fp,
join(mp, "2_study_1001_closed_reference_otu_table.biom"))
示例4: get_filepaths
def get_filepaths(self, conn_handler=None):
r"""Retrieves the list of (filepath_id, filepath)"""
# Check that this function has been called from a subclass
self._check_subclass()
# Check if the connection handler has been provided. Create a new
# one if not.
conn_handler = conn_handler if conn_handler else SQLConnectionHandler()
if self._table == 'required_sample_info':
table = 'sample_template_filepath'
column = 'study_id'
elif self._table == 'common_prep_info':
table = 'prep_template_filepath'
column = 'prep_template_id'
else:
raise QiitaDBNotImplementedError(
'get_filepath for %s' % self._table)
try:
filepath_ids = conn_handler.execute_fetchall(
"SELECT filepath_id, filepath FROM qiita.filepath WHERE "
"filepath_id IN (SELECT filepath_id FROM qiita.{0} WHERE "
"{1}=%s) ORDER BY filepath_id DESC".format(table, column),
(self.id, ))
except Exception as e:
LogEntry.create('Runtime', str(e),
info={self.__class__.__name__: self.id})
raise e
_, fb = get_mountpoint('templates', conn_handler)[0]
base_fp = partial(join, fb)
return [(fpid, base_fp(fp)) for fpid, fp in filepath_ids]
示例5: create_from_scratch
def create_from_scratch(self, prep_template, study_id):
raw_data_filetype = self.get_argument("filetype")
barcodes_str = self.get_argument("barcodes")
forward_reads_str = self.get_argument("forward")
sff_str = self.get_argument("sff")
fasta_str = self.get_argument("fasta")
qual_str = self.get_argument("qual")
reverse_reads_str = self.get_argument("reverse")
def _split(x):
return x.split(",") if x else []
filepaths, fps = [], []
fps.append((_split(barcodes_str), "raw_barcodes"))
fps.append((_split(fasta_str), "raw_fasta"))
fps.append((_split(qual_str), "raw_qual"))
fps.append((_split(forward_reads_str), "raw_forward_seqs"))
fps.append((_split(reverse_reads_str), "raw_reverse_seqs"))
fps.append((_split(sff_str), "raw_sff"))
# We need to retrieve the full path for all the files, as the
# arguments only contain the file name. Since we don't know in which
# mountpoint the data lives, we retrieve all of them and we loop
# through all the files checking if they exist or not.
for _, f in get_mountpoint("uploads", retrieve_all=True):
f = join(f, str(study_id))
for fp_set, filetype in fps:
for t in fp_set:
ft = join(f, t)
if exists(ft):
filepaths.append((ft, filetype))
return submit(self.current_user.id, create_raw_data, raw_data_filetype, prep_template, filepaths)
示例6: test_post_valid
def test_post_valid(self):
dontcare, uploads_dir = get_mountpoint('uploads')[0]
foo_fp = os.path.join(uploads_dir, '1', 'foo.txt')
bar_fp = os.path.join(uploads_dir, '1', 'bar.txt')
with open(foo_fp, 'w') as fp:
fp.write("@x\nATGC\n+\nHHHH\n")
with open(bar_fp, 'w') as fp:
fp.write("@x\nATGC\n+\nHHHH\n")
prep = StringIO(EXP_PREP_TEMPLATE.format(1))
prep_table = load_template_to_dataframe(prep)
response = self.post('/api/v1/study/1/preparation?data_type=16S',
data=prep_table.T.to_dict(),
headers=self.headers, asjson=True)
prepid = json_decode(response.body)['id']
uri = '/api/v1/study/1/preparation/%d/artifact' % prepid
# 1 -> fwd or rev sequences in fastq
# 3 -> barcodes
body = {'artifact_type': 'FASTQ', 'filepaths': [['foo.txt', 1],
['bar.txt',
'raw_barcodes']],
'artifact_name': 'a name is a name'}
response = self.post(uri, data=body, headers=self.headers, asjson=True)
self.assertEqual(response.code, 201)
obs = json_decode(response.body)['id']
prep_instance = PrepTemplate(prepid)
exp = prep_instance.artifact.id
self.assertEqual(obs, exp)
示例7: setUp
def setUp(self):
fd, self.seqs_fp = mkstemp(suffix='_seqs.fastq')
close(fd)
fd, self.barcodes_fp = mkstemp(suffix='_barcodes.fastq')
close(fd)
self.filetype = 2
self.filepaths = [(self.seqs_fp, 1), (self.barcodes_fp, 2)]
self.studies = [Study(1)]
_, self.db_test_raw_dir = get_mountpoint('raw_data')[0]
with open(self.seqs_fp, "w") as f:
f.write("\n")
with open(self.barcodes_fp, "w") as f:
f.write("\n")
self._clean_up_files = []
# Create a new study
info = {
"timeseries_type_id": 1,
"metadata_complete": True,
"mixs_compliant": True,
"number_samples_collected": 25,
"number_samples_promised": 28,
"portal_type_id": 3,
"study_alias": "FCM",
"study_description": "Microbiome of people who eat nothing but "
"fried chicken",
"study_abstract": "Exploring how a high fat diet changes the "
"gut microbiome",
"emp_person_id": StudyPerson(2),
"principal_investigator_id": StudyPerson(3),
"lab_person_id": StudyPerson(1)
}
Study.create(User("[email protected]"), "Test study 2", [1], info)
示例8: _common_purge_filpeaths_test
def _common_purge_filpeaths_test(self):
# Get all the filepaths so we can test if they've been removed or not
sql_fp = "SELECT filepath, data_directory_id FROM qiita.filepath"
fps = [join(get_mountpoint_path_by_id(dd_id), fp) for fp, dd_id in self.conn_handler.execute_fetchall(sql_fp)]
# Make sure that the files exist - specially for travis
for fp in fps:
if not exists(fp):
with open(fp, "w") as f:
f.write("\n")
self.files_to_remove.append(fp)
_, raw_data_mp = get_mountpoint("raw_data")[0]
removed_fps = [join(raw_data_mp, "2_sequences_barcodes.fastq.gz"), join(raw_data_mp, "2_sequences.fastq.gz")]
for fp in removed_fps:
with open(fp, "w") as f:
f.write("\n")
sql = """INSERT INTO qiita.filepath
(filepath, filepath_type_id, checksum,
checksum_algorithm_id, data_directory_id)
VALUES ('2_sequences_barcodes.fastq.gz', 3, '852952723', 1, 5),
('2_sequences.fastq.gz', 1, '852952723', 1, 5)
RETURNING filepath_id"""
fp_ids = self.conn_handler.execute_fetchall(sql)
fps = set(fps).difference(removed_fps)
# Check that the files exist
for fp in fps:
self.assertTrue(exists(fp))
for fp in removed_fps:
self.assertTrue(exists(fp))
exp_count = get_count("qiita.filepath") - 2
purge_filepaths()
obs_count = get_count("qiita.filepath")
# Check that only 2 rows have been removed
self.assertEqual(obs_count, exp_count)
# Check that the 2 rows that have been removed are the correct ones
sql = """SELECT EXISTS(
SELECT * FROM qiita.filepath WHERE filepath_id = %s)"""
obs = self.conn_handler.execute_fetchone(sql, (fp_ids[0][0],))[0]
self.assertFalse(obs)
obs = self.conn_handler.execute_fetchone(sql, (fp_ids[1][0],))[0]
self.assertFalse(obs)
# Check that the files have been successfully removed
for fp in removed_fps:
self.assertFalse(exists(fp))
# Check that all the other files still exist
for fp in fps:
self.assertTrue(exists(fp))
示例9: test_move_upload_files_to_trash
def test_move_upload_files_to_trash(self):
test_filename = "this_is_a_test_file.txt"
# create file to move to trash
fid, folder = get_mountpoint("uploads")[0]
test_fp = join(folder, "1", test_filename)
with open(test_fp, "w") as f:
f.write("test")
self.files_to_remove.append(test_fp)
exp = [(fid, "this_is_a_test_file.txt"), (fid, "uploaded_file.txt")]
obs = get_files_from_uploads_folders("1")
self.assertItemsEqual(obs, exp)
# move file
move_upload_files_to_trash(1, [(fid, test_filename)])
exp = [(fid, "uploaded_file.txt")]
obs = get_files_from_uploads_folders("1")
self.assertItemsEqual(obs, exp)
# testing errors
with self.assertRaises(QiitaDBError):
move_upload_files_to_trash(2, [(fid, test_filename)])
with self.assertRaises(QiitaDBError):
move_upload_files_to_trash(1, [(10, test_filename)])
with self.assertRaises(QiitaDBError):
move_upload_files_to_trash(1, [(fid, test_filename)])
# removing trash folder
rmtree(join(folder, "1", "trash"))
示例10: check_fp
def check_fp(study_id, filename):
"""Check whether an uploaded file exists
Parameters
----------
study_id : int
Study file uploaded to
filename : str
name of the uploaded file
Returns
-------
dict
{'status': status,
'message': msg,
'file': str}
file contains full filepath if status is success, otherwise it contains
the filename
"""
# Get the uploads folder
_, base_fp = get_mountpoint("uploads")[0]
# Get the path of the sample template in the uploads folder
fp_rsp = join(base_fp, str(study_id), filename)
if not exists(fp_rsp):
# The file does not exist, fail nicely
return {'status': 'error',
'message': 'file does not exist',
'file': filename}
return {'status': 'success',
'message': '',
'file': fp_rsp}
示例11: setUp
def setUp(self):
fd, self.seqs_fp = mkstemp(suffix='_seqs.fastq')
close(fd)
fd, self.barcodes_fp = mkstemp(suffix='_barcodes.fastq')
close(fd)
self.filetype = 2
self.filepaths = [(self.seqs_fp, 1), (self.barcodes_fp, 2)]
_, self.db_test_raw_dir = get_mountpoint('raw_data')[0]
with open(self.seqs_fp, "w") as f:
f.write("\n")
with open(self.barcodes_fp, "w") as f:
f.write("\n")
self._clean_up_files = []
# Create some new PrepTemplates
metadata_dict = {
'SKB8.640193': {'center_name': 'ANL',
'primer': 'GTGCCAGCMGCCGCGGTAA',
'barcode': 'GTCCGCAAGTTA',
'run_prefix': "s_G1_L001_sequences",
'platform': 'ILLUMINA',
'library_construction_protocol': 'AAAA',
'experiment_design_description': 'BBBB'}}
metadata = pd.DataFrame.from_dict(metadata_dict, orient='index')
self.pt1 = PrepTemplate.create(metadata, Study(1), "16S")
self.pt2 = PrepTemplate.create(metadata, Study(1), "18S")
self.prep_templates = [self.pt1, self.pt2]
示例12: post
def post(self, study_id, prep_id):
study = self.safe_get_study(study_id)
if study is None:
return
prep_id = to_int(prep_id)
try:
p = PrepTemplate(prep_id)
except QiitaDBUnknownIDError:
self.fail('Preparation not found', 404)
return
if p.study_id != study.id:
self.fail('Preparation ID not associated with the study', 409)
return
artifact_deets = json_decode(self.request.body)
_, upload = get_mountpoint('uploads')[0]
base = os.path.join(upload, study_id)
filepaths = [(os.path.join(base, fp), fp_type)
for fp, fp_type in artifact_deets['filepaths']]
try:
art = Artifact.create(filepaths,
artifact_deets['artifact_type'],
artifact_deets['artifact_name'],
p)
except QiitaError as e:
self.fail(str(e), 406)
return
self.write({'id': art.id})
self.set_status(201)
self.finish()
示例13: test_move_upload_files_to_trash
def test_move_upload_files_to_trash(self):
test_filename = 'this_is_a_test_file.txt'
# create file to move to trash
fid, folder = get_mountpoint("uploads")[0]
open(join(folder, '1', test_filename), 'w').write('test')
exp = [(fid, 'this_is_a_test_file.txt'), (fid, 'uploaded_file.txt')]
obs = get_files_from_uploads_folders("1")
self.assertItemsEqual(obs, exp)
# move file
move_upload_files_to_trash(1, [(fid, test_filename)])
exp = [(fid, 'uploaded_file.txt')]
obs = get_files_from_uploads_folders("1")
self.assertItemsEqual(obs, exp)
# testing errors
with self.assertRaises(QiitaDBError):
move_upload_files_to_trash(2, [(fid, test_filename)])
with self.assertRaises(QiitaDBError):
move_upload_files_to_trash(1, [(10, test_filename)])
with self.assertRaises(QiitaDBError):
move_upload_files_to_trash(1, [(fid, test_filename)])
# removing trash folder
rmtree(join(folder, '1', 'trash'))
示例14: test_build_mapping_file
def test_build_mapping_file(self):
new_id = get_count('qiita.filepath') + 1
samples = {1: ['1.SKB8.640193', '1.SKD8.640184', '1.SKB7.640196']}
self.analysis._build_mapping_file(samples)
obs = self.analysis.mapping_file
self.assertEqual(obs, self.map_fp)
base_dir = get_mountpoint('analysis')[0][1]
obs = pd.read_csv(obs, sep='\t', infer_datetime_format=True,
parse_dates=True, index_col=False, comment='\t')
exp = pd.read_csv(join(base_dir, '1_analysis_mapping_exp.txt'),
sep='\t', infer_datetime_format=True,
parse_dates=True, index_col=False, comment='\t')
assert_frame_equal(obs, exp)
sql = """SELECT * FROM qiita.filepath
WHERE filepath=%s ORDER BY filepath_id"""
obs = self.conn_handler.execute_fetchall(
sql, ("%d_analysis_mapping.txt" % self.analysis.id,))
exp = [[13, '1_analysis_mapping.txt', 9, '852952723', 1, 1],
[new_id, '1_analysis_mapping.txt', 9, '1606265094', 1, 1]]
self.assertEqual(obs, exp)
sql = """SELECT * FROM qiita.analysis_filepath
WHERE analysis_id=%s ORDER BY filepath_id"""
obs = self.conn_handler.execute_fetchall(sql, (self.analysis.id,))
exp = [[1L, 14L, 2L], [1L, 15L, None], [1L, new_id, None]]
示例15: get_filepaths
def get_filepaths(self):
r"""Retrieves the list of (filepath_id, filepath)"""
# Check that this function has been called from a subclass
self._check_subclass()
# Check if the connection handler has been provided. Create a new
# one if not.
conn_handler = SQLConnectionHandler()
try:
filepath_ids = conn_handler.execute_fetchall(
"SELECT filepath_id, filepath FROM qiita.filepath WHERE "
"filepath_id IN (SELECT filepath_id FROM qiita.{0} WHERE "
"{1}=%s) ORDER BY filepath_id DESC".format(
self._filepath_table, self._id_column),
(self.id, ))
except Exception as e:
LogEntry.create('Runtime', str(e),
info={self.__class__.__name__: self.id})
raise e
_, fb = get_mountpoint('templates')[0]
base_fp = partial(join, fb)
return [(fpid, base_fp(fp)) for fpid, fp in filepath_ids]