本文整理汇总了Python中base.populators.DatasetPopulator.fetch方法的典型用法代码示例。如果您正苦于以下问题:Python DatasetPopulator.fetch方法的具体用法?Python DatasetPopulator.fetch怎么用?Python DatasetPopulator.fetch使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类base.populators.DatasetPopulator
的用法示例。
在下文中一共展示了DatasetPopulator.fetch方法的4个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: BaseUploadContentConfigurationTestCase
# 需要导入模块: from base.populators import DatasetPopulator [as 别名]
# 或者: from base.populators.DatasetPopulator import fetch [as 别名]
class BaseUploadContentConfigurationTestCase(integration_util.IntegrationTestCase):
framework_tool_and_types = True
def setUp(self):
super(BaseUploadContentConfigurationTestCase, self).setUp()
self.dataset_populator = DatasetPopulator(self.galaxy_interactor)
self.library_populator = LibraryPopulator(self.galaxy_interactor)
self.history_id = self.dataset_populator.new_history()
def fetch_target(self, target, assert_ok=False, attach_test_file=False):
payload = {
"history_id": self.history_id,
"targets": json.dumps([target]),
}
if attach_test_file:
payload["__files"] = {"files_0|file_data": open(self.test_data_resolver.get_filename("4.bed"))}
response = self.dataset_populator.fetch(payload, assert_ok=assert_ok)
return response
@classmethod
def temp_config_dir(cls, name):
# realpath here to get around problems with symlinks being blocked.
return os.path.realpath(os.path.join(cls._test_driver.galaxy_test_tmp_dir, name))
def _write_file(self, dir_path, content, filename="test"):
"""Helper for writing ftp/server dir files."""
self._ensure_directory(dir_path)
path = os.path.join(dir_path, filename)
with open(path, "w") as f:
f.write(content)
return path
def _ensure_directory(self, path):
if not os.path.exists(path):
os.makedirs(path)
示例2: LibrariesApiTestCase
# 需要导入模块: from base.populators import DatasetPopulator [as 别名]
# 或者: from base.populators.DatasetPopulator import fetch [as 别名]
class LibrariesApiTestCase(api.ApiTestCase, TestsDatasets):
def setUp(self):
super(LibrariesApiTestCase, self).setUp()
self.dataset_populator = DatasetPopulator(self.galaxy_interactor)
self.dataset_collection_populator = DatasetCollectionPopulator(self.galaxy_interactor)
self.library_populator = LibraryPopulator(self.galaxy_interactor)
def test_create(self):
data = dict(name="CreateTestLibrary")
create_response = self._post("libraries", data=data, admin=True)
self._assert_status_code_is(create_response, 200)
library = create_response.json()
self._assert_has_keys(library, "name")
assert library["name"] == "CreateTestLibrary"
def test_delete(self):
library = self.library_populator.new_library("DeleteTestLibrary")
create_response = self._delete("libraries/%s" % library["id"], admin=True)
self._assert_status_code_is(create_response, 200)
library = create_response.json()
self._assert_has_keys(library, "deleted")
assert library["deleted"] is True
# Test undeleting
data = dict(undelete='true')
create_response = self._delete("libraries/%s" % library["id"], data=data, admin=True)
library = create_response.json()
self._assert_status_code_is(create_response, 200)
assert library["deleted"] is False
def test_nonadmin(self):
# Anons can't create libs
data = dict(name="CreateTestLibrary")
create_response = self._post("libraries", data=data, admin=False, anon=True)
self._assert_status_code_is(create_response, 403)
# Anons can't delete libs
library = self.library_populator.new_library("AnonDeleteTestLibrary")
create_response = self._delete("libraries/%s" % library["id"], admin=False, anon=True)
self._assert_status_code_is(create_response, 403)
# Anons can't update libs
data = dict(name="ChangedName", description="ChangedDescription", synopsis='ChangedSynopsis')
create_response = self._patch("libraries/%s" % library["id"], data=data, admin=False, anon=True)
self._assert_status_code_is(create_response, 403)
def test_update(self):
library = self.library_populator.new_library("UpdateTestLibrary")
data = dict(name='ChangedName', description='ChangedDescription', synopsis='ChangedSynopsis')
create_response = self._patch("libraries/%s" % library["id"], data=data, admin=True)
self._assert_status_code_is(create_response, 200)
library = create_response.json()
self._assert_has_keys(library, 'name', 'description', 'synopsis')
assert library['name'] == 'ChangedName'
assert library['description'] == 'ChangedDescription'
assert library['synopsis'] == 'ChangedSynopsis'
def test_create_private_library_permissions(self):
library = self.library_populator.new_library("PermissionTestLibrary")
library_id = library["id"]
role_id = self.library_populator.user_private_role_id()
self.library_populator.set_permissions(library_id, role_id)
create_response = self._create_folder(library)
self._assert_status_code_is(create_response, 200)
def test_create_dataset_denied(self):
library = self.library_populator.new_private_library("ForCreateDatasets")
folder_response = self._create_folder(library)
self._assert_status_code_is(folder_response, 200)
folder_id = folder_response.json()[0]['id']
history_id = self.dataset_populator.new_history()
hda_id = self.dataset_populator.new_dataset(history_id, content="1 2 3")['id']
with self._different_user():
payload = {'from_hda_id': hda_id}
create_response = self._post("folders/%s/contents" % folder_id, payload)
self._assert_status_code_is(create_response, 403)
def test_show_private_dataset_permissions(self):
library, library_dataset = self.library_populator.new_library_dataset_in_private_library("ForCreateDatasets", wait=True)
with self._different_user():
response = self.library_populator.show_ldda(library["id"], library_dataset["id"])
# TODO: this should really be 403 and a proper JSON exception.
self._assert_status_code_is(response, 400)
def test_create_dataset(self):
library, library_dataset = self.library_populator.new_library_dataset_in_private_library("ForCreateDatasets", wait=True)
self._assert_has_keys(library_dataset, "peek", "data_type")
assert library_dataset["peek"].find("create_test") >= 0
assert library_dataset["file_ext"] == "txt", library_dataset["file_ext"]
def test_fetch_upload_to_folder(self):
history_id, library, destination = self._setup_fetch_to_folder("flat_zip")
items = [{"src": "files", "dbkey": "hg19", "info": "my cool bed"}]
targets = [{
"destination": destination,
"items": items
}]
payload = {
"history_id": history_id, # TODO: Shouldn't be needed :(
"targets": json.dumps(targets),
"__files": {"files_0|file_data": open(self.test_data_resolver.get_filename("4.bed"))},
}
#.........这里部分代码省略.........
示例3: DatasetCollectionApiTestCase
# 需要导入模块: from base.populators import DatasetPopulator [as 别名]
# 或者: from base.populators.DatasetPopulator import fetch [as 别名]
class DatasetCollectionApiTestCase(api.ApiTestCase):
def setUp(self):
super(DatasetCollectionApiTestCase, self).setUp()
self.dataset_populator = DatasetPopulator(self.galaxy_interactor)
self.dataset_collection_populator = DatasetCollectionPopulator(self.galaxy_interactor)
self.history_id = self.dataset_populator.new_history()
def test_create_pair_from_history(self):
payload = self.dataset_collection_populator.create_pair_payload(
self.history_id,
instance_type="history",
)
create_response = self._post("dataset_collections", payload)
dataset_collection = self._check_create_response(create_response)
returned_datasets = dataset_collection["elements"]
assert len(returned_datasets) == 2, dataset_collection
def test_create_list_from_history(self):
element_identifiers = self.dataset_collection_populator.list_identifiers(self.history_id)
payload = dict(
instance_type="history",
history_id=self.history_id,
element_identifiers=json.dumps(element_identifiers),
collection_type="list",
)
create_response = self._post("dataset_collections", payload)
dataset_collection = self._check_create_response(create_response)
returned_datasets = dataset_collection["elements"]
assert len(returned_datasets) == 3, dataset_collection
def test_create_list_of_existing_pairs(self):
pair_payload = self.dataset_collection_populator.create_pair_payload(
self.history_id,
instance_type="history",
)
pair_create_response = self._post("dataset_collections", pair_payload)
dataset_collection = self._check_create_response(pair_create_response)
hdca_id = dataset_collection["id"]
element_identifiers = [
dict(name="test1", src="hdca", id=hdca_id)
]
payload = dict(
instance_type="history",
history_id=self.history_id,
element_identifiers=json.dumps(element_identifiers),
collection_type="list",
)
create_response = self._post("dataset_collections", payload)
dataset_collection = self._check_create_response(create_response)
returned_collections = dataset_collection["elements"]
assert len(returned_collections) == 1, dataset_collection
def test_create_list_of_new_pairs(self):
identifiers = self.dataset_collection_populator.nested_collection_identifiers(self.history_id, "list:paired")
payload = dict(
collection_type="list:paired",
instance_type="history",
history_id=self.history_id,
name="a nested collection",
element_identifiers=json.dumps(identifiers),
)
create_response = self._post("dataset_collections", payload)
dataset_collection = self._check_create_response(create_response)
assert dataset_collection["collection_type"] == "list:paired"
assert dataset_collection["name"] == "a nested collection"
returned_collections = dataset_collection["elements"]
assert len(returned_collections) == 1, dataset_collection
pair_1_element = returned_collections[0]
self._assert_has_keys(pair_1_element, "element_identifier", "element_index", "object")
assert pair_1_element["element_identifier"] == "test_level_1", pair_1_element
assert pair_1_element["element_index"] == 0, pair_1_element
pair_1_object = pair_1_element["object"]
self._assert_has_keys(pair_1_object, "collection_type", "elements", "element_count")
self.assertEqual(pair_1_object["collection_type"], "paired")
self.assertEqual(pair_1_object["populated"], True)
pair_elements = pair_1_object["elements"]
assert len(pair_elements) == 2
pair_1_element_1 = pair_elements[0]
assert pair_1_element_1["element_index"] == 0
def test_list_download(self):
fetch_response = self.dataset_collection_populator.create_list_in_history(self.history_id, direct_upload=True).json()
dataset_collection = self.dataset_collection_populator.wait_for_fetched_collection(fetch_response)
returned_dce = dataset_collection["elements"]
assert len(returned_dce) == 3, dataset_collection
create_response = self._download_dataset_collection(history_id=self.history_id, hdca_id=dataset_collection['id'])
self._assert_status_code_is(create_response, 200)
tar_contents = tarfile.open(fileobj=BytesIO(create_response.content))
namelist = tar_contents.getnames()
assert len(namelist) == 3, "Expected 3 elements in [%s]" % namelist
collection_name = dataset_collection['name']
for element, zip_path in zip(returned_dce, namelist):
assert "%s/%s.%s" % (collection_name, element['element_identifier'], element['object']['file_ext']) == zip_path
def test_pair_download(self):
#.........这里部分代码省略.........
示例4: ToolsUploadTestCase
# 需要导入模块: from base.populators import DatasetPopulator [as 别名]
# 或者: from base.populators.DatasetPopulator import fetch [as 别名]
class ToolsUploadTestCase(api.ApiTestCase):
def setUp(self):
super(ToolsUploadTestCase, self).setUp()
self.dataset_populator = DatasetPopulator(self.galaxy_interactor)
def test_upload1_paste(self):
with self.dataset_populator.test_history() as history_id:
payload = self.dataset_populator.upload_payload(history_id, 'Hello World')
create_response = self._post("tools", data=payload)
self._assert_has_keys(create_response.json(), 'outputs')
def test_upload1_paste_bad_datatype(self):
# Check that you get a nice message if you upload an incorrect datatype
with self.dataset_populator.test_history() as history_id:
file_type = "johnsawesomebutfakedatatype"
payload = self.dataset_populator.upload_payload(history_id, 'Hello World', file_type=file_type)
create = self._post("tools", data=payload).json()
self._assert_has_keys(create, 'err_msg')
assert file_type in create['err_msg']
# upload1 rewrites content with posix lines by default but this can be disabled by setting
# to_posix_lines=None in the request. Newer fetch API does not do this by default prefering
# to keep content unaltered if possible but it can be enabled with a simple JSON boolean switch
# of the same name (to_posix_lines).
def test_upload_posix_newline_fixes_by_default(self):
windows_content = ONE_TO_SIX_ON_WINDOWS
result_content = self._upload_and_get_content(windows_content)
self.assertEquals(result_content, ONE_TO_SIX_WITH_TABS)
def test_fetch_posix_unaltered(self):
windows_content = ONE_TO_SIX_ON_WINDOWS
result_content = self._upload_and_get_content(windows_content, api="fetch")
self.assertEquals(result_content, ONE_TO_SIX_ON_WINDOWS)
def test_upload_disable_posix_fix(self):
windows_content = ONE_TO_SIX_ON_WINDOWS
result_content = self._upload_and_get_content(windows_content, to_posix_lines=None)
self.assertEquals(result_content, windows_content)
def test_fetch_post_lines_option(self):
windows_content = ONE_TO_SIX_ON_WINDOWS
result_content = self._upload_and_get_content(windows_content, api="fetch", to_posix_lines=True)
self.assertEquals(result_content, ONE_TO_SIX_WITH_TABS)
def test_upload_tab_to_space_off_by_default(self):
table = ONE_TO_SIX_WITH_SPACES
result_content = self._upload_and_get_content(table)
self.assertEquals(result_content, table)
def test_fetch_tab_to_space_off_by_default(self):
table = ONE_TO_SIX_WITH_SPACES
result_content = self._upload_and_get_content(table, api='fetch')
self.assertEquals(result_content, table)
def test_upload_tab_to_space(self):
table = ONE_TO_SIX_WITH_SPACES
result_content = self._upload_and_get_content(table, space_to_tab="Yes")
self.assertEquals(result_content, ONE_TO_SIX_WITH_TABS)
def test_fetch_tab_to_space(self):
table = ONE_TO_SIX_WITH_SPACES
result_content = self._upload_and_get_content(table, api="fetch", space_to_tab=True)
self.assertEquals(result_content, ONE_TO_SIX_WITH_TABS)
def test_fetch_compressed_with_explicit_type(self):
fastqgz_path = TestDataResolver().get_filename("1.fastqsanger.gz")
with open(fastqgz_path, "rb") as fh:
details = self._upload_and_get_details(fh, api="fetch", ext="fastqsanger.gz")
assert details["state"] == "ok"
assert details["file_ext"] == "fastqsanger.gz"
def test_fetch_compressed_default(self):
fastqgz_path = TestDataResolver().get_filename("1.fastqsanger.gz")
with open(fastqgz_path, "rb") as fh:
details = self._upload_and_get_details(fh, api="fetch", assert_ok=False)
assert details["state"] == "ok"
assert details["file_ext"] == "fastqsanger.gz", details
@uses_test_history(require_new=True)
def test_fetch_compressed_auto_decompress_target(self, history_id):
# TODO: this should definitely be fixed to allow auto decompression via that API.
fastqgz_path = TestDataResolver().get_filename("1.fastqsanger.gz")
with open(fastqgz_path, "rb") as fh:
details = self._upload_and_get_details(fh,
api="fetch",
history_id=history_id,
assert_ok=False,
auto_decompress=True)
assert details["state"] == "ok"
assert details["file_ext"] == "fastqsanger.gz", details
def test_upload_decompress_off_with_auto_by_default(self):
# UNSTABLE_FLAG: This might default to a bed.gz datatype in the future.
bedgz_path = TestDataResolver().get_filename("4.bed.gz")
with open(bedgz_path, "rb") as fh:
details = self._upload_and_get_details(fh, file_type="auto")
assert details["state"] == "ok"
assert details["file_ext"] == "bed", details
#.........这里部分代码省略.........