本文整理汇总了Python中tool_shed.util.hg_util.get_repo_for_repository函数的典型用法代码示例。如果您正苦于以下问题:Python get_repo_for_repository函数的具体用法?Python get_repo_for_repository怎么用?Python get_repo_for_repository使用的例子?那么恭喜您, 这里精选的函数代码示例或许可以为您提供帮助。
在下文中一共展示了get_repo_for_repository函数的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: upload_tar
def upload_tar( trans, rdah, tdah, repository, tar, uploaded_file, upload_point, remove_repo_files_not_in_tar,
commit_message, new_repo_alert ):
# Upload a tar archive of files.
repo_dir = repository.repo_path( trans.app )
hg_util.get_repo_for_repository( trans.app, repository=None, repo_path=repo_dir, create=False )
undesirable_dirs_removed = 0
undesirable_files_removed = 0
check_results = commit_util.check_archive( repository, tar )
if check_results.invalid:
tar.close()
uploaded_file.close()
message = '%s Invalid paths were: %s' % (
' '.join( check_results.errors ), ', '.join( check_results.invalid ) )
return False, message, [], '', undesirable_dirs_removed, undesirable_files_removed
else:
if upload_point is not None:
full_path = os.path.abspath( os.path.join( repo_dir, upload_point ) )
else:
full_path = os.path.abspath( repo_dir )
undesirable_files_removed = len( check_results.undesirable_files )
undesirable_dirs_removed = len( check_results.undesirable_dirs )
filenames_in_archive = [ ti.name for ti in check_results.valid ]
# Extract the uploaded tar to the load_point within the repository hierarchy.
tar.extractall( path=full_path, members=check_results.valid )
tar.close()
uploaded_file.close()
for filename in filenames_in_archive:
uploaded_file_name = os.path.join( full_path, filename )
if os.path.split( uploaded_file_name )[ -1 ] == rt_util.REPOSITORY_DEPENDENCY_DEFINITION_FILENAME:
# Inspect the contents of the file to see if toolshed or changeset_revision attributes
# are missing and if so, set them appropriately.
altered, root_elem, error_message = rdah.handle_tag_attributes( uploaded_file_name )
if error_message:
return False, error_message, [], '', [], []
elif altered:
tmp_filename = xml_util.create_and_write_tmp_file( root_elem )
shutil.move( tmp_filename, uploaded_file_name )
elif os.path.split( uploaded_file_name )[ -1 ] == rt_util.TOOL_DEPENDENCY_DEFINITION_FILENAME:
# Inspect the contents of the file to see if toolshed or changeset_revision
# attributes are missing and if so, set them appropriately.
altered, root_elem, error_message = tdah.handle_tag_attributes( uploaded_file_name )
if error_message:
return False, error_message, [], '', [], []
if altered:
tmp_filename = xml_util.create_and_write_tmp_file( root_elem )
shutil.move( tmp_filename, uploaded_file_name )
return commit_util.handle_directory_changes( trans.app,
trans.request.host,
trans.user.username,
repository,
full_path,
filenames_in_archive,
remove_repo_files_not_in_tar,
new_repo_alert,
commit_message,
undesirable_dirs_removed,
undesirable_files_removed )
示例2: create_repository
def create_repository( app, name, type, description, long_description, user_id, category_ids=[], remote_repository_url=None, homepage_url=None ):
"""Create a new ToolShed repository"""
sa_session = app.model.context.current
# Add the repository record to the database.
repository = app.model.Repository( name=name,
type=type,
remote_repository_url=remote_repository_url,
homepage_url=homepage_url,
description=description,
long_description=long_description,
user_id=user_id )
# Flush to get the id.
sa_session.add( repository )
sa_session.flush()
# Create an admin role for the repository.
create_repository_admin_role( app, repository )
# Determine the repository's repo_path on disk.
dir = os.path.join( app.config.file_path, *directory_hash_id( repository.id ) )
# Create directory if it does not exist.
if not os.path.exists( dir ):
os.makedirs( dir )
# Define repo name inside hashed directory.
repository_path = os.path.join( dir, "repo_%d" % repository.id )
# Create local repository directory.
if not os.path.exists( repository_path ):
os.makedirs( repository_path )
# Create the local repository.
hg_util.get_repo_for_repository( app, repository=None, repo_path=repository_path, create=True )
# Add an entry in the hgweb.config file for the local repository.
lhs = "repos/%s/%s" % ( repository.user.username, repository.name )
app.hgweb_config_manager.add_entry( lhs, repository_path )
# Create a .hg/hgrc file for the local repository.
hg_util.create_hgrc_file( app, repository )
flush_needed = False
if category_ids:
# Create category associations
for category_id in category_ids:
category = sa_session.query( app.model.Category ) \
.get( app.security.decode_id( category_id ) )
rca = app.model.RepositoryCategoryAssociation( repository, category )
sa_session.add( rca )
flush_needed = True
if flush_needed:
sa_session.flush()
# Update the repository registry.
app.repository_registry.add_entry( repository )
message = "Repository <b>%s</b> has been created." % escape( str( repository.name ) )
return repository, message
示例3: get_version_lineage_for_tool
def get_version_lineage_for_tool( self, repository_id, repository_metadata, guid ):
"""
Return the tool version lineage chain in descendant order for the received
guid contained in the received repsitory_metadata.tool_versions. This function
is called only from the Tool Shed.
"""
repository = suc.get_repository_by_id( self.app, repository_id )
repo = hg_util.get_repo_for_repository( self.app, repository=repository, repo_path=None, create=False )
# Initialize the tool lineage
version_lineage = [ guid ]
# Get all ancestor guids of the received guid.
current_child_guid = guid
for changeset in hg_util.reversed_upper_bounded_changelog( repo, repository_metadata.changeset_revision ):
ctx = repo.changectx( changeset )
rm = suc.get_repository_metadata_by_changeset_revision( self.app, repository_id, str( ctx ) )
if rm:
parent_guid = rm.tool_versions.get( current_child_guid, None )
if parent_guid:
version_lineage.append( parent_guid )
current_child_guid = parent_guid
# Get all descendant guids of the received guid.
current_parent_guid = guid
for changeset in hg_util.reversed_lower_upper_bounded_changelog( repo,
repository_metadata.changeset_revision,
repository.tip( self.app ) ):
ctx = repo.changectx( changeset )
rm = suc.get_repository_metadata_by_changeset_revision( self.app, repository_id, str( ctx ) )
if rm:
tool_versions = rm.tool_versions
for child_guid, parent_guid in tool_versions.items():
if parent_guid == current_parent_guid:
version_lineage.insert( 0, child_guid )
current_parent_guid = child_guid
break
return version_lineage
示例4: get_latest_downloadable_repository_metadata
def get_latest_downloadable_repository_metadata( trans, repository ):
"""
Return the latest downloadable repository_metadata record for the received repository. This will
return repositories of type unrestricted as well as types repository_suite_definition and
tool_dependency_definition.
"""
encoded_repository_id = trans.security.encode_id( repository.id )
repo = hg_util.get_repo_for_repository( trans.app, repository=repository, repo_path=None, create=False )
tip_ctx = str( repo.changectx( repo.changelog.tip() ) )
repository_metadata = None
try:
repository_metadata = suc.get_repository_metadata_by_changeset_revision( trans.app, encoded_repository_id, tip_ctx )
if repository_metadata is not None and repository_metadata.downloadable:
return repository_metadata
return None
except:
latest_downloadable_revision = metadata_util.get_previous_metadata_changeset_revision( repository,
repo,
tip_ctx,
downloadable=True )
if latest_downloadable_revision == hg_util.INITIAL_CHANGELOG_HASH:
return None
repository_metadata = suc.get_repository_metadata_by_changeset_revision( trans.app,
encoded_repository_id,
latest_downloadable_revision )
if repository_metadata is not None and repository_metadata.downloadable:
return repository_metadata
return None
示例5: get_ordered_installable_revisions
def get_ordered_installable_revisions( self, trans, name, owner, **kwd ):
"""
GET /api/repositories/get_ordered_installable_revisions
:param name: the name of the Repository
:param owner: the owner of the Repository
Returns the ordered list of changeset revision hash strings that are associated with installable revisions.
As in the changelog, the list is ordered oldest to newest.
"""
# Example URL: http://localhost:9009/api/repositories/get_installable_revisions?name=add_column&owner=test
if name and owner:
# Get the repository information.
repository = suc.get_repository_by_name_and_owner( trans.app, name, owner )
if repository is None:
error_message = "Error in the Tool Shed repositories API in get_ordered_installable_revisions: "
error_message += "cannot locate repository %s owned by %s." % ( str( name ), str( owner ) )
log.debug( error_message )
return []
repo = hg_util.get_repo_for_repository( trans.app, repository=repository, repo_path=None, create=False )
ordered_installable_revisions = suc.get_ordered_metadata_changeset_revisions( repository, repo, downloadable=True )
return ordered_installable_revisions
else:
error_message = "Error in the Tool Shed repositories API in get_ordered_installable_revisions: "
error_message += "invalid name %s or owner %s received." % ( str( name ), str( owner ) )
log.debug( error_message )
return []
示例6: upload
def upload( self, trans, **kwd ):
message = escape( kwd.get( 'message', '' ) )
status = kwd.get( 'status', 'done' )
commit_message = escape( kwd.get( 'commit_message', 'Uploaded' ) )
category_ids = util.listify( kwd.get( 'category_id', '' ) )
categories = suc.get_categories( trans.app )
repository_id = kwd.get( 'repository_id', '' )
repository = suc.get_repository_in_tool_shed( trans.app, repository_id )
repo_dir = repository.repo_path( trans.app )
repo = hg_util.get_repo_for_repository( trans.app, repository=None, repo_path=repo_dir, create=False )
uncompress_file = util.string_as_bool( kwd.get( 'uncompress_file', 'true' ) )
remove_repo_files_not_in_tar = util.string_as_bool( kwd.get( 'remove_repo_files_not_in_tar', 'true' ) )
uploaded_file = None
upload_point = commit_util.get_upload_point( repository, **kwd )
tip = repository.tip( trans.app )
file_data = kwd.get( 'file_data', '' )
url = kwd.get( 'url', '' )
# Part of the upload process is sending email notification to those that have registered to
# receive them. One scenario occurs when the first change set is produced for the repository.
# See the suc.handle_email_alerts() method for the definition of the scenarios.
new_repo_alert = repository.is_new( trans.app )
uploaded_directory = None
if kwd.get( 'upload_button', False ):
if file_data == '' and url == '':
message = 'No files were entered on the upload form.'
status = 'error'
uploaded_file = None
elif url and url.startswith( 'hg' ):
# Use mercurial clone to fetch repository, contents will then be copied over.
uploaded_directory = tempfile.mkdtemp()
repo_url = 'http%s' % url[ len( 'hg' ): ]
repo_url = repo_url.encode( 'ascii', 'replace' )
try:
commands.clone( hg_util.get_configured_ui(), repo_url, uploaded_directory )
except Exception, e:
message = 'Error uploading via mercurial clone: %s' % basic_util.to_html_string( str( e ) )
status = 'error'
basic_util.remove_dir( uploaded_directory )
uploaded_directory = None
elif url:
valid_url = True
try:
stream = urllib.urlopen( url )
except Exception, e:
valid_url = False
message = 'Error uploading file via http: %s' % str( e )
status = 'error'
uploaded_file = None
if valid_url:
fd, uploaded_file_name = tempfile.mkstemp()
uploaded_file = open( uploaded_file_name, 'wb' )
while 1:
chunk = stream.read( util.CHUNK_SIZE )
if not chunk:
break
uploaded_file.write( chunk )
uploaded_file.flush()
uploaded_file_filename = url.split( '/' )[ -1 ]
isempty = os.path.getsize( os.path.abspath( uploaded_file_name ) ) == 0
示例7: build_readme_files_dict
def build_readme_files_dict( app, repository, changeset_revision, metadata, tool_path=None ):
"""
Return a dictionary of valid readme file name <-> readme file content pairs for all readme files defined in the received metadata. Since the
received changeset_revision (which is associated with the received metadata) may not be the latest installable changeset revision, the README
file contents may not be available on disk. This method is used by both Galaxy and the Tool Shed.
"""
if app.name == 'galaxy':
can_use_disk_files = True
else:
repo = hg_util.get_repo_for_repository( app, repository=repository, repo_path=None, create=False )
latest_downloadable_changeset_revision = suc.get_latest_downloadable_changeset_revision( app, repository, repo )
can_use_disk_files = changeset_revision == latest_downloadable_changeset_revision
readme_files_dict = {}
if metadata:
if 'readme_files' in metadata:
for relative_path_to_readme_file in metadata[ 'readme_files' ]:
readme_file_name = os.path.split( relative_path_to_readme_file )[ 1 ]
if can_use_disk_files:
if tool_path:
full_path_to_readme_file = os.path.abspath( os.path.join( tool_path, relative_path_to_readme_file ) )
else:
full_path_to_readme_file = os.path.abspath( relative_path_to_readme_file )
text = None
try:
f = open( full_path_to_readme_file, 'r' )
text = unicodify( f.read() )
f.close()
except Exception, e:
log.exception( "Error reading README file '%s' from disk: %s" % ( str( relative_path_to_readme_file ), str( e ) ) )
text = None
if text:
text_of_reasonable_length = basic_util.size_string( text )
if text_of_reasonable_length.find( '.. image:: ' ) >= 0:
# Handle image display for README files that are contained in repositories in the tool shed or installed into Galaxy.
lock = threading.Lock()
lock.acquire( True )
try:
text_of_reasonable_length = suc.set_image_paths( app,
app.security.encode_id( repository.id ),
text_of_reasonable_length )
except Exception, e:
log.exception( "Exception in build_readme_files_dict, so images may not be properly displayed:\n%s" % str( e ) )
finally:
lock.release()
if readme_file_name.endswith( '.rst' ):
text_of_reasonable_length = Template( rst_to_html( text_of_reasonable_length ),
input_encoding='utf-8',
output_encoding='utf-8',
default_filters=[ 'decode.utf8' ],
encoding_errors='replace' )
text_of_reasonable_length = text_of_reasonable_length.render( static_path=web.url_for( '/static' ),
host_url=web.url_for( '/', qualified=True ) )
text_of_reasonable_length = unicodify( text_of_reasonable_length )
else:
text_of_reasonable_length = basic_util.to_html_string( text_of_reasonable_length )
readme_files_dict[ readme_file_name ] = text_of_reasonable_length
示例8: get_latest_repository_metadata
def get_latest_repository_metadata(app, decoded_repository_id, downloadable=False):
"""Get last metadata defined for a specified repository from the database."""
sa_session = app.model.context.current
repository = sa_session.query(app.model.Repository).get(decoded_repository_id)
repo = hg_util.get_repo_for_repository(app, repository=repository, repo_path=None, create=False)
if downloadable:
changeset_revision = get_latest_downloadable_changeset_revision(app, repository, repo)
else:
changeset_revision = get_latest_changeset_revision(app, repository, repo)
return get_repository_metadata_by_changeset_revision(app, app.security.encode_id(repository.id), changeset_revision)
示例9: get_repo_info_dict
def get_repo_info_dict( app, user, repository_id, changeset_revision ):
repository = suc.get_repository_in_tool_shed( app, repository_id )
repo = hg_util.get_repo_for_repository( app, repository=repository, repo_path=None, create=False )
repository_clone_url = common_util.generate_clone_url_for_repository_in_tool_shed( user, repository )
repository_metadata = suc.get_repository_metadata_by_changeset_revision( app,
repository_id,
changeset_revision )
if not repository_metadata:
# The received changeset_revision is no longer installable, so get the next changeset_revision
# in the repository's changelog. This generally occurs only with repositories of type
# repository_suite_definition or tool_dependency_definition.
next_downloadable_changeset_revision = \
suc.get_next_downloadable_changeset_revision( repository, repo, changeset_revision )
if next_downloadable_changeset_revision:
repository_metadata = suc.get_repository_metadata_by_changeset_revision( app,
repository_id,
next_downloadable_changeset_revision )
if repository_metadata:
# For now, we'll always assume that we'll get repository_metadata, but if we discover our assumption
# is not valid we'll have to enhance the callers to handle repository_metadata values of None in the
# returned repo_info_dict.
metadata = repository_metadata.metadata
if 'tools' in metadata:
includes_tools = True
else:
includes_tools = False
includes_tools_for_display_in_tool_panel = repository_metadata.includes_tools_for_display_in_tool_panel
repository_dependencies_dict = metadata.get( 'repository_dependencies', {} )
repository_dependencies = repository_dependencies_dict.get( 'repository_dependencies', [] )
has_repository_dependencies, has_repository_dependencies_only_if_compiling_contained_td = \
suc.get_repository_dependency_types( repository_dependencies )
if 'tool_dependencies' in metadata:
includes_tool_dependencies = True
else:
includes_tool_dependencies = False
else:
# Here's where we may have to handle enhancements to the callers. See above comment.
includes_tools = False
has_repository_dependencies = False
has_repository_dependencies_only_if_compiling_contained_td = False
includes_tool_dependencies = False
includes_tools_for_display_in_tool_panel = False
ctx = hg_util.get_changectx_for_changeset( repo, changeset_revision )
repo_info_dict = create_repo_info_dict( app=app,
repository_clone_url=repository_clone_url,
changeset_revision=changeset_revision,
ctx_rev=str( ctx.rev() ),
repository_owner=repository.user.username,
repository_name=repository.name,
repository=repository,
repository_metadata=repository_metadata,
tool_dependencies=None,
repository_dependencies=None )
return repo_info_dict, includes_tools, includes_tool_dependencies, includes_tools_for_display_in_tool_panel, \
has_repository_dependencies, has_repository_dependencies_only_if_compiling_contained_td
示例10: get_dependencies_for_metadata_revision
def get_dependencies_for_metadata_revision( app, metadata ):
dependencies = []
for shed, name, owner, changeset, prior, _ in metadata[ 'repository_dependencies' ]:
required_repository = tool_shed.util.repository_util.get_repository_by_name_and_owner( app, name, owner )
repo = hg_util.get_repo_for_repository( app, repository=required_repository, repo_path=None, create=False )
updated_changeset = get_next_downloadable_changeset_revision( required_repository, repo, changeset )
if updated_changeset is None:
continue
metadata_entry = get_repository_metadata_by_changeset_revision( app, app.security.encode_id( required_repository.id ), updated_changeset )
dependencies.append( metadata_entry )
return dependencies
示例11: load_tool_from_changeset_revision
def load_tool_from_changeset_revision( self, repository_id, changeset_revision, tool_config_filename ):
"""
Return a loaded tool whose tool config file name (e.g., filtering.xml) is the value
of tool_config_filename. The value of changeset_revision is a valid (downloadable)
changeset revision. The tool config will be located in the repository manifest between
the received valid changeset revision and the first changeset revision in the repository,
searching backwards.
"""
original_tool_data_path = self.app.config.tool_data_path
repository = suc.get_repository_in_tool_shed( self.app, repository_id )
repo_files_dir = repository.repo_path( self.app )
repo = hg_util.get_repo_for_repository( self.app, repository=None, repo_path=repo_files_dir, create=False )
message = ''
tool = None
can_use_disk_file = False
tool_config_filepath = suc.get_absolute_path_to_file_in_repository( repo_files_dir, tool_config_filename )
work_dir = tempfile.mkdtemp( prefix="tmp-toolshed-ltfcr" )
can_use_disk_file = self.can_use_tool_config_disk_file( repository,
repo,
tool_config_filepath,
changeset_revision )
if can_use_disk_file:
self.app.config.tool_data_path = work_dir
tool, valid, message, sample_files = \
self.handle_sample_files_and_load_tool_from_disk( repo_files_dir,
repository_id,
tool_config_filepath,
work_dir )
if tool is not None:
invalid_files_and_errors_tups = \
self.check_tool_input_params( repo_files_dir,
tool_config_filename,
tool,
sample_files )
if invalid_files_and_errors_tups:
message2 = tool_util.generate_message_for_invalid_tools( self.app,
invalid_files_and_errors_tups,
repository,
metadata_dict=None,
as_html=True,
displaying_invalid_tool=True )
message = self.concat_messages( message, message2 )
else:
tool, message, sample_files = \
self.handle_sample_files_and_load_tool_from_tmp_config( repo,
repository_id,
changeset_revision,
tool_config_filename,
work_dir )
basic_util.remove_dir( work_dir )
self.app.config.tool_data_path = original_tool_data_path
# Reset the tool_data_tables by loading the empty tool_data_table_conf.xml file.
self.tdtm.reset_tool_data_tables()
return repository, tool, message
示例12: get_latest_downloadable_changeset_revision
def get_latest_downloadable_changeset_revision( app, repository, repo=None ):
if repo is None:
repo = hg_util.get_repo_for_repository( app, repository=repository, repo_path=None, create=False )
repository_tip = repository.tip( app )
repository_metadata = get_repository_metadata_by_changeset_revision( app, app.security.encode_id( repository.id ), repository_tip )
if repository_metadata and repository_metadata.downloadable:
return repository_tip
changeset_revisions = [ revision[ 1 ] for revision in get_metadata_revisions( repository, repo ) ]
if changeset_revisions:
return changeset_revisions[ -1 ]
return hg_util.INITIAL_CHANGELOG_HASH
示例13: get_value
def get_value(self, trans, grid, repository):
# Restrict to revisions that have been reviewed.
if repository.reviews:
rval = ''
repo = hg_util.get_repo_for_repository(trans.app, repository=repository)
for review in repository.reviews:
changeset_revision = review.changeset_revision
rev, label = hg_util.get_rev_label_from_changeset_revision(repo, changeset_revision)
rval += '<a href="manage_repository_reviews_of_revision?id=%s&changeset_revision=%s">%s</a><br/>' % \
(trans.security.encode_id(repository.id), changeset_revision, label)
return rval
return ''
示例14: has_previous_repository_reviews
def has_previous_repository_reviews( app, repository, changeset_revision ):
"""
Determine if a repository has a changeset revision review prior to the
received changeset revision.
"""
repo = hg_util.get_repo_for_repository( app, repository=repository, repo_path=None, create=False )
reviewed_revision_hashes = [ review.changeset_revision for review in repository.reviews ]
for changeset in hg_util.reversed_upper_bounded_changelog( repo, changeset_revision ):
previous_changeset_revision = str( repo.changectx( changeset ) )
if previous_changeset_revision in reviewed_revision_hashes:
return True
return False
示例15: get_certified_level_one_tuple
def get_certified_level_one_tuple( self, repository ):
"""
Return True if the latest installable changeset_revision of the received repository is level one certified.
"""
if repository is None:
return ( None, False )
if repository.deleted or repository.deprecated:
return ( None, False )
repo = hg_util.get_repo_for_repository( self.app, repository=repository, repo_path=None, create=False )
# Get the latest installable changeset revision since that is all that is currently configured for testing.
latest_installable_changeset_revision = suc.get_latest_downloadable_changeset_revision( self.app, repository, repo )
if latest_installable_changeset_revision not in [ None, hg_util.INITIAL_CHANGELOG_HASH ]:
encoded_repository_id = self.app.security.encode_id( repository.id )
repository_metadata = suc.get_repository_metadata_by_changeset_revision( self.app,
encoded_repository_id,
latest_installable_changeset_revision )
if repository_metadata:
# Filter out repository revisions that have not been tested.
if repository_metadata.time_last_tested is not None and repository_metadata.tool_test_results is not None:
if repository.type in [ rt_util.REPOSITORY_SUITE_DEFINITION, rt_util.TOOL_DEPENDENCY_DEFINITION ]:
# Look in the tool_test_results dictionary for installation errors.
try:
tool_test_results_dict = repository_metadata.tool_test_results[ 0 ]
except Exception, e:
message = 'Error attempting to retrieve install and test results for repository %s:\n' % str( repository.name )
message += '%s' % str( e )
log.exception( message )
return ( latest_installable_changeset_revision, False )
if 'installation_errors' in tool_test_results_dict:
return ( latest_installable_changeset_revision, False )
return ( latest_installable_changeset_revision, True )
else:
# We have a repository with type Unrestricted.
if repository_metadata.includes_tools:
if repository_metadata.tools_functionally_correct:
return ( latest_installable_changeset_revision, True )
return ( latest_installable_changeset_revision, False )
else:
# Look in the tool_test_results dictionary for installation errors.
try:
tool_test_results_dict = repository_metadata.tool_test_results[ 0 ]
except Exception, e:
message = 'Error attempting to retrieve install and test results for repository %s:\n' % str( repository.name )
message += '%s' % str( e )
log.exception( message )
return ( latest_installable_changeset_revision, False )
if 'installation_errors' in tool_test_results_dict:
return ( latest_installable_changeset_revision, False )
return ( latest_installable_changeset_revision, True )
else:
# No test results.
return ( latest_installable_changeset_revision, False )