1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/dbc3d5c3506e/ Changeset: dbc3d5c3506e User: greg Date: 2014-05-28 17:17:50 Summary: Continued down-sizing of the Tool Shed's shed_util_common module by moving certain functions to more appropriate locations. Some fixes for adding and removing entries to the Tool Shed's repository registry. Affected #: 36 files
diff -r f6aa2d17d38430e92beb9da80f53c9989dd504ee -r dbc3d5c3506ed833dc453faa4f00797d95b8a8bb lib/galaxy/webapps/galaxy/api/tool_shed_repositories.py --- a/lib/galaxy/webapps/galaxy/api/tool_shed_repositories.py +++ b/lib/galaxy/webapps/galaxy/api/tool_shed_repositories.py @@ -11,6 +11,7 @@ from tool_shed.galaxy_install import repository_util from tool_shed.util import common_util from tool_shed.util import encoding_util +from tool_shed.util import hg_util from tool_shed.util import metadata_util from tool_shed.util import workflow_util from tool_shed.util import tool_util @@ -108,7 +109,7 @@ changeset_revisions = json.from_json_string( raw_text ) if len( changeset_revisions ) >= 1: return changeset_revisions[ -1 ] - return suc.INITIAL_CHANGELOG_HASH + return hg_util.INITIAL_CHANGELOG_HASH
def __get_value_mapper( self, trans, tool_shed_repository ): value_mapper={ 'id' : trans.security.encode_id( tool_shed_repository.id ),
diff -r f6aa2d17d38430e92beb9da80f53c9989dd504ee -r dbc3d5c3506ed833dc453faa4f00797d95b8a8bb lib/galaxy/webapps/galaxy/controllers/admin_toolshed.py --- a/lib/galaxy/webapps/galaxy/controllers/admin_toolshed.py +++ b/lib/galaxy/webapps/galaxy/controllers/admin_toolshed.py @@ -462,8 +462,8 @@ err_msg = '' tool_shed_repository = tool_dependencies[ 0 ].tool_shed_repository # Get the tool_dependencies.xml file from the repository. - tool_dependencies_config = suc.get_config_from_disk( rt_util.TOOL_DEPENDENCY_DEFINITION_FILENAME, - tool_shed_repository.repo_path( trans.app ) ) + tool_dependencies_config = hg_util.get_config_from_disk( rt_util.TOOL_DEPENDENCY_DEFINITION_FILENAME, + tool_shed_repository.repo_path( trans.app ) ) installed_tool_dependencies = \ common_install_util.install_specified_packages( app=trans.app, tool_shed_repository=tool_shed_repository, @@ -505,7 +505,7 @@ 'repository/get_latest_downloadable_changeset_revision%s' % params ) raw_text = common_util.tool_shed_get( trans.app, tool_shed_url, url ) latest_downloadable_revision = json.from_json_string( raw_text ) - if latest_downloadable_revision == suc.INITIAL_CHANGELOG_HASH: + if latest_downloadable_revision == hg_util.INITIAL_CHANGELOG_HASH: message = 'Error retrieving the latest downloadable revision for this repository via the url <b>%s</b>.' % url status = 'error' else:
diff -r f6aa2d17d38430e92beb9da80f53c9989dd504ee -r dbc3d5c3506ed833dc453faa4f00797d95b8a8bb lib/galaxy/webapps/tool_shed/api/repositories.py --- a/lib/galaxy/webapps/tool_shed/api/repositories.py +++ b/lib/galaxy/webapps/tool_shed/api/repositories.py @@ -13,6 +13,7 @@ import tool_shed.repository_types.util as rt_util import tool_shed.util.shed_util_common as suc from tool_shed.galaxy_install import repository_util +from tool_shed.util import basic_util from tool_shed.util import encoding_util from tool_shed.util import hg_util from tool_shed.util import import_util @@ -279,7 +280,7 @@ repository_status_info_dict, import_results_tups ) import_util.check_status_and_reset_downloadable( trans, import_results_tups ) - suc.remove_dir( file_path ) + basic_util.remove_dir( file_path ) # NOTE: the order of installation is defined in import_results_tups, but order will be lost # when transferred to return_dict. return_dict = {}
diff -r f6aa2d17d38430e92beb9da80f53c9989dd504ee -r dbc3d5c3506ed833dc453faa4f00797d95b8a8bb lib/galaxy/webapps/tool_shed/controllers/repository.py --- a/lib/galaxy/webapps/tool_shed/controllers/repository.py +++ b/lib/galaxy/webapps/tool_shed/controllers/repository.py @@ -15,6 +15,7 @@ from galaxy.util import json from galaxy.model.orm import and_ import tool_shed.util.shed_util_common as suc +from tool_shed.util import basic_util from tool_shed.util import common_util from tool_shed.util import container_util from tool_shed.util import encoding_util @@ -1146,7 +1147,7 @@ # server account's .hgrc file to include the following setting: # [web] # allow_archive = bz2, gz, zip - file_type_str = suc.get_file_type_str( changeset_revision, file_type ) + file_type_str = export_util.get_file_type_str( changeset_revision, file_type ) repository.times_downloaded += 1 trans.sa_session.add( repository ) trans.sa_session.flush() @@ -1188,7 +1189,7 @@ # Make sure the file is removed from disk after the contents have been downloaded. os.unlink( repositories_archive.name ) repositories_archive_path, file_name = os.path.split( repositories_archive.name ) - suc.remove_dir( repositories_archive_path ) + basic_util.remove_dir( repositories_archive_path ) return opened_archive repository_metadata = suc.get_repository_metadata_by_changeset_revision( trans.app, repository_id, changeset_revision ) metadata = repository_metadata.metadata @@ -1312,7 +1313,10 @@ return self.install_matched_repository_grid( trans, **kwd ) else: kwd[ 'message' ] = "tool id: <b>%s</b><br/>tool name: <b>%s</b><br/>tool version: <b>%s</b><br/>exact matches only: <b>%s</b>" % \ - ( suc.stringify( tool_ids ), suc.stringify( tool_names ), suc.stringify( tool_versions ), str( exact_matches_checked ) ) + ( basic_util.stringify( tool_ids ), + basic_util.stringify( tool_names ), + basic_util.stringify( tool_versions ), + str( exact_matches_checked ) ) self.matched_repository_grid.title = "Repositories with matching tools" return self.matched_repository_grid( trans, **kwd ) else: @@ -1320,9 +1324,9 @@ status = "error" exact_matches_check_box = CheckboxField( 'exact_matches', checked=exact_matches_checked ) return trans.fill_template( '/webapps/tool_shed/repository/find_tools.mako', - tool_id=suc.stringify( tool_ids ), - tool_name=suc.stringify( tool_names ), - tool_version=suc.stringify( tool_versions ), + tool_id=basic_util.stringify( tool_ids ), + tool_name=basic_util.stringify( tool_names ), + tool_version=basic_util.stringify( tool_versions ), exact_matches_check_box=exact_matches_check_box, message=message, status=status ) @@ -1396,7 +1400,7 @@ return self.install_matched_repository_grid( trans, **kwd ) else: kwd[ 'message' ] = "workflow name: <b>%s</b><br/>exact matches only: <b>%s</b>" % \ - ( suc.stringify( workflow_names ), str( exact_matches_checked ) ) + ( basic_util.stringify( workflow_names ), str( exact_matches_checked ) ) self.matched_repository_grid.title = "Repositories with matching workflows" return self.matched_repository_grid( trans, **kwd ) else: @@ -1407,7 +1411,7 @@ workflow_names = [] exact_matches_check_box = CheckboxField( 'exact_matches', checked=exact_matches_checked ) return trans.fill_template( '/webapps/tool_shed/repository/find_workflows.mako', - workflow_name=suc.stringify( workflow_names ), + workflow_name=basic_util.stringify( workflow_names ), exact_matches_check_box=exact_matches_check_box, message=message, status=status ) @@ -1666,7 +1670,7 @@ if repository: repo = hg_util.get_repo_for_repository( trans.app, repository=repository, repo_path=None, create=False ) return suc.get_latest_downloadable_changeset_revision( trans.app, repository, repo ) - return suc.INITIAL_CHANGELOG_HASH + return hg_util.INITIAL_CHANGELOG_HASH
@web.json def get_readme_files( self, trans, **kwd ): @@ -1832,7 +1836,7 @@ #manafest. repo_dir = repository.repo_path( trans.app ) # Get the tool_dependencies.xml file from disk. - tool_dependencies_config = suc.get_config_from_disk( rt_util.TOOL_DEPENDENCY_DEFINITION_FILENAME, repo_dir ) + tool_dependencies_config = hg_util.get_config_from_disk( rt_util.TOOL_DEPENDENCY_DEFINITION_FILENAME, repo_dir ) # Return the encoded contents of the tool_dependencies.xml file. if tool_dependencies_config: tool_dependencies_config_file = open( tool_dependencies_config, 'rb' ) @@ -1982,7 +1986,7 @@ repository_status_info_dict, import_results_tups ) import_util.check_status_and_reset_downloadable( trans, import_results_tups ) - suc.remove_dir( file_path ) + basic_util.remove_dir( file_path ) return trans.fill_template( '/webapps/tool_shed/repository/import_capsule_results.mako', export_info_dict=export_info_dict, import_results_tups=import_results_tups, @@ -2318,7 +2322,7 @@ is_malicious = False skip_tool_test = None repository_dependencies = None - if changeset_revision != suc.INITIAL_CHANGELOG_HASH: + if changeset_revision != hg_util.INITIAL_CHANGELOG_HASH: repository_metadata = suc.get_repository_metadata_by_changeset_revision( trans.app, id, changeset_revision ) if repository_metadata: revision_label = hg_util.get_revision_label( trans, repository, changeset_revision, include_date=False ) @@ -2328,8 +2332,8 @@ # There is no repository_metadata defined for the changeset_revision, so see if it was defined in a previous # changeset in the changelog. previous_changeset_revision = \ - suc.get_previous_metadata_changeset_revision( repository, repo, changeset_revision, downloadable=False ) - if previous_changeset_revision != suc.INITIAL_CHANGELOG_HASH: + metadata_util.get_previous_metadata_changeset_revision( repository, repo, changeset_revision, downloadable=False ) + if previous_changeset_revision != hg_util.INITIAL_CHANGELOG_HASH: repository_metadata = suc.get_repository_metadata_by_changeset_revision( trans.app, id, previous_changeset_revision ) if repository_metadata: revision_label = hg_util.get_revision_label( trans, repository, previous_changeset_revision, include_date=False ) @@ -2390,7 +2394,7 @@ changeset_revision, repository_dependencies, repository_metadata ) - heads = suc.get_repository_heads( repo ) + heads = hg_util.get_repository_heads( repo ) deprecated_repository_dependency_tups = \ repository_dependency_util.get_repository_dependency_tups_from_repository_metadata( trans.app, repository_metadata, @@ -2432,7 +2436,7 @@ repository = suc.get_repository_in_tool_shed( trans, id ) changeset_revision = kwd.get( 'changeset_revision', repository.tip( trans.app ) ) metadata = None - if changeset_revision != suc.INITIAL_CHANGELOG_HASH: + if changeset_revision != hg_util.INITIAL_CHANGELOG_HASH: repository_metadata = suc.get_repository_metadata_by_changeset_revision( trans.app, id, changeset_revision ) if repository_metadata: metadata = repository_metadata.metadata @@ -2441,11 +2445,11 @@ # in a previous changeset in the changelog. repo = hg_util.get_repo_for_repository( trans.app, repository=repository, repo_path=None, create=False ) previous_changeset_revision = \ - suc.get_previous_metadata_changeset_revision( repository, - repo, - changeset_revision, - downloadable=False ) - if previous_changeset_revision != suc.INITIAL_CHANGELOG_HASH: + metadata_util.get_previous_metadata_changeset_revision( repository, + repo, + changeset_revision, + downloadable=False ) + if previous_changeset_revision != hg_util.INITIAL_CHANGELOG_HASH: repository_metadata = suc.get_repository_metadata_by_changeset_revision( trans.app, id, previous_changeset_revision ) @@ -2602,10 +2606,15 @@ repository = suc.get_repository_by_name_and_owner( trans.app, name, owner ) repo = hg_util.get_repo_for_repository( trans.app, repository=repository, repo_path=None, create=False ) # Get the lower bound changeset revision. - lower_bound_changeset_revision = suc.get_previous_metadata_changeset_revision( repository, repo, changeset_revision, downloadable=True ) + lower_bound_changeset_revision = metadata_util.get_previous_metadata_changeset_revision( repository, + repo, + changeset_revision, + downloadable=True ) # Build the list of changeset revision hashes. changeset_hashes = [] - for changeset in suc.reversed_lower_upper_bounded_changelog( repo, lower_bound_changeset_revision, changeset_revision ): + for changeset in hg_util.reversed_lower_upper_bounded_changelog( repo, + lower_bound_changeset_revision, + changeset_revision ): changeset_hashes.append( str( repo.changectx( changeset ) ) ) if changeset_hashes: changeset_hashes_str = ','.join( changeset_hashes ) @@ -3117,7 +3126,7 @@ for diff in patch.diff( repo, node1=ctx_parent.node(), node2=ctx.node(), opts=diffopts ): if len( diff ) > suc.MAXDIFFSIZE: diff = util.shrink_string_by_size( diff, suc.MAXDIFFSIZE ) - diffs.append( suc.to_html_string( diff ) ) + diffs.append( basic_util.to_html_string( diff ) ) modified, added, removed, deleted, unknown, ignored, clean = repo.status( node1=ctx_parent.node(), node2=ctx.node() ) anchors = modified + added + removed + deleted + unknown + ignored + clean metadata = metadata_util.get_repository_metadata_by_repository_id_changeset_revision( trans, id, ctx_str, metadata_only=True ) @@ -3236,7 +3245,7 @@ status = 'warning' else: metadata = None - is_malicious = suc.changeset_is_malicious( trans.app, id, repository.tip( trans.app ) ) + is_malicious = metadata_util.is_malicious( trans.app, id, repository.tip( trans.app ) ) if is_malicious: if trans.app.security_agent.can_push( trans.app, trans.user, repository ): message += malicious_error_can_push @@ -3249,7 +3258,7 @@ repository_dependencies, repository_metadata ) repository_type_select_field = rt_util.build_repository_type_select_field( trans, repository=repository ) - heads = suc.get_repository_heads( repo ) + heads = hg_util.get_repository_heads( repo ) return trans.fill_template( '/webapps/tool_shed/repository/view_repository.mako', repo=repo, heads=heads, @@ -3320,7 +3329,7 @@ work_dir ) if message: status = 'error' - suc.remove_dir( work_dir ) + basic_util.remove_dir( work_dir ) break if guid: tool_lineage = tool_util.get_version_lineage_for_tool( trans, repository_id, repository_metadata, guid )
diff -r f6aa2d17d38430e92beb9da80f53c9989dd504ee -r dbc3d5c3506ed833dc453faa4f00797d95b8a8bb lib/galaxy/webapps/tool_shed/controllers/repository_review.py --- a/lib/galaxy/webapps/tool_shed/controllers/repository_review.py +++ b/lib/galaxy/webapps/tool_shed/controllers/repository_review.py @@ -470,7 +470,7 @@ metadata_revision_hashes = [ metadata_revision.changeset_revision for metadata_revision in repository.metadata_revisions ] reviewed_revision_hashes = [ review.changeset_revision for review in repository.reviews ] reviews_dict = odict() - for changeset in suc.get_reversed_changelog_changesets( repo ): + for changeset in hg_util.get_reversed_changelog_changesets( repo ): ctx = repo.changectx( changeset ) changeset_revision = str( ctx ) if changeset_revision in metadata_revision_hashes or changeset_revision in reviewed_revision_hashes:
diff -r f6aa2d17d38430e92beb9da80f53c9989dd504ee -r dbc3d5c3506ed833dc453faa4f00797d95b8a8bb lib/galaxy/webapps/tool_shed/controllers/upload.py --- a/lib/galaxy/webapps/tool_shed/controllers/upload.py +++ b/lib/galaxy/webapps/tool_shed/controllers/upload.py @@ -10,6 +10,7 @@ from galaxy.datatypes import checkers import tool_shed.repository_types.util as rt_util import tool_shed.util.shed_util_common as suc +from tool_shed.util import basic_util from tool_shed.util import commit_util from tool_shed.util import hg_util from tool_shed.util import metadata_util @@ -64,9 +65,9 @@ try: commands.clone( hg_util.get_configured_ui(), repo_url, uploaded_directory ) except Exception, e: - message = 'Error uploading via mercurial clone: %s' % suc.to_html_string( str( e ) ) + message = 'Error uploading via mercurial clone: %s' % basic_util.to_html_string( str( e ) ) status = 'error' - suc.remove_dir( uploaded_directory ) + basic_util.remove_dir( uploaded_directory ) uploaded_directory = None elif url: valid_url = True @@ -296,7 +297,7 @@ # Reset the tool_data_tables by loading the empty tool_data_table_conf.xml file. tool_util.reset_tool_data_tables( trans.app ) if uploaded_directory: - suc.remove_dir( uploaded_directory ) + basic_util.remove_dir( uploaded_directory ) trans.response.send_redirect( web.url_for( controller='repository', action='browse_repository', id=repository_id, @@ -305,7 +306,7 @@ status=status ) ) else: if uploaded_directory: - suc.remove_dir( uploaded_directory ) + basic_util.remove_dir( uploaded_directory ) status = 'error' # Reset the tool_data_tables by loading the empty tool_data_table_conf.xml file. tool_util.reset_tool_data_tables( trans.app )
diff -r f6aa2d17d38430e92beb9da80f53c9989dd504ee -r dbc3d5c3506ed833dc453faa4f00797d95b8a8bb lib/tool_shed/galaxy_install/repository_util.py --- a/lib/tool_shed/galaxy_install/repository_util.py +++ b/lib/tool_shed/galaxy_install/repository_util.py @@ -9,6 +9,7 @@ from galaxy import web from galaxy.model.orm import or_ import tool_shed.util.shed_util_common as suc +from tool_shed.util import basic_util from tool_shed.util import common_util from tool_shed.util import common_install_util from tool_shed.util import container_util @@ -448,7 +449,7 @@ files_dir = relative_install_dir if shed_config_dict.get( 'tool_path' ): files_dir = os.path.join( shed_config_dict[ 'tool_path' ], files_dir ) - datatypes_config = suc.get_config_from_disk( suc.DATATYPES_CONFIG_FILENAME, files_dir ) + datatypes_config = hg_util.get_config_from_disk( suc.DATATYPES_CONFIG_FILENAME, files_dir ) # Load data types required by tools. converter_path, display_path = \ datatype_util.alter_config_and_load_prorietary_datatypes( trans.app, datatypes_config, files_dir, override=False ) @@ -620,14 +621,14 @@ tool_shed_repository, trans.install_model.ToolShedRepository.installation_status.INSTALLING_TOOL_DEPENDENCIES ) # Get the tool_dependencies.xml file from the repository. - tool_dependencies_config = suc.get_config_from_disk( 'tool_dependencies.xml', install_dir ) + tool_dependencies_config = hg_util.get_config_from_disk( 'tool_dependencies.xml', install_dir ) installed_tool_dependencies = \ common_install_util.install_specified_packages( app=trans.app, tool_shed_repository=tool_shed_repository, tool_dependencies_config=tool_dependencies_config, tool_dependencies=tool_shed_repository.tool_dependencies, from_tool_migration_manager=False ) - suc.remove_dir( work_dir ) + basic_util.remove_dir( work_dir ) suc.update_tool_shed_repository_status( trans.app, tool_shed_repository, trans.install_model.ToolShedRepository.installation_status.INSTALLED ) @@ -878,7 +879,7 @@ repository, trans.install_model.ToolShedRepository.installation_status.INSTALLING_TOOL_DEPENDENCIES ) # Get the tool_dependencies.xml file from the repository. - tool_dependencies_config = suc.get_config_from_disk( 'tool_dependencies.xml', repository.repo_path( trans.app ) ) + tool_dependencies_config = hg_util.get_config_from_disk( 'tool_dependencies.xml', repository.repo_path( trans.app ) ) installed_tool_dependencies = \ common_install_util.install_specified_packages( app=trans.app, tool_shed_repository=repository, @@ -888,7 +889,7 @@ for installed_tool_dependency in installed_tool_dependencies: if installed_tool_dependency.status in [ trans.install_model.ToolDependency.installation_status.ERROR ]: repair_dict = add_repair_dict_entry( repository.name, installed_tool_dependency.error_message ) - suc.remove_dir( work_dir ) + basic_util.remove_dir( work_dir ) suc.update_tool_shed_repository_status( trans.app, repository, trans.install_model.ToolShedRepository.installation_status.INSTALLED ) return repair_dict
diff -r f6aa2d17d38430e92beb9da80f53c9989dd504ee -r dbc3d5c3506ed833dc453faa4f00797d95b8a8bb lib/tool_shed/galaxy_install/tool_dependencies/recipe/tag_handler.py --- a/lib/tool_shed/galaxy_install/tool_dependencies/recipe/tag_handler.py +++ b/lib/tool_shed/galaxy_install/tool_dependencies/recipe/tag_handler.py @@ -405,7 +405,7 @@ package_name=package_name, package_version=package_version, tool_dependencies_config=config_to_use ) - suc.remove_file( tmp_filename ) + self.remove_file( tmp_filename ) else: message = "Unable to locate required tool shed repository named %s owned by %s with revision %s." % \ ( str( required_repository_name ), str( required_repository_owner ), str( default_required_repository_changeset_revision ) ) @@ -429,6 +429,15 @@ print "Error installing tool dependency for required repository: %s" % str( rd_tool_dependency.error_message ) return tool_dependency, proceed_with_install, action_elem_tuples
+ def remove_file( self, file_name ): + """Attempt to remove a file from disk.""" + if file_name: + if os.path.exists( file_name ): + try: + os.remove( file_name ) + except: + pass +
class SetEnvironment( RecipeTag ):
diff -r f6aa2d17d38430e92beb9da80f53c9989dd504ee -r dbc3d5c3506ed833dc453faa4f00797d95b8a8bb lib/tool_shed/galaxy_install/tool_migration_manager.py --- a/lib/tool_shed/galaxy_install/tool_migration_manager.py +++ b/lib/tool_shed/galaxy_install/tool_migration_manager.py @@ -11,6 +11,7 @@ from galaxy import util from galaxy.tools import ToolSection import tool_shed.util.shed_util_common as suc +from tool_shed.util import basic_util from tool_shed.util import common_install_util from tool_shed.util import common_util from tool_shed.util import datatype_util @@ -251,13 +252,13 @@ def get_guid( self, repository_clone_url, relative_install_dir, tool_config ): if self.shed_config_dict.get( 'tool_path' ): relative_install_dir = os.path.join( self.shed_config_dict[ 'tool_path' ], relative_install_dir ) - tool_config_filename = suc.strip_path( tool_config ) + tool_config_filename = basic_util.strip_path( tool_config ) for root, dirs, files in os.walk( relative_install_dir ): if root.find( '.hg' ) < 0 and root.find( 'hgrc' ) < 0: if '.hg' in dirs: dirs.remove( '.hg' ) for name in files: - filename = suc.strip_path( name ) + filename = basic_util.strip_path( name ) if filename == tool_config_filename: full_path = str( os.path.abspath( os.path.join( root, name ) ) ) tool = self.toolbox.load_tool( full_path ) @@ -444,7 +445,7 @@ tool_shed_repository, self.app.install_model.ToolShedRepository.installation_status.INSTALLING_TOOL_DEPENDENCIES ) # Get the tool_dependencies.xml file from disk. - tool_dependencies_config = suc.get_config_from_disk( 'tool_dependencies.xml', repo_install_dir ) + tool_dependencies_config = hg_util.get_config_from_disk( 'tool_dependencies.xml', repo_install_dir ) installed_tool_dependencies = \ common_install_util.install_specified_packages( app=self.app, tool_shed_repository=tool_shed_repository, @@ -462,7 +463,7 @@ self.app.install_model.context.add( tool_shed_repository ) self.app.install_model.context.flush() work_dir = tempfile.mkdtemp( prefix="tmp-toolshed-hrc" ) - datatypes_config = suc.get_config_from_disk( suc.DATATYPES_CONFIG_FILENAME, repo_install_dir ) + datatypes_config = hg_util.get_config_from_disk( suc.DATATYPES_CONFIG_FILENAME, repo_install_dir ) # Load proprietary data types required by tools. The value of override is not important here since the Galaxy server will be started # after this installation completes. converter_path, display_path = datatype_util.alter_config_and_load_prorietary_datatypes( self.app, datatypes_config, repo_install_dir, override=False ) #repo_install_dir was relative_install_dir @@ -481,7 +482,7 @@ if display_path: # Load proprietary datatype display applications self.app.datatypes_registry.load_display_applications( installed_repository_dict=repository_dict ) - suc.remove_dir( work_dir ) + basic_util.remove_dir( work_dir )
def install_repository( self, repository_elem, tool_shed_repository, install_dependencies, is_repository_dependency=False ): """Install a single repository, loading contained tools into the tool panel."""
diff -r f6aa2d17d38430e92beb9da80f53c9989dd504ee -r dbc3d5c3506ed833dc453faa4f00797d95b8a8bb lib/tool_shed/grids/repository_grids.py --- a/lib/tool_shed/grids/repository_grids.py +++ b/lib/tool_shed/grids/repository_grids.py @@ -98,7 +98,7 @@ def get_value( self, trans, grid, repository ): """Display the current repository heads.""" repo = hg_util.get_repo_for_repository( trans.app, repository=repository, repo_path=None, create=False ) - heads = suc.get_repository_heads( repo ) + heads = hg_util.get_repository_heads( repo ) multiple_heads = len( heads ) > 1 if multiple_heads: heads_str = '<font color="red">'
diff -r f6aa2d17d38430e92beb9da80f53c9989dd504ee -r dbc3d5c3506ed833dc453faa4f00797d95b8a8bb lib/tool_shed/grids/util.py --- a/lib/tool_shed/grids/util.py +++ b/lib/tool_shed/grids/util.py @@ -5,6 +5,7 @@ from galaxy.web.form_builder import SelectField
from tool_shed.util import hg_util +from tool_shed.util import metadata_util from tool_shed.util import shed_util_common as suc
log = logging.getLogger( __name__ ) @@ -178,8 +179,11 @@ return repository_metadata return None except: - latest_downloadable_revision = suc.get_previous_metadata_changeset_revision( repository, repo, tip_ctx, downloadable=True ) - if latest_downloadable_revision == suc.INITIAL_CHANGELOG_HASH: + latest_downloadable_revision = metadata_util.get_previous_metadata_changeset_revision( repository, + repo, + tip_ctx, + downloadable=True ) + if latest_downloadable_revision == hg_util.INITIAL_CHANGELOG_HASH: return None repository_metadata = suc.get_repository_metadata_by_changeset_revision( trans.app, encoded_repository_id, @@ -225,8 +229,11 @@ repository_metadata = suc.get_repository_metadata_by_changeset_revision( trans.app, encoded_repository_id, tip_ctx ) return repository_metadata except: - latest_downloadable_revision = suc.get_previous_metadata_changeset_revision( repository, repo, tip_ctx, downloadable=False ) - if latest_downloadable_revision == suc.INITIAL_CHANGELOG_HASH: + latest_downloadable_revision = metadata_util.get_previous_metadata_changeset_revision( repository, + repo, + tip_ctx, + downloadable=False ) + if latest_downloadable_revision == hg_util.INITIAL_CHANGELOG_HASH: return None repository_metadata = suc.get_repository_metadata_by_changeset_revision( trans.app, encoded_repository_id,
diff -r f6aa2d17d38430e92beb9da80f53c9989dd504ee -r dbc3d5c3506ed833dc453faa4f00797d95b8a8bb lib/tool_shed/repository_registry.py --- a/lib/tool_shed/repository_registry.py +++ b/lib/tool_shed/repository_registry.py @@ -50,17 +50,35 @@ for rca in repository.categories: category = rca.category category_name = str( category.name ) - self.viewable_repositories_and_suites_by_category[ category_name ] += 1 + if category_name in self.viewable_repositories_and_suites_by_category: + self.viewable_repositories_and_suites_by_category[ category_name ] += 1 + else: + self.viewable_repositories_and_suites_by_category[ category_name ] = 1 if is_valid: - self.viewable_valid_repositories_and_suites_by_category[ category_name ] += 1 + if category_name in self.viewable_valid_repositories_and_suites_by_category: + self.viewable_valid_repositories_and_suites_by_category[ category_name ] += 1 + else: + self.viewable_valid_repositories_and_suites_by_category[ category_name ] = 1 if repository.type == rt_util.REPOSITORY_SUITE_DEFINITION: - self.viewable_suites_by_category[ category_name ] += 1 + if category_name in self.viewable_suites_by_category: + self.viewable_suites_by_category[ category_name ] += 1 + else: + self.viewable_suites_by_category[ category_name ] = 1 if is_valid: - self.viewable_valid_suites_by_category[ category_name ] += 1 + if category_name in self.viewable_valid_suites_by_category: + self.viewable_valid_suites_by_category[ category_name ] += 1 + else: + self.viewable_valid_suites_by_category[ category_name ] = 1 if is_level_one_certified: - self.certified_level_one_viewable_repositories_and_suites_by_category[ category_name ] += 1 + if category_name in self.certified_level_one_viewable_repositories_and_suites_by_category: + self.certified_level_one_viewable_repositories_and_suites_by_category[ category_name ] += 1 + else: + self.certified_level_one_viewable_repositories_and_suites_by_category[ category_name ] = 1 if repository.type == rt_util.REPOSITORY_SUITE_DEFINITION: - self.certified_level_one_viewable_suites_by_category[ category_name ] += 1 + if category_name in self.certified_level_one_viewable_suites_by_category: + self.certified_level_one_viewable_suites_by_category[ category_name ] += 1 + else: + self.certified_level_one_viewable_suites_by_category[ category_name ] = 1 self.load_repository_and_suite_tuple( repository ) if is_level_one_certified: self.load_certified_level_one_repository_and_suite_tuple( repository ) @@ -97,7 +115,7 @@ repo = hg_util.get_repo_for_repository( self.app, repository=repository, repo_path=None, create=False ) # Get the latest installable changeset revision since that is all that is currently configured for testing. latest_installable_changeset_revision = suc.get_latest_downloadable_changeset_revision( self.app, repository, repo ) - if latest_installable_changeset_revision not in [ None, suc.INITIAL_CHANGELOG_HASH ]: + if latest_installable_changeset_revision not in [ None, hg_util.INITIAL_CHANGELOG_HASH ]: encoded_repository_id = self.app.security.encode_id( repository.id ) repository_metadata = suc.get_repository_metadata_by_changeset_revision( self.app, encoded_repository_id, @@ -168,7 +186,7 @@ name = str( repository.name ) owner = str( repository.user.username ) tip_changeset_hash = repository.tip( self.app ) - if tip_changeset_hash != suc.INITIAL_CHANGELOG_HASH: + if tip_changeset_hash != hg_util.INITIAL_CHANGELOG_HASH: certified_level_one_tuple = ( name, owner, tip_changeset_hash ) if repository.type == rt_util.REPOSITORY_SUITE_DEFINITION: if certified_level_one_tuple not in self.certified_level_one_suite_tuples: @@ -257,17 +275,41 @@ for rca in repository.categories: category = rca.category category_name = str( category.name ) - self.viewable_repositories_and_suites_by_category[ category_name ] -= 1 + if category_name in self.viewable_repositories_and_suites_by_category: + if self.viewable_repositories_and_suites_by_category[ category_name ] > 0: + self.viewable_repositories_and_suites_by_category[ category_name ] -= 1 + else: + self.viewable_repositories_and_suites_by_category[ category_name ] = 0 if is_valid: - self.viewable_valid_repositories_and_suites_by_category[ category_name ] -= 1 + if category_name in self.viewable_valid_repositories_and_suites_by_category: + if self.viewable_valid_repositories_and_suites_by_category[ category_name ] > 0: + self.viewable_valid_repositories_and_suites_by_category[ category_name ] -= 1 + else: + self.viewable_valid_repositories_and_suites_by_category[ category_name ] = 0 if repository.type == rt_util.REPOSITORY_SUITE_DEFINITION: - self.viewable_suites_by_category[ category_name ] -= 1 + if category_name in self.viewable_suites_by_category: + if self.viewable_suites_by_category[ category_name ] > 0: + self.viewable_suites_by_category[ category_name ] -= 1 + else: + self.viewable_suites_by_category[ category_name ] = 0 if is_valid: - self.viewable_valid_suites_by_category[ category_name ] -= 1 + if category_name in self.viewable_valid_suites_by_category: + if self.viewable_valid_suites_by_category[ category_name ] > 0: + self.viewable_valid_suites_by_category[ category_name ] -= 1 + else: + self.viewable_valid_suites_by_category[ category_name ] = 0 if is_level_one_certified: - self.certified_level_one_viewable_repositories_and_suites_by_category[ category_name ] -= 1 + if category_name in self.certified_level_one_viewable_repositories_and_suites_by_category: + if self.certified_level_one_viewable_repositories_and_suites_by_category[ category_name ] > 0: + self.certified_level_one_viewable_repositories_and_suites_by_category[ category_name ] -= 1 + else: + self.certified_level_one_viewable_repositories_and_suites_by_category[ category_name ] = 0 if repository.type == rt_util.REPOSITORY_SUITE_DEFINITION: - self.certified_level_one_viewable_suites_by_category[ category_name ] -= 1 + if category_name in self.certified_level_one_viewable_suites_by_category: + if self.certified_level_one_viewable_suites_by_category[ category_name ] > 0: + self.certified_level_one_viewable_suites_by_category[ category_name ] -= 1 + else: + self.certified_level_one_viewable_suites_by_category[ category_name ] = 0 self.unload_repository_and_suite_tuple( repository ) if is_level_one_certified: self.unload_certified_level_one_repository_and_suite_tuple( repository ) @@ -286,7 +328,7 @@ name = str( repository.name ) owner = str( repository.user.username ) tip_changeset_hash = repository.tip( self.app ) - if tip_changeset_hash != suc.INITIAL_CHANGELOG_HASH: + if tip_changeset_hash != hg_util.INITIAL_CHANGELOG_HASH: certified_level_one_tuple = ( name, owner, tip_changeset_hash ) if repository.type == rt_util.REPOSITORY_SUITE_DEFINITION: if certified_level_one_tuple in self.certified_level_one_suite_tuples:
diff -r f6aa2d17d38430e92beb9da80f53c9989dd504ee -r dbc3d5c3506ed833dc453faa4f00797d95b8a8bb lib/tool_shed/repository_types/repository_suite_definition.py --- a/lib/tool_shed/repository_types/repository_suite_definition.py +++ b/lib/tool_shed/repository_types/repository_suite_definition.py @@ -1,7 +1,7 @@ import logging from tool_shed.repository_types.metadata import TipOnly import tool_shed.repository_types.util as rt_util -import tool_shed.util.shed_util_common as suc +from tool_shed.util import basic_util
from galaxy import eggs eggs.require( 'mercurial' ) @@ -35,7 +35,7 @@ # is named repository_dependencies.xml. files_changed_in_changeset = ctx.files() for file_path in files_changed_in_changeset: - file_name = suc.strip_path( file_path ) + file_name = basic_util.strip_path( file_path ) if file_name not in self.valid_file_names: return False return True
diff -r f6aa2d17d38430e92beb9da80f53c9989dd504ee -r dbc3d5c3506ed833dc453faa4f00797d95b8a8bb lib/tool_shed/repository_types/tool_dependency_definition.py --- a/lib/tool_shed/repository_types/tool_dependency_definition.py +++ b/lib/tool_shed/repository_types/tool_dependency_definition.py @@ -1,7 +1,7 @@ import logging from tool_shed.repository_types.metadata import TipOnly import tool_shed.repository_types.util as rt_util -import tool_shed.util.shed_util_common as suc +from tool_shed.util import basic_util
from galaxy import eggs eggs.require( 'mercurial' ) @@ -34,7 +34,7 @@ # Inspect all files in the changeset (in sorted order) to make sure there is only one and it is named tool_dependencies.xml. files_changed_in_changeset = ctx.files() for file_path in files_changed_in_changeset: - file_name = suc.strip_path( file_path ) + file_name = basic_util.strip_path( file_path ) if file_name not in self.valid_file_names: return False return True
diff -r f6aa2d17d38430e92beb9da80f53c9989dd504ee -r dbc3d5c3506ed833dc453faa4f00797d95b8a8bb lib/tool_shed/scripts/api/common.py --- a/lib/tool_shed/scripts/api/common.py +++ b/lib/tool_shed/scripts/api/common.py @@ -8,11 +8,8 @@ new_path.extend( sys.path[ 1: ] ) sys.path = new_path
-import tool_shed.util.shed_util_common as suc from tool_shed.util import common_util - -from galaxy import eggs -import pkg_resources +from tool_shed.util import hg_util
def delete( api_key, url, data, return_formatted=True ): """ @@ -115,7 +112,7 @@ return None, error_message if len( changeset_revisions ) >= 1: return changeset_revisions[ -1 ], error_message - return suc.INITIAL_CHANGELOG_HASH, error_message + return hg_util.INITIAL_CHANGELOG_HASH, error_message
def get_repository_dict( url, repository_dict ): """
diff -r f6aa2d17d38430e92beb9da80f53c9989dd504ee -r dbc3d5c3506ed833dc453faa4f00797d95b8a8bb lib/tool_shed/scripts/api/export.py --- a/lib/tool_shed/scripts/api/export.py +++ b/lib/tool_shed/scripts/api/export.py @@ -17,17 +17,6 @@
CHUNK_SIZE = 2**20 # 1Mb
-def get_file_type_str( changeset_revision, file_type ): - if file_type == 'zip': - file_type_str = '%s.zip' % changeset_revision - elif file_type == 'bz2': - file_type_str = '%s.tar.bz2' % changeset_revision - elif file_type == 'gz': - file_type_str = '%s.tar.gz' % changeset_revision - else: - file_type_str = '' - return file_type_str - def string_as_bool( string ): if str( string ).lower() in ( 'true', 'yes', 'on' ): return True
diff -r f6aa2d17d38430e92beb9da80f53c9989dd504ee -r dbc3d5c3506ed833dc453faa4f00797d95b8a8bb lib/tool_shed/scripts/api/get_filtered_repository_revisions.py --- a/lib/tool_shed/scripts/api/get_filtered_repository_revisions.py +++ b/lib/tool_shed/scripts/api/get_filtered_repository_revisions.py @@ -34,7 +34,7 @@
from galaxy.util import asbool from galaxy.util.json import from_json_string -import tool_shed.util.shed_util_common as suc +from tool_shed.util import hg_util
def main( options ): base_tool_shed_url = options.tool_shed_url.rstrip( '/' ) @@ -71,12 +71,12 @@ repository_dicts.append( baseline_repository_dict ) else: # Don't test empty repositories. - changeset_revision = baseline_repository_dict.get( 'changeset_revision', suc.INITIAL_CHANGELOG_HASH ) - if changeset_revision != suc.INITIAL_CHANGELOG_HASH: + changeset_revision = baseline_repository_dict.get( 'changeset_revision', hg_util.INITIAL_CHANGELOG_HASH ) + if changeset_revision != hg_util.INITIAL_CHANGELOG_HASH: # Merge the dictionary returned from /api/repository_revisions with the detailed repository_dict and # append it to the list of repository_dicts to install and test. if latest_revision_only: - latest_revision = repository_dict.get( 'latest_revision', suc.INITIAL_CHANGELOG_HASH ) + latest_revision = repository_dict.get( 'latest_revision', hg_util.INITIAL_CHANGELOG_HASH ) if changeset_revision == latest_revision: repository_dicts.append( dict( repository_dict.items() + baseline_repository_dict.items() ) ) else:
diff -r f6aa2d17d38430e92beb9da80f53c9989dd504ee -r dbc3d5c3506ed833dc453faa4f00797d95b8a8bb lib/tool_shed/util/basic_util.py --- /dev/null +++ b/lib/tool_shed/util/basic_util.py @@ -0,0 +1,57 @@ +import logging +import os + +from galaxy.util import unicodify + +from galaxy import eggs + +eggs.require( 'markupsafe' ) +import markupsafe + +log = logging.getLogger( __name__ ) + +MAX_DISPLAY_SIZE = 32768 + +def remove_dir( dir ): + """Attempt to remove a directory from disk.""" + if dir: + if os.path.exists( dir ): + try: + shutil.rmtree( dir ) + except: + pass + +def size_string( raw_text, size=MAX_DISPLAY_SIZE ): + """Return a subset of a string (up to MAX_DISPLAY_SIZE) translated to a safe string for display in a browser.""" + if raw_text and len( raw_text ) >= size: + large_str = '\nFile contents truncated because file size is larger than maximum viewing size of %s\n' % util.nice_size( size ) + raw_text = '%s%s' % ( raw_text[ 0:size ], large_str ) + return raw_text or '' + +def stringify( list ): + if list: + return ','.join( list ) + return '' + +def strip_path( fpath ): + """Attempt to strip the path from a file name.""" + if not fpath: + return fpath + try: + file_path, file_name = os.path.split( fpath ) + except: + file_name = fpath + return file_name + +def to_html_string( text ): + """Translates the characters in text to an html string""" + if text: + try: + text = unicodify( text ) + except UnicodeDecodeError, e: + return "Error decoding string: %s" % str( e ) + text = unicode( markupsafe.escape( text ) ) + text = text.replace( '\n', '<br/>' ) + text = text.replace( ' ', ' ' ) + text = text.replace( ' ', ' ' ) + return text
diff -r f6aa2d17d38430e92beb9da80f53c9989dd504ee -r dbc3d5c3506ed833dc453faa4f00797d95b8a8bb lib/tool_shed/util/commit_util.py --- a/lib/tool_shed/util/commit_util.py +++ b/lib/tool_shed/util/commit_util.py @@ -345,7 +345,7 @@ if repository: repo = hg_util.get_repo_for_repository( trans.app, repository=repository, repo_path=None, create=False ) lastest_installable_changeset_revision = suc.get_latest_downloadable_changeset_revision( trans.app, repository, repo ) - if lastest_installable_changeset_revision != suc.INITIAL_CHANGELOG_HASH: + if lastest_installable_changeset_revision != hg_util.INITIAL_CHANGELOG_HASH: elem.attrib[ 'changeset_revision' ] = lastest_installable_changeset_revision revised = True else:
diff -r f6aa2d17d38430e92beb9da80f53c9989dd504ee -r dbc3d5c3506ed833dc453faa4f00797d95b8a8bb lib/tool_shed/util/datatype_util.py --- a/lib/tool_shed/util/datatype_util.py +++ b/lib/tool_shed/util/datatype_util.py @@ -3,6 +3,8 @@ import tempfile from galaxy import eggs from galaxy.util import asbool +from tool_shed.util import basic_util +from tool_shed.util import hg_util from tool_shed.util import tool_util from tool_shed.util import xml_util import tool_shed.util.shed_util_common as suc @@ -127,7 +129,7 @@ for converter in elem.findall( 'converter' ): converter_config = converter.get( 'file', None ) if converter_config: - converter_config_file_name = suc.strip_path( converter_config ) + converter_config_file_name = basic_util.strip_path( converter_config ) for root, dirs, files in os.walk( relative_install_dir ): if root.find( '.hg' ) < 0: for name in files: @@ -144,7 +146,7 @@ for display_app in elem.findall( 'display' ): display_config = display_app.get( 'file', None ) if display_config: - display_config_file_name = suc.strip_path( display_config ) + display_config_file_name = basic_util.strip_path( display_config ) for root, dirs, files in os.walk( relative_install_dir ): if root.find( '.hg' ) < 0: for name in files: @@ -166,7 +168,7 @@ # Load proprietary datatypes and return information needed for loading proprietary datatypes converters and display applications later. metadata = repository.metadata repository_dict = None - datatypes_config = suc.get_config_from_disk( suc.DATATYPES_CONFIG_FILENAME, relative_install_dir ) + datatypes_config = hg_util.get_config_from_disk( suc.DATATYPES_CONFIG_FILENAME, relative_install_dir ) if datatypes_config: converter_path, display_path = alter_config_and_load_prorietary_datatypes( app, datatypes_config, relative_install_dir, deactivate=deactivate ) if converter_path or display_path:
diff -r f6aa2d17d38430e92beb9da80f53c9989dd504ee -r dbc3d5c3506ed833dc453faa4f00797d95b8a8bb lib/tool_shed/util/export_util.py --- a/lib/tool_shed/util/export_util.py +++ b/lib/tool_shed/util/export_util.py @@ -11,6 +11,7 @@ from galaxy import eggs from galaxy import web from galaxy.util.odict import odict +from tool_shed.util import basic_util from tool_shed.util import commit_util from tool_shed.util import common_install_util from tool_shed.util import common_util @@ -101,7 +102,7 @@ attributes, sub_elements = get_repository_attributes_and_sub_elements( ordered_repository, archive_name ) elem = xml_util.create_element( 'repository', attributes=attributes, sub_elements=sub_elements ) exported_repository_registry.exported_repository_elems.append( elem ) - suc.remove_dir( work_dir ) + basic_util.remove_dir( work_dir ) # Keep information about the export in a file name export_info.xml in the archive. sub_elements = generate_export_elem( tool_shed_url, repository, changeset_revision, export_repository_dependencies, api ) export_elem = xml_util.create_element( 'export_info', attributes=None, sub_elements=sub_elements ) @@ -127,7 +128,7 @@ return repositories_archive, error_messages
def generate_repository_archive( trans, work_dir, tool_shed_url, repository, changeset_revision, file_type ): - file_type_str = suc.get_file_type_str( changeset_revision, file_type ) + file_type_str = get_file_type_str( changeset_revision, file_type ) file_name = '%s-%s' % ( repository.name, file_type_str ) return_code, error_message = archive_repository_revision( trans, ui, repository, work_dir, changeset_revision ) if return_code: @@ -172,7 +173,7 @@ def generate_repository_archive_filename( tool_shed_url, name, owner, changeset_revision, file_type, export_repository_dependencies=False, use_tmp_archive_dir=False ): tool_shed = remove_protocol_from_tool_shed_url( tool_shed_url ) - file_type_str = suc.get_file_type_str( changeset_revision, file_type ) + file_type_str = get_file_type_str( changeset_revision, file_type ) if export_repository_dependencies: repositories_archive_filename = '%s_%s_%s_%s_%s' % ( CAPSULE_WITH_DEPENDENCIES_FILENAME, tool_shed, name, owner, file_type_str ) else: @@ -210,6 +211,17 @@ return repository, repository_metadata.changeset_revision return None, None
+def get_file_type_str( changeset_revision, file_type ): + if file_type == 'zip': + file_type_str = '%s.zip' % changeset_revision + elif file_type == 'bz2': + file_type_str = '%s.tar.bz2' % changeset_revision + elif file_type == 'gz': + file_type_str = '%s.tar.gz' % changeset_revision + else: + file_type_str = '' + return file_type_str + def get_repo_info_dict_for_import( encoded_repository_id, encoded_repository_ids, repo_info_dicts ): """ The received encoded_repository_ids and repo_info_dicts are lists that contain associated elements at each
diff -r f6aa2d17d38430e92beb9da80f53c9989dd504ee -r dbc3d5c3506ed833dc453faa4f00797d95b8a8bb lib/tool_shed/util/hg_util.py --- a/lib/tool_shed/util/hg_util.py +++ b/lib/tool_shed/util/hg_util.py @@ -1,4 +1,6 @@ import logging +import os + from datetime import datetime from time import gmtime from time import strftime @@ -12,8 +14,12 @@ from mercurial import hg from mercurial import ui
+from tool_shed.util import basic_util + log = logging.getLogger( __name__ )
+INITIAL_CHANGELOG_HASH = '000000000000' + def clone_repository( repository_clone_url, repository_file_dir, ctx_rev ): """ Clone the repository up to the specified changeset_revision. No subsequent revisions will be @@ -32,6 +38,22 @@ log.debug( error_message ) return False, error_message
+def copy_file_from_manifest( repo, ctx, filename, dir ): + """ + Copy the latest version of the file named filename from the repository manifest to the directory + to which dir refers. + """ + for changeset in reversed_upper_bounded_changelog( repo, ctx ): + changeset_ctx = repo.changectx( changeset ) + fctx = get_file_context_from_ctx( changeset_ctx, filename ) + if fctx and fctx not in [ 'DELETED' ]: + file_path = os.path.join( dir, filename ) + fh = open( file_path, 'wb' ) + fh.write( fctx.data() ) + fh.close() + return file_path + return None + def get_changectx_for_changeset( repo, changeset_revision, **kwd ): """Retrieve a specified changectx from a repository.""" for changeset in repo.changelog: @@ -40,6 +62,25 @@ return ctx return None
+def get_config( config_file, repo, ctx, dir ): + """Return the latest version of config_filename from the repository manifest.""" + config_file = basic_util.strip_path( config_file ) + for changeset in reversed_upper_bounded_changelog( repo, ctx ): + changeset_ctx = repo.changectx( changeset ) + for ctx_file in changeset_ctx.files(): + ctx_file_name = basic_util.strip_path( ctx_file ) + if ctx_file_name == config_file: + return get_named_tmpfile_from_ctx( changeset_ctx, ctx_file, dir ) + return None + +def get_config_from_disk( config_file, relative_install_dir ): + for root, dirs, files in os.walk( relative_install_dir ): + if root.find( '.hg' ) < 0: + for name in files: + if name == config_file: + return os.path.abspath( os.path.join( root, name ) ) + return None + def get_configured_ui(): """Configure any desired ui settings.""" _ui = ui.ui() @@ -50,6 +91,44 @@ _ui.setconfig( 'ui', 'quiet', True ) return _ui
+def get_ctx_file_path_from_manifest( filename, repo, changeset_revision ): + """ + Get the ctx file path for the latest revision of filename from the repository manifest up + to the value of changeset_revision. + """ + stripped_filename = basic_util.strip_path( filename ) + for changeset in reversed_upper_bounded_changelog( repo, changeset_revision ): + manifest_changeset_revision = str( repo.changectx( changeset ) ) + manifest_ctx = repo.changectx( changeset ) + for ctx_file in manifest_ctx.files(): + ctx_file_name = basic_util.strip_path( ctx_file ) + if ctx_file_name == stripped_filename: + return manifest_ctx, ctx_file + return None, None + +def get_file_context_from_ctx( ctx, filename ): + """Return the mercurial file context for a specified file.""" + # We have to be careful in determining if we found the correct file because multiple files with + # the same name may be in different directories within ctx if the files were moved within the change + # set. For example, in the following ctx.files() list, the former may have been moved to the latter: + # ['tmap_wrapper_0.0.19/tool_data_table_conf.xml.sample', 'tmap_wrapper_0.3.3/tool_data_table_conf.xml.sample']. + # Another scenario is that the file has been deleted. + deleted = False + filename = basic_util.strip_path( filename ) + for ctx_file in ctx.files(): + ctx_file_name = basic_util.strip_path( ctx_file ) + if filename == ctx_file_name: + try: + # If the file was moved, its destination will be returned here. + fctx = ctx[ ctx_file ] + return fctx + except LookupError, e: + # Set deleted for now, and continue looking in case the file was moved instead of deleted. + deleted = True + if deleted: + return 'DELETED' + return None + def get_mercurial_default_options_dict( command, command_table=None, **kwd ): '''Borrowed from repoman - get default parameters for a mercurial command.''' if command_table is None: @@ -62,6 +141,32 @@ default_options_dict[ option ] = kwd[ option ] return default_options_dict
+def get_named_tmpfile_from_ctx( ctx, filename, dir ): + """ + Return a named temporary file created from a specified file with a given name included in a repository + changeset revision. + """ + filename = basic_util.strip_path( filename ) + for ctx_file in ctx.files(): + ctx_file_name = basic_util.strip_path( ctx_file ) + if filename == ctx_file_name: + try: + # If the file was moved, its destination file contents will be returned here. + fctx = ctx[ ctx_file ] + except LookupError, e: + # Continue looking in case the file was moved. + fctx = None + continue + if fctx: + fh = tempfile.NamedTemporaryFile( 'wb', prefix="tmp-toolshed-gntfc", dir=dir ) + tmp_filename = fh.name + fh.close() + fh = open( tmp_filename, 'wb' ) + fh.write( fctx.data() ) + fh.close() + return tmp_filename + return None + def get_readable_ctx_date( ctx ): """Convert the date of the changeset (the received ctx) to a human-readable date.""" t, tz = ctx.date() @@ -75,6 +180,18 @@ if repo_path is not None: return hg.repository( get_configured_ui(), repo_path, create=create )
+def get_repository_heads( repo ): + """Return current repository heads, which are changesets with no child changesets.""" + heads = [ repo[ h ] for h in repo.heads( None ) ] + return heads + +def get_reversed_changelog_changesets( repo ): + """Return a list of changesets in reverse order from that provided by the repository manifest.""" + reversed_changelog = [] + for changeset in repo.changelog: + reversed_changelog.insert( 0, changeset ) + return reversed_changelog + def get_revision_label( trans, repository, changeset_revision, include_date=True, include_hash=True ): """ Return a string consisting of the human read-able changeset rev and the changeset revision string @@ -146,6 +263,38 @@ label = "-1:%s" % changeset_revision return rev, label
+def reversed_lower_upper_bounded_changelog( repo, excluded_lower_bounds_changeset_revision, included_upper_bounds_changeset_revision ): + """ + Return a reversed list of changesets in the repository changelog after the excluded_lower_bounds_changeset_revision, + but up to and including the included_upper_bounds_changeset_revision. The value of excluded_lower_bounds_changeset_revision + will be the value of INITIAL_CHANGELOG_HASH if no valid changesets exist before included_upper_bounds_changeset_revision. + """ + # To set excluded_lower_bounds_changeset_revision, calling methods should do the following, where the value + # of changeset_revision is a downloadable changeset_revision. + # excluded_lower_bounds_changeset_revision = \ + # metadata_util.get_previous_metadata_changeset_revision( repository, repo, changeset_revision, downloadable=? ) + if excluded_lower_bounds_changeset_revision == INITIAL_CHANGELOG_HASH: + appending_started = True + else: + appending_started = False + reversed_changelog = [] + for changeset in repo.changelog: + changeset_hash = str( repo.changectx( changeset ) ) + if appending_started: + reversed_changelog.insert( 0, changeset ) + if changeset_hash == excluded_lower_bounds_changeset_revision and not appending_started: + appending_started = True + if changeset_hash == included_upper_bounds_changeset_revision: + break + return reversed_changelog + +def reversed_upper_bounded_changelog( repo, included_upper_bounds_changeset_revision ): + """ + Return a reversed list of changesets in the repository changelog up to and including the + included_upper_bounds_changeset_revision. + """ + return reversed_lower_upper_bounded_changelog( repo, INITIAL_CHANGELOG_HASH, included_upper_bounds_changeset_revision ) + def update_repository( repo, ctx_rev=None ): """ Update the cloned repository to changeset_revision. It is critical that the installed repository is updated to the desired
diff -r f6aa2d17d38430e92beb9da80f53c9989dd504ee -r dbc3d5c3506ed833dc453faa4f00797d95b8a8bb lib/tool_shed/util/metadata_util.py --- a/lib/tool_shed/util/metadata_util.py +++ b/lib/tool_shed/util/metadata_util.py @@ -13,6 +13,7 @@
import tool_shed.util.shed_util_common as suc from tool_shed.repository_types.metadata import TipOnly +from tool_shed.util import basic_util from tool_shed.util import common_util from tool_shed.util import common_install_util from tool_shed.util import container_util @@ -526,7 +527,7 @@ tool_config = sub_elem.attrib[ 'file' ] target_datatype = sub_elem.attrib[ 'target_datatype' ] # Parse the tool_config to get the guid. - tool_config_path = suc.get_config_from_disk( tool_config, repository_files_dir ) + tool_config_path = hg_util.get_config_from_disk( tool_config, repository_files_dir ) full_path = os.path.abspath( tool_config_path ) tool, valid, error_message = tool_util.load_tool_from_config( app, app.security.encode_id( repository.id ), full_path ) if tool is None: @@ -632,7 +633,7 @@ app.config.tool_data_path = work_dir #FIXME: Thread safe? app.config.tool_data_table_config_path = work_dir # Handle proprietary datatypes, if any. - datatypes_config = suc.get_config_from_disk( suc.DATATYPES_CONFIG_FILENAME, files_dir ) + datatypes_config = hg_util.get_config_from_disk( suc.DATATYPES_CONFIG_FILENAME, files_dir ) if datatypes_config: metadata_dict = generate_datatypes_metadata( app, repository, repository_clone_url, files_dir, datatypes_config, metadata_dict ) # Get the relative path to all sample files included in the repository for storage in the repository's metadata. @@ -749,14 +750,14 @@ metadata_dict = generate_data_manager_metadata( app, repository, files_dir, - suc.get_config_from_disk( suc.REPOSITORY_DATA_MANAGER_CONFIG_FILENAME, files_dir ), + hg_util.get_config_from_disk( suc.REPOSITORY_DATA_MANAGER_CONFIG_FILENAME, files_dir ), metadata_dict, shed_config_dict=shed_config_dict )
if readme_files: metadata_dict[ 'readme_files' ] = readme_files # This step must be done after metadata for tools has been defined. - tool_dependencies_config = suc.get_config_from_disk( 'tool_dependencies.xml', files_dir ) + tool_dependencies_config = hg_util.get_config_from_disk( 'tool_dependencies.xml', files_dir ) if tool_dependencies_config: metadata_dict, error_message = generate_tool_dependency_metadata( app, repository, @@ -772,7 +773,7 @@ # Reset the value of the app's tool_data_path and tool_data_table_config_path to their respective original values. app.config.tool_data_path = original_tool_data_path app.config.tool_data_table_config_path = original_tool_data_table_config_path - suc.remove_dir( work_dir ) + basic_util.remove_dir( work_dir ) return metadata_dict, invalid_file_tups
def generate_package_dependency_metadata( app, elem, valid_tool_dependencies_dict, invalid_tool_dependencies_dict ): @@ -1006,7 +1007,7 @@ outputs = [] for output in ttb.outputs: name, file_name, extra = output - outputs.append( ( name, suc.strip_path( file_name ) if file_name else None ) ) + outputs.append( ( name, basic_util.strip_path( file_name ) if file_name else None ) ) if file_name not in required_files and file_name is not None: required_files.append( file_name ) test_dict = dict( name=str( ttb.name ), @@ -1078,6 +1079,29 @@ # The tool did not change through all of the changeset revisions. return old_id
+def get_previous_metadata_changeset_revision( repository, repo, before_changeset_revision, downloadable=True ): + """ + Return the changeset_revision in the repository changelog that has associated metadata prior to + the changeset to which before_changeset_revision refers. If there isn't one, return the hash value + of an empty repository changelog, hg_util.INITIAL_CHANGELOG_HASH. + """ + changeset_revisions = suc.get_ordered_metadata_changeset_revisions( repository, repo, downloadable=downloadable ) + if len( changeset_revisions ) == 1: + changeset_revision = changeset_revisions[ 0 ] + if changeset_revision == before_changeset_revision: + return hg_util.INITIAL_CHANGELOG_HASH + return changeset_revision + previous_changeset_revision = None + for changeset_revision in changeset_revisions: + if changeset_revision == before_changeset_revision: + if previous_changeset_revision: + return previous_changeset_revision + else: + # Return the hash value of an empty repository changelog - note that this will not be a valid changeset revision. + return hg_util.INITIAL_CHANGELOG_HASH + else: + previous_changeset_revision = changeset_revision + def get_relative_path_to_repository_file( root, name, relative_install_dir, work_dir, shed_config_dict, resetting_all_metadata_on_repository ): if resetting_all_metadata_on_repository: full_path_to_file = os.path.join( root, name ) @@ -1344,6 +1368,13 @@ return True return False
+def is_malicious( app, id, changeset_revision, **kwd ): + """Check the malicious flag in repository metadata for a specified change set revision.""" + repository_metadata = suc.get_repository_metadata_by_changeset_revision( app, id, changeset_revision ) + if repository_metadata: + return repository_metadata.malicious + return False + def new_datatypes_metadata_required( trans, repository_metadata, metadata_dict ): """ Compare the last saved metadata for each datatype in the repository with the new metadata in metadata_dict to determine if a new @@ -1821,7 +1852,7 @@ changeset_revisions.append( metadata_changeset_revision ) ancestor_changeset_revision = None ancestor_metadata_dict = None - suc.remove_dir( work_dir ) + basic_util.remove_dir( work_dir ) # Delete all repository_metadata records for this repository that do not have a changeset_revision # value in changeset_revisions. clean_repository_metadata( trans, id, changeset_revisions )
diff -r f6aa2d17d38430e92beb9da80f53c9989dd504ee -r dbc3d5c3506ed833dc453faa4f00797d95b8a8bb lib/tool_shed/util/readme_util.py --- a/lib/tool_shed/util/readme_util.py +++ b/lib/tool_shed/util/readme_util.py @@ -10,6 +10,7 @@ from galaxy.util import unicodify
import tool_shed.util.shed_util_common as suc +from tool_shed.util import basic_util from tool_shed.util import common_util from tool_shed.util import hg_util
@@ -46,7 +47,7 @@ log.exception( "Error reading README file '%s' from disk: %s" % ( str( relative_path_to_readme_file ), str( e ) ) ) text = None if text: - text_of_reasonable_length = suc.size_string( text ) + text_of_reasonable_length = basic_util.size_string( text ) if text_of_reasonable_length.find( '.. image:: ' ) >= 0: # Handle image display for README files that are contained in repositories in the tool shed or installed into Galaxy. lock = threading.Lock() @@ -69,17 +70,17 @@ host_url=web.url_for( '/', qualified=True ) ) text_of_reasonable_length = unicodify( text_of_reasonable_length ) else: - text_of_reasonable_length = suc.to_html_string( text_of_reasonable_length ) + text_of_reasonable_length = basic_util.to_html_string( text_of_reasonable_length ) readme_files_dict[ readme_file_name ] = text_of_reasonable_length else: # We must be in the tool shed and have an old changeset_revision, so we need to retrieve the file contents from the repository manifest. ctx = hg_util.get_changectx_for_changeset( repo, changeset_revision ) if ctx: - fctx = suc.get_file_context_from_ctx( ctx, readme_file_name ) + fctx = hg_util.get_file_context_from_ctx( ctx, readme_file_name ) if fctx and fctx not in [ 'DELETED' ]: try: text = unicodify( fctx.data() ) - readme_files_dict[ readme_file_name ] = suc.size_string( text ) + readme_files_dict[ readme_file_name ] = basic_util.size_string( text ) except Exception, e: log.exception( "Error reading README file '%s' from repository manifest: %s" % \ ( str( relative_path_to_readme_file ), str( e ) ) )
diff -r f6aa2d17d38430e92beb9da80f53c9989dd504ee -r dbc3d5c3506ed833dc453faa4f00797d95b8a8bb lib/tool_shed/util/review_util.py --- a/lib/tool_shed/util/review_util.py +++ b/lib/tool_shed/util/review_util.py @@ -55,7 +55,7 @@ repo = hg_util.get_repo_for_repository( trans.app, repository=repository, repo_path=None, create=False ) reviewed_revision_hashes = [ review.changeset_revision for review in repository.reviews ] previous_reviews_dict = odict() - for changeset in suc.reversed_upper_bounded_changelog( repo, changeset_revision ): + for changeset in hg_util.reversed_upper_bounded_changelog( repo, changeset_revision ): previous_changeset_revision = str( repo.changectx( changeset ) ) if previous_changeset_revision in reviewed_revision_hashes: previous_rev, previous_changeset_revision_label = hg_util.get_rev_label_from_changeset_revision( repo, previous_changeset_revision ) @@ -89,7 +89,7 @@ """Determine if a repository has a changeset revision review prior to the received changeset revision.""" repo = hg_util.get_repo_for_repository( trans.app, repository=repository, repo_path=None, create=False ) reviewed_revision_hashes = [ review.changeset_revision for review in repository.reviews ] - for changeset in suc.reversed_upper_bounded_changelog( repo, changeset_revision ): + for changeset in hg_util.reversed_upper_bounded_changelog( repo, changeset_revision ): previous_changeset_revision = str( repo.changectx( changeset ) ) if previous_changeset_revision in reviewed_revision_hashes: return True
diff -r f6aa2d17d38430e92beb9da80f53c9989dd504ee -r dbc3d5c3506ed833dc453faa4f00797d95b8a8bb lib/tool_shed/util/shed_util_common.py --- a/lib/tool_shed/util/shed_util_common.py +++ b/lib/tool_shed/util/shed_util_common.py @@ -7,13 +7,13 @@ from galaxy import util from galaxy.util import asbool from galaxy.util import json -from galaxy.util import unicodify from galaxy.web import url_for from galaxy.web.form_builder import SelectField from galaxy.datatypes import checkers from galaxy.model.orm import and_ from galaxy.model.orm import or_ import sqlalchemy.orm.exc +from tool_shed.util import basic_util from tool_shed.util import common_util from tool_shed.util import encoding_util from tool_shed.util import hg_util @@ -22,18 +22,11 @@ from xml.etree import ElementTree as XmlET from urllib2 import HTTPError
-from galaxy import eggs - -eggs.require( 'markupsafe' ) -import markupsafe - log = logging.getLogger( __name__ )
CHUNK_SIZE = 2**20 # 1Mb -INITIAL_CHANGELOG_HASH = '000000000000' MAX_CONTENT_SIZE = 1048576 MAXDIFFSIZE = 8000 -MAX_DISPLAY_SIZE = 32768 DATATYPES_CONFIG_FILENAME = 'datatypes_conf.xml' REPOSITORY_DATA_MANAGER_CONFIG_FILENAME = 'data_manager_conf.xml'
@@ -124,13 +117,6 @@ tool_dependencies_select_field.add_option( option_label, option_value ) return tool_dependencies_select_field
-def changeset_is_malicious( app, id, changeset_revision, **kwd ): - """Check the malicious flag in repository metadata for a specified change set""" - repository_metadata = get_repository_metadata_by_changeset_revision( app, id, changeset_revision ) - if repository_metadata: - return repository_metadata.malicious - return False - def check_or_update_tool_shed_status_for_installed_repository( trans, repository ): updated = False tool_shed_status_dict = get_tool_shed_status_for_installed_repository( trans.app, repository ) @@ -157,19 +143,6 @@ shutil.move( filename, os.path.abspath( config_filename ) ) os.chmod( config_filename, 0644 )
-def copy_file_from_manifest( repo, ctx, filename, dir ): - """Copy the latest version of the file named filename from the repository manifest to the directory to which dir refers.""" - for changeset in reversed_upper_bounded_changelog( repo, ctx ): - changeset_ctx = repo.changectx( changeset ) - fctx = get_file_context_from_ctx( changeset_ctx, filename ) - if fctx and fctx not in [ 'DELETED' ]: - file_path = os.path.join( dir, filename ) - fh = open( file_path, 'wb' ) - fh.write( fctx.data() ) - fh.close() - return file_path - return None - def create_or_update_tool_shed_repository( app, name, description, installed_changeset_revision, ctx_rev, repository_clone_url, metadata_dict, status, current_changeset_revision=None, owner='', dist_to_shed=False ): """ @@ -337,7 +310,7 @@ for tool_dict in metadata[ 'tools' ]: guid = tool_dict[ 'guid' ] tool_config = tool_dict[ 'tool_config' ] - file_name = strip_path( tool_config ) + file_name = basic_util.strip_path( tool_config ) guids_and_configs[ guid ] = file_name # Parse the shed_tool_conf file in which all of this repository's tools are defined and generate the tool_panel_dict. tree, error_message = xml_util.parse_xml( shed_tool_conf ) @@ -389,7 +362,7 @@
def get_absolute_path_to_file_in_repository( repo_files_dir, file_name ): """Return the absolute path to a specified disk file contained in a repository.""" - stripped_file_name = strip_path( file_name ) + stripped_file_name = basic_util.strip_path( file_name ) file_path = None for root, dirs, files in os.walk( repo_files_dir ): if root.find( '.hg' ) < 0: @@ -416,25 +389,6 @@ except sqlalchemy.orm.exc.NoResultFound: return None
-def get_config( config_file, repo, ctx, dir ): - """Return the latest version of config_filename from the repository manifest.""" - config_file = strip_path( config_file ) - for changeset in reversed_upper_bounded_changelog( repo, ctx ): - changeset_ctx = repo.changectx( changeset ) - for ctx_file in changeset_ctx.files(): - ctx_file_name = strip_path( ctx_file ) - if ctx_file_name == config_file: - return get_named_tmpfile_from_ctx( changeset_ctx, ctx_file, dir ) - return None - -def get_config_from_disk( config_file, relative_install_dir ): - for root, dirs, files in os.walk( relative_install_dir ): - if root.find( '.hg' ) < 0: - for name in files: - if name == config_file: - return os.path.abspath( os.path.join( root, name ) ) - return None - def get_ctx_rev( app, tool_shed_url, name, owner, changeset_revision ): """ Send a request to the tool shed to retrieve the ctx_rev for a repository defined by the @@ -447,21 +401,6 @@ ctx_rev = common_util.tool_shed_get( app, tool_shed_url, url ) return ctx_rev
-def get_ctx_file_path_from_manifest( filename, repo, changeset_revision ): - """ - Get the ctx file path for the latest revision of filename from the repository manifest up - to the value of changeset_revision. - """ - stripped_filename = strip_path( filename ) - for changeset in reversed_upper_bounded_changelog( repo, changeset_revision ): - manifest_changeset_revision = str( repo.changectx( changeset ) ) - manifest_ctx = repo.changectx( changeset ) - for ctx_file in manifest_ctx.files(): - ctx_file_name = strip_path( ctx_file ) - if ctx_file_name == stripped_filename: - return manifest_ctx, ctx_file - return None, None - def get_current_repository_metadata_for_changeset_revision( app, repository, changeset_revision ): encoded_repository_id = app.security.encode_id( repository.id ) repository_metadata = get_repository_metadata_by_changeset_revision( app, @@ -537,40 +476,6 @@ dependent_downloadable_revisions.append( downloadable_revision ) return dependent_downloadable_revisions
-def get_file_context_from_ctx( ctx, filename ): - """Return the mercurial file context for a specified file.""" - # We have to be careful in determining if we found the correct file because multiple files with - # the same name may be in different directories within ctx if the files were moved within the change - # set. For example, in the following ctx.files() list, the former may have been moved to the latter: - # ['tmap_wrapper_0.0.19/tool_data_table_conf.xml.sample', 'tmap_wrapper_0.3.3/tool_data_table_conf.xml.sample']. - # Another scenario is that the file has been deleted. - deleted = False - filename = strip_path( filename ) - for ctx_file in ctx.files(): - ctx_file_name = strip_path( ctx_file ) - if filename == ctx_file_name: - try: - # If the file was moved, its destination will be returned here. - fctx = ctx[ ctx_file ] - return fctx - except LookupError, e: - # Set deleted for now, and continue looking in case the file was moved instead of deleted. - deleted = True - if deleted: - return 'DELETED' - return None - -def get_file_type_str( changeset_revision, file_type ): - if file_type == 'zip': - file_type_str = '%s.zip' % changeset_revision - elif file_type == 'bz2': - file_type_str = '%s.tar.bz2' % changeset_revision - elif file_type == 'gz': - file_type_str = '%s.tar.gz' % changeset_revision - else: - file_type_str = '' - return file_type_str - def get_ids_of_tool_shed_repositories_being_installed( trans, as_string=False ): installing_repository_ids = [] new_status = trans.install_model.ToolShedRepository.installation_status.NEW @@ -603,7 +508,7 @@ changeset_revisions = get_ordered_metadata_changeset_revisions( repository, repo, downloadable=False ) if changeset_revisions: return changeset_revisions[ -1 ] - return INITIAL_CHANGELOG_HASH + return hg_util.INITIAL_CHANGELOG_HASH
def get_latest_downloadable_changeset_revision( app, repository, repo ): repository_tip = repository.tip( app ) @@ -613,30 +518,7 @@ changeset_revisions = get_ordered_metadata_changeset_revisions( repository, repo, downloadable=True ) if changeset_revisions: return changeset_revisions[ -1 ] - return INITIAL_CHANGELOG_HASH - -def get_named_tmpfile_from_ctx( ctx, filename, dir ): - """Return a named temporary file created from a specified file with a given name included in a repository changeset revision.""" - filename = strip_path( filename ) - for ctx_file in ctx.files(): - ctx_file_name = strip_path( ctx_file ) - if filename == ctx_file_name: - try: - # If the file was moved, its destination file contents will be returned here. - fctx = ctx[ ctx_file ] - except LookupError, e: - # Continue looking in case the file was moved. - fctx = None - continue - if fctx: - fh = tempfile.NamedTemporaryFile( 'wb', prefix="tmp-toolshed-gntfc", dir=dir ) - tmp_filename = fh.name - fh.close() - fh = open( tmp_filename, 'wb' ) - fh.write( fctx.data() ) - fh.close() - return tmp_filename - return None + return hg_util.INITIAL_CHANGELOG_HASH
def get_next_downloadable_changeset_revision( repository, repo, after_changeset_revision ): """ @@ -737,28 +619,6 @@ sorted_changeset_revisions = [ str( changeset_tup[ 1 ] ) for changeset_tup in sorted_changeset_tups ] return sorted_changeset_revisions
-def get_previous_metadata_changeset_revision( repository, repo, before_changeset_revision, downloadable=True ): - """ - Return the changeset_revision in the repository changelog that has associated metadata prior to the changeset to which - before_changeset_revision refers. If there isn't one, return the hash value of an empty repository changelog, INITIAL_CHANGELOG_HASH. - """ - changeset_revisions = get_ordered_metadata_changeset_revisions( repository, repo, downloadable=downloadable ) - if len( changeset_revisions ) == 1: - changeset_revision = changeset_revisions[ 0 ] - if changeset_revision == before_changeset_revision: - return INITIAL_CHANGELOG_HASH - return changeset_revision - previous_changeset_revision = None - for changeset_revision in changeset_revisions: - if changeset_revision == before_changeset_revision: - if previous_changeset_revision: - return previous_changeset_revision - else: - # Return the hash value of an empty repository changelog - note that this will not be a valid changeset revision. - return INITIAL_CHANGELOG_HASH - else: - previous_changeset_revision = changeset_revision - def get_prior_import_or_install_required_dict( trans, tsr_ids, repo_info_dicts ): """ This method is used in the Tool Shed when exporting a repository and its dependencies, and in Galaxy when a repository and its dependencies @@ -974,7 +834,7 @@ else: safe_str = '' for i, line in enumerate( open( file_path ) ): - safe_str = '%s%s' % ( safe_str, to_html_string( line ) ) + safe_str = '%s%s' % ( safe_str, basic_util.to_html_string( line ) ) # Stop reading after string is larger than MAX_CONTENT_SIZE. if len( safe_str ) > MAX_CONTENT_SIZE: large_str = \ @@ -982,13 +842,17 @@ util.nice_size( MAX_CONTENT_SIZE ) safe_str = '%s%s' % ( safe_str, large_str ) break - if len( safe_str ) > MAX_DISPLAY_SIZE: - # Eliminate the middle of the file to display a file no larger than MAX_DISPLAY_SIZE. This may not be ideal if the file is larger - # than MAX_CONTENT_SIZE. + if len( safe_str ) > basic_util.MAX_DISPLAY_SIZE: + # Eliminate the middle of the file to display a file no larger than basic_util.MAX_DISPLAY_SIZE. + # This may not be ideal if the file is larger than MAX_CONTENT_SIZE. join_by_str = \ "<br/><br/>...some text eliminated here because file size is larger than maximum viewing size of %s...<br/><br/>" % \ - util.nice_size( MAX_DISPLAY_SIZE ) - safe_str = util.shrink_string_by_size( safe_str, MAX_DISPLAY_SIZE, join_by=join_by_str, left_larger=True, beginning_on_size_error=True ) + util.nice_size( basic_util.MAX_DISPLAY_SIZE ) + safe_str = util.shrink_string_by_size( safe_str, + basic_util.MAX_DISPLAY_SIZE, + join_by=join_by_str, + left_larger=True, + beginning_on_size_error=True ) return safe_str
def get_repository_files( trans, folder_path ): @@ -1023,11 +887,6 @@ # This should never be reached - raise an exception? return v, None
-def get_repository_heads( repo ): - """Return current repository heads, which are changesets with no child changesets.""" - heads = [ repo[ h ] for h in repo.heads( None ) ] - return heads - def get_repository_ids_requiring_prior_import_or_install( trans, tsr_ids, repository_dependencies ): """ This method is used in the Tool Shed when exporting a repository and its dependencies, and in Galaxy when a repository and its dependencies @@ -1131,20 +990,13 @@ repository_tools_tups.append( ( relative_path, guid, tool ) ) return repository_tools_tups
-def get_reversed_changelog_changesets( repo ): - """Return a list of changesets in reverse order from that provided by the repository manifest.""" - reversed_changelog = [] - for changeset in repo.changelog: - reversed_changelog.insert( 0, changeset ) - return reversed_changelog - def get_shed_tool_conf_dict( app, shed_tool_conf ): """Return the in-memory version of the shed_tool_conf file, which is stored in the config_elems entry in the shed_tool_conf_dict associated with the file.""" for index, shed_tool_conf_dict in enumerate( app.toolbox.shed_tool_confs ): if shed_tool_conf == shed_tool_conf_dict[ 'config_filename' ]: return index, shed_tool_conf_dict else: - file_name = strip_path( shed_tool_conf_dict[ 'config_filename' ] ) + file_name = basic_util.strip_path( shed_tool_conf_dict[ 'config_filename' ] ) if shed_tool_conf == file_name: return index, shed_tool_conf_dict
@@ -1188,7 +1040,7 @@ if config_filename == shed_tool_conf: return shed_tool_conf_dict[ 'tool_path' ] else: - file_name = strip_path( config_filename ) + file_name = basic_util.strip_path( config_filename ) if file_name == shed_tool_conf: return shed_tool_conf_dict[ 'tool_path' ] return None @@ -1331,9 +1183,10 @@ repo = hg_util.get_repo_for_repository( trans.app, repository=repository, repo_path=None, create=False ) # Get the upper bound changeset revision. upper_bound_changeset_revision = get_next_downloadable_changeset_revision( repository, repo, changeset_revision ) - # Build the list of changeset revision hashes defining each available update up to, but excluding, upper_bound_changeset_revision. + # Build the list of changeset revision hashes defining each available update up to, but excluding + # upper_bound_changeset_revision. changeset_hashes = [] - for changeset in reversed_lower_upper_bounded_changelog( repo, changeset_revision, upper_bound_changeset_revision ): + for changeset in hg_util.reversed_lower_upper_bounded_changelog( repo, changeset_revision, upper_bound_changeset_revision ): # Make sure to exclude upper_bound_changeset_revision. if changeset != upper_bound_changeset_revision: changeset_hashes.append( str( repo.changectx( changeset ) ) ) @@ -1477,7 +1330,10 @@ return False
def open_repository_files_folder( trans, folder_path ): - """Return a list of dictionaries, each of which contains information for a file or directory contained within a directory in a repository file hierarchy.""" + """ + Return a list of dictionaries, each of which contains information for a file or directory contained + within a directory in a repository file hierarchy. + """ try: files_list = get_repository_files( trans, folder_path ) except OSError, e: @@ -1499,28 +1355,6 @@ folder_contents.append( node ) return folder_contents
-def pretty_print( dict=None ): - if dict is not None: - return json.to_json_string( dict, sort_keys=True, indent=4 ) - -def remove_dir( dir ): - """Attempt to remove a directory from disk.""" - if dir: - if os.path.exists( dir ): - try: - shutil.rmtree( dir ) - except: - pass - -def remove_file( file_name ): - """Attempt to remove a file from disk.""" - if file_name: - if os.path.exists( file_name ): - try: - os.remove( file_name ) - except: - pass - def repository_was_previously_installed( trans, tool_shed_url, repository_name, repo_info_tuple ): """ Find out if a repository is already installed into Galaxy - there are several scenarios where this @@ -1584,34 +1418,6 @@ trans.install_model.context.add( repository ) trans.install_model.context.flush()
-def reversed_lower_upper_bounded_changelog( repo, excluded_lower_bounds_changeset_revision, included_upper_bounds_changeset_revision ): - """ - Return a reversed list of changesets in the repository changelog after the excluded_lower_bounds_changeset_revision, but up to and - including the included_upper_bounds_changeset_revision. The value of excluded_lower_bounds_changeset_revision will be the value of - INITIAL_CHANGELOG_HASH if no valid changesets exist before included_upper_bounds_changeset_revision. - """ - # To set excluded_lower_bounds_changeset_revision, calling methods should do the following, where the value of changeset_revision - # is a downloadable changeset_revision. - # excluded_lower_bounds_changeset_revision = get_previous_metadata_changeset_revision( repository, repo, changeset_revision, downloadable=? ) - if excluded_lower_bounds_changeset_revision == INITIAL_CHANGELOG_HASH: - appending_started = True - else: - appending_started = False - reversed_changelog = [] - for changeset in repo.changelog: - changeset_hash = str( repo.changectx( changeset ) ) - if appending_started: - reversed_changelog.insert( 0, changeset ) - if changeset_hash == excluded_lower_bounds_changeset_revision and not appending_started: - appending_started = True - if changeset_hash == included_upper_bounds_changeset_revision: - break - return reversed_changelog - -def reversed_upper_bounded_changelog( repo, included_upper_bounds_changeset_revision ): - """Return a reversed list of changesets in the repository changelog up to and including the included_upper_bounds_changeset_revision.""" - return reversed_lower_upper_bounded_changelog( repo, INITIAL_CHANGELOG_HASH, included_upper_bounds_changeset_revision ) - def set_image_paths( app, encoded_repository_id, text ): """ Handle tool help image display for tools that are contained in repositories in the tool shed or installed into Galaxy as well as image @@ -1666,41 +1472,6 @@ return str( required_rd_tup[ 4 ] ) return 'False'
-def size_string( raw_text, size=MAX_DISPLAY_SIZE ): - """Return a subset of a string (up to MAX_DISPLAY_SIZE) translated to a safe string for display in a browser.""" - if raw_text and len( raw_text ) >= size: - large_str = '\nFile contents truncated because file size is larger than maximum viewing size of %s\n' % util.nice_size( size ) - raw_text = '%s%s' % ( raw_text[ 0:size ], large_str ) - return raw_text or '' - -def stringify( list ): - if list: - return ','.join( list ) - return '' - -def strip_path( fpath ): - """Attempt to strip the path from a file name.""" - if not fpath: - return fpath - try: - file_path, file_name = os.path.split( fpath ) - except: - file_name = fpath - return file_name - -def to_html_string( text ): - """Translates the characters in text to an html string""" - if text: - try: - text = unicodify( text ) - except UnicodeDecodeError, e: - return "Error decoding string: %s" % str( e ) - text = unicode( markupsafe.escape( text ) ) - text = text.replace( '\n', '<br/>' ) - text = text.replace( ' ', ' ' ) - text = text.replace( ' ', ' ' ) - return text - def tool_shed_from_repository_clone_url( repository_clone_url ): """Given a repository clone URL, return the tool shed that contains the repository.""" return common_util.remove_protocol_and_user_from_clone_url( repository_clone_url ).split( '/repos/' )[ 0 ].rstrip( '/' )
diff -r f6aa2d17d38430e92beb9da80f53c9989dd504ee -r dbc3d5c3506ed833dc453faa4f00797d95b8a8bb lib/tool_shed/util/tool_dependency_util.py --- a/lib/tool_shed/util/tool_dependency_util.py +++ b/lib/tool_shed/util/tool_dependency_util.py @@ -7,6 +7,8 @@ from galaxy.model.orm import or_ import tool_shed.util.shed_util_common as suc import tool_shed.repository_types.util as rt_util +from tool_shed.util import basic_util +from tool_shed.util import hg_util from tool_shed.util import xml_util from tool_shed.galaxy_install.tool_dependencies import td_common_util
@@ -85,7 +87,7 @@ if shed_config_dict.get( 'tool_path' ): relative_install_dir = os.path.join( shed_config_dict.get( 'tool_path' ), relative_install_dir ) # Get the tool_dependencies.xml file from the repository. - tool_dependencies_config = suc.get_config_from_disk( 'tool_dependencies.xml', relative_install_dir ) + tool_dependencies_config = hg_util.get_config_from_disk( 'tool_dependencies.xml', relative_install_dir ) tree, error_message = xml_util.parse_xml( tool_dependencies_config ) if tree is None: return tool_dependency_objects @@ -629,7 +631,7 @@ error_message += ' prepared for re-installation.' print error_message tool_dependency.status = app.install_model.ToolDependency.installation_status.NEVER_INSTALLED - suc.remove_dir( tool_dependency_install_dir ) + basic_util.remove_dir( tool_dependency_install_dir ) can_install_tool_dependency = True sa_session.add( tool_dependency ) sa_session.flush()
diff -r f6aa2d17d38430e92beb9da80f53c9989dd504ee -r dbc3d5c3506ed833dc453faa4f00797d95b8a8bb lib/tool_shed/util/tool_util.py --- a/lib/tool_shed/util/tool_util.py +++ b/lib/tool_shed/util/tool_util.py @@ -13,6 +13,7 @@ from galaxy.util.expressions import ExpressionContext from galaxy.web.form_builder import SelectField from galaxy.tools.actions.upload import UploadToolAction +from tool_shed.util import basic_util from tool_shed.util import common_util from tool_shed.util import hg_util from tool_shed.util import xml_util @@ -119,7 +120,7 @@ return False if changeset_revision == repository.tip( trans.app ): return True - file_name = suc.strip_path( file_path ) + file_name = basic_util.strip_path( file_path ) latest_version_of_file = get_latest_tool_config_revision_from_repository_manifest( repo, file_name, changeset_revision ) can_use_disk_file = filecmp.cmp( file_path, latest_version_of_file ) try: @@ -142,7 +143,7 @@ if options and isinstance( options, dynamic_options.DynamicOptions ): if options.tool_data_table or options.missing_tool_data_table_name: # Make sure the repository contains a tool_data_table_conf.xml.sample file. - sample_tool_data_table_conf = suc.get_config_from_disk( 'tool_data_table_conf.xml.sample', repo_dir ) + sample_tool_data_table_conf = hg_util.get_config_from_disk( 'tool_data_table_conf.xml.sample', repo_dir ) if sample_tool_data_table_conf: error, correction_msg = handle_sample_tool_data_table_conf_file( app, sample_tool_data_table_conf ) if error: @@ -158,10 +159,10 @@ if options.index_file or options.missing_index_file: # Make sure the repository contains the required xxx.loc.sample file. index_file = options.index_file or options.missing_index_file - index_file_name = suc.strip_path( index_file ) + index_file_name = basic_util.strip_path( index_file ) sample_found = False for sample_file in sample_files: - sample_file_name = suc.strip_path( sample_file ) + sample_file_name = basic_util.strip_path( sample_file ) if sample_file_name == '%s.sample' % index_file_name: options.index_file = index_file_name options.missing_index_file = None @@ -206,7 +207,7 @@ """ if dest_path is None: dest_path = os.path.abspath( app.config.tool_data_path ) - sample_file_name = suc.strip_path( filename ) + sample_file_name = basic_util.strip_path( filename ) copied_file = sample_file_name.replace( '.sample', '' ) full_source_path = os.path.abspath( filename ) full_destination_path = os.path.join( dest_path, sample_file_name ) @@ -312,7 +313,7 @@ {<Tool guid> : [{ tool_config : <tool_config_file>, id: <ToolSection id>, version : <ToolSection version>, name : <TooSection name>}]} """ tool_panel_dict = {} - file_name = suc.strip_path( tool_config ) + file_name = basic_util.strip_path( tool_config ) tool_section_dicts = generate_tool_section_dicts( tool_config=file_name, tool_sections=tool_sections ) tool_panel_dict[ guid ] = tool_section_dicts return tool_panel_dict @@ -412,11 +413,11 @@ This method is restricted to tool_config files rather than any file since it is likely that, with the exception of tool config files, multiple files will have the same name in various directories within the repository. """ - stripped_filename = suc.strip_path( filename ) - for changeset in suc.reversed_upper_bounded_changelog( repo, changeset_revision ): + stripped_filename = basic_util.strip_path( filename ) + for changeset in hg_util.reversed_upper_bounded_changelog( repo, changeset_revision ): manifest_ctx = repo.changectx( changeset ) for ctx_file in manifest_ctx.files(): - ctx_file_name = suc.strip_path( ctx_file ) + ctx_file_name = basic_util.strip_path( ctx_file ) if ctx_file_name == stripped_filename: try: fctx = manifest_ctx[ ctx_file ] @@ -442,14 +443,14 @@ """ deleted_sample_files = [] sample_files = [] - for changeset in suc.reversed_upper_bounded_changelog( repo, ctx ): + for changeset in hg_util.reversed_upper_bounded_changelog( repo, ctx ): changeset_ctx = repo.changectx( changeset ) for ctx_file in changeset_ctx.files(): - ctx_file_name = suc.strip_path( ctx_file ) + ctx_file_name = basic_util.strip_path( ctx_file ) # If we decide in the future that files deleted later in the changelog should not be used, we can use the following if statement. # if ctx_file_name.endswith( '.sample' ) and ctx_file_name not in sample_files and ctx_file_name not in deleted_sample_files: if ctx_file_name.endswith( '.sample' ) and ctx_file_name not in sample_files: - fctx = suc.get_file_context_from_ctx( changeset_ctx, ctx_file ) + fctx = hg_util.get_file_context_from_ctx( changeset_ctx, ctx_file ) if fctx in [ 'DELETED' ]: # Since the possibly future used if statement above is commented out, the same file that was initially added will be # discovered in an earlier changeset in the change log and fall through to the else block below. In other words, if @@ -536,7 +537,7 @@ version_lineage = [ guid ] # Get all ancestor guids of the received guid. current_child_guid = guid - for changeset in suc.reversed_upper_bounded_changelog( repo, repository_metadata.changeset_revision ): + for changeset in hg_util.reversed_upper_bounded_changelog( repo, repository_metadata.changeset_revision ): ctx = repo.changectx( changeset ) rm = suc.get_repository_metadata_by_changeset_revision( trans.app, repository_id, str( ctx ) ) if rm: @@ -546,9 +547,9 @@ current_child_guid = parent_guid # Get all descendant guids of the received guid. current_parent_guid = guid - for changeset in suc.reversed_lower_upper_bounded_changelog( repo, - repository_metadata.changeset_revision, - repository.tip( trans.app ) ): + for changeset in hg_util.reversed_lower_upper_bounded_changelog( repo, + repository_metadata.changeset_revision, + repository.tip( trans.app ) ): ctx = repo.changectx( changeset ) rm = suc.get_repository_metadata_by_changeset_revision( trans.app, repository_id, str( ctx ) ) if rm: @@ -574,7 +575,7 @@ break if missing_data_table_entry: # The repository must contain a tool_data_table_conf.xml.sample file that includes all required entries for all tools in the repository. - sample_tool_data_table_conf = suc.get_config_from_disk( 'tool_data_table_conf.xml.sample', relative_install_dir ) + sample_tool_data_table_conf = hg_util.get_config_from_disk( 'tool_data_table_conf.xml.sample', relative_install_dir ) if sample_tool_data_table_conf: # Add entries to the ToolDataTableManager's in-memory data_tables dictionary. error, message = handle_sample_tool_data_table_conf_file( app, sample_tool_data_table_conf, persist=True ) @@ -598,11 +599,11 @@ params_with_missing_index_file = repository_tool.params_with_missing_index_file for param in params_with_missing_index_file: options = param.options - missing_file_name = suc.strip_path( options.missing_index_file ) + missing_file_name = basic_util.strip_path( options.missing_index_file ) if missing_file_name not in sample_files_copied: # The repository must contain the required xxx.loc.sample file. for sample_file in sample_files: - sample_file_name = suc.strip_path( sample_file ) + sample_file_name = basic_util.strip_path( sample_file ) if sample_file_name == '%s.sample' % missing_file_name: copy_sample_file( app, sample_file ) if options.tool_data_table and options.tool_data_table.missing_index_file: @@ -643,7 +644,7 @@ error, message = handle_sample_tool_data_table_conf_file( trans.app, tool_data_table_config ) if error: log.debug( message ) - manifest_ctx, ctx_file = suc.get_ctx_file_path_from_manifest( tool_config_filename, repo, changeset_revision ) + manifest_ctx, ctx_file = hg_util.get_ctx_file_path_from_manifest( tool_config_filename, repo, changeset_revision ) if manifest_ctx and ctx_file: tool, message2 = load_tool_from_tmp_config( trans, repo, repository_id, manifest_ctx, ctx_file, work_dir ) message = concat_messages( message, message2 ) @@ -885,7 +886,7 @@ message = concat_messages( message, message2 ) else: tool, message, sample_files = handle_sample_files_and_load_tool_from_tmp_config( trans, repo, repository_id, changeset_revision, tool_config_filename, work_dir ) - suc.remove_dir( work_dir ) + basic_util.remove_dir( work_dir ) trans.app.config.tool_data_path = original_tool_data_path # Reset the tool_data_tables by loading the empty tool_data_table_conf.xml file. reset_tool_data_tables( trans.app ) @@ -911,7 +912,7 @@ def load_tool_from_tmp_config( trans, repo, repository_id, ctx, ctx_file, work_dir ): tool = None message = '' - tmp_tool_config = suc.get_named_tmpfile_from_ctx( ctx, ctx_file, work_dir ) + tmp_tool_config = hg_util.get_named_tmpfile_from_ctx( ctx, ctx_file, work_dir ) if tmp_tool_config: element_tree, error_message = xml_util.parse_xml( tmp_tool_config ) if element_tree is None: @@ -921,7 +922,7 @@ tmp_code_files = [] for code_elem in element_tree_root.findall( 'code' ): code_file_name = code_elem.get( 'file' ) - tmp_code_file_name = suc.copy_file_from_manifest( repo, ctx, code_file_name, work_dir ) + tmp_code_file_name = hg_util.copy_file_from_manifest( repo, ctx, code_file_name, work_dir ) if tmp_code_file_name: tmp_code_files.append( tmp_code_file_name ) tool, valid, message = load_tool_from_config( trans.app, repository_id, tmp_tool_config )
diff -r f6aa2d17d38430e92beb9da80f53c9989dd504ee -r dbc3d5c3506ed833dc453faa4f00797d95b8a8bb templates/admin/review_tool_migration_stages.mako --- a/templates/admin/review_tool_migration_stages.mako +++ b/templates/admin/review_tool_migration_stages.mako @@ -35,7 +35,7 @@ </p></div><table class="grid"> - <% from tool_shed.util.shed_util_common import to_html_string %> + <% from tool_shed.util.basic_util import to_html_string %> %for stage in migration_stages_dict.keys(): <% migration_command = 'sh ./scripts/migrate_tools/%04d_tools.sh' % stage
diff -r f6aa2d17d38430e92beb9da80f53c9989dd504ee -r dbc3d5c3506ed833dc453faa4f00797d95b8a8bb templates/admin/tool_shed_repository/manage_repository_tool_dependencies.mako --- a/templates/admin/tool_shed_repository/manage_repository_tool_dependencies.mako +++ b/templates/admin/tool_shed_repository/manage_repository_tool_dependencies.mako @@ -31,7 +31,7 @@ %for tool_dependency in repository.tool_dependencies: <% if tool_dependency.error_message: - from tool_shed.util.shed_util_common import to_html_string + from tool_shed.util.basic_util import to_html_string error_message = to_html_string( tool_dependency.error_message ) else: error_message = ''
This diff is so big that we needed to truncate the remainder.
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving this because you have the service enabled, addressing the recipient of this email.