commit/galaxy-central: greg: Tweak some imports.
1 new commit in galaxy-central: https://bitbucket.org/galaxy/galaxy-central/changeset/70f88a048ed2/ changeset: 70f88a048ed2 user: greg date: 2012-12-12 23:36:40 summary: Tweak some imports. affected #: 10 files diff -r ae60aaaf6a139e88b2849abf97fd47af711341fd -r 70f88a048ed2b565d60250ca66302fd03a7852ff lib/galaxy/tool_shed/install_manager.py --- a/lib/galaxy/tool_shed/install_manager.py +++ b/lib/galaxy/tool_shed/install_manager.py @@ -6,6 +6,7 @@ from galaxy.tools import ToolSection from galaxy.util.json import from_json_string, to_json_string from galaxy.util.shed_util import * +import galaxy.util.shed_util_common as suc from galaxy.util.odict import odict from galaxy.tool_shed.common_util import * @@ -87,7 +88,7 @@ break full_path = str( os.path.abspath( os.path.join( root, name ) ) ) tool = self.toolbox.load_tool( full_path ) - return generate_tool_guid( repository_clone_url, tool ) + return suc.generate_tool_guid( repository_clone_url, tool ) def get_proprietary_tool_panel_elems( self, latest_tool_migration_script_number ): # Parse each config in self.proprietary_tool_confs (the default is tool_conf.xml) and generate a list of Elements that are # either ToolSection elements or Tool elements. These will be used to generate new entries in the migrated_tools_conf.xml @@ -177,15 +178,15 @@ else: print 'The tool "%s" (%s) has not been enabled because it is not defined in a proprietary tool config (%s).' \ % ( guid, tool_config, ", ".join( self.proprietary_tool_confs or [] ) ) - metadata_dict, invalid_file_tups = generate_metadata_for_changeset_revision( app=self.app, - repository=tool_shed_repository, - repository_clone_url=repository_clone_url, - shed_config_dict = self.shed_config_dict, - relative_install_dir=relative_install_dir, - repository_files_dir=None, - resetting_all_metadata_on_repository=False, - updating_installed_repository=False, - persist=True ) + metadata_dict, invalid_file_tups = suc.generate_metadata_for_changeset_revision( app=self.app, + repository=tool_shed_repository, + repository_clone_url=repository_clone_url, + shed_config_dict = self.shed_config_dict, + relative_install_dir=relative_install_dir, + repository_files_dir=None, + resetting_all_metadata_on_repository=False, + updating_installed_repository=False, + persist=True ) tool_shed_repository.metadata = metadata_dict self.app.sa_session.add( tool_shed_repository ) self.app.sa_session.flush() @@ -217,7 +218,7 @@ tool_shed_repository, self.app.model.ToolShedRepository.installation_status.INSTALLING_TOOL_DEPENDENCIES ) # Get the tool_dependencies.xml file from disk. - tool_dependencies_config = get_config_from_disk( 'tool_dependencies.xml', repo_install_dir ) + tool_dependencies_config = suc.get_config_from_disk( 'tool_dependencies.xml', repo_install_dir ) installed_tool_dependencies = handle_tool_dependencies( app=self.app, tool_shed_repository=tool_shed_repository, tool_dependencies_config=tool_dependencies_config, @@ -242,7 +243,7 @@ self.app.sa_session.add( tool_shed_repository ) self.app.sa_session.flush() work_dir = tempfile.mkdtemp() - datatypes_config = get_config_from_disk( 'datatypes_conf.xml', repo_install_dir ) + datatypes_config = suc.get_config_from_disk( 'datatypes_conf.xml', repo_install_dir ) # Load proprietary data types required by tools. The value of override is not important here since the Galaxy server will be started # after this installation completes. converter_path, display_path = alter_config_and_load_prorietary_datatypes( self.app, datatypes_config, repo_install_dir, override=False ) #repo_install_dir was relative_install_dir @@ -294,7 +295,7 @@ owner=self.repository_owner, dist_to_shed=True ) update_tool_shed_repository_status( self.app, tool_shed_repository, self.app.model.ToolShedRepository.installation_status.CLONING ) - cloned_ok, error_message = clone_repository( repository_clone_url, os.path.abspath( install_dir ), ctx_rev ) + cloned_ok, error_message = suc.clone_repository( repository_clone_url, os.path.abspath( install_dir ), ctx_rev ) if cloned_ok: self.handle_repository_contents( tool_shed_repository=tool_shed_repository, repository_clone_url=repository_clone_url, diff -r ae60aaaf6a139e88b2849abf97fd47af711341fd -r 70f88a048ed2b565d60250ca66302fd03a7852ff lib/galaxy/tool_shed/update_manager.py --- a/lib/galaxy/tool_shed/update_manager.py +++ b/lib/galaxy/tool_shed/update_manager.py @@ -33,7 +33,7 @@ self.sleeper.sleep( self.seconds_to_sleep ) log.info( 'Transfer job restarter shutting down...' ) def check_for_update( self, repository ): - tool_shed_url = get_url_from_repository_tool_shed( self.app, repository ) + tool_shed_url = suc.get_url_from_repository_tool_shed( self.app, repository ) url = '%s/repository/check_for_updates?name=%s&owner=%s&changeset_revision=%s&from_update_manager=True' % \ ( tool_shed_url, repository.name, repository.owner, repository.changeset_revision ) try: diff -r ae60aaaf6a139e88b2849abf97fd47af711341fd -r 70f88a048ed2b565d60250ca66302fd03a7852ff lib/galaxy/util/shed_util.py --- a/lib/galaxy/util/shed_util.py +++ b/lib/galaxy/util/shed_util.py @@ -1,10 +1,10 @@ import os, tempfile, shutil, logging, urllib2 from galaxy import util -from shed_util_common import * +import shed_util_common as suc from galaxy.tools.search import ToolBoxSearch from galaxy.tool_shed.tool_dependencies.install_util import create_or_update_tool_dependency, install_package, set_environment -from galaxy.tool_shed.encoding_util import * -from galaxy.model.orm import * +from galaxy.tool_shed import encoding_util +from galaxy.model.orm import and_ from galaxy import eggs import pkg_resources @@ -185,7 +185,7 @@ filename=os.path.join( tool_path, filename ) # Attempt to ensure we're copying an appropriate file. if is_data_index_sample_file( filename ): - copy_sample_file( app, filename, dest_path=dest_path ) + suc.copy_sample_file( app, filename, dest_path=dest_path ) def create_repository_dict_for_proprietary_datatypes( tool_shed, name, owner, installed_changeset_revision, tool_dicts, converter_path=None, display_path=None ): return dict( tool_shed=tool_shed, repository_name=name, @@ -204,7 +204,7 @@ # to it being uninstalled. current_changeset_revision = installed_changeset_revision sa_session = app.model.context.current - tool_shed = get_tool_shed_from_clone_url( repository_clone_url ) + tool_shed = suc.get_tool_shed_from_clone_url( repository_clone_url ) if not owner: owner = get_repository_owner_from_clone_url( repository_clone_url ) includes_datatypes = 'datatypes' in metadata_dict @@ -255,7 +255,7 @@ if shed_config_dict.get( 'tool_path' ): relative_install_dir = os.path.join( shed_config_dict.get( 'tool_path' ), relative_install_dir ) # Get the tool_dependencies.xml file from the repository. - tool_dependencies_config = get_config_from_disk( 'tool_dependencies.xml', relative_install_dir ) + tool_dependencies_config = suc.get_config_from_disk( 'tool_dependencies.xml', relative_install_dir ) try: tree = ElementTree.parse( tool_dependencies_config ) except Exception, e: @@ -295,8 +295,8 @@ return tool_dependency_objects def generate_clone_url_for_installed_repository( trans, repository ): """Generate the URL for cloning a repository that has been installed into a Galaxy instance.""" - tool_shed_url = get_url_from_repository_tool_shed( trans.app, repository ) - return url_join( tool_shed_url, 'repos', repository.owner, repository.name ) + tool_shed_url = suc.get_url_from_repository_tool_shed( trans.app, repository ) + return suc.url_join( tool_shed_url, 'repos', repository.owner, repository.name ) def generate_tool_elem( tool_shed, repository_name, changeset_revision, owner, tool_file_path, tool, tool_section ): if tool_section is not None: tool_elem = SubElement( tool_section, 'tool' ) @@ -321,7 +321,7 @@ """Generate a list of ElementTree Element objects for each section or tool.""" elem_list = [] tool_elem = None - cleaned_repository_clone_url = clean_repository_clone_url( repository_clone_url ) + cleaned_repository_clone_url = suc.clean_repository_clone_url( repository_clone_url ) if not owner: owner = get_repository_owner( cleaned_repository_clone_url ) tool_shed = cleaned_repository_clone_url.split( 'repos' )[ 0 ].rstrip( '/' ) @@ -475,12 +475,12 @@ def get_config( config_file, repo, ctx, dir ): """Return the latest version of config_filename from the repository manifest.""" config_file = strip_path( config_file ) - for changeset in reversed_upper_bounded_changelog( repo, ctx ): + for changeset in suc.reversed_upper_bounded_changelog( repo, ctx ): changeset_ctx = repo.changectx( changeset ) for ctx_file in changeset_ctx.files(): ctx_file_name = strip_path( ctx_file ) if ctx_file_name == config_file: - return get_named_tmpfile_from_ctx( changeset_ctx, ctx_file, dir ) + return suc.get_named_tmpfile_from_ctx( changeset_ctx, ctx_file, dir ) return None def get_converter_and_display_paths( registration_elem, relative_install_dir ): """Find the relative path to data type converters and display applications included in installed tool shed repositories.""" @@ -525,7 +525,7 @@ break return converter_path, display_path def get_ctx_rev( tool_shed_url, name, owner, changeset_revision ): - url = url_join( tool_shed_url, 'repository/get_ctx_rev?name=%s&owner=%s&changeset_revision=%s' % ( name, owner, changeset_revision ) ) + url = suc.url_join( tool_shed_url, 'repository/get_ctx_rev?name=%s&owner=%s&changeset_revision=%s' % ( name, owner, changeset_revision ) ) response = urllib2.urlopen( url ) ctx_rev = response.read() response.close() @@ -552,7 +552,7 @@ repo_path = repo_path.replace( '/', '', 1 ) return repo_path.lstrip( '/' ).split( '/' )[ 0 ] def get_repository_owner_from_clone_url( repository_clone_url ): - tmp_url = clean_repository_clone_url( repository_clone_url ) + tmp_url = suc.clean_repository_clone_url( repository_clone_url ) tool_shed = tmp_url.split( 'repos' )[ 0 ].rstrip( '/' ) return get_repository_owner( tmp_url ) def get_repository_tools_tups( app, metadata_dict ): @@ -674,14 +674,14 @@ .first() def get_update_to_changeset_revision_and_ctx_rev( trans, repository ): """Return the changeset revision hash to which the repository can be updated.""" - tool_shed_url = get_url_from_repository_tool_shed( trans.app, repository ) - url = url_join( tool_shed_url, 'repository/get_changeset_revision_and_ctx_rev?name=%s&owner=%s&changeset_revision=%s' % \ + tool_shed_url = suc.get_url_from_repository_tool_shed( trans.app, repository ) + url = suc.url_join( tool_shed_url, 'repository/get_changeset_revision_and_ctx_rev?name=%s&owner=%s&changeset_revision=%s' % \ ( repository.name, repository.owner, repository.installed_changeset_revision ) ) try: response = urllib2.urlopen( url ) encoded_update_dict = response.read() if encoded_update_dict: - update_dict = tool_shed_decode( encoded_update_dict ) + update_dict = encoding_util.tool_shed_decode( encoded_update_dict ) changeset_revision = update_dict[ 'changeset_revision' ] ctx_rev = update_dict[ 'ctx_rev' ] response.close() @@ -704,11 +704,11 @@ break if missing_data_table_entry: # The repository must contain a tool_data_table_conf.xml.sample file that includes all required entries for all tools in the repository. - sample_tool_data_table_conf = get_config_from_disk( 'tool_data_table_conf.xml.sample', relative_install_dir ) + sample_tool_data_table_conf = suc.get_config_from_disk( 'tool_data_table_conf.xml.sample', relative_install_dir ) if sample_tool_data_table_conf: # Add entries to the ToolDataTableManager's in-memory data_tables dictionary as well as the list of data_table_elems and the list of # data_table_elem_names. - error, message = handle_sample_tool_data_table_conf_file( app, sample_tool_data_table_conf, persist=True ) + error, message = suc.handle_sample_tool_data_table_conf_file( app, sample_tool_data_table_conf, persist=True ) if error: # TODO: Do more here than logging an exception. log.debug( message ) @@ -716,7 +716,7 @@ repository_tool = app.toolbox.load_tool( os.path.join( tool_path, tup_path ), guid=guid ) repository_tools_tups[ index ] = ( tup_path, guid, repository_tool ) # Reset the tool_data_tables by loading the empty tool_data_table_conf.xml file. - reset_tool_data_tables( app ) + suc.reset_tool_data_tables( app ) return repository_tools_tups def handle_missing_index_file( app, tool_path, sample_files, repository_tools_tups, sample_files_copied ): """ @@ -734,7 +734,7 @@ for sample_file in sample_files: sample_file_name = strip_path( sample_file ) if sample_file_name == '%s.sample' % missing_file_name: - copy_sample_file( app, sample_file ) + suc.copy_sample_file( app, sample_file ) if options.tool_data_table and options.tool_data_table.missing_index_file: options.tool_data_table.handle_found_index_file( options.missing_index_file ) sample_files_copied.append( options.missing_index_file ) @@ -852,7 +852,7 @@ # Load proprietary datatypes and return information needed for loading proprietary datatypes converters and display applications later. metadata = repository.metadata repository_dict = None - datatypes_config = get_config_from_disk( 'datatypes_conf.xml', relative_install_dir ) + datatypes_config = suc.get_config_from_disk( 'datatypes_conf.xml', relative_install_dir ) if datatypes_config: converter_path, display_path = alter_config_and_load_prorietary_datatypes( app, datatypes_config, relative_install_dir, deactivate=deactivate ) if converter_path or display_path: @@ -883,10 +883,7 @@ return False def pull_repository( repo, repository_clone_url, ctx_rev ): """Pull changes from a remote repository to a local one.""" - commands.pull( get_configured_ui(), - repo, - source=repository_clone_url, - rev=[ ctx_rev ] ) + commands.pull( suc.get_configured_ui(), repo, source=repository_clone_url, rev=[ ctx_rev ] ) def remove_from_shed_tool_config( trans, shed_tool_conf_dict, guids_to_remove ): # A tool shed repository is being uninstalled so change the shed_tool_conf file. Parse the config file to generate the entire list # of config_elems instead of using the in-memory list since it will be a subset of the entire list if one or more repositories have @@ -1038,7 +1035,7 @@ trans.app.toolbox.write_integrated_tool_panel_config_file() def remove_tool_dependency( trans, tool_dependency ): dependency_install_dir = tool_dependency.installation_directory( trans.app ) - removed, error_message = remove_tool_dependency_installation_directory( dependency_install_dir ) + removed, error_message = suc.remove_tool_dependency_installation_directory( dependency_install_dir ) if removed: tool_dependency.status = trans.model.ToolDependency.installation_status.UNINSTALLED tool_dependency.error_message = None @@ -1046,7 +1043,7 @@ trans.sa_session.flush() return removed, error_message def tool_shed_from_repository_clone_url( repository_clone_url ): - return clean_repository_clone_url( repository_clone_url ).split( 'repos' )[ 0 ].rstrip( '/' ) + return suc.clean_repository_clone_url( repository_clone_url ).split( 'repos' )[ 0 ].rstrip( '/' ) def update_in_shed_tool_config( app, repository ): # A tool shed repository is being updated so change the shed_tool_conf file. Parse the config file to generate the entire list # of config_elems instead of using the in-memory list. @@ -1060,7 +1057,7 @@ tool_panel_dict = generate_tool_panel_dict_from_shed_tool_conf_entries( trans, repository ) repository_tools_tups = get_repository_tools_tups( app, repository.metadata ) - cleaned_repository_clone_url = clean_repository_clone_url( generate_clone_url_for_installed_repository( trans, repository ) ) + cleaned_repository_clone_url = suc.clean_repository_clone_url( generate_clone_url_for_installed_repository( trans, repository ) ) tool_shed = tool_shed_from_repository_clone_url( cleaned_repository_clone_url ) owner = repository.owner if not owner: diff -r ae60aaaf6a139e88b2849abf97fd47af711341fd -r 70f88a048ed2b565d60250ca66302fd03a7852ff lib/galaxy/webapps/community/controllers/admin.py --- a/lib/galaxy/webapps/community/controllers/admin.py +++ b/lib/galaxy/webapps/community/controllers/admin.py @@ -5,7 +5,7 @@ from galaxy.web.framework.helpers import time_ago, iff, grids from galaxy.web.form_builder import SelectField from galaxy.util import inflector -from galaxy.util.shed_util_common import * +import galaxy.util.shed_util_common as suc from common import * from repository import RepositoryGrid, CategoryGrid @@ -342,8 +342,8 @@ class RevisionColumn( grids.TextColumn ): def get_value( self, trans, grid, repository_metadata ): repository = repository_metadata.repository - repo = hg.repository( get_configured_ui(), repository.repo_path( trans.app ) ) - ctx = get_changectx_for_changeset( repo, repository_metadata.changeset_revision ) + repo = hg.repository( suc.get_configured_ui(), repository.repo_path( trans.app ) ) + ctx = suc.get_changectx_for_changeset( repo, repository_metadata.changeset_revision ) return "%s:%s" % ( str( ctx.rev() ), repository_metadata.changeset_revision ) class ToolsColumn( grids.TextColumn ): def get_value( self, trans, grid, repository_metadata ): @@ -481,7 +481,7 @@ # The received id is the repository id, so we need to get the id of the user # that uploaded the repository. repository_id = kwd.get( 'id', None ) - repository = get_repository_in_tool_shed( trans, repository_id ) + repository = suc.get_repository_in_tool_shed( trans, repository_id ) kwd[ 'f-email' ] = repository.user.email elif operation == "repositories_by_category": # Eliminate the current filters if any exist. @@ -513,7 +513,7 @@ changset_revision_str = 'changeset_revision_' if k.startswith( changset_revision_str ): repository_id = trans.security.encode_id( int( k.lstrip( changset_revision_str ) ) ) - repository = get_repository_in_tool_shed( trans, repository_id ) + repository = suc.get_repository_in_tool_shed( trans, repository_id ) if repository.tip( trans.app ) != v: return trans.response.send_redirect( web.url_for( controller='repository', action='browse_repositories', @@ -586,7 +586,7 @@ count = 0 deleted_repositories = "" for repository_id in ids: - repository = get_repository_in_tool_shed( trans, repository_id ) + repository = suc.get_repository_in_tool_shed( trans, repository_id ) if not repository.deleted: repository.deleted = True trans.sa_session.add( repository ) @@ -717,12 +717,12 @@ @web.require_admin def reset_metadata_on_selected_repositories_in_tool_shed( self, trans, **kwd ): if 'reset_metadata_on_selected_repositories_button' in kwd: - kwd[ 'CONTROLLER' ] = TOOL_SHED_ADMIN_CONTROLLER - message, status = reset_metadata_on_selected_repositories( trans, **kwd ) + kwd[ 'CONTROLLER' ] = suc.TOOL_SHED_ADMIN_CONTROLLER + message, status = suc.reset_metadata_on_selected_repositories( trans, **kwd ) else: message = util.restore_text( kwd.get( 'message', '' ) ) status = kwd.get( 'status', 'done' ) - repositories_select_field = build_repository_ids_select_field( trans, TOOL_SHED_ADMIN_CONTROLLER ) + repositories_select_field = suc.build_repository_ids_select_field( trans, suc.TOOL_SHED_ADMIN_CONTROLLER ) return trans.fill_template( '/webapps/community/admin/reset_metadata_on_selected_repositories.mako', repositories_select_field=repositories_select_field, message=message, @@ -740,7 +740,7 @@ count = 0 undeleted_repositories = "" for repository_id in ids: - repository = get_repository_in_tool_shed( trans, repository_id ) + repository = suc.get_repository_in_tool_shed( trans, repository_id ) if repository.deleted: repository.deleted = False trans.sa_session.add( repository ) diff -r ae60aaaf6a139e88b2849abf97fd47af711341fd -r 70f88a048ed2b565d60250ca66302fd03a7852ff lib/galaxy/webapps/community/controllers/common.py --- a/lib/galaxy/webapps/community/controllers/common.py +++ b/lib/galaxy/webapps/community/controllers/common.py @@ -6,7 +6,7 @@ from galaxy.util.odict import odict from galaxy.util.json import from_json_string, to_json_string from galaxy.util.hash_util import * -from galaxy.util.shed_util_common import * +import galaxy.util.shed_util_common as suc from galaxy.web.base.controller import * from galaxy.web.base.controllers.admin import * from galaxy.webapps.community import model @@ -108,12 +108,12 @@ tool_versions_dict = {} for tool_dict in metadata.get( 'tools', [] ): # We have at least 2 changeset revisions to compare tool guids and tool ids. - parent_id = get_parent_id( trans, - id, - tool_dict[ 'id' ], - tool_dict[ 'version' ], - tool_dict[ 'guid' ], - changeset_revisions ) + parent_id = suc.get_parent_id( trans, + id, + tool_dict[ 'id' ], + tool_dict[ 'version' ], + tool_dict[ 'guid' ], + changeset_revisions ) tool_versions_dict[ tool_dict[ 'guid' ] ] = parent_id if tool_versions_dict: repository_metadata.tool_versions = tool_versions_dict @@ -130,7 +130,7 @@ return False if changeset_revision == repository.tip( trans.app ): return True - file_name = strip_path( file_path ) + file_name = suc.strip_path( file_path ) latest_version_of_file = get_latest_tool_config_revision_from_repository_manifest( repo, file_name, changeset_revision ) can_use_disk_file = filecmp.cmp( file_path, latest_version_of_file ) try: @@ -140,7 +140,7 @@ return can_use_disk_file def changeset_is_malicious( trans, id, changeset_revision, **kwd ): """Check the malicious flag in repository metadata for a specified change set""" - repository_metadata = get_repository_metadata_by_changeset_revision( trans, id, changeset_revision ) + repository_metadata = suc.get_repository_metadata_by_changeset_revision( trans, id, changeset_revision ) if repository_metadata: return repository_metadata.malicious return False @@ -191,7 +191,7 @@ tool.id, tool.version ) def get_absolute_path_to_file_in_repository( repo_files_dir, file_name ): - stripped_file_name = strip_path( file_name ) + stripped_file_name = suc.strip_path( file_name ) file_path = None for root, dirs, files in os.walk( repo_files_dir ): if root.find( '.hg' ) < 0: @@ -246,11 +246,11 @@ This method is restricted to tool_config files rather than any file since it is likely that, with the exception of tool config files, multiple files will have the same name in various directories within the repository. """ - stripped_filename = strip_path( filename ) - for changeset in reversed_upper_bounded_changelog( repo, changeset_revision ): + stripped_filename = suc.strip_path( filename ) + for changeset in suc.reversed_upper_bounded_changelog( repo, changeset_revision ): manifest_ctx = repo.changectx( changeset ) for ctx_file in manifest_ctx.files(): - ctx_file_name = strip_path( ctx_file ) + ctx_file_name = suc.strip_path( ctx_file ) if ctx_file_name == stripped_filename: try: fctx = manifest_ctx[ ctx_file ] @@ -268,10 +268,10 @@ return None def get_previous_repository_reviews( trans, repository, changeset_revision ): """Return an ordered dictionary of repository reviews up to and including the received changeset revision.""" - repo = hg.repository( get_configured_ui(), repository.repo_path( trans.app ) ) + repo = hg.repository( suc.get_configured_ui(), repository.repo_path( trans.app ) ) reviewed_revision_hashes = [ review.changeset_revision for review in repository.reviews ] previous_reviews_dict = odict() - for changeset in reversed_upper_bounded_changelog( repo, changeset_revision ): + for changeset in suc.reversed_upper_bounded_changelog( repo, changeset_revision ): previous_changeset_revision = str( repo.changectx( changeset ) ) if previous_changeset_revision in reviewed_revision_hashes: previous_rev, previous_changeset_revision_label = get_rev_label_from_changeset_revision( repo, previous_changeset_revision ) @@ -313,9 +313,9 @@ def get_rev_label_changeset_revision_from_repository_metadata( trans, repository_metadata, repository=None ): if repository is None: repository = repository_metadata.repository - repo = hg.repository( get_configured_ui(), repository.repo_path( trans.app ) ) + repo = hg.repository( suc.get_configured_ui(), repository.repo_path( trans.app ) ) changeset_revision = repository_metadata.changeset_revision - ctx = get_changectx_for_changeset( repo, changeset_revision ) + ctx = suc.get_changectx_for_changeset( repo, changeset_revision ) if ctx: rev = '%04d' % ctx.rev() label = "%s:%s" % ( str( ctx.rev() ), changeset_revision ) @@ -324,7 +324,7 @@ label = "-1:%s" % changeset_revision return rev, label, changeset_revision def get_rev_label_from_changeset_revision( repo, changeset_revision ): - ctx = get_changectx_for_changeset( repo, changeset_revision ) + ctx = suc.get_changectx_for_changeset( repo, changeset_revision ) if ctx: rev = '%04d' % ctx.rev() label = "%s:%s" % ( str( ctx.rev() ), changeset_revision ) @@ -358,8 +358,8 @@ Return a string consisting of the human read-able changeset rev and the changeset revision string. """ - repo = hg.repository( get_configured_ui(), repository.repo_path( trans.app ) ) - ctx = get_changectx_for_changeset( repo, changeset_revision ) + repo = hg.repository( suc.get_configured_ui(), repository.repo_path( trans.app ) ) + ctx = suc.get_changectx_for_changeset( repo, changeset_revision ) if ctx: return "%s:%s" % ( str( ctx.rev() ), changeset_revision ) else: @@ -389,7 +389,7 @@ # user is not an admin user, the email will not include any information about both HTML and image content # that was included in the change set. repo_dir = repository.repo_path( trans.app ) - repo = hg.repository( get_configured_ui(), repo_dir ) + repo = hg.repository( suc.get_configured_ui(), repo_dir ) smtp_server = trans.app.config.smtp_server if smtp_server and ( new_repo_alert or repository.email_alerts ): # Send email alert to users that want them. @@ -457,9 +457,9 @@ log.exception( "An error occurred sending a tool shed repository update alert by email." ) def has_previous_repository_reviews( trans, repository, changeset_revision ): """Determine if a repository has a changeset revision review prior to the received changeset revision.""" - repo = hg.repository( get_configured_ui(), repository.repo_path( trans.app ) ) + repo = hg.repository( suc.get_configured_ui(), repository.repo_path( trans.app ) ) reviewed_revision_hashes = [ review.changeset_revision for review in repository.reviews ] - for changeset in reversed_upper_bounded_changelog( repo, changeset_revision ): + for changeset in suc.reversed_upper_bounded_changelog( repo, changeset_revision ): previous_changeset_revision = str( repo.changectx( changeset ) ) if previous_changeset_revision in reviewed_revision_hashes: return True @@ -471,9 +471,9 @@ revision and the first changeset revision in the repository, searching backwards. """ original_tool_data_path = trans.app.config.tool_data_path - repository = get_repository_in_tool_shed( trans, repository_id ) + repository = suc.get_repository_in_tool_shed( trans, repository_id ) repo_files_dir = repository.repo_path( trans.app ) - repo = hg.repository( get_configured_ui(), repo_files_dir ) + repo = hg.repository( suc.get_configured_ui(), repo_files_dir ) message = '' tool = None can_use_disk_file = False @@ -482,27 +482,27 @@ can_use_disk_file = can_use_tool_config_disk_file( trans, repository, repo, tool_config_filepath, changeset_revision ) if can_use_disk_file: trans.app.config.tool_data_path = work_dir - tool, valid, message, sample_files = handle_sample_files_and_load_tool_from_disk( trans, repo_files_dir, tool_config_filepath, work_dir ) + tool, valid, message, sample_files = suc.handle_sample_files_and_load_tool_from_disk( trans, repo_files_dir, tool_config_filepath, work_dir ) if tool is not None: - invalid_files_and_errors_tups = check_tool_input_params( trans.app, - repo_files_dir, - tool_config_filename, - tool, - sample_files ) + invalid_files_and_errors_tups = suc.check_tool_input_params( trans.app, + repo_files_dir, + tool_config_filename, + tool, + sample_files ) if invalid_files_and_errors_tups: - message2 = generate_message_for_invalid_tools( trans, - invalid_files_and_errors_tups, - repository, - metadata_dict=None, - as_html=True, - displaying_invalid_tool=True ) - message = concat_messages( message, message2 ) + message2 = suc.generate_message_for_invalid_tools( trans, + invalid_files_and_errors_tups, + repository, + metadata_dict=None, + as_html=True, + displaying_invalid_tool=True ) + message = suc.concat_messages( message, message2 ) else: - tool, message, sample_files = handle_sample_files_and_load_tool_from_tmp_config( trans, repo, changeset_revision, tool_config_filename, work_dir ) - remove_dir( work_dir ) + tool, message, sample_files = suc.handle_sample_files_and_load_tool_from_tmp_config( trans, repo, changeset_revision, tool_config_filename, work_dir ) + suc.remove_dir( work_dir ) trans.app.config.tool_data_path = original_tool_data_path # Reset the tool_data_tables by loading the empty tool_data_table_conf.xml file. - reset_tool_data_tables( trans.app ) + suc.reset_tool_data_tables( trans.app ) return repository, tool, message def new_repository_dependency_metadata_required( trans, repository, metadata_dict ): """ @@ -594,36 +594,36 @@ message = '' status = 'done' encoded_id = trans.security.encode_id( repository.id ) - repository_clone_url = generate_clone_url_for_repository_in_tool_shed( trans, repository ) + repository_clone_url = suc.generate_clone_url_for_repository_in_tool_shed( trans, repository ) repo_dir = repository.repo_path( trans.app ) - repo = hg.repository( get_configured_ui(), repo_dir ) - metadata_dict, invalid_file_tups = generate_metadata_for_changeset_revision( app=trans.app, - repository=repository, - repository_clone_url=repository_clone_url, - relative_install_dir=repo_dir, - repository_files_dir=None, - resetting_all_metadata_on_repository=False, - updating_installed_repository=False, - persist=False ) + repo = hg.repository( suc.get_configured_ui(), repo_dir ) + metadata_dict, invalid_file_tups = suc.generate_metadata_for_changeset_revision( app=trans.app, + repository=repository, + repository_clone_url=repository_clone_url, + relative_install_dir=repo_dir, + repository_files_dir=None, + resetting_all_metadata_on_repository=False, + updating_installed_repository=False, + persist=False ) if metadata_dict: - downloadable = is_downloadable( metadata_dict ) + downloadable = suc.is_downloadable( metadata_dict ) repository_metadata = None if new_repository_dependency_metadata_required( trans, repository, metadata_dict ) or \ new_tool_metadata_required( trans, repository, metadata_dict ) or \ new_workflow_metadata_required( trans, repository, metadata_dict ): # Create a new repository_metadata table row. - repository_metadata = create_or_update_repository_metadata( trans, - encoded_id, - repository, - repository.tip( trans.app ), - metadata_dict ) + repository_metadata = suc.create_or_update_repository_metadata( trans, + encoded_id, + repository, + repository.tip( trans.app ), + metadata_dict ) # If this is the first record stored for this repository, see if we need to send any email alerts. if len( repository.downloadable_revisions ) == 1: handle_email_alerts( trans, repository, content_alert_str='', new_repo_alert=True, admin_only=False ) else: repository_metadata = get_latest_repository_metadata( trans, repository.id ) if repository_metadata: - downloadable = is_downloadable( metadata_dict ) + downloadable = suc.is_downloadable( metadata_dict ) # Update the last saved repository_metadata table row. repository_metadata.changeset_revision = repository.tip( trans.app ) repository_metadata.metadata = metadata_dict @@ -632,17 +632,17 @@ trans.sa_session.flush() else: # There are no tools in the repository, and we're setting metadata on the repository tip. - repository_metadata = create_or_update_repository_metadata( trans, - encoded_id, - repository, - repository.tip( trans.app ), - metadata_dict ) + repository_metadata = suc.create_or_update_repository_metadata( trans, + encoded_id, + repository, + repository.tip( trans.app ), + metadata_dict ) if 'tools' in metadata_dict and repository_metadata and status != 'error': # Set tool versions on the new downloadable change set. The order of the list of changesets is critical, so we use the repo's changelog. changeset_revisions = [] for changeset in repo.changelog: changeset_revision = str( repo.changectx( changeset ) ) - if get_repository_metadata_by_changeset_revision( trans, encoded_id, changeset_revision ): + if suc.get_repository_metadata_by_changeset_revision( trans, encoded_id, changeset_revision ): changeset_revisions.append( changeset_revision ) add_tool_versions( trans, encoded_id, repository_metadata, changeset_revisions ) elif len( repo ) == 1 and not invalid_file_tups: @@ -650,10 +650,10 @@ message += "be defined so this revision cannot be automatically installed into a local Galaxy instance." status = "error" if invalid_file_tups: - message = generate_message_for_invalid_tools( trans, invalid_file_tups, repository, metadata_dict ) + message = suc.generate_message_for_invalid_tools( trans, invalid_file_tups, repository, metadata_dict ) status = 'error' # Reset the tool_data_tables by loading the empty tool_data_table_conf.xml file. - reset_tool_data_tables( trans.app ) + suc.reset_tool_data_tables( trans.app ) return message, status def set_repository_metadata_due_to_new_tip( trans, repository, content_alert_str=None, **kwd ): # Set metadata on the repository tip. @@ -671,7 +671,7 @@ # Make a copy of a repository's files for browsing, remove from disk all files that are not tracked, and commit all # added, modified or removed files that have not yet been committed. repo_dir = repository.repo_path( trans.app ) - repo = hg.repository( get_configured_ui(), repo_dir ) + repo = hg.repository( suc.get_configured_ui(), repo_dir ) # The following will delete the disk copy of only the files in the repository. #os.system( 'hg update -r null > /dev/null 2>&1' ) files_to_remove_from_disk = [] diff -r ae60aaaf6a139e88b2849abf97fd47af711341fd -r 70f88a048ed2b565d60250ca66302fd03a7852ff lib/galaxy/webapps/community/controllers/repository.py --- a/lib/galaxy/webapps/community/controllers/repository.py +++ b/lib/galaxy/webapps/community/controllers/repository.py @@ -9,7 +9,7 @@ from galaxy.web.framework.helpers import time_ago, iff, grids from galaxy.util.json import from_json_string, to_json_string from galaxy.model.orm import * -from galaxy.util.shed_util_common import * +import galaxy.util.shed_util_common as suc from galaxy.tool_shed.encoding_util import * from common import * @@ -616,7 +616,7 @@ else: # The received id is the repository id, so we need to get the id of the user that uploaded the repository. repository_id = kwd.get( 'id', None ) - repository = get_repository_in_tool_shed( trans, repository_id ) + repository = suc.get_repository_in_tool_shed( trans, repository_id ) kwd[ 'f-email' ] = repository.user.email elif operation == "repositories_i_own": # Eliminate the current filters if any exist. @@ -673,7 +673,7 @@ changset_revision_str = 'changeset_revision_' if k.startswith( changset_revision_str ): repository_id = trans.security.encode_id( int( k.lstrip( changset_revision_str ) ) ) - repository = get_repository_in_tool_shed( trans, repository_id ) + repository = suc.get_repository_in_tool_shed( trans, repository_id ) if repository.tip( trans.app ) != v: return trans.response.send_redirect( web.url_for( controller='repository', action='browse_repositories', @@ -687,10 +687,10 @@ message = util.restore_text( params.get( 'message', '' ) ) status = params.get( 'status', 'done' ) commit_message = util.restore_text( params.get( 'commit_message', 'Deleted selected files' ) ) - repository = get_repository_in_tool_shed( trans, id ) - repo = hg.repository( get_configured_ui(), repository.repo_path( trans.app ) ) + repository = suc.get_repository_in_tool_shed( trans, id ) + repo = hg.repository( suc.get_configured_ui(), repository.repo_path( trans.app ) ) # Update repository files for browsing. - update_repository( repo ) + suc.update_repository( repo ) is_malicious = changeset_is_malicious( trans, id, repository.tip( trans.app ) ) metadata = self.get_metadata( trans, id, repository.tip( trans.app ) ) return trans.fill_template( '/webapps/community/repository/browse_repository.mako', @@ -748,7 +748,7 @@ operation = kwd[ 'operation' ].lower() if operation == "preview_tools_in_changeset": repository_id = kwd.get( 'id', None ) - repository = get_repository_in_tool_shed( trans, repository_id ) + repository = suc.get_repository_in_tool_shed( trans, repository_id ) repository_metadata = get_latest_repository_metadata( trans, repository.id ) latest_installable_changeset_revision = repository_metadata.changeset_revision return trans.response.send_redirect( web.url_for( controller='repository', @@ -772,7 +772,7 @@ changset_revision_str = 'changeset_revision_' if k.startswith( changset_revision_str ): repository_id = trans.security.encode_id( int( k.lstrip( changset_revision_str ) ) ) - repository = get_repository_in_tool_shed( trans, repository_id ) + repository = suc.get_repository_in_tool_shed( trans, repository_id ) if repository.tip( trans.app ) != v: return trans.response.send_redirect( web.url_for( controller='repository', action='preview_tools_in_changeset', @@ -817,11 +817,11 @@ name = params.get( 'name', None ) owner = params.get( 'owner', None ) changeset_revision = params.get( 'changeset_revision', None ) - repository = get_repository_by_name_and_owner( trans, name, owner ) + repository = suc.get_repository_by_name_and_owner( trans, name, owner ) repo_dir = repository.repo_path( trans.app ) - repo = hg.repository( get_configured_ui(), repo_dir ) + repo = hg.repository( suc.get_configured_ui(), repo_dir ) # Default to the current changeset revision. - update_to_ctx = get_changectx_for_changeset( repo, changeset_revision ) + update_to_ctx = suc.get_changectx_for_changeset( repo, changeset_revision ) latest_changeset_revision = changeset_revision from_update_manager = kwd.get( 'from_update_manager', False ) if from_update_manager: @@ -829,9 +829,9 @@ no_update = 'false' else: # Start building up the url to redirect back to the calling Galaxy instance. - url = url_join( galaxy_url, - 'admin_toolshed/update_to_changeset_revision?tool_shed_url=%s&name=%s&owner=%s&changeset_revision=%s&latest_changeset_revision=' % \ - ( url_for( '/', qualified=True ), repository.name, repository.user.username, changeset_revision ) ) + url = suc.url_join( galaxy_url, + 'admin_toolshed/update_to_changeset_revision?tool_shed_url=%s&name=%s&owner=%s&changeset_revision=%s&latest_changeset_revision=' % \ + ( url_for( '/', qualified=True ), repository.name, repository.user.username, changeset_revision ) ) if changeset_revision == repository.tip( trans.app ): # If changeset_revision is the repository tip, there are no additional updates. if from_update_manager: @@ -839,9 +839,9 @@ # Return the same value for changeset_revision and latest_changeset_revision. url += latest_changeset_revision else: - repository_metadata = get_repository_metadata_by_changeset_revision( trans, - trans.security.encode_id( repository.id ), - changeset_revision ) + repository_metadata = suc.get_repository_metadata_by_changeset_revision( trans, + trans.security.encode_id( repository.id ), + changeset_revision ) if repository_metadata: # If changeset_revision is in the repository_metadata table for this repository, there are no additional updates. if from_update_manager: @@ -855,19 +855,19 @@ update_to_changeset_hash = None for changeset in repo.changelog: changeset_hash = str( repo.changectx( changeset ) ) - ctx = get_changectx_for_changeset( repo, changeset_hash ) + ctx = suc.get_changectx_for_changeset( repo, changeset_hash ) if update_to_changeset_hash: if changeset_hash == repository.tip( trans.app ): - update_to_ctx = get_changectx_for_changeset( repo, changeset_hash ) + update_to_ctx = suc.get_changectx_for_changeset( repo, changeset_hash ) latest_changeset_revision = changeset_hash break else: - repository_metadata = get_repository_metadata_by_changeset_revision( trans, - trans.security.encode_id( repository.id ), - changeset_hash ) + repository_metadata = suc.get_repository_metadata_by_changeset_revision( trans, + trans.security.encode_id( repository.id ), + changeset_hash ) if repository_metadata: # We found a RepositoryMetadata record. - update_to_ctx = get_changectx_for_changeset( repo, changeset_hash ) + update_to_ctx = suc.get_changectx_for_changeset( repo, changeset_hash ) latest_changeset_revision = changeset_hash break else: @@ -888,7 +888,7 @@ params = util.Params( kwd ) message = util.restore_text( params.get( 'message', '' ) ) status = params.get( 'status', 'done' ) - repository = get_repository_in_tool_shed( trans, id ) + repository = suc.get_repository_in_tool_shed( trans, id ) metadata = self.get_metadata( trans, id, repository.tip( trans.app ) ) if trans.user and trans.user.email: return trans.fill_template( "/webapps/community/repository/contact_owner.mako", @@ -904,7 +904,7 @@ # Since we support both http and https, we set push_ssl to False to override the default (which is True) in the mercurial api. The hg # purge extension purges all files and directories not being tracked by mercurial in the current repository. It'll remove unknown files # and empty directories. This is not currently used because it is not supported in the mercurial API. - repo = hg.repository( get_configured_ui(), path=repository.repo_path( trans.app ) ) + repo = hg.repository( suc.get_configured_ui(), path=repository.repo_path( trans.app ) ) fp = repo.opener( 'hgrc', 'wb' ) fp.write( '[paths]\n' ) fp.write( 'default = .\n' ) @@ -963,7 +963,7 @@ if not os.path.exists( repository_path ): os.makedirs( repository_path ) # Create the local repository - repo = hg.repository( get_configured_ui(), repository_path, create=True ) + repo = hg.repository( suc.get_configured_ui(), repository_path, create=True ) # Add an entry in the hgweb.config file for the local repository. lhs = "repos/%s/%s" % ( repository.user.username, repository.name ) trans.app.hgweb_config_manager.add_entry( lhs, repository_path ) @@ -999,7 +999,7 @@ message = util.restore_text( params.get( 'message', '' ) ) status = params.get( 'status', 'done' ) repository_id = params.get( 'id', None ) - repository = get_repository_in_tool_shed( trans, repository_id ) + repository = suc.get_repository_in_tool_shed( trans, repository_id ) mark_deprecated = util.string_as_bool( params.get( 'mark_deprecated', False ) ) repository.deprecated = mark_deprecated trans.sa_session.add( repository ) @@ -1054,7 +1054,7 @@ def download( self, trans, repository_id, changeset_revision, file_type, **kwd ): # Download an archive of the repository files compressed as zip, gz or bz2. params = util.Params( kwd ) - repository = get_repository_in_tool_shed( trans, repository_id ) + repository = suc.get_repository_in_tool_shed( trans, repository_id ) # Allow hgweb to handle the download. This requires the tool shed # server account's .hgrc file to include the following setting: # [web] @@ -1087,7 +1087,7 @@ # The received id is a RepositoryMetadata id, so we have to get the repository id. repository_metadata = get_repository_metadata_by_id( trans, item_id ) repository_id = trans.security.encode_id( repository_metadata.repository.id ) - repository = get_repository_in_tool_shed( trans, repository_id ) + repository = suc.get_repository_in_tool_shed( trans, repository_id ) kwd[ 'id' ] = repository_id kwd[ 'changeset_revision' ] = repository_metadata.changeset_revision if trans.webapp.name == 'community' and ( is_admin or repository.user == trans.user ): @@ -1172,7 +1172,7 @@ # The received id is a RepositoryMetadata id, so we have to get the repository id. repository_metadata = get_repository_metadata_by_id( trans, item_id ) repository_id = trans.security.encode_id( repository_metadata.repository.id ) - repository = get_repository_in_tool_shed( trans, repository_id ) + repository = suc.get_repository_in_tool_shed( trans, repository_id ) kwd[ 'id' ] = repository_id kwd[ 'changeset_revision' ] = repository_metadata.changeset_revision if trans.webapp.name == 'community' and ( is_admin or repository.user == trans.user ): @@ -1252,11 +1252,11 @@ name = params.get( 'name', None ) owner = params.get( 'owner', None ) changeset_revision = params.get( 'changeset_revision', None ) - repository = get_repository_by_name_and_owner( trans, name, owner ) + repository = suc.get_repository_by_name_and_owner( trans, name, owner ) repo_dir = repository.repo_path( trans.app ) - repo = hg.repository( get_configured_ui(), repo_dir ) + repo = hg.repository( suc.get_configured_ui(), repo_dir ) # Default to the received changeset revision and ctx_rev. - update_to_ctx = get_changectx_for_changeset( repo, changeset_revision ) + update_to_ctx = suc.get_changectx_for_changeset( repo, changeset_revision ) ctx_rev = str( update_to_ctx.rev() ) latest_changeset_revision = changeset_revision update_dict = dict( changeset_revision=changeset_revision, ctx_rev=ctx_rev ) @@ -1264,9 +1264,9 @@ # If changeset_revision is the repository tip, there are no additional updates. return tool_shed_encode( update_dict ) else: - repository_metadata = get_repository_metadata_by_changeset_revision( trans, - trans.security.encode_id( repository.id ), - changeset_revision ) + repository_metadata = suc.get_repository_metadata_by_changeset_revision( trans, + trans.security.encode_id( repository.id ), + changeset_revision ) if repository_metadata: # If changeset_revision is in the repository_metadata table for this repository, there are no additional updates. return tool_shed_encode( update_dict ) @@ -1276,16 +1276,16 @@ update_to_changeset_hash = None for changeset in repo.changelog: changeset_hash = str( repo.changectx( changeset ) ) - ctx = get_changectx_for_changeset( repo, changeset_hash ) + ctx = suc.get_changectx_for_changeset( repo, changeset_hash ) if update_to_changeset_hash: - if get_repository_metadata_by_changeset_revision( trans, trans.security.encode_id( repository.id ), changeset_hash ): + if suc.get_repository_metadata_by_changeset_revision( trans, trans.security.encode_id( repository.id ), changeset_hash ): # We found a RepositoryMetadata record. if changeset_hash == repository.tip( trans.app ): # The current ctx is the repository tip, so use it. - update_to_ctx = get_changectx_for_changeset( repo, changeset_hash ) + update_to_ctx = suc.get_changectx_for_changeset( repo, changeset_hash ) latest_changeset_revision = changeset_hash else: - update_to_ctx = get_changectx_for_changeset( repo, update_to_changeset_hash ) + update_to_ctx = suc.get_changectx_for_changeset( repo, update_to_changeset_hash ) latest_changeset_revision = update_to_changeset_hash break elif not update_to_changeset_hash and changeset_hash == changeset_revision: @@ -1300,10 +1300,10 @@ repository_name = kwd[ 'name' ] repository_owner = kwd[ 'owner' ] changeset_revision = kwd[ 'changeset_revision' ] - repository = get_repository_by_name_and_owner( trans, repository_name, repository_owner ) + repository = suc.get_repository_by_name_and_owner( trans, repository_name, repository_owner ) repo_dir = repository.repo_path( trans.app ) - repo = hg.repository( get_configured_ui(), repo_dir ) - ctx = get_changectx_for_changeset( repo, changeset_revision ) + repo = hg.repository( suc.get_configured_ui(), repo_dir ) + ctx = suc.get_changectx_for_changeset( repo, changeset_revision ) if ctx: return str( ctx.rev() ) return '' @@ -1312,16 +1312,16 @@ # Avoid caching trans.response.headers['Pragma'] = 'no-cache' trans.response.headers['Expires'] = '0' - return get_repository_file_contents( file_path ) + return suc.get_repository_file_contents( file_path ) def get_file_from_changeset_revision( self, repo_files_dir, changeset_revision, file_name, dir ): """Return file_name from the received changeset_revision of the repository manifest.""" stripped_file_name = strip_path( file_name ) - repo = hg.repository( get_configured_ui(), repo_files_dir ) - ctx = get_changectx_for_changeset( repo, changeset_revision ) - named_tmp_file = get_named_tmpfile_from_ctx( ctx, file_name, dir ) + repo = hg.repository( suc.get_configured_ui(), repo_files_dir ) + ctx = suc.get_changectx_for_changeset( repo, changeset_revision ) + named_tmp_file = suc.get_named_tmpfile_from_ctx( ctx, file_name, dir ) return named_tmp_file def get_metadata( self, trans, repository_id, changeset_revision ): - repository_metadata = get_repository_metadata_by_changeset_revision( trans, repository_id, changeset_revision ) + repository_metadata = suc.get_repository_metadata_by_changeset_revision( trans, repository_id, changeset_revision ) if repository_metadata and repository_metadata.metadata: return repository_metadata.metadata return None @@ -1331,21 +1331,21 @@ name = params.get( 'name', None ) owner = params.get( 'owner', None ) changeset_revision = params.get( 'changeset_revision', None ) - repository = get_repository_by_name_and_owner( trans, name, owner ) + repository = suc.get_repository_by_name_and_owner( trans, name, owner ) repository_id = trans.security.encode_id( repository.id ) - repository_metadata = get_repository_metadata_by_changeset_revision( trans, repository_id, changeset_revision ) + repository_metadata = suc.get_repository_metadata_by_changeset_revision( trans, repository_id, changeset_revision ) if repository_metadata: metadata = repository_metadata.metadata if metadata: # Get a dictionary of all repositories upon which the contents of the received repository depends. - repository_dependencies = get_repository_dependencies_for_changeset_revision( trans=trans, - repository=repository, - repository_metadata=repository_metadata, - toolshed_base_url=str( url_for( '/', qualified=True ) ).rstrip( '/' ), - key_rd_dicts_to_be_processed=None, - all_repository_dependencies=None, - handled_key_rd_dicts=None, - circular_repository_dependencies=None ) + repository_dependencies = suc.get_repository_dependencies_for_changeset_revision( trans=trans, + repository=repository, + repository_metadata=repository_metadata, + toolshed_base_url=str( url_for( '/', qualified=True ) ).rstrip( '/' ), + key_rd_dicts_to_be_processed=None, + all_repository_dependencies=None, + handled_key_rd_dicts=None, + circular_repository_dependencies=None ) if repository_dependencies: return tool_shed_encode( repository_dependencies ) return '' @@ -1361,9 +1361,9 @@ repo_info_dicts = [] for tup in zip( util.listify( repository_ids ), util.listify( changeset_revisions ) ): repository_id, changeset_revision = tup - repository = get_repository_in_tool_shed( trans, repository_id ) - repository_clone_url = generate_clone_url_for_repository_in_tool_shed( trans, repository ) - repository_metadata = get_repository_metadata_by_changeset_revision( trans, repository_id, changeset_revision ) + repository = suc.get_repository_in_tool_shed( trans, repository_id ) + repository_clone_url = suc.generate_clone_url_for_repository_in_tool_shed( trans, repository ) + repository_metadata = suc.get_repository_metadata_by_changeset_revision( trans, repository_id, changeset_revision ) metadata = repository_metadata.metadata if not includes_tools and 'tools' in metadata: includes_tools = True @@ -1372,17 +1372,17 @@ if not includes_tool_dependencies and 'tool_dependencies' in metadata: includes_tool_dependencies = True repo_dir = repository.repo_path( trans.app ) - repo = hg.repository( get_configured_ui(), repo_dir ) - ctx = get_changectx_for_changeset( repo, changeset_revision ) - repo_info_dict = create_repo_info_dict( trans=trans, - repository_clone_url=repository_clone_url, - changeset_revision=changeset_revision, - ctx_rev=str( ctx.rev() ), - repository_owner=repository.user.username, - repository_name=repository.name, - repository=repository, - metadata=None, - repository_metadata=repository_metadata ) + repo = hg.repository( suc.get_configured_ui(), repo_dir ) + ctx = suc.get_changectx_for_changeset( repo, changeset_revision ) + repo_info_dict = suc.create_repo_info_dict( trans=trans, + repository_clone_url=repository_clone_url, + changeset_revision=changeset_revision, + ctx_rev=str( ctx.rev() ), + repository_owner=repository.user.username, + repository_name=repository.name, + repository=repository, + metadata=None, + repository_metadata=repository_metadata ) repo_info_dicts.append( tool_shed_encode( repo_info_dict ) ) return dict( includes_tools=includes_tools, includes_repository_dependencies=includes_repository_dependencies, @@ -1397,9 +1397,9 @@ repository_name = kwd[ 'name' ] repository_owner = kwd[ 'owner' ] changeset_revision = kwd[ 'changeset_revision' ] - repository = get_repository_by_name_and_owner( trans, repository_name, repository_owner ) - repository_metadata = get_repository_metadata_by_changeset_revision( trans, trans.security.encode_id( repository.id ), changeset_revision ) - return build_readme_files_dict( repository_metadata ) + repository = suc.get_repository_by_name_and_owner( trans, repository_name, repository_owner ) + repository_metadata = suc.get_repository_metadata_by_changeset_revision( trans, trans.security.encode_id( repository.id ), changeset_revision ) + return suc.build_readme_files_dict( repository_metadata ) @web.expose def get_tool_dependencies( self, trans, **kwd ): """Handle a request from a local Galaxy instance.""" @@ -1411,7 +1411,7 @@ name = params.get( 'name', None ) owner = params.get( 'owner', None ) changeset_revision = params.get( 'changeset_revision', None ) - repository = get_repository_by_name_and_owner( trans, name, owner ) + repository = suc.get_repository_by_name_and_owner( trans, name, owner ) for downloadable_revision in repository.downloadable_revisions: if downloadable_revision.changeset_revision == changeset_revision: break @@ -1432,13 +1432,13 @@ name = kwd[ 'name' ] owner = kwd[ 'owner' ] changeset_revision = kwd[ 'changeset_revision' ] - repository = get_repository_by_name_and_owner( trans, name, owner ) + repository = suc.get_repository_by_name_and_owner( trans, name, owner ) repo_dir = repository.repo_path( trans.app ) - repo = hg.repository( get_configured_ui(), repo_dir ) + repo = hg.repository( suc.get_configured_ui(), repo_dir ) tool_version_dicts = [] for changeset in repo.changelog: current_changeset_revision = str( repo.changectx( changeset ) ) - repository_metadata = get_repository_metadata_by_changeset_revision( trans, trans.security.encode_id( repository.id ), current_changeset_revision ) + repository_metadata = suc.get_repository_metadata_by_changeset_revision( trans, trans.security.encode_id( repository.id ), current_changeset_revision ) if repository_metadata and repository_metadata.tool_versions: tool_version_dicts.append( repository_metadata.tool_versions ) if current_changeset_revision == changeset_revision: @@ -1450,14 +1450,14 @@ """Return the tool lineage in descendant order for the received guid contained in the received repsitory_metadata.tool_versions.""" encoded_id = trans.security.encode_id( repository.id ) repo_dir = repository.repo_path( trans.app ) - repo = hg.repository( get_configured_ui(), repo_dir ) + repo = hg.repository( suc.get_configured_ui(), repo_dir ) # Initialize the tool lineage tool_guid_lineage = [ guid ] # Get all ancestor guids of the received guid. current_child_guid = guid - for changeset in reversed_upper_bounded_changelog( repo, repository_metadata.changeset_revision ): + for changeset in suc.reversed_upper_bounded_changelog( repo, repository_metadata.changeset_revision ): ctx = repo.changectx( changeset ) - rm = get_repository_metadata_by_changeset_revision( trans, encoded_id, str( ctx ) ) + rm = suc.get_repository_metadata_by_changeset_revision( trans, encoded_id, str( ctx ) ) if rm: parent_guid = rm.tool_versions.get( current_child_guid, None ) if parent_guid: @@ -1465,9 +1465,9 @@ current_child_guid = parent_guid # Get all descendant guids of the received guid. current_parent_guid = guid - for changeset in reversed_lower_upper_bounded_changelog( repo, repository_metadata.changeset_revision, repository.tip( trans.app ) ): + for changeset in suc.reversed_lower_upper_bounded_changelog( repo, repository_metadata.changeset_revision, repository.tip( trans.app ) ): ctx = repo.changectx( changeset ) - rm = get_repository_metadata_by_changeset_revision( trans, encoded_id, str( ctx ) ) + rm = suc.get_repository_metadata_by_changeset_revision( trans, encoded_id, str( ctx ) ) if rm: tool_versions = rm.tool_versions for child_guid, parent_guid in tool_versions.items(): @@ -1566,15 +1566,15 @@ owner = kwd.get( 'owner', None ) galaxy_url = kwd.get( 'galaxy_url', None ) if not repository_ids: - repository = get_repository_by_name_and_owner( trans, name, owner ) + repository = suc.get_repository_by_name_and_owner( trans, name, owner ) repository_ids = trans.security.encode_id( repository.id ) if not galaxy_url: # If galaxy_url is not in the request, it had to have been stored in a cookie by the tool shed. galaxy_url = trans.get_cookie( name='toolshedgalaxyurl' ) # Redirect back to local Galaxy to perform install. - url = url_join( galaxy_url, - 'admin_toolshed/prepare_for_install?tool_shed_url=%s&repository_ids=%s&changeset_revisions=%s' % \ - ( url_for( '/', qualified=True ), ','.join( util.listify( repository_ids ) ), ','.join( util.listify( changeset_revisions ) ) ) ) + url = suc.url_join( galaxy_url, + 'admin_toolshed/prepare_for_install?tool_shed_url=%s&repository_ids=%s&changeset_revisions=%s' % \ + ( url_for( '/', qualified=True ), ','.join( util.listify( repository_ids ) ), ','.join( util.listify( changeset_revisions ) ) ) ) return trans.response.send_redirect( url ) @web.expose def load_invalid_tool( self, trans, repository_id, tool_config, changeset_revision, **kwd ): @@ -1586,13 +1586,13 @@ is_malicious = changeset_is_malicious( trans, repository_id, repository.tip( trans.app ) ) invalid_file_tups = [] if tool: - invalid_file_tups = check_tool_input_params( trans.app, - repository.repo_path( trans.app ), - tool_config, - tool, - [] ) + invalid_file_tups = suc.check_tool_input_params( trans.app, + repository.repo_path( trans.app ), + tool_config, + tool, + [] ) if invalid_file_tups: - message = generate_message_for_invalid_tools( trans, invalid_file_tups, repository, {}, as_html=True, displaying_invalid_tool=True ) + message = suc.generate_message_for_invalid_tools( trans, invalid_file_tups, repository, {}, as_html=True, displaying_invalid_tool=True ) elif error_message: message = error_message try: @@ -1667,9 +1667,9 @@ message = util.restore_text( params.get( 'message', '' ) ) status = params.get( 'status', 'done' ) cntrller = params.get( 'cntrller', 'repository' ) - repository = get_repository_in_tool_shed( trans, id ) + repository = suc.get_repository_in_tool_shed( trans, id ) repo_dir = repository.repo_path( trans.app ) - repo = hg.repository( get_configured_ui(), repo_dir ) + repo = hg.repository( suc.get_configured_ui(), repo_dir ) repo_name = util.restore_text( params.get( 'repo_name', repository.name ) ) changeset_revision = util.restore_text( params.get( 'changeset_revision', repository.tip( trans.app ) ) ) description = util.restore_text( params.get( 'description', repository.description ) ) @@ -1787,8 +1787,8 @@ metadata = None is_malicious = False repository_dependencies = None - if changeset_revision != INITIAL_CHANGELOG_HASH: - repository_metadata = get_repository_metadata_by_changeset_revision( trans, id, changeset_revision ) + if changeset_revision != suc.INITIAL_CHANGELOG_HASH: + repository_metadata = suc.get_repository_metadata_by_changeset_revision( trans, id, changeset_revision ) if repository_metadata: revision_label = get_revision_label( trans, repository, changeset_revision ) repository_metadata_id = trans.security.encode_id( repository_metadata.id ) @@ -1796,9 +1796,9 @@ is_malicious = repository_metadata.malicious else: # There is no repository_metadata defined for the changeset_revision, so see if it was defined in a previous changeset in the changelog. - previous_changeset_revision = get_previous_downloadable_changset_revision( repository, repo, changeset_revision ) - if previous_changeset_revision != INITIAL_CHANGELOG_HASH: - repository_metadata = get_repository_metadata_by_changeset_revision( trans, id, previous_changeset_revision ) + previous_changeset_revision = suc.get_previous_downloadable_changset_revision( repository, repo, changeset_revision ) + if previous_changeset_revision != suc.INITIAL_CHANGELOG_HASH: + repository_metadata = suc.get_repository_metadata_by_changeset_revision( trans, id, previous_changeset_revision ) if repository_metadata: revision_label = get_revision_label( trans, repository, previous_changeset_revision ) repository_metadata_id = trans.security.encode_id( repository_metadata.id ) @@ -1806,13 +1806,13 @@ is_malicious = repository_metadata.malicious if repository_metadata: # Get a dictionary of all repositories upon which the contents of the current repository_metadata record depend. - repository_dependencies = get_repository_dependencies_for_changeset_revision( trans=trans, - repository=repository, - repository_metadata=repository_metadata, - toolshed_base_url=str( url_for( '/', qualified=True ) ).rstrip( '/' ), - key_rd_dicts_to_be_processed=None, - all_repository_dependencies=None, - handled_key_rd_dicts=None ) + repository_dependencies = suc.get_repository_dependencies_for_changeset_revision( trans=trans, + repository=repository, + repository_metadata=repository_metadata, + toolshed_base_url=str( url_for( '/', qualified=True ) ).rstrip( '/' ), + key_rd_dicts_to_be_processed=None, + all_repository_dependencies=None, + handled_key_rd_dicts=None ) if is_malicious: if trans.app.security_agent.can_push( trans.app, trans.user, repository ): message += malicious_error_can_push @@ -1832,7 +1832,7 @@ review_id = trans.security.encode_id( review.id ) else: review_id = None - containers_dict = build_repository_containers_for_tool_shed( repository, changeset_revision, repository_dependencies, repository_metadata ) + containers_dict = suc.build_repository_containers_for_tool_shed( repository, changeset_revision, repository_dependencies, repository_metadata ) return trans.fill_template( '/webapps/community/repository/manage_repository.mako', cntrller=cntrller, repo_name=repo_name, @@ -1903,28 +1903,28 @@ # Avoid caching trans.response.headers['Pragma'] = 'no-cache' trans.response.headers['Expires'] = '0' - return open_repository_files_folder( trans, folder_path ) + return suc.open_repository_files_folder( trans, folder_path ) @web.expose def preview_tools_in_changeset( self, trans, repository_id, **kwd ): params = util.Params( kwd ) message = util.restore_text( params.get( 'message', '' ) ) status = params.get( 'status', 'done' ) - repository = get_repository_in_tool_shed( trans, repository_id ) + repository = suc.get_repository_in_tool_shed( trans, repository_id ) repo_dir = repository.repo_path( trans.app ) - repo = hg.repository( get_configured_ui(), repo_dir ) + repo = hg.repository( suc.get_configured_ui(), repo_dir ) changeset_revision = util.restore_text( params.get( 'changeset_revision', repository.tip( trans.app ) ) ) - repository_metadata = get_repository_metadata_by_changeset_revision( trans, repository_id, changeset_revision ) + repository_metadata = suc.get_repository_metadata_by_changeset_revision( trans, repository_id, changeset_revision ) if repository_metadata: repository_metadata_id = trans.security.encode_id( repository_metadata.id ), metadata = repository_metadata.metadata # Get a dictionary of all repositories upon which the contents of the current repository_metadata record depend. - repository_dependencies = get_repository_dependencies_for_changeset_revision( trans=trans, - repository=repository, - repository_metadata=repository_metadata, - toolshed_base_url=str( url_for( '/', qualified=True ) ).rstrip( '/' ), - key_rd_dicts_to_be_processed=None, - all_repository_dependencies=None, - handled_key_rd_dicts=None ) + repository_dependencies = suc.get_repository_dependencies_for_changeset_revision( trans=trans, + repository=repository, + repository_metadata=repository_metadata, + toolshed_base_url=str( url_for( '/', qualified=True ) ).rstrip( '/' ), + key_rd_dicts_to_be_processed=None, + all_repository_dependencies=None, + handled_key_rd_dicts=None ) else: repository_metadata_id = None metadata = None @@ -1935,7 +1935,7 @@ selected_value=changeset_revision, add_id_to_name=False, downloadable=False ) - containers_dict = build_repository_containers_for_tool_shed( repository, changeset_revision, repository_dependencies, repository_metadata ) + containers_dict = suc.build_repository_containers_for_tool_shed( repository, changeset_revision, repository_dependencies, repository_metadata ) return trans.fill_template( '/webapps/community/repository/preview_tools_in_changeset.mako', repository=repository, containers_dict=containers_dict, @@ -1961,14 +1961,14 @@ name = params.get( 'name', None ) owner = params.get( 'owner', None ) changeset_revision = params.get( 'changeset_revision', None ) - repository = get_repository_by_name_and_owner( trans, name, owner ) + repository = suc.get_repository_by_name_and_owner( trans, name, owner ) repo_dir = repository.repo_path( trans.app ) - repo = hg.repository( get_configured_ui(), repo_dir ) + repo = hg.repository( suc.get_configured_ui(), repo_dir ) # Get the lower bound changeset revision - lower_bound_changeset_revision = get_previous_downloadable_changset_revision( repository, repo, changeset_revision ) + lower_bound_changeset_revision = suc.get_previous_downloadable_changset_revision( repository, repo, changeset_revision ) # Build the list of changeset revision hashes. changeset_hashes = [] - for changeset in reversed_lower_upper_bounded_changelog( repo, lower_bound_changeset_revision, changeset_revision ): + for changeset in suc.reversed_lower_upper_bounded_changelog( repo, lower_bound_changeset_revision, changeset_revision ): changeset_hashes.append( str( repo.changectx( changeset ) ) ) if changeset_hashes: changeset_hashes_str = ','.join( changeset_hashes ) @@ -1987,8 +1987,8 @@ action='browse_repositories', message='Select a repository to rate', status='error' ) ) - repository = get_repository_in_tool_shed( trans, id ) - repo = hg.repository( get_configured_ui(), repository.repo_path( trans.app ) ) + repository = suc.get_repository_in_tool_shed( trans, id ) + repo = hg.repository( suc.get_configured_ui(), repository.repo_path( trans.app ) ) if repository.user == trans.user: return trans.response.send_redirect( web.url_for( controller='repository', action='browse_repositories', @@ -2017,10 +2017,10 @@ def reset_all_metadata( self, trans, id, **kwd ): # This method is called only from the ~/templates/webapps/community/repository/manage_repository.mako template. # It resets all metadata on the complete changelog for a single repository in the tool shed. - invalid_file_tups, metadata_dict = reset_all_metadata_on_repository_in_tool_shed( trans, id, **kwd ) + invalid_file_tups, metadata_dict = suc.reset_all_metadata_on_repository_in_tool_shed( trans, id, **kwd ) if invalid_file_tups: - repository = get_repository_in_tool_shed( trans, id ) - message = generate_message_for_invalid_tools( trans, invalid_file_tups, repository, metadata_dict ) + repository = suc.get_repository_in_tool_shed( trans, id ) + message = suc.generate_message_for_invalid_tools( trans, invalid_file_tups, repository, metadata_dict ) status = 'error' else: message = "All repository metadata has been reset." @@ -2125,9 +2125,9 @@ message = util.restore_text( params.get( 'message', '' ) ) status = params.get( 'status', 'done' ) commit_message = util.restore_text( params.get( 'commit_message', 'Deleted selected files' ) ) - repository = get_repository_in_tool_shed( trans, id ) + repository = suc.get_repository_in_tool_shed( trans, id ) repo_dir = repository.repo_path( trans.app ) - repo = hg.repository( get_configured_ui(), repo_dir ) + repo = hg.repository( suc.get_configured_ui(), repo_dir ) selected_files_to_delete = util.restore_text( params.get( 'selected_files_to_delete', '' ) ) if params.get( 'select_files_to_delete_button', False ): if selected_files_to_delete: @@ -2163,9 +2163,9 @@ commands.commit( repo.ui, repo, repo_dir, user=trans.user.username, message=commit_message ) handle_email_alerts( trans, repository ) # Update the repository files for browsing. - update_repository( repo ) + suc.update_repository( repo ) # Get the new repository tip. - repo = hg.repository( get_configured_ui(), repo_dir ) + repo = hg.repository( suc.get_configured_ui(), repo_dir ) if tip == repository.tip( trans.app ): message += 'No changes to repository. ' kwd[ 'message' ] = message @@ -2187,7 +2187,7 @@ status=status ) @web.expose def send_to_owner( self, trans, id, message='' ): - repository = get_repository_in_tool_shed( trans, id ) + repository = suc.get_repository_in_tool_shed( trans, id ) if not message: message = 'Enter a message' status = 'error' @@ -2237,7 +2237,7 @@ total_alerts_removed = 0 flush_needed = False for repository_id in repository_ids: - repository = get_repository_in_tool_shed( trans, repository_id ) + repository = suc.get_repository_in_tool_shed( trans, repository_id ) if repository.email_alerts: email_alerts = from_json_string( repository.email_alerts ) else: @@ -2268,7 +2268,7 @@ def set_malicious( self, trans, id, ctx_str, **kwd ): malicious = kwd.get( 'malicious', '' ) if kwd.get( 'malicious_button', False ): - repository_metadata = get_repository_metadata_by_changeset_revision( trans, id, ctx_str ) + repository_metadata = suc.get_repository_metadata_by_changeset_revision( trans, id, ctx_str ) malicious_checked = CheckboxField.is_checked( malicious ) repository_metadata.malicious = malicious_checked trans.sa_session.add( repository_metadata ) @@ -2309,12 +2309,12 @@ params = util.Params( kwd ) message = util.restore_text( params.get( 'message', '' ) ) status = params.get( 'status', 'done' ) - repository = get_repository_in_tool_shed( trans, id ) - repo = hg.repository( get_configured_ui(), repository.repo_path( trans.app ) ) + repository = suc.get_repository_in_tool_shed( trans, id ) + repo = hg.repository( suc.get_configured_ui(), repository.repo_path( trans.app ) ) changesets = [] for changeset in repo.changelog: ctx = repo.changectx( changeset ) - if get_repository_metadata_by_changeset_revision( trans, id, str( ctx ) ): + if suc.get_repository_metadata_by_changeset_revision( trans, id, str( ctx ) ): has_metadata = True else: has_metadata = False @@ -2346,9 +2346,9 @@ params = util.Params( kwd ) message = util.restore_text( params.get( 'message', '' ) ) status = params.get( 'status', 'done' ) - repository = get_repository_in_tool_shed( trans, id ) - repo = hg.repository( get_configured_ui(), repository.repo_path( trans.app ) ) - ctx = get_changectx_for_changeset( repo, ctx_str ) + repository = suc.get_repository_in_tool_shed( trans, id ) + repo = hg.repository( suc.get_configured_ui(), repository.repo_path( trans.app ) ) + ctx = suc.get_changectx_for_changeset( repo, ctx_str ) if ctx is None: message = "Repository does not include changeset revision '%s'." % str( ctx_str ) status = 'error' @@ -2362,7 +2362,7 @@ anchors = modified + added + removed + deleted + unknown + ignored + clean diffs = [] for diff in patch.diff( repo, node1=ctx_parent.node(), node2=ctx.node() ): - diffs.append( to_safe_string( diff, to_html=True ) ) + diffs.append( suc.to_safe_string( diff, to_html=True ) ) is_malicious = changeset_is_malicious( trans, id, repository.tip( trans.app ) ) metadata = self.get_metadata( trans, id, ctx_str ) return trans.fill_template( '/webapps/community/repository/view_changeset.mako', @@ -2383,7 +2383,7 @@ status=status ) @web.expose def view_or_manage_repository( self, trans, **kwd ): - repository = get_repository_in_tool_shed( trans, kwd[ 'id' ] ) + repository = suc.get_repository_in_tool_shed( trans, kwd[ 'id' ] ) if trans.user_is_admin() or repository.user == trans.user: return trans.response.send_redirect( web.url_for( controller='repository', action='manage_repository', @@ -2398,8 +2398,8 @@ message = util.restore_text( params.get( 'message', '' ) ) status = params.get( 'status', 'done' ) cntrller = params.get( 'cntrller', 'repository' ) - repository = get_repository_in_tool_shed( trans, id ) - repo = hg.repository( get_configured_ui(), repository.repo_path( trans.app ) ) + repository = suc.get_repository_in_tool_shed( trans, id ) + repo = hg.repository( suc.get_configured_ui(), repository.repo_path( trans.app ) ) avg_rating, num_ratings = self.get_ave_item_rating_data( trans.sa_session, repository, webapp_model=trans.model ) changeset_revision = util.restore_text( params.get( 'changeset_revision', repository.tip( trans.app ) ) ) display_reviews = util.string_as_bool( params.get( 'display_reviews', False ) ) @@ -2434,18 +2434,18 @@ add_id_to_name=False, downloadable=False ) revision_label = get_revision_label( trans, repository, changeset_revision ) - repository_metadata = get_repository_metadata_by_changeset_revision( trans, id, changeset_revision ) + repository_metadata = suc.get_repository_metadata_by_changeset_revision( trans, id, changeset_revision ) if repository_metadata: repository_metadata_id = trans.security.encode_id( repository_metadata.id ) metadata = repository_metadata.metadata # Get a dictionary of all repositories upon which the contents of the current repository_metadata record depend. - repository_dependencies = get_repository_dependencies_for_changeset_revision( trans=trans, - repository=repository, - repository_metadata=repository_metadata, - toolshed_base_url=str( url_for( '/', qualified=True ) ).rstrip( '/' ), - key_rd_dicts_to_be_processed=None, - all_repository_dependencies=None, - handled_key_rd_dicts=None ) + repository_dependencies = suc.get_repository_dependencies_for_changeset_revision( trans=trans, + repository=repository, + repository_metadata=repository_metadata, + toolshed_base_url=str( url_for( '/', qualified=True ) ).rstrip( '/' ), + key_rd_dicts_to_be_processed=None, + all_repository_dependencies=None, + handled_key_rd_dicts=None ) else: repository_metadata_id = None metadata = None @@ -2466,7 +2466,7 @@ review_id = trans.security.encode_id( review.id ) else: review_id = None - containers_dict = build_repository_containers_for_tool_shed( repository, changeset_revision, repository_dependencies, repository_metadata ) + containers_dict = suc.build_repository_containers_for_tool_shed( repository, changeset_revision, repository_dependencies, repository_metadata ) return trans.fill_template( '/webapps/community/repository/view_repository.mako', cntrller=cntrller, repo=repo, @@ -2491,16 +2491,16 @@ params = util.Params( kwd ) message = util.restore_text( params.get( 'message', '' ) ) status = params.get( 'status', 'done' ) - repository = get_repository_in_tool_shed( trans, repository_id ) + repository = suc.get_repository_in_tool_shed( trans, repository_id ) repo_files_dir = repository.repo_path( trans.app ) - repo = hg.repository( get_configured_ui(), repo_files_dir ) + repo = hg.repository( suc.get_configured_ui(), repo_files_dir ) tool_metadata_dict = {} tool_lineage = [] tool = None guid = None original_tool_data_path = trans.app.config.tool_data_path revision_label = get_revision_label( trans, repository, changeset_revision ) - repository_metadata = get_repository_metadata_by_changeset_revision( trans, repository_id, changeset_revision ) + repository_metadata = suc.get_repository_metadata_by_changeset_revision( trans, repository_id, changeset_revision ) if repository_metadata: metadata = repository_metadata.metadata if metadata: @@ -2515,18 +2515,18 @@ can_use_disk_file = can_use_tool_config_disk_file( trans, repository, repo, full_path_to_tool_config, changeset_revision ) if can_use_disk_file: trans.app.config.tool_data_path = work_dir - tool, valid, message, sample_files = handle_sample_files_and_load_tool_from_disk( trans, - repo_files_dir, - full_path_to_tool_config, - work_dir ) + tool, valid, message, sample_files = suc.handle_sample_files_and_load_tool_from_disk( trans, + repo_files_dir, + full_path_to_tool_config, + work_dir ) if message: status = 'error' else: - tool, message, sample_files = handle_sample_files_and_load_tool_from_tmp_config( trans, - repo, - changeset_revision, - tool_config_filename, - work_dir ) + tool, message, sample_files = suc.handle_sample_files_and_load_tool_from_tmp_config( trans, + repo, + changeset_revision, + tool_config_filename, + work_dir ) if message: status = 'error' break diff -r ae60aaaf6a139e88b2849abf97fd47af711341fd -r 70f88a048ed2b565d60250ca66302fd03a7852ff lib/galaxy/webapps/community/controllers/repository_review.py --- a/lib/galaxy/webapps/community/controllers/repository_review.py +++ b/lib/galaxy/webapps/community/controllers/repository_review.py @@ -4,12 +4,12 @@ from galaxy.web.form_builder import SelectField, CheckboxField from galaxy.webapps.community import model from galaxy.web.framework.helpers import time_ago, iff, grids -from galaxy.model.orm import * +from galaxy.model.orm import and_ from sqlalchemy.sql.expression import func from common import * from galaxy.webapps.community.util.container_util import STRSEP from repository import RepositoryGrid -from galaxy.util.shed_util_common import * +import galaxy.util.shed_util_common as suc from galaxy.util.odict import odict from galaxy import eggs @@ -56,7 +56,7 @@ # Restrict to revisions that have been reviewed. if repository.reviews: rval = '' - repo = hg.repository( get_configured_ui(), repository.repo_path( trans.app ) ) + repo = hg.repository( suc.get_configured_ui(), repository.repo_path( trans.app ) ) for review in repository.reviews: changeset_revision = review.changeset_revision rev, label = get_rev_label_from_changeset_revision( repo, changeset_revision ) @@ -311,7 +311,7 @@ status = params.get( 'status', 'done' ) review = get_review( trans, kwd[ 'id' ] ) repository = review.repository - repo = hg.repository( get_configured_ui(), repository.repo_path( trans.app ) ) + repo = hg.repository( suc.get_configured_ui(), repository.repo_path( trans.app ) ) rev, changeset_revision_label = get_rev_label_from_changeset_revision( repo, review.changeset_revision ) return trans.fill_template( '/webapps/community/repository_review/browse_review.mako', repository=repository, @@ -384,7 +384,7 @@ message = "You have already created a review for revision <b>%s</b> of repository <b>%s</b>." % ( changeset_revision, repository.name ) status = "error" else: - repository = get_repository_in_tool_shed( trans, repository_id ) + repository = suc.get_repository_in_tool_shed( trans, repository_id ) # See if there are any reviews for previous changeset revisions that the user can copy. if not create_without_copying and not previous_review_id and has_previous_repository_reviews( trans, repository, changeset_revision ): return trans.response.send_redirect( web.url_for( controller='repository_review', @@ -392,7 +392,7 @@ **kwd ) ) # A review can be initially performed only on an installable revision of a repository, so make sure we have metadata associated # with the received changeset_revision. - repository_metadata = get_repository_metadata_by_changeset_revision( trans, repository_id, changeset_revision ) + repository_metadata = suc.get_repository_metadata_by_changeset_revision( trans, repository_id, changeset_revision ) if repository_metadata: metadata = repository_metadata.metadata if metadata: @@ -470,7 +470,7 @@ for component in get_components( trans ): components_dict[ component.name ] = dict( component=component, component_review=None ) repository = review.repository - repo = hg.repository( get_configured_ui(), repository.repo_path( trans.app ) ) + repo = hg.repository( suc.get_configured_ui(), repository.repo_path( trans.app ) ) for component_review in review.component_reviews: if component_review and component_review.component: component_name = component_review.component.name @@ -653,9 +653,9 @@ status = params.get( 'status', 'done' ) repository_id = kwd.get( 'id', None ) if repository_id: - repository = get_repository_in_tool_shed( trans, repository_id ) + repository = suc.get_repository_in_tool_shed( trans, repository_id ) repo_dir = repository.repo_path( trans.app ) - repo = hg.repository( get_configured_ui(), repo_dir ) + repo = hg.repository( suc.get_configured_ui(), repo_dir ) metadata_revision_hashes = [ metadata_revision.changeset_revision for metadata_revision in repository.metadata_revisions ] reviewed_revision_hashes = [ review.changeset_revision for review in repository.reviews ] reviews_dict = odict() @@ -669,7 +669,7 @@ repository_reviews = get_reviews_by_repository_id_changeset_revision( trans, repository_id, changeset_revision ) # Determine if the current user can add a review to this revision. can_add_review = trans.user not in [ repository_review.user for repository_review in repository_reviews ] - repository_metadata = get_repository_metadata_by_changeset_revision( trans, repository_id, changeset_revision ) + repository_metadata = suc.get_repository_metadata_by_changeset_revision( trans, repository_id, changeset_revision ) if repository_metadata: repository_metadata_reviews = util.listify( repository_metadata.reviews ) else: @@ -700,9 +700,9 @@ status = params.get( 'status', 'done' ) repository_id = kwd.get( 'id', None ) changeset_revision = kwd.get( 'changeset_revision', None ) - repository = get_repository_in_tool_shed( trans, repository_id ) + repository = suc.get_repository_in_tool_shed( trans, repository_id ) repo_dir = repository.repo_path( trans.app ) - repo = hg.repository( get_configured_ui(), repo_dir ) + repo = hg.repository( suc.get_configured_ui(), repo_dir ) installable = changeset_revision in [ metadata_revision.changeset_revision for metadata_revision in repository.metadata_revisions ] rev, changeset_revision_label = get_rev_label_from_changeset_revision( repo, changeset_revision ) reviews = get_reviews_by_repository_id_changeset_revision( trans, repository_id, changeset_revision ) @@ -765,9 +765,9 @@ params = util.Params( kwd ) message = util.restore_text( params.get( 'message', '' ) ) status = params.get( 'status', 'done' ) - repository = get_repository_in_tool_shed( trans, kwd[ 'id' ] ) + repository = suc.get_repository_in_tool_shed( trans, kwd[ 'id' ] ) changeset_revision = kwd.get( 'changeset_revision', None ) - repo = hg.repository( get_configured_ui(), repository.repo_path( trans.app ) ) + repo = hg.repository( suc.get_configured_ui(), repository.repo_path( trans.app ) ) previous_reviews_dict = get_previous_repository_reviews( trans, repository, changeset_revision ) rev, changeset_revision_label = get_rev_label_from_changeset_revision( repo, changeset_revision ) return trans.fill_template( '/webapps/community/repository_review/select_previous_review.mako', @@ -780,7 +780,7 @@ @web.expose @web.require_login( "view or manage repository" ) def view_or_manage_repository( self, trans, **kwd ): - repository = get_repository_in_tool_shed( trans, kwd[ 'id' ] ) + repository = suc.get_repository_in_tool_shed( trans, kwd[ 'id' ] ) if trans.user_is_admin() or repository.user == trans.user: return trans.response.send_redirect( web.url_for( controller='repository', action='manage_repository', diff -r ae60aaaf6a139e88b2849abf97fd47af711341fd -r 70f88a048ed2b565d60250ca66302fd03a7852ff lib/galaxy/webapps/community/controllers/upload.py --- a/lib/galaxy/webapps/community/controllers/upload.py +++ b/lib/galaxy/webapps/community/controllers/upload.py @@ -3,7 +3,7 @@ from galaxy.model.orm import * from galaxy.datatypes.checkers import * from common import * -from galaxy.util.shed_util_common import * +import galaxy.util.shed_util_common as suc from galaxy import eggs eggs.require('mercurial') @@ -28,9 +28,9 @@ category_ids = util.listify( params.get( 'category_id', '' ) ) categories = get_categories( trans ) repository_id = params.get( 'repository_id', '' ) - repository = get_repository_in_tool_shed( trans, repository_id ) + repository = suc.get_repository_in_tool_shed( trans, repository_id ) repo_dir = repository.repo_path( trans.app ) - repo = hg.repository( get_configured_ui(), repo_dir ) + repo = hg.repository( suc.get_configured_ui(), repo_dir ) uncompress_file = util.string_as_bool( params.get( 'uncompress_file', 'true' ) ) remove_repo_files_not_in_tar = util.string_as_bool( params.get( 'remove_repo_files_not_in_tar', 'true' ) ) uploaded_file = None @@ -53,7 +53,7 @@ uploaded_directory = tempfile.mkdtemp() repo_url = 'http%s' % url[ len( 'hg' ): ] repo_url = repo_url.encode( 'ascii', 'replace' ) - commands.clone( get_configured_ui(), repo_url, uploaded_directory ) + commands.clone( suc.get_configured_ui(), repo_url, uploaded_directory ) elif url: valid_url = True try: @@ -146,7 +146,7 @@ if full_path.endswith( 'tool_data_table_conf.xml.sample' ): # Handle the special case where a tool_data_table_conf.xml.sample file is being uploaded by parsing the file and adding new entries # to the in-memory trans.app.tool_data_tables dictionary. - error, error_message = handle_sample_tool_data_table_conf_file( trans.app, full_path ) + error, error_message = suc.handle_sample_tool_data_table_conf_file( trans.app, full_path ) if error: message = '%s<br/>%s' % ( message, error_message ) # See if the content of the change set was valid. @@ -154,7 +154,7 @@ handle_email_alerts( trans, repository, content_alert_str=content_alert_str, new_repo_alert=new_repo_alert, admin_only=admin_only ) if ok: # Update the repository files for browsing. - update_repository( repo ) + suc.update_repository( repo ) # Get the new repository tip. if tip == repository.tip( trans.app ): message = 'No changes to repository. ' @@ -181,8 +181,8 @@ message += " %d files were removed from the repository root. " % len( files_to_remove ) kwd[ 'message' ] = message set_repository_metadata_due_to_new_tip( trans, repository, content_alert_str=content_alert_str, **kwd ) - #provide a warning message if a tool_dependencies.xml file is provided, but tool dependencies weren't loaded due to e.g. a requirement tag mismatch - if get_config_from_disk( 'tool_dependencies.xml', repo_dir ): + # Provide a warning message if a tool_dependencies.xml file is provided, but tool dependencies weren't loaded due to e.g. a requirement tag mismatch + if suc.get_config_from_disk( 'tool_dependencies.xml', repo_dir ): if repository.metadata_revisions: metadata_dict = repository.metadata_revisions[0].metadata else: @@ -192,7 +192,7 @@ status = 'warning' log.debug( 'Error in tool dependencies for repository %s: %s.' % ( repository.id, repository.name ) ) # Reset the tool_data_tables by loading the empty tool_data_table_conf.xml file. - reset_tool_data_tables( trans.app ) + suc.reset_tool_data_tables( trans.app ) trans.response.send_redirect( web.url_for( controller='repository', action='browse_repository', id=repository_id, @@ -202,7 +202,7 @@ else: status = 'error' # Reset the tool_data_tables by loading the empty tool_data_table_conf.xml file. - reset_tool_data_tables( trans.app ) + suc.reset_tool_data_tables( trans.app ) selected_categories = [ trans.security.decode_id( id ) for id in category_ids ] return trans.fill_template( '/webapps/community/repository/upload.mako', repository=repository, @@ -214,7 +214,7 @@ status=status ) def upload_directory( self, trans, repository, uploaded_directory, upload_point, remove_repo_files_not_in_tar, commit_message, new_repo_alert ): repo_dir = repository.repo_path( trans.app ) - repo = hg.repository( get_configured_ui(), repo_dir ) + repo = hg.repository( suc.get_configured_ui(), repo_dir ) undesirable_dirs_removed = 0 undesirable_files_removed = 0 if upload_point is not None: @@ -250,7 +250,7 @@ def upload_tar( self, trans, repository, tar, uploaded_file, upload_point, remove_repo_files_not_in_tar, commit_message, new_repo_alert ): # Upload a tar archive of files. repo_dir = repository.repo_path( trans.app ) - repo = hg.repository( get_configured_ui(), repo_dir ) + repo = hg.repository( suc.get_configured_ui(), repo_dir ) undesirable_dirs_removed = 0 undesirable_files_removed = 0 ok, message = self.__check_archive( tar ) @@ -283,7 +283,7 @@ return self.__handle_directory_changes(trans, repository, full_path, filenames_in_archive, remove_repo_files_not_in_tar, new_repo_alert, commit_message, undesirable_dirs_removed, undesirable_files_removed) def __handle_directory_changes( self, trans, repository, full_path, filenames_in_archive, remove_repo_files_not_in_tar, new_repo_alert, commit_message, undesirable_dirs_removed, undesirable_files_removed ): repo_dir = repository.repo_path( trans.app ) - repo = hg.repository( get_configured_ui(), repo_dir ) + repo = hg.repository( suc.get_configured_ui(), repo_dir ) content_alert_str = '' files_to_remove = [] filenames_in_archive = [ os.path.join( full_path, name ) for name in filenames_in_archive ] @@ -339,7 +339,7 @@ if filename_in_archive.endswith( 'tool_data_table_conf.xml.sample' ): # Handle the special case where a tool_data_table_conf.xml.sample file is being uploaded by parsing the file and adding new entries # to the in-memory trans.app.tool_data_tables dictionary. - error, message = handle_sample_tool_data_table_conf_file( trans.app, filename_in_archive ) + error, message = suc.handle_sample_tool_data_table_conf_file( trans.app, filename_in_archive ) if error: return False, message, files_to_remove, content_alert_str, undesirable_dirs_removed, undesirable_files_removed commands.commit( repo.ui, repo, full_path, user=trans.user.username, message=commit_message ) diff -r ae60aaaf6a139e88b2849abf97fd47af711341fd -r 70f88a048ed2b565d60250ca66302fd03a7852ff lib/galaxy/webapps/community/controllers/workflow.py --- a/lib/galaxy/webapps/community/controllers/workflow.py +++ b/lib/galaxy/webapps/community/controllers/workflow.py @@ -10,7 +10,7 @@ from galaxy.webapps.galaxy.controllers.workflow import attach_ordered_steps from galaxy.model.orm import * from common import * -from galaxy.util.shed_util_common import * +import galaxy.util.shed_util_common as suc from galaxy.tool_shed.encoding_util import * class RepoInputDataModule( InputDataModule ): @@ -145,7 +145,7 @@ message = kwd.get( 'message', '' ) status = kwd.get( 'status', 'done' ) repository_metadata = get_repository_metadata_by_id( trans, repository_metadata_id ) - repository = get_repository_in_tool_shed( trans, trans.security.encode_id( repository_metadata.repository_id ) ) + repository = suc.get_repository_in_tool_shed( trans, trans.security.encode_id( repository_metadata.repository_id ) ) return trans.fill_template( "/webapps/community/repository/view_workflow.mako", repository=repository, changeset_revision=repository_metadata.changeset_revision, diff -r ae60aaaf6a139e88b2849abf97fd47af711341fd -r 70f88a048ed2b565d60250ca66302fd03a7852ff lib/galaxy/webapps/galaxy/controllers/admin_toolshed.py --- a/lib/galaxy/webapps/galaxy/controllers/admin_toolshed.py +++ b/lib/galaxy/webapps/galaxy/controllers/admin_toolshed.py @@ -2,6 +2,7 @@ from admin import * from galaxy.util.json import from_json_string, to_json_string from galaxy.util.shed_util import * +import galaxy.util.shed_util_common as suc from galaxy.tool_shed.encoding_util import * from galaxy import eggs, tools @@ -423,7 +424,7 @@ def browse_tool_shed( self, trans, **kwd ): tool_shed_url = kwd[ 'tool_shed_url' ] galaxy_url = url_for( '/', qualified=True ) - url = url_join( tool_shed_url, 'repository/browse_valid_categories?galaxy_url=%s' % ( galaxy_url ) ) + url = suc.url_join( tool_shed_url, 'repository/browse_valid_categories?galaxy_url=%s' % ( galaxy_url ) ) return trans.response.send_redirect( url ) @web.expose @web.require_admin @@ -439,10 +440,10 @@ def check_for_updates( self, trans, **kwd ): # Send a request to the relevant tool shed to see if there are any updates. repository = get_installed_tool_shed_repository( trans, kwd[ 'id' ] ) - tool_shed_url = get_url_from_repository_tool_shed( trans.app, repository ) - url = url_join( tool_shed_url, - 'repository/check_for_updates?galaxy_url=%s&name=%s&owner=%s&changeset_revision=%s' % \ - ( url_for( '/', qualified=True ), repository.name, repository.owner, repository.changeset_revision ) ) + tool_shed_url = suc.get_url_from_repository_tool_shed( trans.app, repository ) + url = suc.url_join( tool_shed_url, + 'repository/check_for_updates?galaxy_url=%s&name=%s&owner=%s&changeset_revision=%s' % \ + ( url_for( '/', qualified=True ), repository.name, repository.owner, repository.changeset_revision ) ) return trans.response.send_redirect( url ) @web.expose @web.require_admin @@ -531,14 +532,14 @@ def find_tools_in_tool_shed( self, trans, **kwd ): tool_shed_url = kwd[ 'tool_shed_url' ] galaxy_url = url_for( '/', qualified=True ) - url = url_join( tool_shed_url, 'repository/find_tools?galaxy_url=%s' % galaxy_url ) + url = suc.url_join( tool_shed_url, 'repository/find_tools?galaxy_url=%s' % galaxy_url ) return trans.response.send_redirect( url ) @web.expose @web.require_admin def find_workflows_in_tool_shed( self, trans, **kwd ): tool_shed_url = kwd[ 'tool_shed_url' ] galaxy_url = url_for( '/', qualified=True ) - url = url_join( tool_shed_url, 'repository/find_workflows?galaxy_url=%s' % galaxy_url ) + url = suc.url_join( tool_shed_url, 'repository/find_workflows?galaxy_url=%s' % galaxy_url ) return trans.response.send_redirect( url ) def generate_tool_path( self, repository_clone_url, changeset_revision ): """ @@ -547,20 +548,20 @@ <tool shed url>/repos/<repository owner>/<repository name>/<installed changeset revision> http://test@bx.psu.edu:9009/repos/test/filter """ - tmp_url = clean_repository_clone_url( repository_clone_url ) + tmp_url = suc.clean_repository_clone_url( repository_clone_url ) # Now tmp_url is something like: bx.psu.edu:9009/repos/some_username/column items = tmp_url.split( 'repos' ) tool_shed_url = items[ 0 ] repo_path = items[ 1 ] tool_shed_url = clean_tool_shed_url( tool_shed_url ) - return url_join( tool_shed_url, 'repos', repo_path, changeset_revision ) + return suc.url_join( tool_shed_url, 'repos', repo_path, changeset_revision ) @web.json @web.require_admin def get_file_contents( self, trans, file_path ): # Avoid caching trans.response.headers['Pragma'] = 'no-cache' trans.response.headers['Expires'] = '0' - return get_repository_file_contents( file_path ) + return suc.get_repository_file_contents( file_path ) @web.expose @web.require_admin def get_repository_dependencies( self, trans, repository_id, repository_name, repository_owner, changeset_revision ): @@ -570,10 +571,10 @@ need it so that we can derive the tool shed from which it was installed. """ repository = get_installed_tool_shed_repository( trans, repository_id ) - tool_shed_url = get_url_from_repository_tool_shed( trans.app, repository ) - url = url_join( tool_shed_url, - 'repository/get_repository_dependencies?name=%s&owner=%s&changeset_revision=%s' % \ - ( repository_name, repository_owner, changeset_revision ) ) + tool_shed_url = suc.get_url_from_repository_tool_shed( trans.app, repository ) + url = suc.url_join( tool_shed_url, + 'repository/get_repository_dependencies?name=%s&owner=%s&changeset_revision=%s' % \ + ( repository_name, repository_owner, changeset_revision ) ) response = urllib2.urlopen( url ) raw_text = response.read() response.close() @@ -612,7 +613,7 @@ message = '' tool_shed_repository = tool_dependencies[ 0 ].tool_shed_repository # Get the tool_dependencies.xml file from the repository. - tool_dependencies_config = get_config_from_disk( 'tool_dependencies.xml', tool_shed_repository.repo_path( trans.app ) ) + tool_dependencies_config = suc.get_config_from_disk( 'tool_dependencies.xml', tool_shed_repository.repo_path( trans.app ) ) installed_tool_dependencies = handle_tool_dependencies( app=trans.app, tool_shed_repository=tool_shed_repository, tool_dependencies_config=tool_dependencies_config, @@ -697,15 +698,15 @@ clone_dir = os.path.join( tool_path, relative_clone_dir ) relative_install_dir = os.path.join( relative_clone_dir, tool_shed_repository.name ) install_dir = os.path.join( tool_path, relative_install_dir ) - cloned_ok, error_message = clone_repository( repository_clone_url, os.path.abspath( install_dir ), ctx_rev ) + cloned_ok, error_message = suc.clone_repository( repository_clone_url, os.path.abspath( install_dir ), ctx_rev ) if cloned_ok: if reinstalling: # Since we're reinstalling the repository we need to find the latest changeset revision to which is can be updated. current_changeset_revision, current_ctx_rev = get_update_to_changeset_revision_and_ctx_rev( trans, tool_shed_repository ) if current_ctx_rev != ctx_rev: - repo = hg.repository( get_configured_ui(), path=os.path.abspath( install_dir ) ) + repo = hg.repository( suc.get_configured_ui(), path=os.path.abspath( install_dir ) ) pull_repository( repo, repository_clone_url, current_changeset_revision ) - update_repository( repo, ctx_rev=current_ctx_rev ) + suc.update_repository( repo, ctx_rev=current_ctx_rev ) self.handle_repository_contents( trans, tool_shed_repository=tool_shed_repository, tool_path=tool_path, @@ -722,10 +723,10 @@ update_tool_shed_repository_status( trans.app, tool_shed_repository, trans.model.ToolShedRepository.installation_status.SETTING_TOOL_VERSIONS ) - tool_shed_url = get_url_from_repository_tool_shed( trans.app, tool_shed_repository ) - url = url_join( tool_shed_url, - '/repository/get_tool_versions?name=%s&owner=%s&changeset_revision=%s' % \ - ( tool_shed_repository.name, tool_shed_repository.owner, tool_shed_repository.changeset_revision ) ) + tool_shed_url = suc.get_url_from_repository_tool_shed( trans.app, tool_shed_repository ) + url = suc.url_join( tool_shed_url, + '/repository/get_tool_versions?name=%s&owner=%s&changeset_revision=%s' % \ + ( tool_shed_repository.name, tool_shed_repository.owner, tool_shed_repository.changeset_revision ) ) response = urllib2.urlopen( url ) text = response.read() response.close() @@ -744,7 +745,7 @@ tool_shed_repository, trans.model.ToolShedRepository.installation_status.INSTALLING_TOOL_DEPENDENCIES ) # Get the tool_dependencies.xml file from the repository. - tool_dependencies_config = get_config_from_disk( 'tool_dependencies.xml', install_dir )#relative_install_dir ) + tool_dependencies_config = suc.get_config_from_disk( 'tool_dependencies.xml', install_dir )#relative_install_dir ) installed_tool_dependencies = handle_tool_dependencies( app=trans.app, tool_shed_repository=tool_shed_repository, tool_dependencies_config=tool_dependencies_config, @@ -774,15 +775,15 @@ when an admin is installing a new repository or reinstalling an uninstalled repository. """ shed_config_dict = trans.app.toolbox.get_shed_config_dict_by_filename( shed_tool_conf ) - metadata_dict, invalid_file_tups = generate_metadata_for_changeset_revision( app=trans.app, - repository=tool_shed_repository, - repository_clone_url=repository_clone_url, - shed_config_dict=shed_config_dict, - relative_install_dir=relative_install_dir, - repository_files_dir=None, - resetting_all_metadata_on_repository=False, - updating_installed_repository=False, - persist=True ) + metadata_dict, invalid_file_tups = suc.generate_metadata_for_changeset_revision( app=trans.app, + repository=tool_shed_repository, + repository_clone_url=repository_clone_url, + shed_config_dict=shed_config_dict, + relative_install_dir=relative_install_dir, + repository_files_dir=None, + resetting_all_metadata_on_repository=False, + updating_installed_repository=False, + persist=True ) tool_shed_repository.metadata = metadata_dict trans.sa_session.add( tool_shed_repository ) trans.sa_session.flush() @@ -824,7 +825,7 @@ files_dir = relative_install_dir if shed_config_dict.get( 'tool_path' ): files_dir = os.path.join( shed_config_dict['tool_path'], files_dir ) - datatypes_config = get_config_from_disk( 'datatypes_conf.xml', files_dir ) + datatypes_config = suc.get_config_from_disk( 'datatypes_conf.xml', files_dir ) # Load data types required by tools. converter_path, display_path = alter_config_and_load_prorietary_datatypes( trans.app, datatypes_config, files_dir, override=False ) if converter_path or display_path: @@ -859,13 +860,10 @@ **kwd ) ) if repository.can_install and operation == 'install': # Send a request to the tool shed to install the repository. - tool_shed_url = get_url_from_repository_tool_shed( trans.app, repository ) - url = url_join( tool_shed_url, - 'repository/install_repositories_by_revision?name=%s&owner=%s&changeset_revisions=%s&galaxy_url=%s' % \ - ( repository.name, - repository.owner, - repository.installed_changeset_revision, - ( url_for( '/', qualified=True ) ) ) ) + tool_shed_url = suc.get_url_from_repository_tool_shed( trans.app, repository ) + url = suc.url_join( tool_shed_url, + 'repository/install_repositories_by_revision?name=%s&owner=%s&changeset_revisions=%s&galaxy_url=%s' % \ + ( repository.name, repository.owner, repository.installed_changeset_revision, ( url_for( '/', qualified=True ) ) ) ) return trans.response.send_redirect( url ) description = util.restore_text( params.get( 'description', repository.description ) ) shed_tool_conf, tool_path, relative_install_dir = get_tool_panel_config_tool_path_install_dir( trans.app, repository ) @@ -1049,7 +1047,7 @@ # Avoid caching trans.response.headers['Pragma'] = 'no-cache' trans.response.headers['Expires'] = '0' - return open_repository_files_folder( trans, folder_path ) + return suc.open_repository_files_folder( trans, folder_path ) @web.expose @web.require_admin def prepare_for_install( self, trans, **kwd ): @@ -1081,9 +1079,9 @@ repository_ids = kwd.get( 'repository_ids', None ) changeset_revisions = kwd.get( 'changeset_revisions', None ) # Get the information necessary to install each repository. - url = url_join( tool_shed_url, - 'repository/get_repository_information?repository_ids=%s&changeset_revisions=%s' % \ - ( repository_ids, changeset_revisions ) ) + url = suc.url_join( tool_shed_url, + 'repository/get_repository_information?repository_ids=%s&changeset_revisions=%s' % \ + ( repository_ids, changeset_revisions ) ) response = urllib2.urlopen( url ) raw_text = response.read() response.close() @@ -1240,21 +1238,21 @@ repository_dependencies = None elif len( repo_info_tuple ) == 7: description, repository_clone_url, changeset_revision, ctx_rev, repository_owner, repository_dependencies, tool_dependencies = repo_info_tuple - url = url_join( tool_shed_url, - 'repository/get_readme_files?name=%s&owner=%s&changeset_revision=%s' % \ - ( name, repository_owner, changeset_revision ) ) + url = suc.url_join( tool_shed_url, + 'repository/get_readme_files?name=%s&owner=%s&changeset_revision=%s' % \ + ( name, repository_owner, changeset_revision ) ) response = urllib2.urlopen( url ) raw_text = response.read() response.close() readme_files_dict = from_json_string( raw_text ) - containers_dict = build_repository_containers_for_galaxy( trans=trans, - toolshed_base_url=tool_shed_url, - repository_name=name, - repository_owner=repository_owner, - changeset_revision=changeset_revision, - readme_files_dict=readme_files_dict, - repository_dependencies=repository_dependencies, - tool_dependencies=tool_dependencies ) + containers_dict = suc.build_repository_containers_for_galaxy( trans=trans, + toolshed_base_url=tool_shed_url, + repository_name=name, + repository_owner=repository_owner, + changeset_revision=changeset_revision, + readme_files_dict=readme_files_dict, + repository_dependencies=repository_dependencies, + tool_dependencies=tool_dependencies ) else: containers_dict = dict( readme_files_dict=None, repository_dependencies=None, tool_dependencies=None ) # Handle tool dependencies chack box. @@ -1300,7 +1298,7 @@ repository_clone_url = generate_clone_url_for_installed_repository( trans, tool_shed_repository ) clone_dir = os.path.join( tool_path, self.generate_tool_path( repository_clone_url, tool_shed_repository.installed_changeset_revision ) ) relative_install_dir = os.path.join( clone_dir, tool_shed_repository.name ) - tool_shed_url = get_url_from_repository_tool_shed( trans.app, tool_shed_repository ) + tool_shed_url = suc.get_url_from_repository_tool_shed( trans.app, tool_shed_repository ) tool_section = None tool_panel_section_key = None metadata = tool_shed_repository.metadata @@ -1377,17 +1375,17 @@ repository_name=tool_shed_repository.name, repository_owner=tool_shed_repository.owner, changeset_revision=tool_shed_repository.installed_changeset_revision ) - repo = hg.repository( get_configured_ui(), path=os.path.abspath( tool_shed_repository.repo_path( trans.app ) ) ) - repo_info_dict = create_repo_info_dict( trans=trans, - repository_clone_url=repository_clone_url, - changeset_revision=tool_shed_repository.installed_changeset_revision, - ctx_rev=ctx_rev, - repository_owner=tool_shed_repository.owner, - repository_name=tool_shed_repository.name, - repository=None, - repository_metadata=None, - metadata=metadata, - repository_dependencies=repository_dependencies ) + repo = hg.repository( suc.get_configured_ui(), path=os.path.abspath( tool_shed_repository.repo_path( trans.app ) ) ) + repo_info_dict = suc.create_repo_info_dict( trans=trans, + repository_clone_url=repository_clone_url, + changeset_revision=tool_shed_repository.installed_changeset_revision, + ctx_rev=ctx_rev, + repository_owner=tool_shed_repository.owner, + repository_name=tool_shed_repository.name, + repository=None, + repository_metadata=None, + metadata=metadata, + repository_dependencies=repository_dependencies ) repo_info_dict = tool_shed_encode( repo_info_dict ) new_kwd = dict( includes_tool_dependencies=tool_shed_repository.includes_tool_dependencies, includes_tools=tool_shed_repository.includes_tools, @@ -1439,12 +1437,12 @@ repository_dependencies = None elif len( repo_info_tuple ) == 7: description, repository_clone_url, changeset_revision, ctx_rev, repository_owner, repository_dependencies, tool_dependencies = repo_info_tuple - tool_shed = get_tool_shed_from_clone_url( repository_clone_url ) + tool_shed = suc.get_tool_shed_from_clone_url( repository_clone_url ) # Get all previous change set revisions from the tool shed for the repository back to, but excluding, the previous valid changeset # revision to see if it was previously installed using one of them. - url = url_join( tool_shed_url, - 'repository/previous_changeset_revisions?galaxy_url=%s&name=%s&owner=%s&changeset_revision=%s' % \ - ( url_for( '/', qualified=True ), repository_name, repository_owner, changeset_revision ) ) + url = suc.url_join( tool_shed_url, + 'repository/previous_changeset_revisions?galaxy_url=%s&name=%s&owner=%s&changeset_revision=%s' % \ + ( url_for( '/', qualified=True ), repository_name, repository_owner, changeset_revision ) ) response = urllib2.urlopen( url ) text = response.read() response.close() @@ -1466,7 +1464,7 @@ repository_id = kwd[ 'id' ] tool_shed_repository = get_installed_tool_shed_repository( trans, repository_id ) metadata = tool_shed_repository.metadata - tool_shed_url = get_url_from_repository_tool_shed( trans.app, tool_shed_repository ) + tool_shed_url = suc.get_url_from_repository_tool_shed( trans.app, tool_shed_repository ) ctx_rev = get_ctx_rev( tool_shed_url, tool_shed_repository.name, tool_shed_repository.owner, tool_shed_repository.installed_changeset_revision ) repository_clone_url = generate_clone_url_for_installed_repository( trans, tool_shed_repository ) repository_dependencies = self.get_repository_dependencies( trans=trans, @@ -1474,16 +1472,16 @@ repository_name=tool_shed_repository.name, repository_owner=tool_shed_repository.owner, changeset_revision=tool_shed_repository.installed_changeset_revision ) - repo_info_dict = create_repo_info_dict( trans=trans, - repository_clone_url=repository_clone_url, - changeset_revision=tool_shed_repository.installed_changeset_revision, - ctx_rev=ctx_rev, - repository_owner=tool_shed_repository.owner, - repository_name=tool_shed_repository.name, - repository=None, - repository_metadata=None, - metadata=metadata, - repository_dependencies=repository_dependencies ) + repo_info_dict = suc.create_repo_info_dict( trans=trans, + repository_clone_url=repository_clone_url, + changeset_revision=tool_shed_repository.installed_changeset_revision, + ctx_rev=ctx_rev, + repository_owner=tool_shed_repository.owner, + repository_name=tool_shed_repository.name, + repository=None, + repository_metadata=None, + metadata=metadata, + repository_dependencies=repository_dependencies ) # Get the location in the tool panel in which the tool was originally loaded. if 'tool_panel_section' in metadata: tool_panel_dict = metadata[ 'tool_panel_section' ] @@ -1513,21 +1511,21 @@ message += "Uncheck the <b>No changes</b> check box and select a tool panel section to load the tools into that section." status = 'warning' if metadata and 'readme_files' in metadata: - url = url_join( tool_shed_url, - 'repository/get_readme_files?name=%s&owner=%s&changeset_revision=%s' % \ - ( tool_shed_repository.name, tool_shed_repository.owner, tool_shed_repository.installed_changeset_revision ) ) + url = suc.url_join( tool_shed_url, + 'repository/get_readme_files?name=%s&owner=%s&changeset_revision=%s' % \ + ( tool_shed_repository.name, tool_shed_repository.owner, tool_shed_repository.installed_changeset_revision ) ) response = urllib2.urlopen( url ) raw_text = response.read() response.close() readme_files_dict = from_json_string( raw_text ) - containers_dict = build_repository_containers_for_galaxy( trans=trans, - toolshed_base_url=tool_shed_url, - repository_name=name, - repository_owner=repository_owner, - changeset_revision=changeset_revision, - readme_files_dict=readme_files_dict, - repository_dependencies=repository_dependencies, - tool_dependencies=tool_dependencies ) + containers_dict = suc.build_repository_containers_for_galaxy( trans=trans, + toolshed_base_url=tool_shed_url, + repository_name=name, + repository_owner=repository_owner, + changeset_revision=changeset_revision, + readme_files_dict=readme_files_dict, + repository_dependencies=repository_dependencies, + tool_dependencies=tool_dependencies ) else: containers_dict = dict( readme_files_dict=None, repository_dependencies=None, tool_dependencies=None ) # Handle repository dependencies check box. @@ -1558,12 +1556,12 @@ @web.require_admin def reset_metadata_on_selected_installed_repositories( self, trans, **kwd ): if 'reset_metadata_on_selected_repositories_button' in kwd: - kwd[ 'CONTROLLER' ] = GALAXY_ADMIN_TOOL_SHED_CONTROLLER - message, status = reset_metadata_on_selected_repositories( trans, **kwd ) + kwd[ 'CONTROLLER' ] = suc.GALAXY_ADMIN_TOOL_SHED_CONTROLLER + message, status = suc.reset_metadata_on_selected_repositories( trans, **kwd ) else: message = util.restore_text( kwd.get( 'message', '' ) ) status = kwd.get( 'status', 'done' ) - repositories_select_field = build_repository_ids_select_field( trans, GALAXY_ADMIN_TOOL_SHED_CONTROLLER ) + repositories_select_field = suc.build_repository_ids_select_field( trans, suc.GALAXY_ADMIN_TOOL_SHED_CONTROLLER ) return trans.fill_template( '/admin/tool_shed_repository/reset_metadata_on_selected_repositories.mako', repositories_select_field=repositories_select_field, message=message, @@ -1573,20 +1571,20 @@ def reset_repository_metadata( self, trans, id ): """Reset all metadata on a single installed tool shed repository.""" repository = get_installed_tool_shed_repository( trans, id ) - tool_shed_url = get_url_from_repository_tool_shed( trans.app, repository ) + tool_shed_url = suc.get_url_from_repository_tool_shed( trans.app, repository ) repository_clone_url = generate_clone_url_for_installed_repository( trans, repository ) tool_path, relative_install_dir = repository.get_tool_relative_path( trans.app ) if relative_install_dir: original_metadata_dict = repository.metadata - metadata_dict, invalid_file_tups = generate_metadata_for_changeset_revision( app=trans.app, - repository=repository, - repository_clone_url=repository_clone_url, - shed_config_dict = repository.get_shed_config_dict( trans.app ), - relative_install_dir=relative_install_dir, - repository_files_dir=None, - resetting_all_metadata_on_repository=False, - updating_installed_repository=False, - persist=False ) + metadata_dict, invalid_file_tups = suc.generate_metadata_for_changeset_revision( app=trans.app, + repository=repository, + repository_clone_url=repository_clone_url, + shed_config_dict = repository.get_shed_config_dict( trans.app ), + relative_install_dir=relative_install_dir, + repository_files_dir=None, + resetting_all_metadata_on_repository=False, + updating_installed_repository=False, + persist=False ) repository.metadata = metadata_dict if metadata_dict != original_metadata_dict: update_in_shed_tool_config( trans.app, repository ) @@ -1645,10 +1643,10 @@ def set_tool_versions( self, trans, **kwd ): # Get the tool_versions from the tool shed for each tool in the installed change set. repository = get_installed_tool_shed_repository( trans, kwd[ 'id' ] ) - tool_shed_url = get_url_from_repository_tool_shed( trans.app, repository ) - url = url_join( tool_shed_url, - 'repository/get_tool_versions?name=%s&owner=%s&changeset_revision=%s' % \ - ( repository.name, repository.owner, repository.changeset_revision ) ) + tool_shed_url = suc.get_url_from_repository_tool_shed( trans.app, repository ) + url = suc.url_join( tool_shed_url, + 'repository/get_tool_versions?name=%s&owner=%s&changeset_revision=%s' % \ + ( repository.name, repository.owner, repository.changeset_revision ) ) response = urllib2.urlopen( url ) text = response.read() response.close() @@ -1757,21 +1755,21 @@ repo_files_dir = os.path.abspath( os.path.join( tool_path, relative_install_dir, name ) ) else: repo_files_dir = os.path.abspath( os.path.join( relative_install_dir, name ) ) - repo = hg.repository( get_configured_ui(), path=repo_files_dir ) + repo = hg.repository( suc.get_configured_ui(), path=repo_files_dir ) repository_clone_url = os.path.join( tool_shed_url, 'repos', owner, name ) pull_repository( repo, repository_clone_url, latest_ctx_rev ) - update_repository( repo, latest_ctx_rev ) + suc.update_repository( repo, latest_ctx_rev ) tool_shed = clean_tool_shed_url( tool_shed_url ) # Update the repository metadata. - metadata_dict, invalid_file_tups = generate_metadata_for_changeset_revision( app=trans.app, - repository=repository, - repository_clone_url=repository_clone_url, - shed_config_dict = repository.get_shed_config_dict( trans.app ), - relative_install_dir=relative_install_dir, - repository_files_dir=None, - resetting_all_metadata_on_repository=False, - updating_installed_repository=True, - persist=True ) + metadata_dict, invalid_file_tups = suc.generate_metadata_for_changeset_revision( app=trans.app, + repository=repository, + repository_clone_url=repository_clone_url, + shed_config_dict = repository.get_shed_config_dict( trans.app ), + relative_install_dir=relative_install_dir, + repository_files_dir=None, + resetting_all_metadata_on_repository=False, + updating_installed_repository=True, + persist=True ) repository.metadata = metadata_dict # Update the repository changeset_revision in the database. repository.changeset_revision = latest_changeset_revision Repository URL: https://bitbucket.org/galaxy/galaxy-central/ -- This is a commit notification from bitbucket.org. You are receiving this because you have the service enabled, addressing the recipient of this email.
participants (1)
-
Bitbucket