commit/galaxy-central: greg: Refactor code supporting setting metadata on repositories in the tool shed and installed in Galaxy - enables using same code across web apps.
1 new commit in galaxy-central: https://bitbucket.org/galaxy/galaxy-central/changeset/50b1d7a65bd2/ changeset: 50b1d7a65bd2 user: greg date: 2012-10-26 21:39:50 summary: Refactor code supporting setting metadata on repositories in the tool shed and installed in Galaxy - enables using same code across web apps. affected #: 14 files diff -r b2975e2fa6844b230ca4a656ecdb82f9e6612815 -r 50b1d7a65bd2760699d9982d6e1ab60f2dbd665a lib/galaxy/util/shed_util.py --- a/lib/galaxy/util/shed_util.py +++ b/lib/galaxy/util/shed_util.py @@ -3,9 +3,12 @@ from datetime import date, datetime, timedelta from time import strftime, gmtime from galaxy import util +from galaxy.web import url_for +from galaxy.web.form_builder import SelectField from galaxy.tools import parameters from galaxy.datatypes.checkers import * from galaxy.util.json import * +from galaxy.util import inflector from galaxy.tools.search import ToolBoxSearch from galaxy.tool_shed.tool_dependencies.install_util import create_or_update_tool_dependency, install_package, set_environment from galaxy.tool_shed.encoding_util import * @@ -23,6 +26,7 @@ log = logging.getLogger( __name__ ) +GALAXY_ADMIN_TOOL_SHED_CONTROLLER = 'GALAXY_ADMIN_TOOL_SHED_CONTROLLER' INITIAL_CHANGELOG_HASH = '000000000000' # Characters that must be html escaped MAPPED_CHARS = { '>' :'>', @@ -33,6 +37,7 @@ MAX_CONTENT_SIZE = 32768 NOT_TOOL_CONFIGS = [ 'datatypes_conf.xml', 'tool_dependencies.xml' ] VALID_CHARS = set( string.letters + string.digits + "'\"-=_.()/+*^,:?!#[]%\\$@;{}" ) +TOOL_SHED_ADMIN_CONTROLLER = 'TOOL_SHED_ADMIN_CONTROLLER' class ShedCounter( object ): def __init__( self, model ): @@ -246,6 +251,27 @@ except: pass return converter_path, display_path +def build_repository_ids_select_field( trans, cntrller, name='repository_ids', multiple=True, display='checkboxes' ): + """Method called from both Galaxy and the Tool Shed to generate the current list of repositories for resetting metadata.""" + repositories_select_field = SelectField( name=name, multiple=multiple, display=display ) + if cntrller == TOOL_SHED_ADMIN_CONTROLLER: + for repository in trans.sa_session.query( trans.model.Repository ) \ + .filter( trans.model.Repository.table.c.deleted == False ) \ + .order_by( trans.model.Repository.table.c.name, + trans.model.Repository.table.c.user_id ): + owner = repository.user.username + option_label = '%s (%s)' % ( repository.name, owner ) + option_value = '%s' % trans.security.encode_id( repository.id ) + repositories_select_field.add_option( option_label, option_value ) + elif cntrller == GALAXY_ADMIN_TOOL_SHED_CONTROLLER: + for repository in trans.sa_session.query( trans.model.ToolShedRepository ) \ + .filter( trans.model.ToolShedRepository.table.c.uninstalled == False ) \ + .order_by( trans.model.ToolShedRepository.table.c.name, + trans.model.ToolShedRepository.table.c.owner ): + option_label = '%s (%s)' % ( repository.name, repository.owner ) + option_value = trans.security.encode_id( repository.id ) + repositories_select_field.add_option( option_label, option_value ) + return repositories_select_field def can_generate_tool_dependency_metadata( root, metadata_dict ): """ Make sure the combination of name, version and type (the type will be the value of elem.tag) of each root element tag in the tool_dependencies.xml @@ -339,6 +365,105 @@ correction_msg += "Upload a file named <b>%s.sample</b> to the repository to correct this error." % str( index_file_name ) invalid_files_and_errors_tups.append( ( tool_config_name, correction_msg ) ) return invalid_files_and_errors_tups +def clean_repository_metadata( trans, id, changeset_revisions ): + # Delete all repository_metadata records associated with the repository that have a changeset_revision that is not in changeset_revisions. + # We sometimes see multiple records with the same changeset revision value - no idea how this happens. We'll assume we can delete the older + # records, so we'll order by update_time descending and delete records that have the same changeset_revision we come across later.. + changeset_revisions_checked = [] + for repository_metadata in trans.sa_session.query( trans.model.RepositoryMetadata ) \ + .filter( trans.model.RepositoryMetadata.table.c.repository_id == trans.security.decode_id( id ) ) \ + .order_by( trans.model.RepositoryMetadata.table.c.changeset_revision, + trans.model.RepositoryMetadata.table.c.update_time.desc() ): + changeset_revision = repository_metadata.changeset_revision + can_delete = changeset_revision in changeset_revisions_checked or changeset_revision not in changeset_revisions + if can_delete: + trans.sa_session.delete( repository_metadata ) + trans.sa_session.flush() +def compare_changeset_revisions( ancestor_changeset_revision, ancestor_metadata_dict, current_changeset_revision, current_metadata_dict ): + # The metadata associated with ancestor_changeset_revision is ancestor_metadata_dict. This changeset_revision is an ancestor of + # current_changeset_revision which is associated with current_metadata_dict. A new repository_metadata record will be created only + # when this method returns the string 'not equal and not subset'. + ancestor_datatypes = ancestor_metadata_dict.get( 'datatypes', [] ) + ancestor_tools = ancestor_metadata_dict.get( 'tools', [] ) + ancestor_guids = [ tool_dict[ 'guid' ] for tool_dict in ancestor_tools ] + ancestor_guids.sort() + ancestor_tool_dependencies = ancestor_metadata_dict.get( 'tool_dependencies', [] ) + ancestor_workflows = ancestor_metadata_dict.get( 'workflows', [] ) + current_datatypes = current_metadata_dict.get( 'datatypes', [] ) + current_tools = current_metadata_dict.get( 'tools', [] ) + current_guids = [ tool_dict[ 'guid' ] for tool_dict in current_tools ] + current_guids.sort() + current_tool_dependencies = current_metadata_dict.get( 'tool_dependencies', [] ) + current_workflows = current_metadata_dict.get( 'workflows', [] ) + # Handle case where no metadata exists for either changeset. + if not ancestor_guids and not current_guids and not ancestor_workflows and not current_workflows and not ancestor_datatypes and not current_datatypes: + return 'no metadata' + workflow_comparison = compare_workflows( ancestor_workflows, current_workflows ) + datatype_comparison = compare_datatypes( ancestor_datatypes, current_datatypes ) + # Handle case where all metadata is the same. + if ancestor_guids == current_guids and workflow_comparison == 'equal' and datatype_comparison == 'equal': + return 'equal' + if workflow_comparison in [ 'equal', 'subset' ] and datatype_comparison in [ 'equal', 'subset' ]: + is_subset = True + for guid in ancestor_guids: + if guid not in current_guids: + is_subset = False + break + if is_subset: + return 'subset' + return 'not equal and not subset' +def compare_datatypes( ancestor_datatypes, current_datatypes ): + # Determine if ancestor_datatypes is the same as current_datatypes + # or if ancestor_datatypes is a subset of current_datatypes. Each + # datatype dict looks something like: + # {"dtype": "galaxy.datatypes.images:Image", "extension": "pdf", "mimetype": "application/pdf"} + if len( ancestor_datatypes ) <= len( current_datatypes ): + for ancestor_datatype in ancestor_datatypes: + # Currently the only way to differentiate datatypes is by name. + ancestor_datatype_dtype = ancestor_datatype[ 'dtype' ] + ancestor_datatype_extension = ancestor_datatype[ 'extension' ] + ancestor_datatype_mimetype = ancestor_datatype.get( 'mimetype', None ) + found_in_current = False + for current_datatype in current_datatypes: + if current_datatype[ 'dtype' ] == ancestor_datatype_dtype and \ + current_datatype[ 'extension' ] == ancestor_datatype_extension and \ + current_datatype.get( 'mimetype', None ) == ancestor_datatype_mimetype: + found_in_current = True + break + if not found_in_current: + return 'not equal and not subset' + if len( ancestor_datatypes ) == len( current_datatypes ): + return 'equal' + else: + return 'subset' + return 'not equal and not subset' +def compare_workflows( ancestor_workflows, current_workflows ): + # Determine if ancestor_workflows is the same as current_workflows + # or if ancestor_workflows is a subset of current_workflows. + if len( ancestor_workflows ) <= len( current_workflows ): + for ancestor_workflow_tup in ancestor_workflows: + # ancestor_workflows is a list of tuples where each contained tuple is + # [ <relative path to the .ga file in the repository>, <exported workflow dict> ] + ancestor_workflow_dict = ancestor_workflow_tup[1] + # Currently the only way to differentiate workflows is by name. + ancestor_workflow_name = ancestor_workflow_dict[ 'name' ] + num_ancestor_workflow_steps = len( ancestor_workflow_dict[ 'steps' ] ) + found_in_current = False + for current_workflow_tup in current_workflows: + current_workflow_dict = current_workflow_tup[1] + # Assume that if the name and number of steps are euqal, + # then the workflows are the same. Of course, this may + # not be true... + if current_workflow_dict[ 'name' ] == ancestor_workflow_name and len( current_workflow_dict[ 'steps' ] ) == num_ancestor_workflow_steps: + found_in_current = True + break + if not found_in_current: + return 'not equal and not subset' + if len( ancestor_workflows ) == len( current_workflows ): + return 'equal' + else: + return 'subset' + return 'not equal and not subset' def concat_messages( msg1, msg2 ): if msg1: if msg2: @@ -447,6 +572,20 @@ tool_dicts=tool_dicts, converter_path=converter_path, display_path=display_path ) +def create_or_update_repository_metadata( trans, id, repository, changeset_revision, metadata_dict ): + downloadable = is_downloadable( metadata_dict ) + repository_metadata = get_repository_metadata_by_changeset_revision( trans, id, changeset_revision ) + if repository_metadata: + repository_metadata.metadata = metadata_dict + repository_metadata.downloadable = downloadable + else: + repository_metadata = trans.model.RepositoryMetadata( repository_id=repository.id, + changeset_revision=changeset_revision, + metadata=metadata_dict, + downloadable=downloadable ) + trans.sa_session.add( repository_metadata ) + trans.sa_session.flush() + return repository_metadata def create_or_update_tool_shed_repository( app, name, description, installed_changeset_revision, ctx_rev, repository_clone_url, metadata_dict, status, current_changeset_revision=None, owner='', dist_to_shed=False ): # The received value for dist_to_shed will be True if the InstallManager is installing a repository that contains tools or datatypes that used @@ -546,10 +685,19 @@ set_status=set_status ) tool_dependency_objects.append( tool_dependency ) return tool_dependency_objects -def generate_clone_url( trans, repository ): - """Generate the URL for cloning a repository.""" +def generate_clone_url_for_installed_repository( trans, repository ): + """Generate the URL for cloning a repository that has been installed into a Galaxy instance.""" tool_shed_url = get_url_from_repository_tool_shed( trans.app, repository ) return url_join( tool_shed_url, 'repos', repository.owner, repository.name ) +def generate_clone_url_for_repository_in_tool_shed( trans, repository ): + """Generate the URL for cloning a repository that is in the tool shed.""" + base_url = url_for( '/', qualified=True ).rstrip( '/' ) + if trans.user: + protocol, base = base_url.split( '://' ) + username = '%s@' % trans.user.username + return '%s://%s%s/repos/%s/%s' % ( protocol, username, base, repository.user.username, repository.name ) + else: + return '%s/repos/%s/%s' % ( base_url, repository.user.username, repository.name ) def generate_datatypes_metadata( datatypes_config, metadata_dict ): """Update the received metadata_dict with information from the parsed datatypes_config.""" tree = ElementTree.parse( datatypes_config ) @@ -750,6 +898,42 @@ app.config.tool_data_path = original_tool_data_path app.config.tool_data_table_config_path = original_tool_data_table_config_path return metadata_dict, invalid_file_tups +def generate_message_for_invalid_tools( invalid_file_tups, repository, metadata_dict, as_html=True, displaying_invalid_tool=False ): + if as_html: + new_line = '<br/>' + bold_start = '<b>' + bold_end = '</b>' + else: + new_line = '\n' + bold_start = '' + bold_end = '' + message = '' + if not displaying_invalid_tool: + if metadata_dict: + message += "Metadata was defined for some items in revision '%s'. " % str( repository.tip ) + message += "Correct the following problems if necessary and reset metadata.%s" % new_line + else: + message += "Metadata cannot be defined for revision '%s' so this revision cannot be automatically " % str( repository.tip ) + message += "installed into a local Galaxy instance. Correct the following problems and reset metadata.%s" % new_line + for itc_tup in invalid_file_tups: + tool_file, exception_msg = itc_tup + if exception_msg.find( 'No such file or directory' ) >= 0: + exception_items = exception_msg.split() + missing_file_items = exception_items[ 7 ].split( '/' ) + missing_file = missing_file_items[ -1 ].rstrip( '\'' ) + if missing_file.endswith( '.loc' ): + sample_ext = '%s.sample' % missing_file + else: + sample_ext = missing_file + correction_msg = "This file refers to a missing file %s%s%s. " % ( bold_start, str( missing_file ), bold_end ) + correction_msg += "Upload a file named %s%s%s to the repository to correct this error." % ( bold_start, sample_ext, bold_end ) + else: + if as_html: + correction_msg = exception_msg + else: + correction_msg = exception_msg.replace( '<br/>', new_line ).replace( '<b>', bold_start ).replace( '</b>', bold_end ) + message += "%s%s%s - %s%s" % ( bold_start, tool_file, bold_end, correction_msg, new_line ) + return message def generate_package_dependency_metadata( elem, tool_dependencies_dict ): """The value of package_name must match the value of the "package" type in the tool config's <requirements> tag set.""" requirements_dict = {} @@ -1158,6 +1342,9 @@ ctx = get_changectx_for_changeset( repo, changeset_revision ) named_tmp_file = get_named_tmpfile_from_ctx( ctx, file_name, dir ) return named_tmp_file +def get_installed_tool_shed_repository( trans, id ): + """Get a repository on the Galaxy side from the database via id""" + return trans.sa_session.query( trans.model.ToolShedRepository ).get( trans.security.decode_id( id ) ) def get_list_of_copied_sample_files( repo, ctx, dir ): """ Find all sample files (files in the repository with the special .sample extension) in the reversed repository manifest up to ctx. Copy @@ -1211,6 +1398,24 @@ fh.close() return tmp_filename return None +def get_parent_id( trans, id, old_id, version, guid, changeset_revisions ): + parent_id = None + # Compare from most recent to oldest. + changeset_revisions.reverse() + for changeset_revision in changeset_revisions: + repository_metadata = get_repository_metadata_by_changeset_revision( trans, id, changeset_revision ) + metadata = repository_metadata.metadata + tools_dicts = metadata.get( 'tools', [] ) + for tool_dict in tools_dicts: + if tool_dict[ 'guid' ] == guid: + # The tool has not changed between the compared changeset revisions. + continue + if tool_dict[ 'id' ] == old_id and tool_dict[ 'version' ] != version: + # The tool version is different, so we've found the parent. + return tool_dict[ 'guid' ] + if parent_id is None: + # The tool did not change through all of the changeset revisions. + return old_id def get_repository_file_contents( file_path ): if is_gzip( file_path ): to_html = to_html_str( '\ngzip compressed file\n' ) @@ -1242,6 +1447,9 @@ if contents: contents.sort() return contents +def get_repository_in_tool_shed( trans, id ): + """Get a repository on the tool shed side from the database via id""" + return trans.sa_session.query( trans.model.Repository ).get( trans.security.decode_id( id ) ) def get_repository_metadata_by_changeset_revision( trans, id, changeset_revision ): """Get metadata for a specified repository change set from the database""" # Make sure there are no duplicate records, and return the single unique record for the changeset_revision. Duplicate records were somehow @@ -1635,6 +1843,8 @@ parent_id=tool_version_using_parent_id.id ) sa_session.add( tool_version_association ) sa_session.flush() +def is_downloadable( metadata_dict ): + return 'datatypes' in metadata_dict or 'tools' in metadata_dict or 'workflows' in metadata_dict def load_installed_datatype_converters( app, installed_repository_dict, deactivate=False ): # Load or deactivate proprietary datatype converters app.datatypes_registry.load_datatype_converters( app.toolbox, installed_repository_dict=installed_repository_dict, deactivate=deactivate ) @@ -1739,6 +1949,12 @@ repo, source=repository_clone_url, rev=[ ctx_rev ] ) +def remove_dir( dir ): + if os.path.exists( dir ): + try: + shutil.rmtree( dir ) + except: + pass def remove_from_shed_tool_config( trans, shed_tool_conf_dict, guids_to_remove ): # A tool shed repository is being uninstalled so change the shed_tool_conf file. Parse the config file to generate the entire list # of config_elems instead of using the in-memory list since it will be a subset of the entire list if one or more repositories have @@ -1912,6 +2128,188 @@ removed = True error_message = '' return removed, error_message +def reset_all_metadata_on_installed_repository( trans, id ): + """Reset all metadata on a single tool shed repository installed into a Galaxy instance.""" + repository = get_installed_tool_shed_repository( trans, id ) + tool_shed_url = get_url_from_repository_tool_shed( trans.app, repository ) + repository_clone_url = generate_clone_url_for_installed_repository( trans, repository ) + tool_path, relative_install_dir = repository.get_tool_relative_path( trans.app ) + if relative_install_dir: + original_metadata_dict = repository.metadata + metadata_dict, invalid_file_tups = generate_metadata_for_changeset_revision( app=trans.app, + repository=repository, + repository_clone_url=repository_clone_url, + shed_config_dict = repository.get_shed_config_dict( trans.app ), + relative_install_dir=relative_install_dir, + repository_files_dir=None, + resetting_all_metadata_on_repository=False, + updating_installed_repository=False ) + repository.metadata = metadata_dict + if metadata_dict != original_metadata_dict: + update_in_shed_tool_config( trans.app, repository ) + trans.sa_session.add( repository ) + trans.sa_session.flush() + log.debug( 'Metadata has been reset on repository %s.' % repository.name ) + else: + log.debug( 'Metadata did not need to be reset on repository %s.' % repository.name ) + else: + log.debug( 'Error locating installation directory for repository %s.' % repository.name ) + return invalid_file_tups, metadata_dict +def reset_all_metadata_on_repository_in_tool_shed( trans, id ): + """Reset all metadata on a single repository in a tool shed.""" + def reset_all_tool_versions( trans, id, repo ): + changeset_revisions = [] + for changeset in repo.changelog: + changeset_revision = str( repo.changectx( changeset ) ) + repository_metadata = get_repository_metadata_by_changeset_revision( trans, id, changeset_revision ) + if repository_metadata: + metadata = repository_metadata.metadata + if metadata: + if metadata.get( 'tools', None ): + changeset_revisions.append( changeset_revision ) + # The list of changeset_revisions is now filtered to contain only those that are downloadable and contain tools. + # If a repository includes tools, build a dictionary of { 'tool id' : 'parent tool id' } pairs for each tool in each changeset revision. + for index, changeset_revision in enumerate( changeset_revisions ): + tool_versions_dict = {} + repository_metadata = get_repository_metadata_by_changeset_revision( trans, id, changeset_revision ) + metadata = repository_metadata.metadata + tool_dicts = metadata[ 'tools' ] + if index == 0: + # The first changset_revision is a special case because it will have no ancestor changeset_revisions in which to match tools. + # The parent tool id for tools in the first changeset_revision will be the "old_id" in the tool config. + for tool_dict in tool_dicts: + tool_versions_dict[ tool_dict[ 'guid' ] ] = tool_dict[ 'id' ] + else: + for tool_dict in tool_dicts: + parent_id = get_parent_id( trans, + id, + tool_dict[ 'id' ], + tool_dict[ 'version' ], + tool_dict[ 'guid' ], + changeset_revisions[ 0:index ] ) + tool_versions_dict[ tool_dict[ 'guid' ] ] = parent_id + if tool_versions_dict: + repository_metadata.tool_versions = tool_versions_dict + trans.sa_session.add( repository_metadata ) + trans.sa_session.flush() + repository = get_repository_in_tool_shed( trans, id ) + log.debug( "Resetting all metadata on repository: %s" % repository.name ) + repo_dir = repository.repo_path + repo = hg.repository( get_configured_ui(), repo_dir ) + repository_clone_url = generate_clone_url_for_repository_in_tool_shed( trans, repository ) + # The list of changeset_revisions refers to repository_metadata records that have been created or updated. When the following loop + # completes, we'll delete all repository_metadata records for this repository that do not have a changeset_revision value in this list. + changeset_revisions = [] + # When a new repository_metadata record is created, it always uses the values of metadata_changeset_revision and metadata_dict. + metadata_changeset_revision = None + metadata_dict = None + ancestor_changeset_revision = None + ancestor_metadata_dict = None + invalid_file_tups = [] + home_dir = os.getcwd() + for changeset in repo.changelog: + work_dir = tempfile.mkdtemp() + current_changeset_revision = str( repo.changectx( changeset ) ) + ctx = repo.changectx( changeset ) + log.debug( "Cloning repository revision: %s", str( ctx.rev() ) ) + cloned_ok, error_message = clone_repository( repository_clone_url, work_dir, str( ctx.rev() ) ) + if cloned_ok: + log.debug( "Generating metadata for changset revision: %s", str( ctx.rev() ) ) + current_metadata_dict, invalid_file_tups = generate_metadata_for_changeset_revision( app=trans.app, + repository=repository, + repository_clone_url=repository_clone_url, + relative_install_dir=repo_dir, + repository_files_dir=work_dir, + resetting_all_metadata_on_repository=True, + updating_installed_repository=False ) + if current_metadata_dict: + if not metadata_changeset_revision and not metadata_dict: + # We're at the first change set in the change log. + metadata_changeset_revision = current_changeset_revision + metadata_dict = current_metadata_dict + if ancestor_changeset_revision: + # Compare metadata from ancestor and current. The value of comparison will be one of: + # 'no metadata' - no metadata for either ancestor or current, so continue from current + # 'equal' - ancestor metadata is equivalent to current metadata, so continue from current + # 'subset' - ancestor metadata is a subset of current metadata, so continue from current + # 'not equal and not subset' - ancestor metadata is neither equal to nor a subset of current metadata, so persist ancestor metadata. + comparison = compare_changeset_revisions( ancestor_changeset_revision, + ancestor_metadata_dict, + current_changeset_revision, + current_metadata_dict ) + if comparison in [ 'no metadata', 'equal', 'subset' ]: + ancestor_changeset_revision = current_changeset_revision + ancestor_metadata_dict = current_metadata_dict + elif comparison == 'not equal and not subset': + metadata_changeset_revision = ancestor_changeset_revision + metadata_dict = ancestor_metadata_dict + repository_metadata = create_or_update_repository_metadata( trans, id, repository, metadata_changeset_revision, metadata_dict ) + changeset_revisions.append( metadata_changeset_revision ) + ancestor_changeset_revision = current_changeset_revision + ancestor_metadata_dict = current_metadata_dict + else: + # We're at the beginning of the change log. + ancestor_changeset_revision = current_changeset_revision + ancestor_metadata_dict = current_metadata_dict + if not ctx.children(): + metadata_changeset_revision = current_changeset_revision + metadata_dict = current_metadata_dict + # We're at the end of the change log. + repository_metadata = create_or_update_repository_metadata( trans, id, repository, metadata_changeset_revision, metadata_dict ) + changeset_revisions.append( metadata_changeset_revision ) + ancestor_changeset_revision = None + ancestor_metadata_dict = None + elif ancestor_metadata_dict: + # We reach here only if current_metadata_dict is empty and ancestor_metadata_dict is not. + if not ctx.children(): + # We're at the end of the change log. + repository_metadata = create_or_update_repository_metadata( trans, id, repository, metadata_changeset_revision, metadata_dict ) + changeset_revisions.append( metadata_changeset_revision ) + ancestor_changeset_revision = None + ancestor_metadata_dict = None + remove_dir( work_dir ) + # Delete all repository_metadata records for this repository that do not have a changeset_revision value in changeset_revisions. + clean_repository_metadata( trans, id, changeset_revisions ) + # Set tool version information for all downloadable changeset revisions. Get the list of changeset revisions from the changelog. + reset_all_tool_versions( trans, id, repo ) + # Reset the tool_data_tables by loading the empty tool_data_table_conf.xml file. + reset_tool_data_tables( trans.app ) + return invalid_file_tups, metadata_dict +def reset_metadata_on_selected_repositories( trans, **kwd ): + # This method is called from both Galaxy and the Tool Shed, so the cntrller param is required. + repository_ids = util.listify( kwd.get( 'repository_ids', None ) ) + CONTROLLER = kwd[ 'CONTROLLER' ] + message = '' + status = 'done' + if repository_ids: + successful_count = 0 + unsuccessful_count = 0 + for repository_id in repository_ids: + try: + if CONTROLLER == 'TOOL_SHED_ADMIN_CONTROLLER': + repository = get_repository_in_tool_shed( trans, repository_id ) + invalid_file_tups, metadata_dict = reset_all_metadata_on_repository_in_tool_shed( trans, repository_id ) + elif CONTROLLER == 'GALAXY_ADMIN_TOOL_SHED_CONTROLLER': + repository = get_installed_tool_shed_repository( trans, repository_id ) + invalid_file_tups, metadata_dict = reset_all_metadata_on_installed_repository( trans, repository_id ) + if invalid_file_tups: + message = generate_message_for_invalid_tools( invalid_file_tups, repository, None, as_html=False ) + log.debug( message ) + unsuccessful_count += 1 + else: + log.debug( "Successfully reset metadata on repository %s" % repository.name ) + successful_count += 1 + except Exception, e: + log.debug( "Error attempting to reset metadata on repository '%s': %s" % ( repository.name, str( e ) ) ) + unsuccessful_count += 1 + message = "Successfully reset metadata on %d %s. " % ( successful_count, inflector.cond_plural( successful_count, "repository" ) ) + if unsuccessful_count: + message += "Error setting metadata on %d %s - see the paster log for details. " % ( unsuccessful_count, + inflector.cond_plural( unsuccessful_count, "repository" ) ) + else: + message = 'Select at least one repository to on which to reset all metadata.' + status = 'error' + return message, status def reset_tool_data_tables( app ): # Reset the tool_data_tables to an empty dictionary. app.tool_data_tables.data_tables = {} @@ -2061,7 +2459,7 @@ tool_panel_dict = generate_tool_panel_dict_from_shed_tool_conf_entries( trans, repository ) repository_tools_tups = get_repository_tools_tups( app, repository.metadata ) - cleaned_repository_clone_url = clean_repository_clone_url( generate_clone_url( trans, repository ) ) + cleaned_repository_clone_url = clean_repository_clone_url( generate_clone_url_for_installed_repository( trans, repository ) ) tool_shed = tool_shed_from_repository_clone_url( cleaned_repository_clone_url ) owner = repository.owner if not owner: diff -r b2975e2fa6844b230ca4a656ecdb82f9e6612815 -r 50b1d7a65bd2760699d9982d6e1ab60f2dbd665a lib/galaxy/webapps/community/controllers/admin.py --- a/lib/galaxy/webapps/community/controllers/admin.py +++ b/lib/galaxy/webapps/community/controllers/admin.py @@ -5,7 +5,9 @@ from galaxy.web.framework.helpers import time_ago, iff, grids from galaxy.web.form_builder import SelectField from galaxy.util import inflector -from galaxy.util.shed_util import get_changectx_for_changeset, get_configured_ui +# TODO: re-factor shed_util to eliminate the following restricted imports +from galaxy.util.shed_util import build_repository_ids_select_field, get_changectx_for_changeset, get_configured_ui, get_repository_in_tool_shed +from galaxy.util.shed_util import reset_metadata_on_selected_repositories, TOOL_SHED_ADMIN_CONTROLLER from common import * from repository import RepositoryGrid, CategoryGrid @@ -481,7 +483,7 @@ # The received id is the repository id, so we need to get the id of the user # that uploaded the repository. repository_id = kwd.get( 'id', None ) - repository = get_repository( trans, repository_id ) + repository = get_repository_in_tool_shed( trans, repository_id ) kwd[ 'f-email' ] = repository.user.email elif operation == "repositories_by_category": # Eliminate the current filters if any exist. @@ -513,7 +515,7 @@ changset_revision_str = 'changeset_revision_' if k.startswith( changset_revision_str ): repository_id = trans.security.encode_id( int( k.lstrip( changset_revision_str ) ) ) - repository = get_repository( trans, repository_id ) + repository = get_repository_in_tool_shed( trans, repository_id ) if repository.tip != v: return trans.response.send_redirect( web.url_for( controller='repository', action='browse_repositories', @@ -586,7 +588,7 @@ count = 0 deleted_repositories = "" for repository_id in ids: - repository = get_repository( trans, repository_id ) + repository = get_repository_in_tool_shed( trans, repository_id ) if not repository.deleted: repository.deleted = True trans.sa_session.add( repository ) @@ -715,57 +717,14 @@ status=status ) @web.expose @web.require_admin - def reset_metadata_on_selected_repositories( self, trans, **kwd ): - params = util.Params( kwd ) - message = util.restore_text( params.get( 'message', '' ) ) - status = params.get( 'status', 'done' ) - repository_names_by_owner = util.listify( kwd.get( 'repository_names_by_owner', None ) ) + def reset_metadata_on_selected_repositories_in_tool_shed( self, trans, **kwd ): if 'reset_metadata_on_selected_repositories_button' in kwd: - if repository_names_by_owner: - successful_count = 0 - unsuccessful_count = 0 - for repository_name_owner_str in repository_names_by_owner: - repository_name_owner_list = repository_name_owner_str.split( STRSEP ) - name = repository_name_owner_list[ 0 ] - owner = repository_name_owner_list[ 1 ] - repository = get_repository_by_name_and_owner( trans, name, owner ) - try: - invalid_file_tups, metadata_dict = reset_all_metadata_on_repository( trans, trans.security.encode_id( repository.id ) ) - if invalid_file_tups: - message = generate_message_for_invalid_tools( invalid_file_tups, repository, None, as_html=False ) - log.debug( message ) - unsuccessful_count += 1 - else: - log.debug( "Successfully reset metadata on repository %s" % repository.name ) - successful_count += 1 - except Exception, e: - log.debug( "Error attempting to reset metadata on repository '%s': %s" % ( repository.name, str( e ) ) ) - unsuccessful_count += 1 - message = "Successfully reset metadata on %d %s. " % ( successful_count, - inflector.cond_plural( successful_count, "repository" ) ) - if unsuccessful_count: - message += "Error setting metadata on %d %s - see the paster log for details. " % ( unsuccessful_count, - inflector.cond_plural( unsuccessful_count, - "repository" ) ) - trans.response.send_redirect( web.url_for( controller='admin', - action='browse_repository_metadata', - message=util.sanitize_text( message ), - status=status ) ) - else: - 'Select at least one repository to on which to reset all metadata.' - status = 'error' - repositories_select_field = SelectField( name='repository_names_by_owner', - multiple=True, - display='checkboxes' ) - for repository in trans.sa_session.query( trans.model.Repository ) \ - .filter( and_( trans.model.Repository.table.c.deleted == False, - trans.model.Repository.table.c.deprecated == False ) ) \ - .order_by( trans.model.Repository.table.c.name, - trans.model.Repository.table.c.user_id ): - owner = repository.user.username - option_label = '%s (%s)' % ( repository.name, owner ) - option_value = '%s%s%s' % ( repository.name, STRSEP, owner ) - repositories_select_field.add_option( option_label, option_value ) + kwd[ 'CONTROLLER' ] = TOOL_SHED_ADMIN_CONTROLLER + message, status = reset_metadata_on_selected_repositories( trans, **kwd ) + else: + message = util.restore_text( kwd.get( 'message', '' ) ) + status = kwd.get( 'status', 'done' ) + repositories_select_field = build_repository_ids_select_field( trans, TOOL_SHED_ADMIN_CONTROLLER ) return trans.fill_template( '/webapps/community/admin/reset_metadata_on_selected_repositories.mako', repositories_select_field=repositories_select_field, message=message, @@ -783,7 +742,7 @@ count = 0 undeleted_repositories = "" for repository_id in ids: - repository = get_repository( trans, repository_id ) + repository = get_repository_in_tool_shed( trans, repository_id ) if repository.deleted: repository.deleted = False trans.sa_session.add( repository ) diff -r b2975e2fa6844b230ca4a656ecdb82f9e6612815 -r 50b1d7a65bd2760699d9982d6e1ab60f2dbd665a lib/galaxy/webapps/community/controllers/common.py --- a/lib/galaxy/webapps/community/controllers/common.py +++ b/lib/galaxy/webapps/community/controllers/common.py @@ -5,11 +5,14 @@ from galaxy.tools import * from galaxy.util.json import from_json_string, to_json_string from galaxy.util.hash_util import * -from galaxy.util.shed_util import check_tool_input_params, clone_repository, concat_messages, copy_sample_file, generate_metadata_for_changeset_revision +# TODO: re-factor shed_util to eliminate the following restricted imports +from galaxy.util.shed_util import check_tool_input_params, clone_repository, concat_messages, copy_sample_file, create_or_update_repository_metadata +from galaxy.util.shed_util import generate_clone_url_for_repository_in_tool_shed, generate_message_for_invalid_tools, generate_metadata_for_changeset_revision from galaxy.util.shed_util import get_changectx_for_changeset, get_config_from_disk, get_configured_ui, get_file_context_from_ctx, get_named_tmpfile_from_ctx -from galaxy.util.shed_util import get_repository_metadata_by_changeset_revision, handle_sample_files_and_load_tool_from_disk -from galaxy.util.shed_util import handle_sample_files_and_load_tool_from_tmp_config, handle_sample_tool_data_table_conf_file, INITIAL_CHANGELOG_HASH -from galaxy.util.shed_util import load_tool_from_config, reset_tool_data_tables, reversed_upper_bounded_changelog, strip_path +from galaxy.util.shed_util import get_parent_id, get_repository_in_tool_shed, get_repository_metadata_by_changeset_revision +from galaxy.util.shed_util import handle_sample_files_and_load_tool_from_disk, handle_sample_files_and_load_tool_from_tmp_config +from galaxy.util.shed_util import handle_sample_tool_data_table_conf_file, INITIAL_CHANGELOG_HASH, is_downloadable, load_tool_from_config, remove_dir +from galaxy.util.shed_util import reset_tool_data_tables, reversed_upper_bounded_changelog, strip_path from galaxy.web.base.controller import * from galaxy.web.base.controllers.admin import * from galaxy.webapps.community import model @@ -171,105 +174,6 @@ if user_email in admin_users: return True return False -def clean_repository_metadata( trans, id, changeset_revisions ): - # Delete all repository_metadata records associated with the repository that have a changeset_revision that is not in changeset_revisions. - # We sometimes see multiple records with the same changeset revision value - no idea how this happens. We'll assume we can delete the older - # records, so we'll order by update_time descending and delete records that have the same changeset_revision we come across later.. - changeset_revisions_checked = [] - for repository_metadata in trans.sa_session.query( trans.model.RepositoryMetadata ) \ - .filter( trans.model.RepositoryMetadata.table.c.repository_id == trans.security.decode_id( id ) ) \ - .order_by( trans.model.RepositoryMetadata.table.c.changeset_revision, - trans.model.RepositoryMetadata.table.c.update_time.desc() ): - changeset_revision = repository_metadata.changeset_revision - can_delete = changeset_revision in changeset_revisions_checked or changeset_revision not in changeset_revisions - if can_delete: - trans.sa_session.delete( repository_metadata ) - trans.sa_session.flush() -def compare_changeset_revisions( ancestor_changeset_revision, ancestor_metadata_dict, current_changeset_revision, current_metadata_dict ): - # The metadata associated with ancestor_changeset_revision is ancestor_metadata_dict. This changeset_revision is an ancestor of - # current_changeset_revision which is associated with current_metadata_dict. A new repository_metadata record will be created only - # when this method returns the string 'not equal and not subset'. - ancestor_datatypes = ancestor_metadata_dict.get( 'datatypes', [] ) - ancestor_tools = ancestor_metadata_dict.get( 'tools', [] ) - ancestor_guids = [ tool_dict[ 'guid' ] for tool_dict in ancestor_tools ] - ancestor_guids.sort() - ancestor_tool_dependencies = ancestor_metadata_dict.get( 'tool_dependencies', [] ) - ancestor_workflows = ancestor_metadata_dict.get( 'workflows', [] ) - current_datatypes = current_metadata_dict.get( 'datatypes', [] ) - current_tools = current_metadata_dict.get( 'tools', [] ) - current_guids = [ tool_dict[ 'guid' ] for tool_dict in current_tools ] - current_guids.sort() - current_tool_dependencies = current_metadata_dict.get( 'tool_dependencies', [] ) - current_workflows = current_metadata_dict.get( 'workflows', [] ) - # Handle case where no metadata exists for either changeset. - if not ancestor_guids and not current_guids and not ancestor_workflows and not current_workflows and not ancestor_datatypes and not current_datatypes: - return 'no metadata' - workflow_comparison = compare_workflows( ancestor_workflows, current_workflows ) - datatype_comparison = compare_datatypes( ancestor_datatypes, current_datatypes ) - # Handle case where all metadata is the same. - if ancestor_guids == current_guids and workflow_comparison == 'equal' and datatype_comparison == 'equal': - return 'equal' - if workflow_comparison in [ 'equal', 'subset' ] and datatype_comparison in [ 'equal', 'subset' ]: - is_subset = True - for guid in ancestor_guids: - if guid not in current_guids: - is_subset = False - break - if is_subset: - return 'subset' - return 'not equal and not subset' -def compare_datatypes( ancestor_datatypes, current_datatypes ): - # Determine if ancestor_datatypes is the same as current_datatypes - # or if ancestor_datatypes is a subset of current_datatypes. Each - # datatype dict looks something like: - # {"dtype": "galaxy.datatypes.images:Image", "extension": "pdf", "mimetype": "application/pdf"} - if len( ancestor_datatypes ) <= len( current_datatypes ): - for ancestor_datatype in ancestor_datatypes: - # Currently the only way to differentiate datatypes is by name. - ancestor_datatype_dtype = ancestor_datatype[ 'dtype' ] - ancestor_datatype_extension = ancestor_datatype[ 'extension' ] - ancestor_datatype_mimetype = ancestor_datatype.get( 'mimetype', None ) - found_in_current = False - for current_datatype in current_datatypes: - if current_datatype[ 'dtype' ] == ancestor_datatype_dtype and \ - current_datatype[ 'extension' ] == ancestor_datatype_extension and \ - current_datatype.get( 'mimetype', None ) == ancestor_datatype_mimetype: - found_in_current = True - break - if not found_in_current: - return 'not equal and not subset' - if len( ancestor_datatypes ) == len( current_datatypes ): - return 'equal' - else: - return 'subset' - return 'not equal and not subset' -def compare_workflows( ancestor_workflows, current_workflows ): - # Determine if ancestor_workflows is the same as current_workflows - # or if ancestor_workflows is a subset of current_workflows. - if len( ancestor_workflows ) <= len( current_workflows ): - for ancestor_workflow_tup in ancestor_workflows: - # ancestor_workflows is a list of tuples where each contained tuple is - # [ <relative path to the .ga file in the repository>, <exported workflow dict> ] - ancestor_workflow_dict = ancestor_workflow_tup[1] - # Currently the only way to differentiate workflows is by name. - ancestor_workflow_name = ancestor_workflow_dict[ 'name' ] - num_ancestor_workflow_steps = len( ancestor_workflow_dict[ 'steps' ] ) - found_in_current = False - for current_workflow_tup in current_workflows: - current_workflow_dict = current_workflow_tup[1] - # Assume that if the name and number of steps are euqal, - # then the workflows are the same. Of course, this may - # not be true... - if current_workflow_dict[ 'name' ] == ancestor_workflow_name and len( current_workflow_dict[ 'steps' ] ) == num_ancestor_workflow_steps: - found_in_current = True - break - if not found_in_current: - return 'not equal and not subset' - if len( ancestor_workflows ) == len( current_workflows ): - return 'equal' - else: - return 'subset' - return 'not equal and not subset' def copy_file_from_disk( filename, repo_dir, dir ): file_path = None found = False @@ -300,66 +204,6 @@ fh.close() return file_path return None -def create_or_update_repository_metadata( trans, id, repository, changeset_revision, metadata_dict ): - downloadable = is_downloadable( metadata_dict ) - repository_metadata = get_repository_metadata_by_changeset_revision( trans, id, changeset_revision ) - if repository_metadata: - repository_metadata.metadata = metadata_dict - repository_metadata.downloadable = downloadable - else: - repository_metadata = trans.model.RepositoryMetadata( repository_id=repository.id, - changeset_revision=changeset_revision, - metadata=metadata_dict, - downloadable=downloadable ) - trans.sa_session.add( repository_metadata ) - trans.sa_session.flush() - return repository_metadata -def generate_clone_url( trans, repository_id ): - """Generate the URL for cloning a repository.""" - repository = get_repository( trans, repository_id ) - base_url = url_for( '/', qualified=True ).rstrip( '/' ) - if trans.user: - protocol, base = base_url.split( '://' ) - username = '%s@' % trans.user.username - return '%s://%s%s/repos/%s/%s' % ( protocol, username, base, repository.user.username, repository.name ) - else: - return '%s/repos/%s/%s' % ( base_url, repository.user.username, repository.name ) -def generate_message_for_invalid_tools( invalid_file_tups, repository, metadata_dict, as_html=True, displaying_invalid_tool=False ): - if as_html: - new_line = '<br/>' - bold_start = '<b>' - bold_end = '</b>' - else: - new_line = '\n' - bold_start = '' - bold_end = '' - message = '' - if not displaying_invalid_tool: - if metadata_dict: - message += "Metadata was defined for some items in revision '%s'. " % str( repository.tip ) - message += "Correct the following problems if necessary and reset metadata.%s" % new_line - else: - message += "Metadata cannot be defined for revision '%s' so this revision cannot be automatically " % str( repository.tip ) - message += "installed into a local Galaxy instance. Correct the following problems and reset metadata.%s" % new_line - for itc_tup in invalid_file_tups: - tool_file, exception_msg = itc_tup - if exception_msg.find( 'No such file or directory' ) >= 0: - exception_items = exception_msg.split() - missing_file_items = exception_items[ 7 ].split( '/' ) - missing_file = missing_file_items[ -1 ].rstrip( '\'' ) - if missing_file.endswith( '.loc' ): - sample_ext = '%s.sample' % missing_file - else: - sample_ext = missing_file - correction_msg = "This file refers to a missing file %s%s%s. " % ( bold_start, str( missing_file ), bold_end ) - correction_msg += "Upload a file named %s%s%s to the repository to correct this error." % ( bold_start, sample_ext, bold_end ) - else: - if as_html: - correction_msg = exception_msg - else: - correction_msg = exception_msg.replace( '<br/>', new_line ).replace( '<b>', bold_start ).replace( '</b>', bold_end ) - message += "%s%s%s - %s%s" % ( bold_start, tool_file, bold_end, correction_msg, new_line ) - return message def generate_tool_guid( trans, repository, tool ): """ Generate a guid for the received tool. The form of the guid is @@ -450,24 +294,6 @@ fh.close() return tmp_filename return None -def get_parent_id( trans, id, old_id, version, guid, changeset_revisions ): - parent_id = None - # Compare from most recent to oldest. - changeset_revisions.reverse() - for changeset_revision in changeset_revisions: - repository_metadata = get_repository_metadata_by_changeset_revision( trans, id, changeset_revision ) - metadata = repository_metadata.metadata - tools_dicts = metadata.get( 'tools', [] ) - for tool_dict in tools_dicts: - if tool_dict[ 'guid' ] == guid: - # The tool has not changed between the compared changeset revisions. - continue - if tool_dict[ 'id' ] == old_id and tool_dict[ 'version' ] != version: - # The tool version is different, so we've found the parent. - return tool_dict[ 'guid' ] - if parent_id is None: - # The tool did not change through all of the changeset revisions. - return old_id def get_previous_downloadable_changset_revision( repository, repo, before_changeset_revision ): """ Return the downloadable changeset_revision in the repository changelog just prior to the changeset to which before_changeset_revision @@ -542,9 +368,6 @@ for changeset in repo.changelog: reversed_changelog.insert( 0, changeset ) return reversed_changelog -def get_repository( trans, id ): - """Get a repository from the database via id""" - return trans.sa_session.query( trans.model.Repository ).get( trans.security.decode_id( id ) ) def get_repository_by_name( trans, name ): """Get a repository from the database via name""" return trans.sa_session.query( trans.model.Repository ).filter_by( name=name ).one() @@ -707,8 +530,6 @@ if previous_changeset_revision in reviewed_revision_hashes: return True return False -def is_downloadable( metadata_dict ): - return 'datatypes' in metadata_dict or 'tools' in metadata_dict or 'workflows' in metadata_dict def load_tool_from_changeset_revision( trans, repository_id, changeset_revision, tool_config_filename ): """ Return a loaded tool whose tool config file name (e.g., filtering.xml) is the value of tool_config_filename. The value of changeset_revision @@ -716,7 +537,7 @@ revision and the first changeset revision in the repository, searching backwards. """ original_tool_data_path = trans.app.config.tool_data_path - repository = get_repository( trans, repository_id ) + repository = get_repository_in_tool_shed( trans, repository_id ) repo_files_dir = repository.repo_path repo = hg.repository( get_configured_ui(), repo_files_dir ) message = '' @@ -802,134 +623,6 @@ return True # The received metadata_dict includes no metadata for workflows, so a new repository_metadata table record is not needed. return False -def remove_dir( dir ): - if os.path.exists( dir ): - try: - shutil.rmtree( dir ) - except: - pass -def reset_all_metadata_on_repository( trans, id, **kwd ): - def reset_all_tool_versions( trans, id, repo ): - changeset_revisions = [] - for changeset in repo.changelog: - changeset_revision = str( repo.changectx( changeset ) ) - repository_metadata = get_repository_metadata_by_changeset_revision( trans, id, changeset_revision ) - if repository_metadata: - metadata = repository_metadata.metadata - if metadata: - if metadata.get( 'tools', None ): - changeset_revisions.append( changeset_revision ) - # The list of changeset_revisions is now filtered to contain only those that are downloadable and contain tools. - # If a repository includes tools, build a dictionary of { 'tool id' : 'parent tool id' } pairs for each tool in each changeset revision. - for index, changeset_revision in enumerate( changeset_revisions ): - tool_versions_dict = {} - repository_metadata = get_repository_metadata_by_changeset_revision( trans, id, changeset_revision ) - metadata = repository_metadata.metadata - tool_dicts = metadata[ 'tools' ] - if index == 0: - # The first changset_revision is a special case because it will have no ancestor changeset_revisions in which to match tools. - # The parent tool id for tools in the first changeset_revision will be the "old_id" in the tool config. - for tool_dict in tool_dicts: - tool_versions_dict[ tool_dict[ 'guid' ] ] = tool_dict[ 'id' ] - else: - for tool_dict in tool_dicts: - parent_id = get_parent_id( trans, - id, - tool_dict[ 'id' ], - tool_dict[ 'version' ], - tool_dict[ 'guid' ], - changeset_revisions[ 0:index ] ) - tool_versions_dict[ tool_dict[ 'guid' ] ] = parent_id - if tool_versions_dict: - repository_metadata.tool_versions = tool_versions_dict - trans.sa_session.add( repository_metadata ) - trans.sa_session.flush() - params = util.Params( kwd ) - message = util.restore_text( params.get( 'message', '' ) ) - status = params.get( 'status', 'done' ) - repository = get_repository( trans, id ) - log.debug( "Resetting all metadata on repository: %s" % repository.name ) - repo_dir = repository.repo_path - repo = hg.repository( get_configured_ui(), repo_dir ) - repository_clone_url = generate_clone_url( trans, id ) - # The list of changeset_revisions refers to repository_metadata records that have been created or updated. When the following loop - # completes, we'll delete all repository_metadata records for this repository that do not have a changeset_revision value in this list. - changeset_revisions = [] - # When a new repository_metadata record is created, it always uses the values of metadata_changeset_revision and metadata_dict. - metadata_changeset_revision = None - metadata_dict = None - ancestor_changeset_revision = None - ancestor_metadata_dict = None - invalid_file_tups = [] - home_dir = os.getcwd() - for changeset in repo.changelog: - work_dir = tempfile.mkdtemp() - current_changeset_revision = str( repo.changectx( changeset ) ) - ctx = repo.changectx( changeset ) - log.debug( "Cloning repository revision: %s", str( ctx.rev() ) ) - cloned_ok, error_message = clone_repository( repository_clone_url, work_dir, str( ctx.rev() ) ) - if cloned_ok: - log.debug( "Generating metadata for changset revision: %s", str( ctx.rev() ) ) - current_metadata_dict, invalid_file_tups = generate_metadata_for_changeset_revision( app=trans.app, - repository=repository, - repository_clone_url=repository_clone_url, - relative_install_dir=repo_dir, - repository_files_dir=work_dir, - resetting_all_metadata_on_repository=True, - updating_installed_repository=False ) - if current_metadata_dict: - if not metadata_changeset_revision and not metadata_dict: - # We're at the first change set in the change log. - metadata_changeset_revision = current_changeset_revision - metadata_dict = current_metadata_dict - if ancestor_changeset_revision: - # Compare metadata from ancestor and current. The value of comparison will be one of: - # 'no metadata' - no metadata for either ancestor or current, so continue from current - # 'equal' - ancestor metadata is equivalent to current metadata, so continue from current - # 'subset' - ancestor metadata is a subset of current metadata, so continue from current - # 'not equal and not subset' - ancestor metadata is neither equal to nor a subset of current metadata, so persist ancestor metadata. - comparison = compare_changeset_revisions( ancestor_changeset_revision, - ancestor_metadata_dict, - current_changeset_revision, - current_metadata_dict ) - if comparison in [ 'no metadata', 'equal', 'subset' ]: - ancestor_changeset_revision = current_changeset_revision - ancestor_metadata_dict = current_metadata_dict - elif comparison == 'not equal and not subset': - metadata_changeset_revision = ancestor_changeset_revision - metadata_dict = ancestor_metadata_dict - repository_metadata = create_or_update_repository_metadata( trans, id, repository, metadata_changeset_revision, metadata_dict ) - changeset_revisions.append( metadata_changeset_revision ) - ancestor_changeset_revision = current_changeset_revision - ancestor_metadata_dict = current_metadata_dict - else: - # We're at the beginning of the change log. - ancestor_changeset_revision = current_changeset_revision - ancestor_metadata_dict = current_metadata_dict - if not ctx.children(): - metadata_changeset_revision = current_changeset_revision - metadata_dict = current_metadata_dict - # We're at the end of the change log. - repository_metadata = create_or_update_repository_metadata( trans, id, repository, metadata_changeset_revision, metadata_dict ) - changeset_revisions.append( metadata_changeset_revision ) - ancestor_changeset_revision = None - ancestor_metadata_dict = None - elif ancestor_metadata_dict: - # We reach here only if current_metadata_dict is empty and ancestor_metadata_dict is not. - if not ctx.children(): - # We're at the end of the change log. - repository_metadata = create_or_update_repository_metadata( trans, id, repository, metadata_changeset_revision, metadata_dict ) - changeset_revisions.append( metadata_changeset_revision ) - ancestor_changeset_revision = None - ancestor_metadata_dict = None - remove_dir( work_dir ) - # Delete all repository_metadata records for this repository that do not have a changeset_revision value in changeset_revisions. - clean_repository_metadata( trans, id, changeset_revisions ) - # Set tool version information for all downloadable changeset revisions. Get the list of changeset revisions from the changelog. - reset_all_tool_versions( trans, id, repo ) - # Reset the tool_data_tables by loading the empty tool_data_table_conf.xml file. - reset_tool_data_tables( trans.app ) - return invalid_file_tups, metadata_dict def set_repository_metadata( trans, repository, content_alert_str='', **kwd ): """ Set metadata using the repository's current disk files, returning specific error messages (if any) to alert the repository owner that the changeset @@ -938,7 +631,7 @@ message = '' status = 'done' encoded_id = trans.security.encode_id( repository.id ) - repository_clone_url = generate_clone_url( trans, encoded_id ) + repository_clone_url = generate_clone_url_for_repository_in_tool_shed( trans, repository ) repo_dir = repository.repo_path repo = hg.repository( get_configured_ui(), repo_dir ) metadata_dict, invalid_file_tups = generate_metadata_for_changeset_revision( app=trans.app, diff -r b2975e2fa6844b230ca4a656ecdb82f9e6612815 -r 50b1d7a65bd2760699d9982d6e1ab60f2dbd665a lib/galaxy/webapps/community/controllers/repository.py --- a/lib/galaxy/webapps/community/controllers/repository.py +++ b/lib/galaxy/webapps/community/controllers/repository.py @@ -9,11 +9,14 @@ from galaxy.web.framework.helpers import time_ago, iff, grids from galaxy.util.json import from_json_string, to_json_string from galaxy.model.orm import * -from galaxy.util.shed_util import create_repo_info_dict, get_changectx_for_changeset, get_configured_ui, get_file_from_changeset_revision -from galaxy.util.shed_util import get_repository_file_contents, get_repository_metadata_by_changeset_revision, handle_sample_files_and_load_tool_from_disk +# TODO: re-factor shed_util to eliminate the following restricted imports +from galaxy.util.shed_util import create_repo_info_dict, generate_clone_url_for_repository_in_tool_shed, generate_message_for_invalid_tools +from galaxy.util.shed_util import get_changectx_for_changeset, get_configured_ui, get_file_from_changeset_revision, get_repository_file_contents +from galaxy.util.shed_util import get_repository_in_tool_shed, get_repository_metadata_by_changeset_revision, handle_sample_files_and_load_tool_from_disk from galaxy.util.shed_util import handle_sample_files_and_load_tool_from_tmp_config, INITIAL_CHANGELOG_HASH, load_tool_from_config, NOT_TOOL_CONFIGS -from galaxy.util.shed_util import open_repository_files_folder, reversed_lower_upper_bounded_changelog, reversed_upper_bounded_changelog, strip_path -from galaxy.util.shed_util import to_html_escaped, update_repository, url_join +from galaxy.util.shed_util import open_repository_files_folder, remove_dir, reset_all_metadata_on_repository_in_tool_shed +from galaxy.util.shed_util import reversed_lower_upper_bounded_changelog, reversed_upper_bounded_changelog, strip_path, to_html_escaped +from galaxy.util.shed_util import update_repository, url_join from galaxy.tool_shed.encoding_util import * from common import * @@ -639,7 +642,7 @@ else: # The received id is the repository id, so we need to get the id of the user that uploaded the repository. repository_id = kwd.get( 'id', None ) - repository = get_repository( trans, repository_id ) + repository = get_repository_in_tool_shed( trans, repository_id ) kwd[ 'f-email' ] = repository.user.email elif operation == "repositories_i_own": # Eliminate the current filters if any exist. @@ -696,7 +699,7 @@ changset_revision_str = 'changeset_revision_' if k.startswith( changset_revision_str ): repository_id = trans.security.encode_id( int( k.lstrip( changset_revision_str ) ) ) - repository = get_repository( trans, repository_id ) + repository = get_repository_in_tool_shed( trans, repository_id ) if repository.tip != v: return trans.response.send_redirect( web.url_for( controller='repository', action='browse_repositories', @@ -710,7 +713,7 @@ message = util.restore_text( params.get( 'message', '' ) ) status = params.get( 'status', 'done' ) commit_message = util.restore_text( params.get( 'commit_message', 'Deleted selected files' ) ) - repository = get_repository( trans, id ) + repository = get_repository_in_tool_shed( trans, id ) repo = hg.repository( get_configured_ui(), repository.repo_path ) # Update repository files for browsing. update_repository( repo ) @@ -771,7 +774,7 @@ operation = kwd[ 'operation' ].lower() if operation == "preview_tools_in_changeset": repository_id = kwd.get( 'id', None ) - repository = get_repository( trans, repository_id ) + repository = get_repository_in_tool_shed( trans, repository_id ) repository_metadata = get_latest_repository_metadata( trans, repository.id ) latest_installable_changeset_revision = repository_metadata.changeset_revision return trans.response.send_redirect( web.url_for( controller='repository', @@ -795,7 +798,7 @@ changset_revision_str = 'changeset_revision_' if k.startswith( changset_revision_str ): repository_id = trans.security.encode_id( int( k.lstrip( changset_revision_str ) ) ) - repository = get_repository( trans, repository_id ) + repository = get_repository_in_tool_shed( trans, repository_id ) if repository.tip != v: return trans.response.send_redirect( web.url_for( controller='repository', action='preview_tools_in_changeset', @@ -922,7 +925,7 @@ params = util.Params( kwd ) message = util.restore_text( params.get( 'message', '' ) ) status = params.get( 'status', 'done' ) - repository = get_repository( trans, id ) + repository = get_repository_in_tool_shed( trans, id ) metadata = self.get_metadata( trans, id, repository.tip ) if trans.user and trans.user.email: return trans.fill_template( "/webapps/community/repository/contact_owner.mako", @@ -1033,7 +1036,7 @@ message = util.restore_text( params.get( 'message', '' ) ) status = params.get( 'status', 'done' ) repository_id = params.get( 'id', None ) - repository = get_repository( trans, repository_id ) + repository = get_repository_in_tool_shed( trans, repository_id ) mark_deprecated = util.string_as_bool( params.get( 'mark_deprecated', False ) ) repository.deprecated = mark_deprecated trans.sa_session.add( repository ) @@ -1086,7 +1089,7 @@ def download( self, trans, repository_id, changeset_revision, file_type, **kwd ): # Download an archive of the repository files compressed as zip, gz or bz2. params = util.Params( kwd ) - repository = get_repository( trans, repository_id ) + repository = get_repository_in_tool_shed( trans, repository_id ) # Allow hgweb to handle the download. This requires the tool shed # server account's .hgrc file to include the following setting: # [web] @@ -1119,7 +1122,7 @@ # The received id is a RepositoryMetadata id, so we have to get the repository id. repository_metadata = get_repository_metadata_by_id( trans, item_id ) repository_id = trans.security.encode_id( repository_metadata.repository.id ) - repository = get_repository( trans, repository_id ) + repository = get_repository_in_tool_shed( trans, repository_id ) kwd[ 'id' ] = repository_id kwd[ 'changeset_revision' ] = repository_metadata.changeset_revision if trans.webapp.name == 'community' and ( is_admin or repository.user == trans.user ): @@ -1204,7 +1207,7 @@ # The received id is a RepositoryMetadata id, so we have to get the repository id. repository_metadata = get_repository_metadata_by_id( trans, item_id ) repository_id = trans.security.encode_id( repository_metadata.repository.id ) - repository = get_repository( trans, repository_id ) + repository = get_repository_in_tool_shed( trans, repository_id ) kwd[ 'id' ] = repository_id kwd[ 'changeset_revision' ] = repository_metadata.changeset_revision if trans.webapp.name == 'community' and ( is_admin or repository.user == trans.user ): @@ -1309,8 +1312,8 @@ repo_info_dicts = [] for tup in zip( util.listify( repository_ids ), util.listify( changeset_revisions ) ): repository_id, changeset_revision = tup - repository_clone_url = generate_clone_url( trans, repository_id ) - repository = get_repository( trans, repository_id ) + repository = get_repository_in_tool_shed( trans, repository_id ) + repository_clone_url = generate_clone_url_for_repository_in_tool_shed( trans, repository ) repository_metadata = get_repository_metadata_by_changeset_revision( trans, repository_id, changeset_revision ) metadata = repository_metadata.metadata if not includes_tools and 'tools' in metadata: @@ -1668,7 +1671,7 @@ message = util.restore_text( params.get( 'message', '' ) ) status = params.get( 'status', 'done' ) cntrller = params.get( 'cntrller', 'repository' ) - repository = get_repository( trans, id ) + repository = get_repository_in_tool_shed( trans, id ) repo_dir = repository.repo_path repo = hg.repository( get_configured_ui(), repo_dir ) repo_name = util.restore_text( params.get( 'repo_name', repository.name ) ) @@ -1892,7 +1895,7 @@ params = util.Params( kwd ) message = util.restore_text( params.get( 'message', '' ) ) status = params.get( 'status', 'done' ) - repository = get_repository( trans, repository_id ) + repository = get_repository_in_tool_shed( trans, repository_id ) changeset_revision = util.restore_text( params.get( 'changeset_revision', repository.tip ) ) repository_metadata = get_repository_metadata_by_changeset_revision( trans, repository_id, changeset_revision ) if repository_metadata: @@ -1957,7 +1960,7 @@ action='browse_repositories', message='Select a repository to rate', status='error' ) ) - repository = get_repository( trans, id ) + repository = get_repository_in_tool_shed( trans, id ) repo = hg.repository( get_configured_ui(), repository.repo_path ) if repository.user == trans.user: return trans.response.send_redirect( web.url_for( controller='repository', @@ -1985,9 +1988,11 @@ status=status ) @web.expose def reset_all_metadata( self, trans, id, **kwd ): - invalid_file_tups, metadata_dict = reset_all_metadata_on_repository( trans, id, **kwd ) + # This method is called only from the ~/templates/webapps/community/repository/manage_repository.mako template. + # It resets all metadata on the complete changelog for a single repository in the tool shed. + invalid_file_tups, metadata_dict = reset_all_metadata_on_repository_in_tool_shed( trans, id, **kwd ) if invalid_file_tups: - repository = get_repository( trans, id ) + repository = get_repository_in_tool_shed( trans, id ) message = generate_message_for_invalid_tools( invalid_file_tups, repository, metadata_dict ) status = 'error' else: @@ -2093,7 +2098,7 @@ message = util.restore_text( params.get( 'message', '' ) ) status = params.get( 'status', 'done' ) commit_message = util.restore_text( params.get( 'commit_message', 'Deleted selected files' ) ) - repository = get_repository( trans, id ) + repository = get_repository_in_tool_shed( trans, id ) repo_dir = repository.repo_path repo = hg.repository( get_configured_ui(), repo_dir ) selected_files_to_delete = util.restore_text( params.get( 'selected_files_to_delete', '' ) ) @@ -2155,7 +2160,7 @@ status=status ) @web.expose def send_to_owner( self, trans, id, message='' ): - repository = get_repository( trans, id ) + repository = get_repository_in_tool_shed( trans, id ) if not message: message = 'Enter a message' status = 'error' @@ -2205,7 +2210,7 @@ total_alerts_removed = 0 flush_needed = False for repository_id in repository_ids: - repository = get_repository( trans, repository_id ) + repository = get_repository_in_tool_shed( trans, repository_id ) if repository.email_alerts: email_alerts = from_json_string( repository.email_alerts ) else: @@ -2277,7 +2282,7 @@ params = util.Params( kwd ) message = util.restore_text( params.get( 'message', '' ) ) status = params.get( 'status', 'done' ) - repository = get_repository( trans, id ) + repository = get_repository_in_tool_shed( trans, id ) repo = hg.repository( get_configured_ui(), repository.repo_path ) changesets = [] for changeset in repo.changelog: @@ -2314,7 +2319,7 @@ params = util.Params( kwd ) message = util.restore_text( params.get( 'message', '' ) ) status = params.get( 'status', 'done' ) - repository = get_repository( trans, id ) + repository = get_repository_in_tool_shed( trans, id ) repo = hg.repository( get_configured_ui(), repository.repo_path ) ctx = get_changectx_for_changeset( repo, ctx_str ) if ctx is None: @@ -2351,7 +2356,7 @@ status=status ) @web.expose def view_or_manage_repository( self, trans, **kwd ): - repository = get_repository( trans, kwd[ 'id' ] ) + repository = get_repository_in_tool_shed( trans, kwd[ 'id' ] ) if trans.user_is_admin() or repository.user == trans.user: return trans.response.send_redirect( web.url_for( controller='repository', action='manage_repository', @@ -2366,7 +2371,7 @@ message = util.restore_text( params.get( 'message', '' ) ) status = params.get( 'status', 'done' ) cntrller = params.get( 'cntrller', 'repository' ) - repository = get_repository( trans, id ) + repository = get_repository_in_tool_shed( trans, id ) repository_metadata = get_repository_metadata_by_changeset_revision( trans, trans.security.encode_id( repository.id ), changeset_revision ) if repository_metadata: metadata = repository_metadata.metadata @@ -2417,7 +2422,7 @@ message = util.restore_text( params.get( 'message', '' ) ) status = params.get( 'status', 'done' ) cntrller = params.get( 'cntrller', 'repository' ) - repository = get_repository( trans, id ) + repository = get_repository_in_tool_shed( trans, id ) repo = hg.repository( get_configured_ui(), repository.repo_path ) avg_rating, num_ratings = self.get_ave_item_rating_data( trans.sa_session, repository, webapp_model=trans.model ) changeset_revision = util.restore_text( params.get( 'changeset_revision', repository.tip ) ) @@ -2499,7 +2504,7 @@ params = util.Params( kwd ) message = util.restore_text( params.get( 'message', '' ) ) status = params.get( 'status', 'done' ) - repository = get_repository( trans, repository_id ) + repository = get_repository_in_tool_shed( trans, repository_id ) repo_files_dir = repository.repo_path repo = hg.repository( get_configured_ui(), repo_files_dir ) tool_metadata_dict = {} diff -r b2975e2fa6844b230ca4a656ecdb82f9e6612815 -r 50b1d7a65bd2760699d9982d6e1ab60f2dbd665a lib/galaxy/webapps/community/controllers/repository_review.py --- a/lib/galaxy/webapps/community/controllers/repository_review.py +++ b/lib/galaxy/webapps/community/controllers/repository_review.py @@ -8,7 +8,8 @@ from sqlalchemy.sql.expression import func from common import * from repository import RepositoryGrid -from galaxy.util.shed_util import get_configured_ui +# TODO: re-factor shed_util to eliminate the following restricted imports +from galaxy.util.shed_util import get_configured_ui, get_repository_in_tool_shed from galaxy.util.odict import odict from galaxy import eggs @@ -381,7 +382,7 @@ message = "You have already created a review for revision <b>%s</b> of repository <b>%s</b>." % ( changeset_revision, repository.name ) status = "error" else: - repository = get_repository( trans, repository_id ) + repository = get_repository_in_tool_shed( trans, repository_id ) # See if there are any reviews for previous changeset revisions that the user can copy. if not create_without_copying and not previous_review_id and has_previous_repository_reviews( trans, repository, changeset_revision ): return trans.response.send_redirect( web.url_for( controller='repository_review', @@ -650,7 +651,7 @@ status = params.get( 'status', 'done' ) repository_id = kwd.get( 'id', None ) if repository_id: - repository = get_repository( trans, repository_id ) + repository = get_repository_in_tool_shed( trans, repository_id ) repo_dir = repository.repo_path repo = hg.repository( get_configured_ui(), repo_dir ) metadata_revision_hashes = [ metadata_revision.changeset_revision for metadata_revision in repository.metadata_revisions ] @@ -697,7 +698,7 @@ status = params.get( 'status', 'done' ) repository_id = kwd.get( 'id', None ) changeset_revision = kwd.get( 'changeset_revision', None ) - repository = get_repository( trans, repository_id ) + repository = get_repository_in_tool_shed( trans, repository_id ) repo_dir = repository.repo_path repo = hg.repository( get_configured_ui(), repo_dir ) installable = changeset_revision in [ metadata_revision.changeset_revision for metadata_revision in repository.metadata_revisions ] @@ -762,7 +763,7 @@ params = util.Params( kwd ) message = util.restore_text( params.get( 'message', '' ) ) status = params.get( 'status', 'done' ) - repository = get_repository( trans, kwd[ 'id' ] ) + repository = get_repository_in_tool_shed( trans, kwd[ 'id' ] ) changeset_revision = kwd.get( 'changeset_revision', None ) repo = hg.repository( get_configured_ui(), repository.repo_path ) previous_reviews_dict = get_previous_repository_reviews( trans, repository, changeset_revision ) @@ -777,7 +778,7 @@ @web.expose @web.require_login( "view or manage repository" ) def view_or_manage_repository( self, trans, **kwd ): - repository = get_repository( trans, kwd[ 'id' ] ) + repository = get_repository_in_tool_shed( trans, kwd[ 'id' ] ) if trans.user_is_admin() or repository.user == trans.user: return trans.response.send_redirect( web.url_for( controller='repository', action='manage_repository', diff -r b2975e2fa6844b230ca4a656ecdb82f9e6612815 -r 50b1d7a65bd2760699d9982d6e1ab60f2dbd665a lib/galaxy/webapps/community/controllers/upload.py --- a/lib/galaxy/webapps/community/controllers/upload.py +++ b/lib/galaxy/webapps/community/controllers/upload.py @@ -3,7 +3,9 @@ from galaxy.model.orm import * from galaxy.datatypes.checkers import * from common import * -from galaxy.util.shed_util import get_configured_ui, reset_tool_data_tables, handle_sample_tool_data_table_conf_file, update_repository +# TODO: re-factor shed_util to eliminate the following restricted imports +from galaxy.util.shed_util import get_configured_ui, get_repository_in_tool_shed, reset_tool_data_tables, handle_sample_tool_data_table_conf_file +from galaxy.util.shed_util import update_repository from galaxy import eggs eggs.require('mercurial') @@ -28,7 +30,7 @@ category_ids = util.listify( params.get( 'category_id', '' ) ) categories = get_categories( trans ) repository_id = params.get( 'repository_id', '' ) - repository = get_repository( trans, repository_id ) + repository = get_repository_in_tool_shed( trans, repository_id ) repo_dir = repository.repo_path repo = hg.repository( get_configured_ui(), repo_dir ) uncompress_file = util.string_as_bool( params.get( 'uncompress_file', 'true' ) ) diff -r b2975e2fa6844b230ca4a656ecdb82f9e6612815 -r 50b1d7a65bd2760699d9982d6e1ab60f2dbd665a lib/galaxy/webapps/community/controllers/workflow.py --- a/lib/galaxy/webapps/community/controllers/workflow.py +++ b/lib/galaxy/webapps/community/controllers/workflow.py @@ -10,6 +10,8 @@ from galaxy.webapps.galaxy.controllers.workflow import attach_ordered_steps from galaxy.model.orm import * from common import * +# TODO: re-factor shed_util to eliminate the following restricted imports +from galaxy.util.shed_util import get_repository_in_tool_shed from galaxy.tool_shed.encoding_util import * class RepoInputDataModule( InputDataModule ): @@ -144,7 +146,7 @@ message = kwd.get( 'message', '' ) status = kwd.get( 'status', 'done' ) repository_metadata = get_repository_metadata_by_id( trans, repository_metadata_id ) - repository = get_repository( trans, trans.security.encode_id( repository_metadata.repository_id ) ) + repository = get_repository_in_tool_shed( trans, trans.security.encode_id( repository_metadata.repository_id ) ) return trans.fill_template( "/webapps/community/repository/view_workflow.mako", repository=repository, changeset_revision=repository_metadata.changeset_revision, diff -r b2975e2fa6844b230ca4a656ecdb82f9e6612815 -r 50b1d7a65bd2760699d9982d6e1ab60f2dbd665a lib/galaxy/webapps/galaxy/controllers/admin_toolshed.py --- a/lib/galaxy/webapps/galaxy/controllers/admin_toolshed.py +++ b/lib/galaxy/webapps/galaxy/controllers/admin_toolshed.py @@ -10,8 +10,6 @@ log = logging.getLogger( __name__ ) -MAX_CONTENT_SIZE = 32768 - class InstalledRepositoryGrid( grids.Grid ): class NameColumn( grids.TextColumn ): def get_value( self, trans, grid, tool_shed_repository ): @@ -300,7 +298,7 @@ return query class AdminToolshed( AdminGalaxy ): - + installed_repository_grid = InstalledRepositoryGrid() repository_installation_grid = RepositoryInstallationGrid() tool_dependency_grid = ToolDependencyGrid() @@ -309,9 +307,9 @@ @web.require_admin def activate_repository( self, trans, **kwd ): """Activate a repository that was deactivated but not uninstalled.""" - repository = get_repository( trans, kwd[ 'id' ] ) + repository = get_installed_tool_shed_repository( trans, kwd[ 'id' ] ) shed_tool_conf, tool_path, relative_install_dir = get_tool_panel_config_tool_path_install_dir( trans.app, repository ) - repository_clone_url = self.__generate_clone_url( trans, repository ) + repository_clone_url = generate_clone_url_for_installed_repository( trans, repository ) repository.deleted = False repository.status = trans.model.ToolShedRepository.installation_status.INSTALLED if repository.includes_tools: @@ -356,7 +354,7 @@ params = util.Params( kwd ) message = util.restore_text( params.get( 'message', '' ) ) status = params.get( 'status', 'done' ) - repository = get_repository( trans, kwd[ 'id' ] ) + repository = get_installed_tool_shed_repository( trans, kwd[ 'id' ] ) return trans.fill_template( '/admin/tool_shed_repository/browse_repository.mako', repository=repository, message=message, @@ -380,7 +378,7 @@ action='reset_to_install', **kwd ) ) if operation == "activate or reinstall": - repository = get_repository( trans, kwd[ 'id' ] ) + repository = get_installed_tool_shed_repository( trans, kwd[ 'id' ] ) if repository.uninstalled: if repository.includes_tools: # Only allow selecting a different section in the tool panel if the repository was uninstalled. @@ -440,7 +438,7 @@ @web.require_admin def check_for_updates( self, trans, **kwd ): # Send a request to the relevant tool shed to see if there are any updates. - repository = get_repository( trans, kwd[ 'id' ] ) + repository = get_installed_tool_shed_repository( trans, kwd[ 'id' ] ) tool_shed_url = get_url_from_repository_tool_shed( trans.app, repository ) url = url_join( tool_shed_url, 'repository/check_for_updates?galaxy_url=%s&name=%s&owner=%s&changeset_revision=%s' % \ @@ -454,7 +452,7 @@ status = params.get( 'status', 'done' ) remove_from_disk = params.get( 'remove_from_disk', '' ) remove_from_disk_checked = CheckboxField.is_checked( remove_from_disk ) - tool_shed_repository = get_repository( trans, kwd[ 'id' ] ) + tool_shed_repository = get_installed_tool_shed_repository( trans, kwd[ 'id' ] ) shed_tool_conf, tool_path, relative_install_dir = get_tool_panel_config_tool_path_install_dir( trans.app, tool_shed_repository ) if relative_install_dir: if tool_path: @@ -819,7 +817,7 @@ status = params.get( 'status', 'done' ) repository_id = kwd[ 'id' ] operation = kwd.get( 'operation', None ) - repository = get_repository( trans, repository_id ) + repository = get_installed_tool_shed_repository( trans, repository_id ) if not repository: return trans.show_error_message( 'Invalid repository specified.' ) if repository.status in [ trans.model.ToolShedRepository.installation_status.CLONING ]: @@ -1237,14 +1235,14 @@ message = kwd.get( 'message', '' ) status = kwd.get( 'status', 'done' ) repository_id = kwd[ 'id' ] - tool_shed_repository = get_repository( trans, repository_id ) + tool_shed_repository = get_installed_tool_shed_repository( trans, repository_id ) no_changes = kwd.get( 'no_changes', '' ) no_changes_checked = CheckboxField.is_checked( no_changes ) install_tool_dependencies = CheckboxField.is_checked( kwd.get( 'install_tool_dependencies', '' ) ) new_tool_panel_section = kwd.get( 'new_tool_panel_section', '' ) tool_panel_section = kwd.get( 'tool_panel_section', '' ) shed_tool_conf, tool_path, relative_install_dir = get_tool_panel_config_tool_path_install_dir( trans.app, tool_shed_repository ) - repository_clone_url = generate_clone_url( trans, tool_shed_repository ) + repository_clone_url = generate_clone_url_for_installed_repository( trans, tool_shed_repository ) clone_dir = os.path.join( tool_path, self.generate_tool_path( repository_clone_url, tool_shed_repository.installed_changeset_revision ) ) relative_install_dir = os.path.join( clone_dir, tool_shed_repository.name ) tool_shed_url = get_url_from_repository_tool_shed( trans.app, tool_shed_repository ) @@ -1395,11 +1393,11 @@ @web.expose @web.require_admin def reselect_tool_panel_section( self, trans, **kwd ): - repository = get_repository( trans, kwd[ 'id' ] ) + repository = get_installed_tool_shed_repository( trans, kwd[ 'id' ] ) metadata = repository.metadata tool_shed_url = get_url_from_repository_tool_shed( trans.app, repository ) ctx_rev = get_ctx_rev( tool_shed_url, repository.name, repository.owner, repository.installed_changeset_revision ) - repository_clone_url = generate_clone_url( trans, repository ) + repository_clone_url = generate_clone_url_for_installed_repository( trans, repository ) repo_info_dict = create_repo_info_dict( repository, repository.owner, repository_clone_url, @@ -1460,63 +1458,25 @@ status=status ) @web.expose @web.require_admin - def reset_metadata_on_selected_repositories( self, trans, **kwd ): - # TODO: merge this with the similar method in the repository controller. - params = util.Params( kwd ) - message = util.restore_text( params.get( 'message', '' ) ) - status = params.get( 'status', 'done' ) - repository_ids = util.listify( kwd.get( 'repository_names_by_owner', None ) ) + def reset_metadata_on_selected_installed_repositories( self, trans, **kwd ): if 'reset_metadata_on_selected_repositories_button' in kwd: - if repository_ids: - successful_count = 0 - unsuccessful_count = 0 - for repository_id in repository_ids: - repository = get_repository( trans, repository_id ) - try: - invalid_file_tups, metadata_dict = self.reset_repository_metadata( trans, - trans.security.encode_id( repository.id ), - resetting_all_repositories=True ) - if invalid_file_tups: - unsuccessful_count += 1 - else: - successful_count += 1 - except Exception, e: - log.debug( "Error attempting to reset metadata on repository '%s': %s" % ( repository.name, str( e ) ) ) - unsuccessful_count += 1 - message = "Successfully reset metadata on %d %s. " % ( successful_count, - inflector.cond_plural( successful_count, "repository" ) ) - if unsuccessful_count: - message += "Error setting metadata on %d %s - see the paster log for details. " % ( unsuccessful_count, - inflector.cond_plural( unsuccessful_count, - "repository" ) ) - trans.response.send_redirect( web.url_for( controller='admin_toolshed', - action='browse_repositories', - message=util.sanitize_text( message ), - status=status ) ) - else: - 'Select at least one repository to on which to reset all metadata.' - status = 'error' - repositories_select_field = SelectField( name='repository_names_by_owner', - multiple=True, - display='checkboxes' ) - for repository in trans.sa_session.query( trans.model.ToolShedRepository ) \ - .filter( trans.model.ToolShedRepository.table.c.uninstalled == False ) \ - .order_by( trans.model.ToolShedRepository.table.c.name, - trans.model.ToolShedRepository.table.c.owner ): - option_label = '%s (%s)' % ( repository.name, repository.owner ) - option_value = trans.security.encode_id( repository.id ) - repositories_select_field.add_option( option_label, option_value ) + kwd[ 'CONTROLLER' ] = GALAXY_ADMIN_TOOL_SHED_CONTROLLER + message, status = reset_metadata_on_selected_repositories( trans, **kwd ) + else: + message = util.restore_text( kwd.get( 'message', '' ) ) + status = kwd.get( 'status', 'done' ) + repositories_select_field = build_repository_ids_select_field( trans, GALAXY_ADMIN_TOOL_SHED_CONTROLLER ) return trans.fill_template( '/admin/tool_shed_repository/reset_metadata_on_selected_repositories.mako', repositories_select_field=repositories_select_field, message=message, status=status ) @web.expose @web.require_admin - def reset_repository_metadata( self, trans, id, resetting_all_repositories=False ): - """Reset all metadata on the installed tool shed repository.""" - repository = get_repository( trans, id ) + def reset_repository_metadata( self, trans, id ): + """Reset all metadata on a single installed tool shed repository.""" + repository = get_installed_tool_shed_repository( trans, id ) tool_shed_url = get_url_from_repository_tool_shed( trans.app, repository ) - repository_clone_url = self.__generate_clone_url( trans, repository ) + repository_clone_url = generate_clone_url_for_installed_repository( trans, repository ) tool_path, relative_install_dir = repository.get_tool_relative_path( trans.app ) if relative_install_dir: original_metadata_dict = repository.metadata @@ -1533,35 +1493,24 @@ update_in_shed_tool_config( trans.app, repository ) trans.sa_session.add( repository ) trans.sa_session.flush() - if resetting_all_repositories: - log.debug( 'Metadata has been reset on repository %s.' % repository.name ) - else: - message = 'Metadata has been reset on repository <b>%s</b>.' % repository.name - status = 'done' + message = 'Metadata has been reset on repository <b>%s</b>.' % repository.name + status = 'done' else: - if resetting_all_repositories: - log.debug( 'Metadata did not need to be reset on repository %s.' % repository.name ) - else: - message = 'Metadata did not need to be reset on repository <b>%s</b>.' % repository.name - status = 'done' + message = 'Metadata did not need to be reset on repository <b>%s</b>.' % repository.name + status = 'done' else: - if resetting_all_repositories: - log.debug( 'Error locating installation directory for repository %s.' % repository.name ) - else: - message = 'Error locating installation directory for repository <b>%s</b>.' % repository.name - status = 'error' - if resetting_all_repositories: - return invalid_file_tups, metadata_dict - else: - new_kwd = dict( id=id, message=message, status=status ) - return trans.response.send_redirect( web.url_for( controller='admin_toolshed', - action='manage_repository', - **new_kwd ) ) + message = 'Error locating installation directory for repository <b>%s</b>.' % repository.name + status = 'error' + return trans.response.send_redirect( web.url_for( controller='admin_toolshed', + action='manage_repository', + id=id, + message=message, + status=status ) ) @web.expose @web.require_admin def reset_to_install( self, trans, **kwd ): """An error occurred while cloning the repository, so reset everything necessary to enable another attempt.""" - repository = get_repository( trans, kwd[ 'id' ] ) + repository = get_installed_tool_shed_repository( trans, kwd[ 'id' ] ) if kwd.get( 'reset_repository', False ): self.set_repository_attributes( trans, repository, @@ -1596,7 +1545,7 @@ @web.require_admin def set_tool_versions( self, trans, **kwd ): # Get the tool_versions from the tool shed for each tool in the installed change set. - repository = get_repository( trans, kwd[ 'id' ] ) + repository = get_installed_tool_shed_repository( trans, kwd[ 'id' ] ) tool_shed_url = get_url_from_repository_tool_shed( trans.app, repository ) url = url_join( tool_shed_url, 'repository/get_tool_versions?name=%s&owner=%s&changeset_revision=%s' % \ @@ -1756,7 +1705,7 @@ message = util.restore_text( params.get( 'message', '' ) ) cntrller = params.get( 'cntrller', 'admin_toolshed' ) status = params.get( 'status', 'done' ) - repository = get_repository( trans, id ) + repository = get_installed_tool_shed_repository( trans, id ) metadata = repository.metadata shed_config_dict = repository.get_shed_config_dict( trans.app ) tool_path = shed_config_dict.get( 'tool_path', None ) @@ -1790,7 +1739,7 @@ params = util.Params( kwd ) message = util.restore_text( params.get( 'message', '' ) ) status = params.get( 'status', 'done' ) - repository = get_repository( trans, repository_id ) + repository = get_installed_tool_shed_repository( trans, repository_id ) repository_metadata = repository.metadata shed_config_dict = repository.get_shed_config_dict( trans.app ) tool_metadata = {} @@ -1815,10 +1764,6 @@ tool_lineage=tool_lineage, message=message, status=status ) - def __generate_clone_url( self, trans, repository ): - """Generate the URL for cloning a repository.""" - tool_shed_url = get_url_from_repository_tool_shed( trans.app, repository ) - return url_join( tool_shed_url, 'repos', repository.owner, repository.name ) ## ---- Utility methods ------------------------------------------------------- @@ -1847,9 +1792,6 @@ for option_tup in options: select_field.add_option( option_tup[0], option_tup[1] ) return select_field -def get_repository( trans, id ): - """Get a tool_shed_repository from the database via id""" - return trans.sa_session.query( trans.model.ToolShedRepository ).get( trans.security.decode_id( id ) ) def get_tool_dependency( trans, id ): """Get a tool_dependency from the database via id""" return trans.sa_session.query( trans.model.ToolDependency ).get( trans.security.decode_id( id ) ) diff -r b2975e2fa6844b230ca4a656ecdb82f9e6612815 -r 50b1d7a65bd2760699d9982d6e1ab60f2dbd665a templates/admin/tool_shed_repository/reset_metadata_on_selected_repositories.mako --- a/templates/admin/tool_shed_repository/reset_metadata_on_selected_repositories.mako +++ b/templates/admin/tool_shed_repository/reset_metadata_on_selected_repositories.mako @@ -1,42 +1,10 @@ <%inherit file="/base.mako"/><%namespace file="/message.mako" import="render_msg" /> - -<%def name="local_javascripts()"> - <script type="text/javascript"> - function checkAllFields() - { - var chkAll = document.getElementById('checkAll'); - var checks = document.getElementsByTagName('input'); - var boxLength = checks.length; - var allChecked = false; - var totalChecked = 0; - if ( chkAll.checked == true ) - { - for ( i=0; i < boxLength; i++ ) - { - if ( checks[i].name.indexOf( 'repository_names_by_owner' ) != -1) - { - checks[i].checked = true; - } - } - } - else - { - for ( i=0; i < boxLength; i++ ) - { - if ( checks[i].name.indexOf( 'repository_names_by_owner' ) != -1) - { - checks[i].checked = false - } - } - } - } - </script> -</%def> +<%namespace file="/webapps/community/common/common.mako" import="common_misc_javascripts" /><%def name="javascripts()"> ${parent.javascripts()} - ${local_javascripts()} + ${common_misc_javascripts()} </%def> %if message: @@ -50,13 +18,13 @@ <div class="toolForm"><div class="toolFormTitle">Reset all metadata on each selected tool shed repository</div> - <form name="reset_metadata_on_selected_repositories" id="reset_metadata_on_selected_repositories" action="${h.url_for( controller='admin_toolshed', action='reset_metadata_on_selected_repositories' )}" method="post" > + <form name="reset_metadata_on_selected_repositories" id="reset_metadata_on_selected_repositories" action="${h.url_for( controller='admin_toolshed', action='reset_metadata_on_selected_installed_repositories' )}" method="post" ><div class="form-row"> Check each repository for which you want to reset metadata. Repository names are followed by owners in parentheses. </div><div style="clear: both"></div><div class="form-row"> - <input type="checkbox" id="checkAll" name=select_all_repositories_checkbox value="true" onclick='checkAllFields(1);'/><input type="hidden" name=select_all_repositories_checkbox value="true"/><b>Select/unselect all repositories</b> + <input type="checkbox" id="checkAll" name="select_all_repositories_checkbox" value="true" onclick="checkAllFields('repository_ids');"/><input type="hidden" name="select_all_repositories_checkbox" value="true"/><b>Select/unselect all repositories</b></div><div style="clear: both"></div><div class="form-row"> diff -r b2975e2fa6844b230ca4a656ecdb82f9e6612815 -r 50b1d7a65bd2760699d9982d6e1ab60f2dbd665a templates/webapps/community/admin/index.mako --- a/templates/webapps/community/admin/index.mako +++ b/templates/webapps/community/admin/index.mako @@ -55,7 +55,7 @@ <a target="galaxy_main" href="${h.url_for( controller='admin', action='browse_repositories' )}">Browse all repositories</a></div><div class="toolTitle"> - <a target="galaxy_main" href="${h.url_for( controller='admin', action='reset_metadata_on_selected_repositories' )}">Reset selected metadata</a> + <a target="galaxy_main" href="${h.url_for( controller='admin', action='reset_metadata_on_selected_repositories_in_tool_shed' )}">Reset selected metadata</a></div><div class="toolTitle"><a target="galaxy_main" href="${h.url_for( controller='admin', action='browse_repository_metadata' )}">Browse metadata</a> diff -r b2975e2fa6844b230ca4a656ecdb82f9e6612815 -r 50b1d7a65bd2760699d9982d6e1ab60f2dbd665a templates/webapps/community/admin/reset_metadata_on_selected_repositories.mako --- a/templates/webapps/community/admin/reset_metadata_on_selected_repositories.mako +++ b/templates/webapps/community/admin/reset_metadata_on_selected_repositories.mako @@ -1,42 +1,10 @@ <%inherit file="/base.mako"/><%namespace file="/message.mako" import="render_msg" /> - -<%def name="local_javascripts()"> - <script type="text/javascript"> - function checkAllFields() - { - var chkAll = document.getElementById('checkAll'); - var checks = document.getElementsByTagName('input'); - var boxLength = checks.length; - var allChecked = false; - var totalChecked = 0; - if ( chkAll.checked == true ) - { - for ( i=0; i < boxLength; i++ ) - { - if ( checks[i].name.indexOf( 'repository_names_by_owner' ) != -1) - { - checks[i].checked = true; - } - } - } - else - { - for ( i=0; i < boxLength; i++ ) - { - if ( checks[i].name.indexOf( 'repository_names_by_owner' ) != -1) - { - checks[i].checked = false - } - } - } - } - </script> -</%def> +<%namespace file="/webapps/community/common/common.mako" import="common_misc_javascripts" /><%def name="javascripts()"> ${parent.javascripts()} - ${local_javascripts()} + ${common_misc_javascripts()} </%def> %if message: @@ -51,13 +19,13 @@ <div class="toolForm"><div class="toolFormTitle">Reset all metadata on each selected repository</div> - <form name="reset_metadata_on_selected_repositories" id="reset_metadata_on_selected_repositories" action="${h.url_for( controller='admin', action='reset_metadata_on_selected_repositories' )}" method="post" > + <form name="reset_metadata_on_selected_repositories" id="reset_metadata_on_selected_repositories" action="${h.url_for( controller='admin', action='reset_metadata_on_selected_repositories_in_tool_shed' )}" method="post" ><div class="form-row"> Check each repository for which you want to reset metadata. Repository names are followed by owners in parentheses. </div><div style="clear: both"></div><div class="form-row"> - <input type="checkbox" id="checkAll" name=select_all_repositories_checkbox value="true" onclick='checkAllFields(1);'/><input type="hidden" name=select_all_repositories_checkbox value="true"/><b>Select/unselect all repositories</b> + <input type="checkbox" id="checkAll" name="select_all_repositories_checkbox" value="true" onclick="checkAllFields('repository_ids');"/><input type="hidden" name="select_all_repositories_checkbox" value="true"/><b>Select/unselect all repositories</b></div><div style="clear: both"></div><div class="form-row"> diff -r b2975e2fa6844b230ca4a656ecdb82f9e6612815 -r 50b1d7a65bd2760699d9982d6e1ab60f2dbd665a templates/webapps/community/common/common.mako --- a/templates/webapps/community/common/common.mako +++ b/templates/webapps/community/common/common.mako @@ -1,3 +1,36 @@ +<%def name="common_misc_javascripts()"> + <script type="text/javascript"> + function checkAllFields( name ) + { + var chkAll = document.getElementById( 'checkAll' ); + var checks = document.getElementsByTagName( 'input' ); + var boxLength = checks.length; + var allChecked = false; + var totalChecked = 0; + if ( chkAll.checked == true ) + { + for ( i=0; i < boxLength; i++ ) + { + if ( checks[i].name.indexOf( name ) != -1 ) + { + checks[i].checked = true; + } + } + } + else + { + for ( i=0; i < boxLength; i++ ) + { + if ( checks[i].name.indexOf( name ) != -1 ) + { + checks[i].checked = false + } + } + } + } + </script> +</%def> + <%def name="escape_html_add_breaks( value )"><% from galaxy import eggs diff -r b2975e2fa6844b230ca4a656ecdb82f9e6612815 -r 50b1d7a65bd2760699d9982d6e1ab60f2dbd665a templates/webapps/community/repository/common.mako --- a/templates/webapps/community/repository/common.mako +++ b/templates/webapps/community/repository/common.mako @@ -77,8 +77,8 @@ <%def name="render_clone_str( repository )"><% - from galaxy.webapps.community.controllers.common import generate_clone_url - clone_str = generate_clone_url( trans, trans.security.encode_id( repository.id ) ) + from galaxy.util.shed_util import generate_clone_url_for_repository_in_tool_shed + clone_str = generate_clone_url_for_repository_in_tool_shed( trans, repository ) %> hg clone <a href="${clone_str}">${clone_str}</a></%def> diff -r b2975e2fa6844b230ca4a656ecdb82f9e6612815 -r 50b1d7a65bd2760699d9982d6e1ab60f2dbd665a templates/webapps/galaxy/admin/index.mako --- a/templates/webapps/galaxy/admin/index.mako +++ b/templates/webapps/galaxy/admin/index.mako @@ -75,7 +75,7 @@ <div class="toolTitle"><a href="${h.url_for( controller='admin_toolshed', action='monitor_repository_installation', tool_shed_repository_ids=installing_repository_ids )}" target="galaxy_main">Monitor installing tool shed repositories</a></div> %endif %if installed_repositories: - <div class="toolTitle"><a href="${h.url_for( controller='admin_toolshed', action='reset_metadata_on_selected_repositories' )}" target="galaxy_main">Reset metadata for tool shed repositories</a></div> + <div class="toolTitle"><a href="${h.url_for( controller='admin_toolshed', action='reset_metadata_on_selected_installed_repositories' )}" target="galaxy_main">Reset metadata for tool shed repositories</a></div><div class="toolTitle"><a href="${h.url_for( controller='admin_toolshed', action='browse_repositories' )}" target="galaxy_main">Manage installed tool shed repositories</a></div> %endif </div> Repository URL: https://bitbucket.org/galaxy/galaxy-central/ -- This is a commit notification from bitbucket.org. You are receiving this because you have the service enabled, addressing the recipient of this email.
participants (1)
-
Bitbucket