commit/galaxy-central: greg: 1) Add a new InstalledRepositoryManager class which currently enables loading datatypes from previously installed tool shed repositories into the datatypes registry. This component will enable additional features in the future.
1 new commit in galaxy-central: https://bitbucket.org/galaxy/galaxy-central/changeset/42b4bb82e006/ changeset: 42b4bb82e006 user: greg date: 2011-12-16 17:34:35 summary: 1) Add a new InstalledRepositoryManager class which currently enables loading datatypes from previously installed tool shed repositories into the datatypes registry. This component will enable additional features in the future. 2) Enhance the Iinstall_repository method in the admin_toolshed controller to skip displaying the page for selecting a tool panel section if no tools exist in any of the repositories being installed. 3) Eliminate duplicate generate_datatypes_metadata generate_tool_metadata generate_workflow_metadata methods and use those now contained in shed_util.py. 4) Fixes for handling tool shed repository metadata for repositories that do not include any tools. affected #: 10 files diff -r f37d9a0a1afc72945e1283195739fc31418dc934 -r 42b4bb82e006c42f7f2049c2b8460af5e08ddc24 lib/galaxy/app.py --- a/lib/galaxy/app.py +++ b/lib/galaxy/app.py @@ -22,7 +22,7 @@ self.config = config.Configuration( **kwargs ) self.config.check() config.configure_logging( self.config ) - # Set up datatypes registry + # Initialize the datatypes registry to the default data types included in self.config.datatypes_config. self.datatypes_registry = galaxy.datatypes.registry.Registry() self.datatypes_registry.load_datatypes( self.config.root, self.config.datatypes_config ) galaxy.model.set_datatypes_registry( self.datatypes_registry ) @@ -68,6 +68,10 @@ if self.config.get_bool( 'enable_tool_shed_check', False ): from tool_shed import update_manager self.update_manager = update_manager.UpdateManager( self ) + # Manage installed tool shed repositories + self.installed_repository_manager = galaxy.tool_shed.InstalledRepositoryManager( self ) + # Add additional datatypes from installed tool shed repositories to the datatypes registry. + self.installed_repository_manager.load_datatypes() # Load datatype converters self.datatypes_registry.load_datatype_converters( self.toolbox ) # Load history import/export tools diff -r f37d9a0a1afc72945e1283195739fc31418dc934 -r 42b4bb82e006c42f7f2049c2b8460af5e08ddc24 lib/galaxy/tool_shed/__init__.py --- a/lib/galaxy/tool_shed/__init__.py +++ b/lib/galaxy/tool_shed/__init__.py @@ -1,3 +1,22 @@ """ -Classes encapsulating the relationships between Galaxy and Galaxy tool sheds. -""" \ No newline at end of file +Classes encapsulating the management of repositories installed from Galaxy tool sheds. +""" +import os, logging +from galaxy.model.orm import * + +log = logging.getLogger(__name__) + +class InstalledRepositoryManager( object ): + def __init__( self, app ): + self.app = app + self.model = self.app.model + self.sa_session = self.model.context.current + def load_datatypes( self ): + for tool_shed_repository in self.sa_session.query( self.model.ToolShedRepository ) \ + .filter( and_( self.model.ToolShedRepository.table.c.includes_datatypes==True, + self.model.ToolShedRepository.table.c.deleted==False ) ): + metadata = tool_shed_repository.metadata + datatypes_config = metadata[ 'datatypes_config' ] + full_path = os.path.abspath( datatypes_config ) + self.app.datatypes_registry.load_datatypes( self.app.config.root, full_path ) + \ No newline at end of file diff -r f37d9a0a1afc72945e1283195739fc31418dc934 -r 42b4bb82e006c42f7f2049c2b8460af5e08ddc24 lib/galaxy/tool_shed/install_manager.py --- a/lib/galaxy/tool_shed/install_manager.py +++ b/lib/galaxy/tool_shed/install_manager.py @@ -158,4 +158,13 @@ if os.path.exists( clone_dir ): installed = True break + if not installed: + full_path = os.path.abspath( clone_dir ) + # We may have a repository that contains no tools. + if os.path.exists( full_path ): + for root, dirs, files in os.walk( full_path ): + if '.hg' in dirs: + # Assume that the repository has been installed if we find a .hg directory. + installed = True + break return installed diff -r f37d9a0a1afc72945e1283195739fc31418dc934 -r 42b4bb82e006c42f7f2049c2b8460af5e08ddc24 lib/galaxy/util/shed_util.py --- a/lib/galaxy/util/shed_util.py +++ b/lib/galaxy/util/shed_util.py @@ -109,6 +109,7 @@ Update the received metadata_dict with changes that have been applied to the received datatypes_config. This method is used by the InstallManager, which does not have access to trans. + TODO: Handle converters, indexers, sniffers, etc... """ # Parse datatypes_config. tree = ElementTree.parse( datatypes_config ) @@ -125,13 +126,29 @@ registration = root.find( 'registration' ) if registration: for elem in registration.findall( 'datatype' ): - extension = elem.get( 'extension', None ) + datatypes_dict = {} + display_in_upload = elem.get( 'display_in_upload', None ) + if display_in_upload: + datatypes_dict[ 'display_in_upload' ] = display_in_upload dtype = elem.get( 'type', None ) + if dtype: + datatypes_dict[ 'dtype' ] = dtype + extension = elem.get( 'extension', None ) + if extension: + datatypes_dict[ 'extension' ] = extension + max_optional_metadata_filesize = elem.get( 'max_optional_metadata_filesize', None ) + if max_optional_metadata_filesize: + datatypes_dict[ 'max_optional_metadata_filesize' ] = max_optional_metadata_filesize mimetype = elem.get( 'mimetype', None ) - datatypes.append( dict( extension=extension, - dtype=dtype, - mimetype=mimetype ) ) - metadata_dict[ 'datatypes' ] = datatypes + if mimetype: + datatypes_dict[ 'mimetype' ] = mimetype + subclass = elem.get( 'subclass', None ) + if subclass: + datatypes_dict[ 'subclass' ] = subclass + if datatypes_dict: + datatypes.append( datatypes_dict ) + if datatypes: + metadata_dict[ 'datatypes' ] = datatypes return metadata_dict def generate_metadata( toolbox, relative_install_dir, repository_clone_url ): """ @@ -426,46 +443,52 @@ # This method is used by the InstallManager, which does not have access to trans. imported_module = None # Parse datatypes_config. - tree = parse_xml( datatypes_config ) + tree = util.parse_xml( datatypes_config ) datatypes_config_root = tree.getroot() relative_path_to_datatype_file_name = None datatype_files = datatypes_config_root.find( 'datatype_files' ) - # Currently only a single datatype_file is supported. For example: - # <datatype_files> - # <datatype_file name="gmap.py"/> - # </datatype_files> - for elem in datatype_files.findall( 'datatype_file' ): - datatype_file_name = elem.get( 'name', None ) - if datatype_file_name: - # Find the file in the installed repository. - for root, dirs, files in os.walk( relative_intall_dir ): - if root.find( '.hg' ) < 0: - for name in files: - if name == datatype_file_name: - relative_path_to_datatype_file_name = os.path.join( root, name ) - break - break - if relative_path_to_datatype_file_name: - relative_head, relative_tail = os.path.split( relative_path_to_datatype_file_name ) - registration = datatypes_config_root.find( 'registration' ) - # Get the module by parsing the <datatype> tag. - for elem in registration.findall( 'datatype' ): - # A 'type' attribute is currently required. The attribute - # should be something like: type="gmap:GmapDB". - dtype = elem.get( 'type', None ) - if dtype: - fields = dtype.split( ':' ) - datatype_module = fields[0] - datatype_class_name = fields[1] - # Since we currently support only a single datatype_file, - # we have what we need. + if datatype_files: + # Currently only a single datatype_file is supported. For example: + # <datatype_files> + # <datatype_file name="gmap.py"/> + # </datatype_files> + for elem in datatype_files.findall( 'datatype_file' ): + datatype_file_name = elem.get( 'name', None ) + if datatype_file_name: + # Find the file in the installed repository. + for root, dirs, files in os.walk( relative_intall_dir ): + if root.find( '.hg' ) < 0: + for name in files: + if name == datatype_file_name: + relative_path_to_datatype_file_name = os.path.join( root, name ) + break break - try: - sys.path.insert( 0, relative_head ) - imported_module = __import__( datatype_module ) - sys.path.pop( 0 ) - except Exception, e: - log.debug( "Exception importing datatypes code file included in installed repository: %s" % str( e ) ) + if relative_path_to_datatype_file_name: + relative_head, relative_tail = os.path.split( relative_path_to_datatype_file_name ) + registration = datatypes_config_root.find( 'registration' ) + # Get the module by parsing the <datatype> tag. + for elem in registration.findall( 'datatype' ): + # A 'type' attribute is currently required. The attribute + # should be something like: type="gmap:GmapDB". + dtype = elem.get( 'type', None ) + if dtype: + fields = dtype.split( ':' ) + datatype_module = fields[0] + datatype_class_name = fields[1] + # Since we currently support only a single datatype_file, + # we have what we need. + break + try: + sys.path.insert( 0, relative_head ) + imported_module = __import__( datatype_module ) + sys.path.pop( 0 ) + except Exception, e: + log.debug( "Exception importing datatypes code file included in installed repository: %s" % str( e ) ) + else: + # The repository includes a datayptes_conf.xml file, but no code file that + # contains data type classes. This implies that the data types in datayptes_conf.xml + # are all subclasses of data types that are in the distribution. + imported_module = None app.datatypes_registry.load_datatypes( root_dir=app.config.root, config=datatypes_config, imported_module=imported_module ) def load_repository_contents( app, name, description, owner, changeset_revision, tool_path, repository_clone_url, relative_install_dir, current_working_dir, tmp_name, tool_section=None, shed_tool_conf=None, new_install=True ): diff -r f37d9a0a1afc72945e1283195739fc31418dc934 -r 42b4bb82e006c42f7f2049c2b8460af5e08ddc24 lib/galaxy/web/controllers/admin_toolshed.py --- a/lib/galaxy/web/controllers/admin_toolshed.py +++ b/lib/galaxy/web/controllers/admin_toolshed.py @@ -191,13 +191,20 @@ repo_info_dict = kwd[ 'repo_info_dict' ] new_tool_panel_section = kwd.get( 'new_tool_panel_section', '' ) tool_panel_section = kwd.get( 'tool_panel_section', '' ) - if kwd.get( 'select_tool_panel_section_button', False ): - shed_tool_conf = kwd[ 'shed_tool_conf' ] + includes_tools = util.string_as_bool( kwd.get( 'includes_tools', False ) ) + if not includes_tools or ( includes_tools and kwd.get( 'select_tool_panel_section_button', False ) ): + if includes_tools: + shed_tool_conf = kwd[ 'shed_tool_conf' ] + else: + # If installing a repository that includes no tools, get the relative + # tool_path from the file to which the install_tool_config_file config + # setting points. + shed_tool_conf = trans.app.config.install_tool_config # Get the tool path. for k, tool_path in trans.app.toolbox.shed_tool_confs.items(): if k == shed_tool_conf: break - if new_tool_panel_section or tool_panel_section: + if includes_tools and ( new_tool_panel_section or tool_panel_section ): if new_tool_panel_section: section_id = new_tool_panel_section.lower().replace( ' ', '_' ) new_section_key = 'section_%s' % str( section_id ) @@ -290,6 +297,7 @@ tool_shed_url=tool_shed_url, repo_info_dict=repo_info_dict, shed_tool_conf=shed_tool_conf, + includes_tools=includes_tools, shed_tool_conf_select_field=shed_tool_conf_select_field, tool_panel_section_select_field=tool_panel_section_select_field, new_tool_panel_section=new_tool_panel_section, diff -r f37d9a0a1afc72945e1283195739fc31418dc934 -r 42b4bb82e006c42f7f2049c2b8460af5e08ddc24 lib/galaxy/webapps/community/controllers/common.py --- a/lib/galaxy/webapps/community/controllers/common.py +++ b/lib/galaxy/webapps/community/controllers/common.py @@ -4,6 +4,7 @@ from galaxy.tools import * from galaxy.util.json import from_json_string, to_json_string from galaxy.util.hash_util import * +from galaxy.util.shed_util import generate_datatypes_metadata, generate_tool_metadata, generate_workflow_metadata from galaxy.web.base.controller import * from galaxy.webapps.community import model from galaxy.model.orm import * @@ -149,6 +150,7 @@ .order_by( trans.model.RepositoryMetadata.table.c.id.desc() ) \ .first() def generate_clone_url( trans, repository_id ): + """Generate the URL for cloning a repository.""" repository = get_repository( trans, repository_id ) protocol, base = trans.request.base.split( '://' ) if trans.user: @@ -220,54 +222,6 @@ correction_msg += "Upload a file named <b>%s.sample</b> to the repository to correct this error." % str( index_tail ) invalid_files.append( ( name, correction_msg ) ) return can_set_metadata, invalid_files -def generate_tool_metadata( trans, id, changeset_revision, tool_config, tool, metadata_dict ): - """ - Update the received metadata_dict with changes that have been - applied to the received tool. - """ - repository = get_repository( trans, id ) - # Handle tool.requirements. - tool_requirements = [] - for tr in tool.requirements: - name=tr.name - type=tr.type - if type == 'fabfile': - version = None - fabfile = tr.fabfile - method = tr.method - else: - version = tr.version - fabfile = None - method = None - requirement_dict = dict( name=name, - type=type, - version=version, - fabfile=fabfile, - method=method ) - tool_requirements.append( requirement_dict ) - # Handle tool.tests. - tool_tests = [] - if tool.tests: - for ttb in tool.tests: - test_dict = dict( name=ttb.name, - required_files=ttb.required_files, - inputs=ttb.inputs, - outputs=ttb.outputs ) - tool_tests.append( test_dict ) - tool_dict = dict( id=tool.id, - guid = generate_tool_guid( trans, repository, tool ), - name=tool.name, - version=tool.version, - description=tool.description, - version_string_cmd = tool.version_string_cmd, - tool_config=tool_config, - requirements=tool_requirements, - tests=tool_tests ) - if 'tools' in metadata_dict: - metadata_dict[ 'tools' ].append( tool_dict ) - else: - metadata_dict[ 'tools' ] = [ tool_dict ] - return metadata_dict def new_tool_metadata_required( trans, id, metadata_dict ): """ Compare the last saved metadata for each tool in the repository with the new metadata @@ -309,16 +263,6 @@ # The received metadata_dict includes no metadata for tools, so a new repository_metadata table # record is not needed. return False -def generate_workflow_metadata( trans, id, changeset_revision, exported_workflow_dict, metadata_dict ): - """ - Update the received metadata_dict with changes that have been applied - to the received exported_workflow_dict. Store everything in the database. - """ - if 'workflows' in metadata_dict: - metadata_dict[ 'workflows' ].append( exported_workflow_dict ) - else: - metadata_dict[ 'workflows' ] = [ exported_workflow_dict ] - return metadata_dict def new_workflow_metadata_required( trans, id, metadata_dict ): """ Currently everything about an exported workflow except the name is hard-coded, so there's @@ -337,34 +281,6 @@ # The received metadata_dict includes no metadata for workflows, so a new repository_metadata table # record is not needed. return False -def generate_datatypes_metadata( trans, id, changeset_revision, datatypes_config, metadata_dict ): - """ - Update the received metadata_dict with changes that have been applied - to the received datatypes_config. - """ - # Parse datatypes_config. - tree = ElementTree.parse( datatypes_config ) - root = tree.getroot() - ElementInclude.include( root ) - repository_datatype_code_files = [] - datatype_files = root.find( 'datatype_files' ) - if datatype_files: - for elem in datatype_files.findall( 'datatype_file' ): - name = elem.get( 'name', None ) - repository_datatype_code_files.append( name ) - metadata_dict[ 'datatype_files' ] = repository_datatype_code_files - datatypes = [] - registration = root.find( 'registration' ) - if registration: - for elem in registration.findall( 'datatype' ): - extension = elem.get( 'extension', None ) - dtype = elem.get( 'type', None ) - mimetype = elem.get( 'mimetype', None ) - datatypes.append( dict( extension=extension, - dtype=dtype, - mimetype=mimetype ) ) - metadata_dict[ 'datatypes' ] = datatypes - return metadata_dict def generate_metadata_for_repository_tip( trans, id, ctx, changeset_revision, repo_dir ): # Browse the repository tip files on disk to generate metadata. This is faster than # the generate_metadata_for_changeset_revision() method below because fctx.data() does @@ -382,7 +298,7 @@ datatypes_config = os.path.abspath( os.path.join( root, name ) ) break if datatypes_config: - metadata_dict = generate_datatypes_metadata( trans, id, changeset_revision, datatypes_config, metadata_dict ) + metadata_dict = generate_datatypes_metadata( datatypes_config, metadata_dict ) # Find all special .sample files. for root, dirs, files in os.walk( repo_dir ): if root.find( '.hg' ) < 0: @@ -409,19 +325,19 @@ if can_set_metadata: # Update the list of metadata dictionaries for tools in metadata_dict. tool_config = os.path.join( root, name ) - metadata_dict = generate_tool_metadata( trans, id, changeset_revision, tool_config, tool, metadata_dict ) + repository_clone_url = generate_clone_url( trans, id ) + metadata_dict = generate_tool_metadata( tool_config, tool, repository_clone_url, metadata_dict ) # Find all exported workflows elif name.endswith( '.ga' ): try: - full_path = os.path.abspath( os.path.join( root, name ) ) + relative_path = os.path.join( root, name ) # Convert workflow data from json - fp = open( full_path, 'rb' ) + fp = open( relative_path, 'rb' ) workflow_text = fp.read() fp.close() exported_workflow_dict = from_json_string( workflow_text ) if 'a_galaxy_workflow' in exported_workflow_dict and exported_workflow_dict[ 'a_galaxy_workflow' ] == 'true': - # Update the list of metadata dictionaries for workflows in metadata_dict. - metadata_dict = generate_workflow_metadata( trans, id, changeset_revision, exported_workflow_dict, metadata_dict ) + metadata_dict = generate_workflow_metadata( relative_path, exported_workflow_dict, metadata_dict ) except Exception, e: invalid_files.append( ( name, str( e ) ) ) return metadata_dict, invalid_files @@ -438,7 +354,7 @@ datatypes_config = fctx.data() break if datatypes_config: - metadata_dict = generate_datatypes_metadata( trans, id, changeset_revision, datatypes_config, metadata_dict ) + metadata_dict = generate_datatypes_metadata( datatypes_config, metadata_dict ) # Get all tool config file names from the hgweb url, something like: # /repos/test/convert_chars1/file/e58dcf0026c7/convert_characters.xml for filename in ctx: @@ -469,7 +385,8 @@ # anything, but may result in a bit of confusion when maintaining the code / data over time. # IMPORTANT NOTE: Here we are assuming that since the current change set is not the repository # tip, we do not have to handle any .loc.sample files since they would have been handled previously. - metadata_dict = generate_tool_metadata( trans, id, changeset_revision, filename, tool, metadata_dict ) + repository_clone_url = generate_clone_url( trans, id ) + metadata_dict = generate_tool_metadata( filename, tool, repository_clone_url, metadata_dict ) try: os.unlink( tmp_filename ) except: @@ -481,8 +398,7 @@ workflow_text = fctx.data() exported_workflow_dict = from_json_string( workflow_text ) if 'a_galaxy_workflow' in exported_workflow_dict and exported_workflow_dict[ 'a_galaxy_workflow' ] == 'true': - # Update the list of metadata dictionaries for workflows in metadata_dict. - metadata_dict = generate_workflow_metadata( trans, id, changeset_revision, exported_workflow_dict, metadata_dict ) + metadata_dict = generate_workflow_metadata( '', exported_workflow_dict, metadata_dict ) except Exception, e: invalid_files.append( ( name, str( e ) ) ) return metadata_dict, invalid_files @@ -510,12 +426,18 @@ if len( repository.downloadable_revisions ) == 1: handle_email_alerts( trans, repository, content_alert_str='', new_repo_alert=True, admin_only=False ) else: - # Update the last saved repository_metadata table row. repository_metadata = get_latest_repository_metadata( trans, id ) - repository_metadata.changeset_revision = changeset_revision - repository_metadata.metadata = metadata_dict - trans.sa_session.add( repository_metadata ) - trans.sa_session.flush() + if repository_metadata: + # Update the last saved repository_metadata table row. + repository_metadata.changeset_revision = changeset_revision + repository_metadata.metadata = metadata_dict + trans.sa_session.add( repository_metadata ) + trans.sa_session.flush() + else: + # There are no tools in the repository, and we're setting metadat on the repository tip. + repository_metadata = trans.model.RepositoryMetadata( repository.id, changeset_revision, metadata_dict ) + trans.sa_session.add( repository_metadata ) + trans.sa_session.flush() else: # We're re-generating metadata for an old repository revision. repository_metadata = get_repository_metadata_by_changeset_revision( trans, id, changeset_revision ) diff -r f37d9a0a1afc72945e1283195739fc31418dc934 -r 42b4bb82e006c42f7f2049c2b8460af5e08ddc24 lib/galaxy/webapps/community/controllers/repository.py --- a/lib/galaxy/webapps/community/controllers/repository.py +++ b/lib/galaxy/webapps/community/controllers/repository.py @@ -436,9 +436,9 @@ **kwd ) ) if operation == "install": galaxy_url = trans.get_cookie( name='toolshedgalaxyurl' ) - encoded_repo_info_dict = self.__encode_repo_info_dict( trans, webapp, util.listify( item_id ) ) - url = '%s/admin_toolshed/install_repository?tool_shed_url=%s&webapp=%s&repo_info_dict=%s' % \ - ( galaxy_url, url_for( '', qualified=True ), webapp, encoded_repo_info_dict ) + encoded_repo_info_dict, includes_tools = self.__encode_repo_info_dict( trans, webapp, util.listify( item_id ) ) + url = '%s/admin_toolshed/install_repository?tool_shed_url=%s&webapp=%s&repo_info_dict=%s&includes_tools=%s' % \ + ( galaxy_url, url_for( '', qualified=True ), webapp, encoded_repo_info_dict, str( includes_tools ) ) return trans.response.send_redirect( url ) else: # This can only occur when there is a multi-select grid with check boxes and an operation, @@ -512,9 +512,9 @@ **kwd ) ) if operation == "install": galaxy_url = trans.get_cookie( name='toolshedgalaxyurl' ) - encoded_repo_info_dict = self.__encode_repo_info_dict( trans, webapp, util.listify( item_id ) ) - url = '%s/admin_toolshed/install_repository?tool_shed_url=%s&webapp=%s&repo_info_dict=%s' % \ - ( galaxy_url, url_for( '', qualified=True ), webapp, encoded_repo_info_dict ) + encoded_repo_info_dict, includes_tools = self.__encode_repo_info_dict( trans, webapp, util.listify( item_id ) ) + url = '%s/admin_toolshed/install_repository?tool_shed_url=%s&webapp=%s&repo_info_dict=%s&includes_tools=%s' % \ + ( galaxy_url, url_for( '', qualified=True ), webapp, encoded_repo_info_dict, str( includes_tools ) ) return trans.response.send_redirect( url ) else: # This can only occur when there is a multi-select grid with check boxes and an operation, @@ -707,14 +707,17 @@ return match_tuples def __encode_repo_info_dict( self, trans, webapp, repository_metadata_ids ): repo_info_dict = {} + includes_tools = False for repository_metadata_id in repository_metadata_ids: repository_metadata = get_repository_metadata_by_id( trans, repository_metadata_id ) + if not includes_tools and 'tools' in repository_metadata.metadata: + includes_tools = True repository = get_repository( trans, trans.security.encode_id( repository_metadata.repository_id ) ) repository_id = trans.security.encode_id( repository.id ) changeset_revision = repository_metadata.changeset_revision repository_clone_url = generate_clone_url( trans, repository_id ) repo_info_dict[ repository.name ] = ( repository.description, repository_clone_url, changeset_revision ) - return encode( repo_info_dict ) + return encode( repo_info_dict ), includes_tools @web.expose def preview_tools_in_changeset( self, trans, repository_id, **kwd ): params = util.Params( kwd ) @@ -755,12 +758,16 @@ repository_clone_url = generate_clone_url( trans, repository_id ) repository = get_repository( trans, repository_id ) changeset_revision = util.restore_text( params.get( 'changeset_revision', repository.tip ) ) + repository_metadata = get_repository_metadata_by_changeset_revision( trans, repository_id, changeset_revision ) + # Tell the caller if the repository includes Galaxy tools so the page + # enabling selection of the tool panel section can be displayed. + includes_tools = 'tools' in repository_metadata.metadata repo_info_dict = {} repo_info_dict[ repository.name ] = ( repository.description, repository_clone_url, changeset_revision ) encoded_repo_info_dict = encode( repo_info_dict ) # Redirect back to local Galaxy to perform install. - url = '%s/admin_toolshed/install_repository?tool_shed_url=%s&repo_info_dict=%s' % \ - ( galaxy_url, url_for( '', qualified=True ), encoded_repo_info_dict ) + url = '%s/admin_toolshed/install_repository?tool_shed_url=%s&repo_info_dict=%s&includes_tools=%s' % \ + ( galaxy_url, url_for( '', qualified=True ), encoded_repo_info_dict, str( includes_tools ) ) return trans.response.send_redirect( url ) @web.expose def check_for_updates( self, trans, **kwd ): diff -r f37d9a0a1afc72945e1283195739fc31418dc934 -r 42b4bb82e006c42f7f2049c2b8460af5e08ddc24 templates/admin/tool_shed_repository/common.mako --- a/templates/admin/tool_shed_repository/common.mako +++ b/templates/admin/tool_shed_repository/common.mako @@ -128,25 +128,22 @@ <table class="grid"><tr><td><b>extension</b></td> - <td><b>dtype</b></td> + <td><b>type</b></td><td><b>mimetype</b></td> + <td><b>subclass</b></td></tr> %for datatypes_dict in datatypes_dicts: - <% - extension = datatypes_dict[ 'extension' ] - dtype = datatypes_dict[ 'dtype' ] - mimetype = datatypes_dict[ 'mimetype' ] + <% + extension = datatypes_dict.get( 'extension', ' ' ) + dtype = datatypes_dict.get( 'dtype', ' ' ) + mimetype = datatypes_dict.get( 'mimetype', ' ' ) + subclass = datatypes_dict.get( 'subclass', ' ' ) %><tr><td>${extension}</td><td>${dtype}</td> - <td> - %if mimetype: - ${mimetype} - %else: - - %endif - </td> + <td>${mimetype}</td> + <td>${subclass}</td></tr> %endfor </table> diff -r f37d9a0a1afc72945e1283195739fc31418dc934 -r 42b4bb82e006c42f7f2049c2b8460af5e08ddc24 templates/admin/tool_shed_repository/select_tool_panel_section.mako --- a/templates/admin/tool_shed_repository/select_tool_panel_section.mako +++ b/templates/admin/tool_shed_repository/select_tool_panel_section.mako @@ -25,7 +25,7 @@ <div class="toolForm"><div class="toolFormTitle">Choose tool panel section to contain installed tools (optional)</div><div class="toolFormBody"> - <form name="select_tool_panel_section" id="select_tool_panel_section" action="${h.url_for( controller='admin_toolshed', action='install_repository', tool_shed_url=tool_shed_url, repo_info_dict=repo_info_dict )}" method="post" > + <form name="select_tool_panel_section" id="select_tool_panel_section" action="${h.url_for( controller='admin_toolshed', action='install_repository', tool_shed_url=tool_shed_url, repo_info_dict=repo_info_dict, includes_tools=includes_tools )}" method="post" > %if shed_tool_conf_select_field: <div class="form-row"><label>Shed tool configuration file:</label> diff -r f37d9a0a1afc72945e1283195739fc31418dc934 -r 42b4bb82e006c42f7f2049c2b8460af5e08ddc24 templates/webapps/community/repository/common.mako --- a/templates/webapps/community/repository/common.mako +++ b/templates/webapps/community/repository/common.mako @@ -209,25 +209,22 @@ <table class="grid"><tr><td><b>extension</b></td> - <td><b>dtype</b></td> + <td><b>type</b></td><td><b>mimetype</b></td> + <td><b>subclass</b></td></tr> %for datatypes_dict in datatypes_dicts: - <% - extension = datatypes_dict[ 'extension' ] - dtype = datatypes_dict[ 'dtype' ] - mimetype = datatypes_dict[ 'mimetype' ] + <% + extension = datatypes_dict.get( 'extension', ' ' ) + dtype = datatypes_dict.get( 'dtype', ' ' ) + mimetype = datatypes_dict.get( 'mimetype', ' ' ) + subclass = datatypes_dict.get( 'subclass', ' ' ) %><tr><td>${extension}</td><td>${dtype}</td> - <td> - %if mimetype: - ${mimetype} - %else: - - %endif - </td> + <td>${mimetype}</td> + <td>${subclass}</td></tr> %endfor </table> Repository URL: https://bitbucket.org/galaxy/galaxy-central/ -- This is a commit notification from bitbucket.org. You are receiving this because you have the service enabled, addressing the recipient of this email.
participants (1)
-
Bitbucket