commit/galaxy-central: greg: Add the ability to import a repository capsule that was exported from a tool shed into another tool
1 new commit in galaxy-central: https://bitbucket.org/galaxy/galaxy-central/commits/5c59f2c4f770/ Changeset: 5c59f2c4f770 User: greg Date: 2013-11-08 20:54:21 Summary: Add the ability to import a repository capsule that was exported from a tool shed into another tool shed. Upon initial upload, the capsule is inspected and validated to ensure it is a valid exported capule. Also, since exporting a capsule allows for including repository dependencies, the Tool Shed into which the capsule is being imported will be inspected to see if any of the repositories defined by any of the capsule's exported repository archives already exists in the Tool Shed. Any repository that already exists will not be "overwritten" or altered in any way, but the existing repository will be used. Since repositories that were exported into the capsule are associated with a user (the owner), the user importing the capsule must have the authorization to create the repository in the current Tool Shed with that same owner. If the current user is an admin user or a member of the IUC, all repositories will be created no matter the owner. Otherwise, only repositories whose associated owner is the current user will be created. Repositories whose import process results in an error will not typically halt the import process for any remaining repository archives in the capsule. However, errors encoutnered with importing a rep ository that is a dependecy of a later repository may force manual fixes in some cases. For example , if a capsule includes 2 repsitories that are defined as circular dependencies, one of the repisoto ry's repository_dependency.xml file will need to be manually altered to add the appropriate changese t revision since it was not defined at the time the repository was imported. The Tool Shed API will be enhanced to accommodate this new feature shortly. Affected #: 13 files diff -r fd7ab1c8f6a5d498f76b1b1a520fbceb07706769 -r 5c59f2c4f770551ebf175bc7188c4a39af57cf5b lib/galaxy/webapps/tool_shed/controllers/repository.py --- a/lib/galaxy/webapps/tool_shed/controllers/repository.py +++ b/lib/galaxy/webapps/tool_shed/controllers/repository.py @@ -11,8 +11,6 @@ from galaxy.util.odict import odict from galaxy.web.base.controller import BaseUIController from galaxy.web.form_builder import CheckboxField -from galaxy.webapps.tool_shed import model -from galaxy.webapps.tool_shed.model import directory_hash_id from galaxy.web.framework.helpers import grids from galaxy.util import json from galaxy.model.orm import and_ @@ -21,6 +19,7 @@ from tool_shed.util import container_util from tool_shed.util import encoding_util from tool_shed.util import export_util +from tool_shed.util import import_util from tool_shed.util import metadata_util from tool_shed.util import readme_util from tool_shed.util import repository_dependency_util @@ -942,43 +941,13 @@ if error: status = 'error' else: - # Add the repository record to the db - repository = trans.app.model.Repository( name=name, - type=repository_type, - description=description, - long_description=long_description, - user_id=trans.user.id ) - # Flush to get the id - trans.sa_session.add( repository ) - trans.sa_session.flush() - # Determine the repository's repo_path on disk - dir = os.path.join( trans.app.config.file_path, *directory_hash_id( repository.id ) ) - # Create directory if it does not exist - if not os.path.exists( dir ): - os.makedirs( dir ) - # Define repo name inside hashed directory - repository_path = os.path.join( dir, "repo_%d" % repository.id ) - # Create local repository directory - if not os.path.exists( repository_path ): - os.makedirs( repository_path ) - # Create the local repository - repo = hg.repository( suc.get_configured_ui(), repository_path, create=True ) - # Add an entry in the hgweb.config file for the local repository. - lhs = "repos/%s/%s" % ( repository.user.username, repository.name ) - trans.app.hgweb_config_manager.add_entry( lhs, repository_path ) - # Create a .hg/hgrc file for the local repository - repository_maintenance_util.create_hgrc_file( trans, repository ) - flush_needed = False - if category_ids: - # Create category associations - for category_id in category_ids: - category = trans.sa_session.query( model.Category ).get( trans.security.decode_id( category_id ) ) - rca = trans.app.model.RepositoryCategoryAssociation( repository, category ) - trans.sa_session.add( rca ) - flush_needed = True - if flush_needed: - trans.sa_session.flush() - message = "Repository <b>%s</b> has been created." % str( repository.name ) + repository, message = repository_maintenance_util.create_repository( trans, + name, + repository_type, + description, + long_description, + user_id=trans.user.id, + category_ids=category_ids ) trans.response.send_redirect( web.url_for( controller='repository', action='manage_repository', message=message, @@ -1882,14 +1851,49 @@ @web.expose def import_capsule( self, trans, **kwd ): - pass + message = kwd.get( 'message', '' ) + status = kwd.get( 'status', 'done' ) + capsule_file_name = kwd.get( 'capsule_file_name', None ) + encoded_file_path = kwd.get( 'encoded_file_path', None ) + file_path = encoding_util.tool_shed_decode( encoded_file_path ) + export_info_file_path = os.path.join( file_path, 'export_info.xml' ) + export_info_dict = import_util.get_export_info_dict( export_info_file_path ) + manifest_file_path = os.path.join( file_path, 'manifest.xml' ) + # The manifest.xml file has already been validated, so no error_message should be returned here. + repository_info_dicts, error_message = import_util.get_repository_info_from_manifest( manifest_file_path ) + # Determine the status for each exported repository archive contained within the capsule. + repository_status_info_dicts = import_util.get_repository_status_from_tool_shed( trans, repository_info_dicts ) + if 'import_capsule_button' in kwd: + # Generate a list of repository name / import results message tuples for display after the capsule is imported. + import_results_tups = [] + # Only create repositories that do not yet exist and that the current user is authorized to create. The + # status will be None for repositories that fall into the intersection of these 2 categories. + for repository_status_info_dict in repository_status_info_dicts: + # Add the capsule_file_name and encoded_file_path to the repository_status_info_dict. + repository_status_info_dict[ 'capsule_file_name' ] = capsule_file_name + repository_status_info_dict[ 'encoded_file_path' ] = encoded_file_path + import_results_tups = repository_maintenance_util.create_repository_and_import_archive( trans, + repository_status_info_dict, + import_results_tups ) + suc.remove_dir( file_path ) + return trans.fill_template( '/webapps/tool_shed/repository/import_capsule_results.mako', + export_info_dict=export_info_dict, + import_results_tups=import_results_tups, + message=message, + status=status ) + return trans.fill_template( '/webapps/tool_shed/repository/import_capsule.mako', + encoded_file_path=encoded_file_path, + export_info_dict=export_info_dict, + repository_status_info_dicts=repository_status_info_dicts, + message=message, + status=status ) @web.expose def index( self, trans, **kwd ): message = kwd.get( 'message', '' ) status = kwd.get( 'status', 'done' ) # See if there are any RepositoryMetadata records since menu items require them. - repository_metadata = trans.sa_session.query( model.RepositoryMetadata ).first() + repository_metadata = trans.sa_session.query( trans.model.RepositoryMetadata ).first() current_user = trans.user has_reviewed_repositories = False has_deprecated_repositories = False @@ -2446,8 +2450,8 @@ @web.expose def reset_all_metadata( self, trans, id, **kwd ): + """Reset all metadata on the complete changelog for a single repository in the tool shed.""" # This method is called only from the ~/templates/webapps/tool_shed/repository/manage_repository.mako template. - # It resets all metadata on the complete changelog for a single repository in the tool shed. invalid_file_tups, metadata_dict = metadata_util.reset_all_metadata_on_repository_in_tool_shed( trans, id, **kwd ) if invalid_file_tups: repository = suc.get_repository_in_tool_shed( trans, id ) @@ -2808,6 +2812,31 @@ return '' @web.expose + def upload_capsule( self, trans, **kwd ): + message = kwd.get( 'message', '' ) + status = kwd.get( 'status', 'done' ) + url = kwd.get( 'url', '' ) + if 'upload_capsule_button' in kwd: + capsule_dict = import_util.upload_capsule( trans, **kwd ) + status = capsule_dict.get( 'status', 'error' ) + if status == 'error': + message = capsule_dict.get( 'error_message', '' ) + else: + capsule_dict = import_util.extract_capsule_files( trans, **capsule_dict ) + capsule_dict = import_util.validate_capsule( trans, **capsule_dict ) + status = capsule_dict.get( 'status', 'error' ) + if status == 'ok': + return trans.response.send_redirect( web.url_for( controller='repository', + action='import_capsule', + **capsule_dict ) ) + else: + message = 'The capsule contents are invalid and cannpt be imported:<br/>%s' % str( capsule_dict.get( 'error_message', '' ) ) + return trans.fill_template( '/webapps/tool_shed/repository/upload_capsule.mako', + url=url, + message=message, + status=status ) + + @web.expose def view_changelog( self, trans, id, **kwd ): message = kwd.get( 'message', '' ) status = kwd.get( 'status', 'done' ) diff -r fd7ab1c8f6a5d498f76b1b1a520fbceb07706769 -r 5c59f2c4f770551ebf175bc7188c4a39af57cf5b lib/galaxy/webapps/tool_shed/controllers/upload.py --- a/lib/galaxy/webapps/tool_shed/controllers/upload.py +++ b/lib/galaxy/webapps/tool_shed/controllers/upload.py @@ -123,7 +123,7 @@ self.upload_directory( trans, repository, uploaded_directory, upload_point, remove_repo_files_not_in_tar, commit_message, new_repo_alert ) else: if ( isgzip or isbz2 ) and uncompress_file: - uploaded_file_filename = commit_util.uncompress( repository, uploaded_file_name, uploaded_file_filename, isgzip, isbz2 ) + uploaded_file_filename = commit_util.uncompress( repository, uploaded_file_name, uploaded_file_filename, isgzip=isgzip, isbz2=isbz2 ) if repository.type == rt_util.TOOL_DEPENDENCY_DEFINITION and uploaded_file_filename != suc.TOOL_DEPENDENCY_DEFINITION_FILENAME: ok = False message = 'Repositories of type <b>Tool dependency definition</b> can only contain a single file named <b>tool_dependencies.xml</b>.' diff -r fd7ab1c8f6a5d498f76b1b1a520fbceb07706769 -r 5c59f2c4f770551ebf175bc7188c4a39af57cf5b lib/galaxy/webapps/tool_shed/security/__init__.py --- a/lib/galaxy/webapps/tool_shed/security/__init__.py +++ b/lib/galaxy/webapps/tool_shed/security/__init__.py @@ -1,58 +1,74 @@ -""" -Galaxy Tool Shed Security -""" -import os, logging, ConfigParser -from datetime import datetime, timedelta +"""Tool Shed Security""" +import ConfigParser +import logging +import os +from datetime import datetime +from datetime import timedelta from galaxy.util.bunch import Bunch from galaxy.util import listify from galaxy.model.orm import and_ log = logging.getLogger(__name__) + class Action( object ): + def __init__( self, action, description, model ): self.action = action self.description = description self.model = model + class RBACAgent: """Handle Galaxy Tool Shed security""" permitted_actions = Bunch() + def associate_components( self, **kwd ): raise 'No valid method of associating provided components: %s' % kwd + def associate_user_role( self, user, role ): raise 'No valid method of associating a user with a role' + def convert_permitted_action_strings( self, permitted_action_strings ): """ When getting permitted actions from an untrusted source like a form, ensure that they match our actual permitted actions. """ return filter( lambda x: x is not None, [ self.permitted_actions.get( action_string ) for action_string in permitted_action_strings ] ) + def create_private_user_role( self, user ): raise "Unimplemented Method" + def get_action( self, name, default=None ): """Get a permitted action by its dict key or action name""" for k, v in self.permitted_actions.items(): if k == name or v.action == name: return v return default + def get_actions( self ): """Get all permitted actions as a list of Action objects""" return self.permitted_actions.__dict__.values() + def get_item_actions( self, action, item ): raise 'No valid method of retrieving action (%s) for item %s.' % ( action, item ) + def get_private_user_role( self, user ): raise "Unimplemented Method" + class CommunityRBACAgent( RBACAgent ): + def __init__( self, model, permitted_actions=None ): self.model = model if permitted_actions: self.permitted_actions = permitted_actions + @property def sa_session( self ): """Returns a SQLAlchemy session""" return self.model.context + def allow_action( self, roles, action, item ): """ Method for checking a permission for the current user ( based on roles ) to perform a @@ -67,6 +83,7 @@ ret_val = True break return ret_val + def associate_components( self, **kwd ): if 'user' in kwd: if 'group' in kwd: @@ -79,26 +96,31 @@ elif 'repository' in kwd: return self.associate_repository_category( kwd[ 'repository' ], kwd[ 'category' ] ) raise 'No valid method of associating provided components: %s' % kwd + def associate_group_role( self, group, role ): assoc = self.model.GroupRoleAssociation( group, role ) self.sa_session.add( assoc ) self.sa_session.flush() return assoc + def associate_user_group( self, user, group ): assoc = self.model.UserGroupAssociation( user, group ) self.sa_session.add( assoc ) self.sa_session.flush() return assoc + def associate_user_role( self, user, role ): assoc = self.model.UserRoleAssociation( user, role ) self.sa_session.add( assoc ) self.sa_session.flush() return assoc + def associate_repository_category( self, repository, category ): assoc = self.model.RepositoryCategoryAssociation( repository, category ) self.sa_session.add( assoc ) self.sa_session.flush() return assoc + def create_private_user_role( self, user ): # Create private role role = self.model.Role( name=user.email, description='Private Role for ' + user.email, type=self.model.Role.types.PRIVATE ) @@ -107,9 +129,11 @@ # Add user to role self.associate_components( role=role, user=user ) return role + def get_item_actions( self, action, item ): # item must be one of: Dataset, Library, LibraryFolder, LibraryDataset, LibraryDatasetDatasetAssociation return [ permission for permission in item.actions if permission.action == action.action ] + def get_private_user_role( self, user, auto_create=False ): role = self.sa_session.query( self.model.Role ) \ .filter( and_( self.model.Role.table.c.name == user.email, @@ -121,11 +145,13 @@ else: return None return role + def get_repository_reviewer_role( self ): return self.sa_session.query( self.model.Role ) \ .filter( and_( self.model.Role.table.c.name == 'Repository Reviewer', self.model.Role.table.c.type == self.model.Role.types.SYSTEM ) ) \ .first() + def set_entity_group_associations( self, groups=[], users=[], roles=[], delete_existing_assocs=True ): for group in groups: if delete_existing_assocs: @@ -136,6 +162,7 @@ self.associate_components( group=group, role=role ) for user in users: self.associate_components( group=group, user=user ) + def set_entity_role_associations( self, roles=[], users=[], groups=[], delete_existing_assocs=True ): for role in roles: if delete_existing_assocs: @@ -146,6 +173,7 @@ self.associate_components( user=user, role=role ) for group in groups: self.associate_components( group=group, role=role ) + def set_entity_user_associations( self, users=[], roles=[], groups=[], delete_existing_assocs=True ): for user in users: if delete_existing_assocs: @@ -159,10 +187,27 @@ self.associate_components( user=user, role=role ) for group in groups: self.associate_components( user=user, group=group ) + def can_push( self, app, user, repository ): if user: return user.username in listify( repository.allow_push( app ) ) return False + + def user_can_import_repository_archive( self, user, archive_owner ): + # This method should be called only if the current user is not an admin. + if user.username == archive_owner: + return True + # A member of the IUC is authorized to create new repositories that are owned by another user. + iuc_group = self.sa_session.query( self.model.Group ) \ + .filter( and_( self.model.Group.table.c.name == 'Intergalactic Utilities Commission', + self.model.Group.table.c.deleted == False ) ) \ + .first() + if iuc_group is not None: + for uga in iuc_group.users: + if uga.user.id == user.id: + return True + return False + def user_can_review_repositories( self, user ): if user: roles = user.all_roles() @@ -171,6 +216,7 @@ if repository_reviewer_role: return repository_reviewer_role in roles return False + def user_can_browse_component_review( self, app, repository, component_review, user ): if component_review and user: if self.can_push( app, user, repository ): @@ -181,6 +227,7 @@ # Reviewers can access private/public component reviews. return True return False + def get_permitted_actions( filter=None ): '''Utility method to return a subset of RBACAgent's permitted actions''' if filter is None: diff -r fd7ab1c8f6a5d498f76b1b1a520fbceb07706769 -r 5c59f2c4f770551ebf175bc7188c4a39af57cf5b lib/tool_shed/util/commit_util.py --- a/lib/tool_shed/util/commit_util.py +++ b/lib/tool_shed/util/commit_util.py @@ -323,13 +323,13 @@ # <action type="set_environment_for_install"> # <action type="setup_r_environment"> # <action type="setup_ruby_environment"> + error_message = '' for repo_index, repo_elem in enumerate( action_elem ): # Make sure to skip comments and tags that are not <repository>. if repo_elem.tag == 'repository': - revised, repository_elem, error_message = handle_repository_dependency_elem( trans, repo_elem, unpopulate=unpopulate ) - if error_message: - exception_message = 'The tool_dependencies.xml file contains an invalid <repository> tag. %s' % error_message - raise Exception( exception_message ) + revised, repository_elem, message = handle_repository_dependency_elem( trans, repo_elem, unpopulate=unpopulate ) + if message: + error_message += 'The tool_dependencies.xml file contains an invalid <repository> tag. %s' % message if revised: action_elem[ repo_index ] = repository_elem package_altered = True @@ -337,7 +337,7 @@ altered = True if package_altered: actions_elem[ action_index ] = action_elem - return package_altered, altered, actions_elem + return package_altered, altered, actions_elem, error_message def handle_tool_dependencies_definition( trans, tool_dependencies_config, unpopulate=False ): """ @@ -406,7 +406,7 @@ else: # Inspect the sub elements of last_actions_elem to locate all <repository> tags and # populate them with toolshed and changeset_revision attributes if necessary. - last_actions_package_altered, altered, last_actions_elem = \ + last_actions_package_altered, altered, last_actions_elem, message = \ handle_repository_dependency_sub_elem( trans, last_actions_package_altered, altered, @@ -414,22 +414,26 @@ last_actions_index, last_actions_elem, unpopulate=unpopulate ) + if message: + error_message += message elif actions_elem.tag == 'actions': # We are not in an <actions_group> tag set, so we must be in an <actions> tag set. for action_index, action_elem in enumerate( actions_elem ): # Inspect the sub elements of last_actions_elem to locate all <repository> tags and populate them with # toolshed and changeset_revision attributes if necessary. - package_altered, altered, actions_elem = handle_repository_dependency_sub_elem( trans, - package_altered, - altered, - actions_elem, - action_index, - action_elem, - unpopulate=unpopulate ) + package_altered, altered, actions_elem, message = handle_repository_dependency_sub_elem( trans, + package_altered, + altered, + actions_elem, + action_index, + action_elem, + unpopulate=unpopulate ) + if message: + error_message += message else: package_name = root_elem.get( 'name', '' ) package_version = root_elem.get( 'version', '' ) - error_message = 'Version %s of the %s package cannot be installed because ' % ( str( package_version ), str( package_name ) ) + error_message += 'Version %s of the %s package cannot be installed because ' % ( str( package_version ), str( package_name ) ) error_message += 'the recipe for installing the package is missing either an <actions> tag set or an <actions_group> ' error_message += 'tag set.' if package_altered: @@ -474,7 +478,7 @@ return False, error_msg return True, '' -def uncompress( repository, uploaded_file_name, uploaded_file_filename, isgzip, isbz2 ): +def uncompress( repository, uploaded_file_name, uploaded_file_filename, isgzip=False, isbz2=False ): if isgzip: handle_gzip( repository, uploaded_file_name ) return uploaded_file_filename.rstrip( '.gz' ) diff -r fd7ab1c8f6a5d498f76b1b1a520fbceb07706769 -r 5c59f2c4f770551ebf175bc7188c4a39af57cf5b lib/tool_shed/util/export_util.py --- a/lib/tool_shed/util/export_util.py +++ b/lib/tool_shed/util/export_util.py @@ -89,7 +89,12 @@ work_dir = tempfile.mkdtemp( prefix="tmp-toolshed-export-er" ) ordered_repository = ordered_repositories[ index ] ordered_changeset_revision = ordered_changeset_revisions[ index ] - repository_archive, error_message = generate_repository_archive( trans, work_dir, tool_shed_url, ordered_repository, ordered_changeset_revision, file_type ) + repository_archive, error_message = generate_repository_archive( trans, + work_dir, + tool_shed_url, + ordered_repository, + ordered_changeset_revision, + file_type ) if error_message: error_messages = '%s %s' % ( error_messages, error_message ) else: @@ -105,7 +110,10 @@ tmp_export_info = xml_util.create_and_write_tmp_file( export_elem, use_indent=True ) repositories_archive.add( tmp_export_info, arcname='export_info.xml' ) # Write the manifest, which must preserve the order in which the repositories should be imported. - tmp_manifest = xml_util.create_and_write_tmp_file( exported_repository_registry.exported_repository_elems, use_indent=True ) + exported_repository_root = xml_util.create_element( 'repositories' ) + for exported_repository_elem in exported_repository_registry.exported_repository_elems: + exported_repository_root.append( exported_repository_elem ) + tmp_manifest = xml_util.create_and_write_tmp_file( exported_repository_root, use_indent=True ) repositories_archive.add( tmp_manifest, arcname='manifest.xml' ) except Exception, e: log.exception( str( e ) ) diff -r fd7ab1c8f6a5d498f76b1b1a520fbceb07706769 -r 5c59f2c4f770551ebf175bc7188c4a39af57cf5b lib/tool_shed/util/import_util.py --- /dev/null +++ b/lib/tool_shed/util/import_util.py @@ -0,0 +1,369 @@ +import logging +import os +import shutil +import tarfile +import tempfile +import urllib +from galaxy import util +from galaxy.datatypes import checkers +from tool_shed.util import commit_util +from tool_shed.util import encoding_util +from tool_shed.util import metadata_util +from tool_shed.util import xml_util +import tool_shed.util.shed_util_common as suc + +from galaxy import eggs +eggs.require( 'mercurial' ) + +from mercurial import commands +from mercurial import hg +from mercurial import ui + +log = logging.getLogger( __name__ ) + +def extract_capsule_files( trans, **kwd ): + """Extract the uploaded capsule archive into a temporary location for inspection, validation and potential import.""" + return_dict = {} + tar_archive = kwd.get( 'tar_archive', None ) + uploaded_file = kwd.get( 'uploaded_file', None ) + capsule_file_name = kwd.get( 'capsule_file_name', None ) + if tar_archive is not None and uploaded_file is not None and capsule_file_name is not None: + return_dict.update( kwd ) + extract_directory_path = tempfile.mkdtemp( prefix="tmp-capsule-ecf" ) + if capsule_file_name.endswith( '.tar.gz' ): + extract_directory_name = capsule_file_name.replace( '.tar.gz', '' ) + elif capsule_file_name.endswith( '.tar' ): + extract_directory_name = capsule_file_name.replace( '.tar', '' ) + else: + extract_directory_name = capsule_file_name + file_path = os.path.join( extract_directory_path, extract_directory_name ) + return_dict[ 'encoded_file_path' ] = encoding_util.tool_shed_encode( file_path ) + tar_archive.extractall( path=file_path ) + tar_archive.close() + uploaded_file.close() + del return_dict[ 'tar_archive' ] + del return_dict[ 'uploaded_file' ] + return return_dict + +def get_archives_from_manifest( manifest_file_path ): + """ + Return the list of archive names defined in the capsule manifest. This method sill validate the manifest by ensuring all + <repository> tag sets contain a valid <archive> sub-element. + """ + archives = [] + error_message = '' + manifest_tree, error_message = xml_util.parse_xml( manifest_file_path ) + if error_message: + return archives, error_message + manifest_root = manifest_tree.getroot() + for elem in manifest_root: + # <repository name="package_lapack_3_4" type="tool_dependency_definition" username="test"> + if elem.tag != 'repository': + error_message = 'All level one sub-elements in the manifest.xml file must be <repository> tag sets. ' + error_message += 'The tag <b><%s></b> is invalid.' % str( elem.tag ) + return [], error_message + archive_file_name = None + for repository_elem in elem: + if repository_elem.tag == 'archive': + # <archive>package_lapack_3_4-9e7a45ad3522.tar.gz</archive> + archive_file_name = repository_elem.text + break + if archive_file_name is None: + error_message = 'The %s tag set is missing a required <archive> sub-element.' % str( elem.tag ) + return [], error_message + archives.append( archive_file_name ) + return archives, error_message + +def get_export_info_dict( export_info_file_path ): + """Parse the export_info.xml file contained within the capsule and return a dictionary containing it's entries.""" + export_info_tree, error_message = xml_util.parse_xml( export_info_file_path ) + export_info_root = export_info_tree.getroot() + export_info_dict = {} + for elem in export_info_root: + if elem.tag == 'export_time': + export_info_dict[ 'export_time' ] = elem.text + elif elem.tag == 'tool_shed': + export_info_dict[ 'tool_shed' ] = elem.text + elif elem.tag == 'repository_name': + export_info_dict[ 'repository_name' ] = elem.text + elif elem.tag == 'repository_owner': + export_info_dict[ 'repository_owner' ] = elem.text + elif elem.tag == 'changeset_revision': + export_info_dict[ 'changeset_revision' ] = elem.text + elif elem.tag == 'export_repository_dependencies': + if util.asbool( elem.text ): + export_info_dict[ 'export_repository_dependencies' ] = 'Yes' + else: + export_info_dict[ 'export_repository_dependencies' ] = 'No' + return export_info_dict + +def get_repository_info_from_manifest( manifest_file_path ): + """ + Parse the capsule manifest and return a list of dictionaries containing information about each exported repository + archive contained within the capsule. + """ + repository_info_dicts = [] + manifest_tree, error_message = xml_util.parse_xml( manifest_file_path ) + if error_message: + return repository_info_dicts, error_message + manifest_root = manifest_tree.getroot() + for elem in manifest_root: + # <repository name="package_lapack_3_4" type="tool_dependency_definition" username="test"> + if elem.tag != 'repository': + error_message = 'All level one sub-elements in the manifest.xml file must be <repository> tag sets. ' + error_message += 'The tag <b><%s></b> is invalid.' % str( elem.tag ) + return [], error_message + name = elem.get( 'name', None ) + owner = elem.get( 'username', None ) + type = elem.get( 'type', None ) + if name is None or owner is None or type is None: + error_message = 'Missing required name, type, owner attributes from the tag %s' % str( elem.tag ) + return [], error_message + repository_info_dict = dict( name=name, owner=owner, type=type ) + for repository_elem in elem: + if repository_elem.tag == 'archive': + # <archive>package_lapack_3_4-9e7a45ad3522.tar.gz</archive> + archive_file_name = repository_elem.text + repository_info_dict[ 'archive_file_name' ] = archive_file_name + items = archive_file_name.split( '-' ) + changeset_revision = items[ 1 ].rstrip( '.tar.gz' ) + repository_info_dict [ 'changeset_revision' ] = changeset_revision + elif repository_elem.tag == 'categories': + category_names = [] + for category_elem in repository_elem: + if category_elem.tag == 'category': + category_names.append( category_elem.text ) + repository_info_dict[ 'category_names' ] = category_names + elif repository_elem.tag == 'description': + repository_info_dict[ 'description' ] = repository_elem.text + elif repository_elem.tag == 'long_description': + repository_info_dict[ 'long_description' ] = repository_elem.text + repository_info_dicts.append( repository_info_dict ) + return repository_info_dicts, error_message + +def get_repository_status_from_tool_shed( trans, repository_info_dicts ): + """ + For each exported repository archive contained in the capsule, inspect the Tool Shed to see if that repository already + exists or if the current user is authorized to create the repository, and set a status appropriately. If repository + dependencies are included in the capsule, repositories may have various owners. We will keep repositories associated + with owners, so we need to restrict created repositories to those the current user can create. If the current user is + an admin or a member of the IUC, all repositories will be created no matter the owner. Otherwise, only repositories + whose associated owner is the current user will be created. + """ + repository_status_info_dicts = [] + for repository_info_dict in repository_info_dicts: + repository = suc.get_repository_by_name_and_owner( trans.app, repository_info_dict[ 'name' ], repository_info_dict[ 'owner' ] ) + if repository: + if repository.deleted: + repository_info_dict[ 'status' ] = 'Exists, deleted' + elif repository.deprecated: + repository_info_dict[ 'status' ] = 'Exists, deprecated' + else: + repository_info_dict[ 'status' ] = 'Exists' + else: + # No repository with the specified name and owner currently exists, so make sure the current user can create one. + if trans.user_is_admin(): + repository_info_dict[ 'status' ] = None + elif trans.app.security_agent.user_can_import_repository_archive( trans.user, owner ): + repository_info_dict[ 'status' ] = None + else: + repository_info_dict[ 'status' ] = 'Not authorized to import' + repository_status_info_dicts.append( repository_info_dict ) + return repository_status_info_dicts + +def import_repository_archive( trans, repository, repository_archive_dict ): + """Import a repository archive contained within a repository capsule.""" + archive_file_name = repository_archive_dict.get( 'archive_file_name', None ) + capsule_file_name = repository_archive_dict[ 'capsule_file_name' ] + encoded_file_path = repository_archive_dict[ 'encoded_file_path' ] + file_path = encoding_util.tool_shed_decode( encoded_file_path ) + results_dict = dict( ok=True, error_message='' ) + archive_file_path = os.path.join( file_path, archive_file_name ) + archive = tarfile.open( archive_file_path, 'r:*' ) + repo_dir = repository.repo_path( trans.app ) + repo = hg.repository( suc.get_configured_ui(), repo_dir ) + undesirable_dirs_removed = 0 + undesirable_files_removed = 0 + ok, error_message = commit_util.check_archive( repository, archive ) + if ok: + full_path = os.path.abspath( repo_dir ) + filenames_in_archive = [] + for tarinfo_obj in archive.getmembers(): + # Check files and directories in the archive. + ok = os.path.basename( tarinfo_obj.name ) not in commit_util.UNDESIRABLE_FILES + if ok: + for file_path_item in tarinfo_obj.name.split( '/' ): + if file_path_item in commit_util.UNDESIRABLE_DIRS: + undesirable_dirs_removed += 1 + error_message = 'Import failed: invalid file path <b>%s</b> in archive <b>%s</b>' % \ + ( str( file_path_item ), str( archive_file_name ) ) + results_dict[ 'ok' ] = False + results_dict[ 'error_message' ] += error_message + return results_dict + filenames_in_archive.append( tarinfo_obj.name ) + else: + undesirable_files_removed += 1 + # Extract the uploaded archive to the repository root. + archive.extractall( path=full_path ) + archive.close() + for filename in filenames_in_archive: + uploaded_file_name = os.path.join( full_path, filename ) + if os.path.split( uploaded_file_name )[ -1 ] == suc.REPOSITORY_DEPENDENCY_DEFINITION_FILENAME: + # Inspect the contents of the file to see if changeset_revision values are missing and if so, set them appropriately. + altered, root_elem, error_message = commit_util.handle_repository_dependencies_definition( trans, + uploaded_file_name, + unpopulate=False ) + if error_message: + results_dict[ 'ok' ] = False + results_dict[ 'error_message' ] += error_message + if altered: + tmp_filename = xml_util.create_and_write_tmp_file( root_elem ) + shutil.move( tmp_filename, uploaded_file_name ) + elif os.path.split( uploaded_file_name )[ -1 ] == suc.TOOL_DEPENDENCY_DEFINITION_FILENAME: + # Inspect the contents of the file to see if changeset_revision values are missing and if so, set them appropriately. + altered, root_elem, error_message = commit_util.handle_tool_dependencies_definition( trans, uploaded_file_name ) + if error_message: + results_dict[ 'ok' ] = False + results_dict[ 'error_message' ] += error_message + if altered: + tmp_filename = xml_util.create_and_write_tmp_file( root_elem ) + shutil.move( tmp_filename, uploaded_file_name ) + commit_message = 'Imported from capsule %s' % str( capsule_file_name ) + # Send email notification to those that have registered to receive alerts for new repositories in this Tool Shed. + new_repo_alert = True + # Since the repository is new, the following must be False. + remove_repo_files_not_in_tar = False + ok, error_message, files_to_remove, content_alert_str, undesirable_dirs_removed, undesirable_files_removed = \ + commit_util.handle_directory_changes( trans, + repository, + full_path, + filenames_in_archive, + remove_repo_files_not_in_tar, + new_repo_alert, + commit_message, + undesirable_dirs_removed, + undesirable_files_removed ) + try: + metadata_util.set_repository_metadata_due_to_new_tip( trans, repository, content_alert_str=content_alert_str ) + except Exception, e: + log.debug( "Error setting metadata on repository %s created from imported archive %s: %s" % \ + ( str( repository.name ), str( archive_file_name ), str( e ) ) ) + results_dict[ 'ok' ] = ok + results_dict[ 'error_message' ] += error_message + else: + archive.close() + results_dict[ 'ok' ] = False + results_dict[ 'error_message' ] += error_message + return results_dict + +def upload_capsule( trans, **kwd ): + """Upload and prepare an exported repository capsule for validation.""" + file_data = kwd.get( 'file_data', '' ) + url = kwd.get( 'url', '' ) + uploaded_file = None + return_dict = dict( error_message='', + encoded_file_path=None, + status='ok', + tar_archive=None, + uploaded_file=None, + capsule_file_name=None ) + if file_data == '' and url == '': + message = 'No files were entered on the import form.' + status = 'error' + elif url: + valid_url = True + try: + stream = urllib.urlopen( url ) + except Exception, e: + valid_url = False + message = 'Error importing file via http: %s' % str( e ) + status = 'error' + if valid_url: + fd, uploaded_file_name = tempfile.mkstemp() + uploaded_file = open( uploaded_file_name, 'wb' ) + while 1: + chunk = stream.read( util.CHUNK_SIZE ) + if not chunk: + break + uploaded_file.write( chunk ) + uploaded_file.flush() + uploaded_file_filename = url.split( '/' )[ -1 ] + isempty = os.path.getsize( os.path.abspath( uploaded_file_name ) ) == 0 + elif file_data not in ( '', None ): + uploaded_file = file_data.file + uploaded_file_name = uploaded_file.name + uploaded_file_filename = os.path.split( file_data.filename )[ -1 ] + isempty = os.path.getsize( os.path.abspath( uploaded_file_name ) ) == 0 + if uploaded_file is not None: + if isempty: + uploaded_file.close() + return_dict[ 'error_message' ] = 'Your uploaded file is empty.' + return_dict[ 'status' ] = 'error' + return return_dict + try: + # Open for reading with transparent compression. + tar_archive = tarfile.open( uploaded_file_name, 'r:*' ) + except tarfile.ReadError, e: + error_message = 'Error opening file %s: %s' % ( str( uploaded_file_name ), str( e ) ) + log.exception( error_message ) + return_dict[ 'error_message' ] = error_message + return_dict[ 'status' ] = 'error' + uploaded_file.close() + tar_archive.close() + return return_dict + return_dict[ 'tar_archive' ] = tar_archive + return_dict[ 'uploaded_file' ] = uploaded_file + return_dict[ 'capsule_file_name' ] = uploaded_file_filename + else: + return_dict[ 'error_message' ] = 'No files were entered on the import form.' + return_dict[ 'status' ] = 'error' + return return_dict + return return_dict + +def validate_capsule( trans, **kwd ): + """Inspect the uploaded capsule's manifest and it's contained files to ensure it is a valid repository capsule.""" + capsule_dict = {} + capsule_dict.update( kwd ) + encoded_file_path = capsule_dict.get( 'encoded_file_path', '' ) + file_path = encoding_util.tool_shed_decode( encoded_file_path ) + # The capsule must contain a valid XML file named export_info.xml. + export_info_file_path = os.path.join( file_path, 'export_info.xml' ) + export_info_tree, error_message = xml_util.parse_xml( export_info_file_path ) + if error_message: + capsule_dict[ 'error_message' ] = error_message + capsule_dict[ 'status' ] = 'error' + return capsule_dict + # The capsule must contain a valid XML file named manifest.xml. + manifest_file_path = os.path.join( file_path, 'manifest.xml' ) + # Validate the capsule manifest by inspecting name, owner, changeset_revision and type information contained within + # each <repository> tag set. + repository_info_dicts, error_message = get_repository_info_from_manifest( manifest_file_path ) + if error_message: + capsule_dict[ 'error_message' ] = error_message + capsule_dict[ 'status' ] = 'error' + return capsule_dict + # Validate the capsule manifest by ensuring all <repository> tag sets contain a valid <archive> sub-element. + archives, error_message = get_archives_from_manifest( manifest_file_path ) + if error_message: + capsule_dict[ 'error_message' ] = error_message + capsule_dict[ 'status' ] = 'error' + return capsule_dict + # Validate the capsule manifest by ensuring each defined archive file name exists within the capsule. + error_message = verify_archives_in_capsule( file_path, archives ) + if error_message: + capsule_dict[ 'error_message' ] = error_message + capsule_dict[ 'status' ] = 'error' + return capsule_dict + capsule_dict[ 'status' ] = 'ok' + return capsule_dict + +def verify_archives_in_capsule( file_path, archives ): + """Inspect the files contained within the capsule and make sure each is defined correctly in the capsule manifest.""" + error_message = '' + for archive_file_name in archives: + full_path = os.path.join( file_path, archive_file_name ) + if not os.path.exists( full_path ): + error_message = 'The uploaded capsule is invalid because the contained manifest.xml file defines an archive file ' + error_message += 'named <b>%s</b> which is not contained within the capsule.' % str( archive_file_name ) + break + return error_message diff -r fd7ab1c8f6a5d498f76b1b1a520fbceb07706769 -r 5c59f2c4f770551ebf175bc7188c4a39af57cf5b lib/tool_shed/util/repository_maintenance_util.py --- a/lib/tool_shed/util/repository_maintenance_util.py +++ b/lib/tool_shed/util/repository_maintenance_util.py @@ -1,8 +1,11 @@ import ConfigParser import logging +import os import re import tool_shed.util.shed_util_common as suc +from tool_shed.util import import_util from galaxy.web.form_builder import build_select_field +from galaxy.webapps.tool_shed.model import directory_hash_id from galaxy import eggs eggs.require( 'mercurial' ) @@ -56,6 +59,104 @@ fp.write( 'hgext.purge=' ) fp.close() +def create_repository( trans, name, type, description, long_description, user_id, category_ids=[] ): + # Add the repository record to the database. + repository = trans.app.model.Repository( name=name, + type=type, + description=description, + long_description=long_description, + user_id=user_id ) + # Flush to get the id. + trans.sa_session.add( repository ) + trans.sa_session.flush() + # Determine the repository's repo_path on disk. + dir = os.path.join( trans.app.config.file_path, *directory_hash_id( repository.id ) ) + # Create directory if it does not exist. + if not os.path.exists( dir ): + os.makedirs( dir ) + # Define repo name inside hashed directory. + repository_path = os.path.join( dir, "repo_%d" % repository.id ) + # Create local repository directory. + if not os.path.exists( repository_path ): + os.makedirs( repository_path ) + # Create the local repository. + repo = hg.repository( suc.get_configured_ui(), repository_path, create=True ) + # Add an entry in the hgweb.config file for the local repository. + lhs = "repos/%s/%s" % ( repository.user.username, repository.name ) + trans.app.hgweb_config_manager.add_entry( lhs, repository_path ) + # Create a .hg/hgrc file for the local repository. + create_hgrc_file( trans, repository ) + flush_needed = False + if category_ids: + # Create category associations + for category_id in category_ids: + category = trans.sa_session.query( trans.model.Category ) \ + .get( trans.security.decode_id( category_id ) ) + rca = trans.app.model.RepositoryCategoryAssociation( repository, category ) + trans.sa_session.add( rca ) + flush_needed = True + if flush_needed: + trans.sa_session.flush() + message = "Repository <b>%s</b> has been created." % str( repository.name ) + return repository, message + +def create_repository_and_import_archive( trans, repository_archive_dict, import_results_tups ): + """ + Create a new repository in the tool shed and populate it with the contents of a gzip compressed tar archive that was exported + as part or all of the contents of a capsule. + """ + results_message = '' + name = repository_archive_dict.get( 'name', None ) + username = repository_archive_dict.get( 'owner', None ) + if name is None or username is None: + results_message += 'Import failed: required repository name <b>%s</b> or owner <b>%s</b> is missing.' % ( str( name ), str( username )) + import_results_tups.append( ( ( str( name ), str( username ) ), results_message ) ) + else: + if repository_archive_dict[ 'status' ] is None: + # The repository does not yet exist in this Tool Shed and the current user is authorized to import + # the current archive file. + type = repository_archive_dict.get( 'type', 'unrestricted' ) + description = repository_archive_dict.get( 'description', '' ) + long_description = repository_archive_dict.get( 'long_description', '' ) + # The owner entry in the repository_archive_dict is the public username of the user associated with + # the exported repository archive. + user = suc.get_user_by_username( trans.app, username ) + if user is None: + results_message += 'Import failed: repository owner <b>%s</b> does not have an account in this Tool Shed.' % str( username ) + import_results_tups.append( ( ( str( name ), str( username ) ), results_message ) ) + else: + user_id = user.id + # The categories entry in the repository_archive_dict is a list of category names. If a name does not + # exist in the current Tool Shed, the category will not be created, so it will not be associated with + # the repository. + category_ids = [] + category_names = repository_archive_dict[ 'category_names' ] + for category_name in category_names: + category = suc.get_category_by_name( trans, category_name ) + if category is None: + results_message += 'This Tool Shed does not have the category <b>%s</b> so it will not be associated with this repository.' % \ + str( category_name ) + else: + category_ids.append( trans.security.encode_id( category.id ) ) + # Create the repository record in the database. + repository, create_message = create_repository( trans, + name, + type, + description, + long_description, + user_id=user_id, + category_ids=category_ids ) + if create_message: + results_message += create_message + # Populate the new repository with the contents of exported repository archive. + results_dict = import_util.import_repository_archive( trans, repository, repository_archive_dict ) + import_results_tups.append( ( ( str( name ), str( username ) ), results_message ) ) + else: + # The repository either already exists in this Tool Shed or the current user is not authorized to create it. + results_message += 'Import not necessary: repository status for this Tool Shed is: %s.' % str( repository_archive_dict[ 'status' ] ) + import_results_tups.append( ( ( str( name ), str( username ) ), results_message ) ) + return import_results_tups + def validate_repository_name( name, user ): # Repository names must be unique for each user, must be at least four characters # in length and must contain only lower-case letters, numbers, and the '_' character. diff -r fd7ab1c8f6a5d498f76b1b1a520fbceb07706769 -r 5c59f2c4f770551ebf175bc7188c4a39af57cf5b templates/webapps/tool_shed/index.mako --- a/templates/webapps/tool_shed/index.mako +++ b/templates/webapps/tool_shed/index.mako @@ -147,9 +147,9 @@ <div class="toolTitle"><a target="galaxy_main" href="${h.url_for( controller='repository', action='create_repository' )}">Create new repository</a></div> - ##<div class="toolTitle"> - ## <a target="galaxy_main" href="${h.url_for( controller='repository', action='import_capsule' )}">Import repository capsule</a> - ##</div> + <div class="toolTitle"> + <a target="galaxy_main" href="${h.url_for( controller='repository', action='upload_capsule' )}">Import repository capsule</a> + </div> %if can_review_repositories: <div class="toolSectionPad"></div><div class="toolSectionTitle"> diff -r fd7ab1c8f6a5d498f76b1b1a520fbceb07706769 -r 5c59f2c4f770551ebf175bc7188c4a39af57cf5b templates/webapps/tool_shed/repository/export_repository.mako --- a/templates/webapps/tool_shed/repository/export_repository.mako +++ b/templates/webapps/tool_shed/repository/export_repository.mako @@ -23,7 +23,7 @@ ${container_javascripts()} </%def> -${render_tool_shed_repository_actions( repository )} +${render_tool_shed_repository_actions( repository, changeset_revision=changeset_revision )} %if message: ${render_msg( message, status )} diff -r fd7ab1c8f6a5d498f76b1b1a520fbceb07706769 -r 5c59f2c4f770551ebf175bc7188c4a39af57cf5b templates/webapps/tool_shed/repository/import_capsule.mako --- /dev/null +++ b/templates/webapps/tool_shed/repository/import_capsule.mako @@ -0,0 +1,146 @@ +<%namespace file="/message.mako" import="render_msg" /> + +<%! + def inherit(context): + if context.get('use_panels'): + return '/webapps/tool_shed/base_panels.mako' + else: + return '/base.mako' +%> + +<%inherit file="${inherit(context)}"/> + +<%def name="stylesheets()"> + ${parent.stylesheets()} +</%def> + +<%def name="javascripts()"> + ${parent.javascripts()} +</%def> + +%if message: + ${render_msg( message, status )} +%endif + +<div class="toolForm"> + <div class="toolFormBody"> + <div class="form-row"> + <div class="warningmessage"> + Importing may take a while, depending upon the contents of the capsule. + Wait until this page refreshes after clicking the <b>Import</b> button below. + </div> + <div style="clear: both"></div> + </div> + </div> +</div> + +<div class="toolForm"> + <div class="toolFormTitle">Repository capsule information</div> + <div class="toolFormBody"> + <div class="form-row"> + <label>Date and time exported:</label> + ${export_info_dict.get( 'export_time', 'unknown' ) | h} + </div> + <div style="clear: both"></div> + <div class="form-row"> + <label>Exported from Tool Shed:</label> + ${export_info_dict.get( 'tool_shed', 'unknown' ) | h} + </div> + <div style="clear: both"></div> + <div class="form-row"> + <label>Repository name:</label> + ${export_info_dict.get( 'repository_name', 'unknown' ) | h} + </div> + <div style="clear: both"></div> + <div class="form-row"> + <label>Repository owner:</label> + ${export_info_dict.get( 'repository_owner', 'unknown' ) | h} + </div> + <div style="clear: both"></div> + <div class="form-row"> + <label>Changeset revision:</label> + ${export_info_dict.get( 'changeset_revision', 'unknown' ) | h} + </div> + <div style="clear: both"></div> + <div class="form-row"> + <label>Repository dependencies included in capsule?:</label> + ${export_info_dict.get( 'export_repository_dependencies', 'unknown' ) | h} + </div> + <div style="clear: both"></div> + </div> +</div> +<div class="toolForm"> + <div class="toolFormBody"> + <div class="form-row"> + <div class="warningmessage"> + <p> + Exported archives for each of the following repositories are included in the capsule. + </p> + <p> + The <b>Status</b> column will display an entry starting with the word <b>Exists</b> for those repositories + that already exist in this Tool Shed. These repositories will not be created, but the existing repository + will be used. Existing repositories that are deprecated or deleted must be manually altered appropriately. + </p> + <p> + If you are not an admin user in this Tool Shed and you are not a member of the <b>Intergalactic Utilities + Commission</b> defined for this Tool Shed, you will only be able to import repository archives whose + associated owner is you. The <b>Status</b> column for repository archive that you are not authorized to + import will display the entry <b>Not authorized to import</b>. Contact someone that is authorized to import + these repository archives in this Tool Shed if necessary. + </p> + <p> + Repositories that do not yet exist in this Tool Shed (and whose archives you are authorized to import) will + be created in the order defined by the following list. + </p> + </div> + <div style="clear: both"></div> + </div> + </div> +</div> +<div class="toolForm"> + <div class="toolFormTitle">Import capsule</div> + <form id="import_form" name="import_form" action="${h.url_for( controller='repository', action='import_capsule' )}" enctype="multipart/form-data" method="post"> + <div class="form-row"> + <input type="hidden" name="encoded_file_path" value="${encoded_file_path}" /> + </div> + <div class="form-row"> + <table class="grid"> + <tr> + <th bgcolor="#D8D8D8">Name</th> + <th bgcolor="#D8D8D8">Owner</th> + <th bgcolor="#D8D8D8">Changeset Revision</th> + <th bgcolor="#D8D8D8">Type</th> + <th bgcolor="#D8D8D8">Status</th> + </tr> + %for repository_status_info_dict in repository_status_info_dicts: + <tr> + <td>${ repository_status_info_dict[ 'name' ] | h }</td> + <td>${ repository_status_info_dict[ 'owner' ] | h }</td> + <td>${ repository_status_info_dict[ 'changeset_revision' ] | h }</td> + <td> + <% + # Get the label for the repository type. + type = repository_status_info_dict[ 'type' ] + type_class = trans.app.repository_types_registry.get_class_by_label( type ) + type_label = type_class.label + %> + ${ type_label | h } + </td> + <td> + %if repository_status_info_dict[ 'status' ] is None: + + %else: + ${ repository_status_info_dict[ 'status' ] | h } + %endif + </td> + </tr> + %endfor + </table> + </div> + <div style="clear: both"></div> + <div class="form-row"> + <input type="submit" class="primary-button" name="import_capsule_button" value="Import"> + </div> + </form> + </div> +</div> diff -r fd7ab1c8f6a5d498f76b1b1a520fbceb07706769 -r 5c59f2c4f770551ebf175bc7188c4a39af57cf5b templates/webapps/tool_shed/repository/import_capsule_results.mako --- /dev/null +++ b/templates/webapps/tool_shed/repository/import_capsule_results.mako @@ -0,0 +1,82 @@ +<%namespace file="/message.mako" import="render_msg" /> + +<%! + def inherit(context): + if context.get('use_panels'): + return '/webapps/tool_shed/base_panels.mako' + else: + return '/base.mako' +%> + +<%inherit file="${inherit(context)}"/> + +<%def name="stylesheets()"> + ${parent.stylesheets()} +</%def> + +<%def name="javascripts()"> + ${parent.javascripts()} +</%def> + +<br/><br/> +<ul class="manage-table-actions"> + <li><a class="action-button" target="galaxy_main" href="${h.url_for( controller='repository', action='browse_categories' )}">Browse repositories</a></li> +</ul> + +%if message: + ${render_msg( message, status )} +%endif + +<div class="toolForm"> + <div class="toolFormTitle">Repository capsule information</div> + <div class="toolFormBody"> + <div class="form-row"> + <label>Date and time exported:</label> + ${export_info_dict.get( 'export_time', 'unknown' ) | h} + </div> + <div style="clear: both"></div> + <div class="form-row"> + <label>Exported from Tool Shed:</label> + ${export_info_dict.get( 'tool_shed', 'unknown' ) | h} + </div> + <div style="clear: both"></div> + <div class="form-row"> + <label>Repository name:</label> + ${export_info_dict.get( 'repository_name', 'unknown' ) | h} + </div> + <div style="clear: both"></div> + <div class="form-row"> + <label>Repository owner:</label> + ${export_info_dict.get( 'repository_owner', 'unknown' ) | h} + </div> + <div style="clear: both"></div> + <div class="form-row"> + <label>Changeset revision:</label> + ${export_info_dict.get( 'changeset_revision', 'unknown' ) | h} + </div> + <div style="clear: both"></div> + <div class="form-row"> + <label>Repository dependencies included in capsule?:</label> + ${export_info_dict.get( 'export_repository_dependencies', 'unknown' ) | h} + </div> + <div style="clear: both"></div> + </div> +</div> + +<div class="toolForm"> + <div class="toolFormTitle">Results of attempt to import ${len( import_results_tups )} repositories contained in the capsule</div> + <div class="toolFormBody"> + <div class="form-row"> + <table class="grid"> + %for import_results_tup in import_results_tups: + <% + name_owner_tup, results_message = import_results_tup + name, owner = name_owner_tup + %> + <tr><td>Archive of repository <b>${name}</b> owned by <b>${owner}</b><br/>${results_message}</td></tr> + %endfor + </table> + <div style="clear: both"></div> + </div> + </div> +</div> diff -r fd7ab1c8f6a5d498f76b1b1a520fbceb07706769 -r 5c59f2c4f770551ebf175bc7188c4a39af57cf5b templates/webapps/tool_shed/repository/upload.mako --- a/templates/webapps/tool_shed/repository/upload.mako +++ b/templates/webapps/tool_shed/repository/upload.mako @@ -41,7 +41,6 @@ ${render_tool_shed_repository_actions( repository=repository)} <div class="toolForm"> - <div class="toolFormTitle">Repository '${repository.name | h}'</div><div class="toolFormBody"><div class="form-row"><div class="warningmessage"> @@ -50,6 +49,12 @@ </div><div style="clear: both"></div></div> + </div> +</div> + +<div class="toolForm"> + <div class="toolFormTitle">Repository '${repository.name | h}'</div> + <div class="toolFormBody"><form id="upload_form" name="upload_form" action="${h.url_for( controller='upload', action='upload', repository_id=trans.security.encode_id( repository.id ) )}" enctype="multipart/form-data" method="post"><div class="form-row"><label>File:</label> @@ -66,8 +71,8 @@ <div class="toolParamHelp" style="clear: both;"> Enter a url to upload your files. In addition to http and ftp urls, urls that point to mercurial repositories (urls that start with hg:// or hgs://) are allowed. This mechanism results in the tip revision of an external mercurial repository being added - to the tool shed repository as a single new changeset. The revision history of the originating external mercurial repository is - not uploaded to the tool shed repository. + to the Tool Shed repository as a single new changeset. The revision history of the originating external mercurial repository is + not uploaded to the Tool Shed repository. </div><div style="clear: both"></div></div> diff -r fd7ab1c8f6a5d498f76b1b1a520fbceb07706769 -r 5c59f2c4f770551ebf175bc7188c4a39af57cf5b templates/webapps/tool_shed/repository/upload_capsule.mako --- /dev/null +++ b/templates/webapps/tool_shed/repository/upload_capsule.mako @@ -0,0 +1,60 @@ +<%namespace file="/message.mako" import="render_msg" /> + +<%! + def inherit(context): + if context.get('use_panels'): + return '/webapps/tool_shed/base_panels.mako' + else: + return '/base.mako' +%> + +<%inherit file="${inherit(context)}"/> + +<%def name="stylesheets()"> + ${parent.stylesheets()} +</%def> + +<%def name="javascripts()"> + ${parent.javascripts()} +</%def> + +%if message: + ${render_msg( message, status )} +%endif + +<div class="toolForm"> + <div class="toolFormBody"> + <div class="form-row"> + <div class="warningmessage"> + Upload a single exported capsule file. Uploading may take a while, depending upon the size of the file. + Wait until the contents of the file are displayed in your browser after clicking the <b>Upload</b> button below. + </div> + <div style="clear: both"></div> + </div> + </div> +</div> + +<div class="toolForm"> + <div class="toolFormTitle">Upload a repository capsule</div> + <div class="toolFormBody"> + <form id="upload_capsule" name="upload_capsule" action="${h.url_for( controller='repository', action='upload_capsule' )}" enctype="multipart/form-data" method="post"> + <div class="form-row"> + <label>File:</label> + <div class="form-row-input"> + <input type="file" name="file_data"/> + </div> + <div style="clear: both"></div> + </div> + <div class="form-row"> + <label>Url:</label> + <div class="form-row-input"> + <input name="url" type="textfield" value="${url | h}" size="40"/> + </div> + <div style="clear: both"></div> + </div> + <div class="form-row"> + <input type="submit" class="primary-button" name="upload_capsule_button" value="Upload"> + </div> + </form> + </div> +</div> Repository URL: https://bitbucket.org/galaxy/galaxy-central/ -- This is a commit notification from bitbucket.org. You are receiving this because you have the service enabled, addressing the recipient of this email.
participants (1)
-
commits-noreply@bitbucket.org