1 new commit in galaxy-central: https://bitbucket.org/galaxy/galaxy-central/commits/8ca6cfea1909/ Changeset: 8ca6cfea1909 User: greg Date: 2013-08-03 01:30:03 Summary: Add the ability to export a specified repository revision from the tool shed to a gzip compressed archive saved locally. The ability to import this archive to the same or another tool shed is not yet implemented, but coming soon. All of the repository's repository dependencies can optionally be exported into the same archive. The archive includes a manifest.xml file that contains the order in whcih the repositories in the archive must be imported. This feature is now available for any installable revision of a repository using the new "Export this revision" option in the Repository actions menu in the browser. It is also available via the Tool Shed API using a command like the following: export.py --url http://testtoolshed.g2.bx.psu.edu --name chemicaltoolbox --owner bgruening --revision 4133dbf7ff4d --export_repository_dependencies True --download_dir /tmp Affected #: 20 files diff -r 436ca79f821132163526fa9bf1f7d6b7fe3f13c7 -r 8ca6cfea19091f0d44be3eec3b3d18794133a39a lib/galaxy/webapps/tool_shed/api/repository_revisions.py --- a/lib/galaxy/webapps/tool_shed/api/repository_revisions.py +++ b/lib/galaxy/webapps/tool_shed/api/repository_revisions.py @@ -6,6 +6,8 @@ from galaxy import util from galaxy.model.orm import and_, not_, select from galaxy.web.base.controller import BaseAPIController +from tool_shed.util import export_util +import tool_shed.util.shed_util_common as suc log = logging.getLogger( __name__ ) @@ -20,6 +22,58 @@ class RepositoryRevisionsController( BaseAPIController ): """RESTful controller for interactions with tool shed repository revisions.""" + @web.expose_api_anonymous + def export( self, trans, payload, **kwd ): + """ + POST /api/repository_revisions/export + Creates and saves a gzip compressed tar archive of a repository and optionally all of it's repository dependencies. + + The following parameters are included in the payload. + :param tool_shed_url (required): the base URL of the Tool Shed from which the Repository was installed + :param name (required): the name of the Repository + :param owner (required): the owner of the Repository + :param changset_revision (required): the changset_revision of the RepositoryMetadata object associated with the Repository + :param export_repository_dependencies (optional): whether to export repository dependencies - defaults to False + :param download_dir (optional): the local directory to which to download the archive - defaults to /tmp + """ + tool_shed_url = payload.get( 'tool_shed_url', '' ) + if not tool_shed_url: + raise HTTPBadRequest( detail="Missing required parameter 'tool_shed_url'." ) + tool_shed_url = tool_shed_url.rstrip( '/' ) + name = payload.get( 'name', '' ) + if not name: + raise HTTPBadRequest( detail="Missing required parameter 'name'." ) + owner = payload.get( 'owner', '' ) + if not owner: + raise HTTPBadRequest( detail="Missing required parameter 'owner'." ) + changeset_revision = payload.get( 'changeset_revision', '' ) + if not changeset_revision: + raise HTTPBadRequest( detail="Missing required parameter 'changeset_revision'." ) + export_repository_dependencies = payload.get( 'export_repository_dependencies', False ) + download_dir = payload.get( 'download_dir', '/tmp' ) + try: + # We'll currently support only gzip-compressed tar archives. + file_type = 'gz' + file_type_str = suc.get_file_type_str( changeset_revision, file_type ) + export_repository_dependencies = util.string_as_bool( export_repository_dependencies ) + # Get the repository information. + repository = suc.get_repository_by_name_and_owner( trans.app, name, owner ) + repository_id = trans.security.encode_id( repository.id ) + response = export_util.export_repository( trans, + tool_shed_url, + repository_id, + str( repository.name ), + changeset_revision, + file_type, + export_repository_dependencies, + api=True ) + return response + except Exception, e: + message = "Error in the Tool Shed repository_revisions API in export: %s" % str( e ) + log.error( message, exc_info=True ) + trans.response.status = 500 + return message + @web.expose_api def index( self, trans, **kwd ): """ diff -r 436ca79f821132163526fa9bf1f7d6b7fe3f13c7 -r 8ca6cfea19091f0d44be3eec3b3d18794133a39a lib/galaxy/webapps/tool_shed/buildapp.py --- a/lib/galaxy/webapps/tool_shed/buildapp.py +++ b/lib/galaxy/webapps/tool_shed/buildapp.py @@ -86,6 +86,7 @@ parent_resources=dict( member_name='repository', collection_name='repositories' ) ) webapp.mapper.resource( 'repository_revision', 'repository_revisions', + member={ 'export' : 'POST' }, controller='repository_revisions', name_prefix='repository_revision_', path_prefix='/api', diff -r 436ca79f821132163526fa9bf1f7d6b7fe3f13c7 -r 8ca6cfea19091f0d44be3eec3b3d18794133a39a lib/galaxy/webapps/tool_shed/controllers/repository.py --- a/lib/galaxy/webapps/tool_shed/controllers/repository.py +++ b/lib/galaxy/webapps/tool_shed/controllers/repository.py @@ -18,6 +18,7 @@ from galaxy.model.orm import and_ import tool_shed.util.shed_util_common as suc from tool_shed.util import encoding_util +from tool_shed.util import export_util from tool_shed.util import metadata_util from tool_shed.util import readme_util from tool_shed.util import repository_dependency_util @@ -1064,25 +1065,101 @@ @web.expose def download( self, trans, repository_id, changeset_revision, file_type, **kwd ): - # Download an archive of the repository files compressed as zip, gz or bz2. + """Download an archive of the repository files compressed as zip, gz or bz2.""" + # FIXME: thgis will currently only download the repository tip, no matter which installable changeset_revision is being viewed. + # This should be enhanced to use the export method below, which accounts for the currently viewed changeset_revision. repository = suc.get_repository_in_tool_shed( trans, repository_id ) # Allow hgweb to handle the download. This requires the tool shed # server account's .hgrc file to include the following setting: # [web] # allow_archive = bz2, gz, zip - if file_type == 'zip': - file_type_str = '%s.zip' % changeset_revision - elif file_type == 'bz2': - file_type_str = '%s.tar.bz2' % changeset_revision - elif file_type == 'gz': - file_type_str = '%s.tar.gz' % changeset_revision + file_type_str = suc.get_file_type_str( changeset_revision, file_type ) repository.times_downloaded += 1 trans.sa_session.add( repository ) trans.sa_session.flush() - download_url = '/repos/%s/%s/archive/%s' % ( repository.user.username, repository.name, file_type_str ) + download_url = suc.url_join( '/', 'repos', repository.user.username, repository.name, 'archive', file_type_str ) return trans.response.send_redirect( download_url ) @web.expose + def export( self, trans, repository_id, changeset_revision, **kwd ): + message = kwd.get( 'message', '' ) + status = kwd.get( 'status', 'done' ) + export_repository_dependencies = kwd.get( 'export_repository_dependencies', '' ) + repository = suc.get_repository_in_tool_shed( trans, repository_id ) + if kwd.get( 'export_repository_button', False ): + # We'll currently support only gzip-compressed tar archives. + file_type = 'gz' + export_repository_dependencies = CheckboxField.is_checked( export_repository_dependencies ) + tool_shed_url = web.url_for( '/', qualified=True ) + repositories_archive, error_message = export_util.export_repository( trans, + tool_shed_url, + repository_id, + str( repository.name ), + changeset_revision, + file_type, + export_repository_dependencies ) + repositories_archive_filename = os.path.basename( repositories_archive.name ) + if error_message: + message = error_message + else: + trans.response.set_content_type( 'application/x-gzip' ) + trans.response.headers[ "Content-Disposition" ] = 'attachment; filename="%s"' % ( repositories_archive_filename ) + opened_archive = open( repositories_archive.name ) + # Make sure the file is removed from disk after the contents have been downloaded. + os.unlink( repositories_archive.name ) + return opened_archive + repository_metadata = suc.get_repository_metadata_by_changeset_revision( trans, + trans.security.encode_id( repository.id ), + changeset_revision ) + metadata = repository_metadata.metadata + # Get a dictionary of all repositories upon which the contents of the current repository_metadata record depend. + repository_dependencies = \ + repository_dependency_util.get_repository_dependencies_for_changeset_revision( trans=trans, + repository=repository, + repository_metadata=repository_metadata, + toolshed_base_url=str( web.url_for( '/', qualified=True ) ).rstrip( '/' ), + key_rd_dicts_to_be_processed=None, + all_repository_dependencies=None, + handled_key_rd_dicts=None ) + if repository_dependencies: + # Only display repository dependencies if they exist. + exclude = [ 'datatypes', 'invalid_repository_dependencies', 'invalid_tool_dependencies', 'invalid_tools', + 'readme_files', 'tool_dependencies', 'tools', 'tool_test_results', 'workflows', 'data_manager' ] + containers_dict = container_util.build_repository_containers_for_tool_shed( trans, + repository, + changeset_revision, + repository_dependencies, + repository_metadata, + exclude=exclude ) + export_repository_dependencies_check_box = CheckboxField( 'export_repository_dependencies', checked=True ) + else: + containers_dict = None + export_repository_dependencies_check_box = None + revision_label = suc.get_revision_label( trans, repository, changeset_revision ) + return trans.fill_template( "/webapps/tool_shed/repository/export_repository.mako", + changeset_revision=changeset_revision, + containers_dict=containers_dict, + export_repository_dependencies_check_box=export_repository_dependencies_check_box, + repository=repository, + repository_metadata=repository_metadata, + revision_label=revision_label, + metadata=metadata, + message=message, + status=status ) + + @web.expose + def export_via_api( self, trans, **kwd ): + """Return an exported gzip compressed repository archive file opened for reading.""" + encoded_repositories_archive_name = kwd.get( 'encoded_repositories_archive_name', None ) + if encoded_repositories_archive_name: + repositories_archive_name = encoding_util.tool_shed_decode( encoded_repositories_archive_name ) + opened_archive = open( repositories_archive_name ) + # Make sure the file is removed from disk after the contents have been downloaded. + os.unlink( repositories_archive_name ) + return opened_archive + return '' + + @web.expose def find_tools( self, trans, **kwd ): message = kwd.get( 'message', '' ) status = kwd.get( 'status', 'done' ) diff -r 436ca79f821132163526fa9bf1f7d6b7fe3f13c7 -r 8ca6cfea19091f0d44be3eec3b3d18794133a39a lib/galaxy/webapps/tool_shed/controllers/upload.py --- a/lib/galaxy/webapps/tool_shed/controllers/upload.py +++ b/lib/galaxy/webapps/tool_shed/controllers/upload.py @@ -135,7 +135,7 @@ # Move some version of the uploaded file to the load_point within the repository hierarchy. if uploaded_file_filename in [ suc.REPOSITORY_DEPENDENCY_DEFINITION_FILENAME ]: # Inspect the contents of the file to see if changeset_revision values are missing and if so, set them appropriately. - altered, root_elem = commit_util.handle_repository_dependencies_definition( trans, uploaded_file_name ) + altered, root_elem = commit_util.handle_repository_dependencies_definition( trans, uploaded_file_name, unpopulate=False ) if altered: tmp_filename = xml_util.create_and_write_tmp_file( root_elem ) shutil.move( tmp_filename, full_path ) @@ -277,7 +277,7 @@ uploaded_file_name = os.path.abspath( os.path.join( root, uploaded_file ) ) if os.path.split( uploaded_file_name )[ -1 ] == suc.REPOSITORY_DEPENDENCY_DEFINITION_FILENAME: # Inspect the contents of the file to see if changeset_revision values are missing and if so, set them appropriately. - altered, root_elem = commit_util.handle_repository_dependencies_definition( trans, uploaded_file_name ) + altered, root_elem = commit_util.handle_repository_dependencies_definition( trans, uploaded_file_name, unpopulate=False ) if altered: tmp_filename = xml_util.create_and_write_tmp_file( root_elem ) shutil.move( tmp_filename, uploaded_file_name ) @@ -338,7 +338,7 @@ uploaded_file_name = os.path.join( full_path, filename ) if os.path.split( uploaded_file_name )[ -1 ] == suc.REPOSITORY_DEPENDENCY_DEFINITION_FILENAME: # Inspect the contents of the file to see if changeset_revision values are missing and if so, set them appropriately. - altered, root_elem = commit_util.handle_repository_dependencies_definition( trans, uploaded_file_name ) + altered, root_elem = commit_util.handle_repository_dependencies_definition( trans, uploaded_file_name, unpopulate=False ) if altered: tmp_filename = xml_util.create_and_write_tmp_file( root_elem ) shutil.move( tmp_filename, uploaded_file_name ) diff -r 436ca79f821132163526fa9bf1f7d6b7fe3f13c7 -r 8ca6cfea19091f0d44be3eec3b3d18794133a39a lib/galaxy/webapps/tool_shed/util/container_util.py --- a/lib/galaxy/webapps/tool_shed/util/container_util.py +++ b/lib/galaxy/webapps/tool_shed/util/container_util.py @@ -710,8 +710,10 @@ lock.release() return containers_dict -def build_repository_containers_for_tool_shed( trans, repository, changeset_revision, repository_dependencies, repository_metadata ): +def build_repository_containers_for_tool_shed( trans, repository, changeset_revision, repository_dependencies, repository_metadata, exclude=None ): """Return a dictionary of containers for the received repository's dependencies and contents for display in the tool shed.""" + if exclude is None: + exclude = [] containers_dict = dict( datatypes=None, invalid_tools=None, readme_files=None, @@ -734,13 +736,13 @@ folder_id = 0 # Datatypes container. if metadata: - if 'datatypes' in metadata: + if 'datatypes' not in exclude and 'datatypes' in metadata: datatypes = metadata[ 'datatypes' ] folder_id, datatypes_root_folder = build_datatypes_folder( trans, folder_id, datatypes ) containers_dict[ 'datatypes' ] = datatypes_root_folder # Invalid repository dependencies container. if metadata: - if 'invalid_repository_dependencies' in metadata: + if 'invalid_repository_dependencies' not in exclude and 'invalid_repository_dependencies' in metadata: invalid_repository_dependencies = metadata[ 'invalid_repository_dependencies' ] folder_id, invalid_repository_dependencies_root_folder = \ build_invalid_repository_dependencies_root_folder( trans, @@ -749,7 +751,7 @@ containers_dict[ 'invalid_repository_dependencies' ] = invalid_repository_dependencies_root_folder # Invalid tool dependencies container. if metadata: - if 'invalid_tool_dependencies' in metadata: + if 'invalid_tool_dependencies' not in exclude and 'invalid_tool_dependencies' in metadata: invalid_tool_dependencies = metadata[ 'invalid_tool_dependencies' ] folder_id, invalid_tool_dependencies_root_folder = \ build_invalid_tool_dependencies_root_folder( trans, @@ -758,7 +760,7 @@ containers_dict[ 'invalid_tool_dependencies' ] = invalid_tool_dependencies_root_folder # Invalid tools container. if metadata: - if 'invalid_tools' in metadata: + if 'invalid_tools' not in exclude and 'invalid_tools' in metadata: invalid_tool_configs = metadata[ 'invalid_tools' ] folder_id, invalid_tools_root_folder = build_invalid_tools_folder( trans, folder_id, @@ -769,21 +771,22 @@ containers_dict[ 'invalid_tools' ] = invalid_tools_root_folder # Readme files container. if metadata: - if 'readme_files' in metadata: + if 'readme_files' not in exclude and 'readme_files' in metadata: readme_files_dict = readme_util.build_readme_files_dict( metadata ) folder_id, readme_files_root_folder = build_readme_files_folder( trans, folder_id, readme_files_dict ) containers_dict[ 'readme_files' ] = readme_files_root_folder - # Repository dependencies container. - folder_id, repository_dependencies_root_folder = build_repository_dependencies_folder( trans=trans, - folder_id=folder_id, - repository_dependencies=repository_dependencies, - label='Repository dependencies', - installed=False ) - if repository_dependencies_root_folder: - containers_dict[ 'repository_dependencies' ] = repository_dependencies_root_folder + if 'repository_dependencies' not in exclude: + # Repository dependencies container. + folder_id, repository_dependencies_root_folder = build_repository_dependencies_folder( trans=trans, + folder_id=folder_id, + repository_dependencies=repository_dependencies, + label='Repository dependencies', + installed=False ) + if repository_dependencies_root_folder: + containers_dict[ 'repository_dependencies' ] = repository_dependencies_root_folder # Tool dependencies container. if metadata: - if 'tool_dependencies' in metadata: + if 'tool_dependencies' not in exclude and 'tool_dependencies' in metadata: tool_dependencies = metadata[ 'tool_dependencies' ] if trans.webapp.name == 'tool_shed': if 'orphan_tool_dependencies' in metadata: @@ -797,7 +800,7 @@ containers_dict[ 'tool_dependencies' ] = tool_dependencies_root_folder # Valid tools container. if metadata: - if 'tools' in metadata: + if 'tools' not in exclude and 'tools' in metadata: valid_tools = metadata[ 'tools' ] folder_id, valid_tools_root_folder = build_tools_folder( trans, folder_id, @@ -807,7 +810,7 @@ label='Valid tools' ) containers_dict[ 'valid_tools' ] = valid_tools_root_folder # Tool test results container. - if tool_test_results and len( tool_test_results ) > 1: + if 'tool_test_results' not in exclude and tool_test_results and len( tool_test_results ) > 1: # Only create and populate this folder if there are actual tool test results to display, since the display of the 'Test environment' # folder by itself can be misleading. We check for more than a single entry in the tool_test_results dictionary because it may have # only the "test_environment" entry, but we want at least 1 of "passed_tests", "failed_tests", "installation_errors", "missing_test_components" @@ -816,7 +819,7 @@ containers_dict[ 'tool_test_results' ] = tool_test_results_root_folder # Workflows container. if metadata: - if 'workflows' in metadata: + if 'workflows' not in exclude and 'workflows' in metadata: workflows = metadata[ 'workflows' ] folder_id, workflows_root_folder = build_workflows_folder( trans=trans, folder_id=folder_id, @@ -827,7 +830,7 @@ containers_dict[ 'workflows' ] = workflows_root_folder # Valid Data Managers container if metadata: - if 'data_manager' in metadata: + if 'data_manager' not in exclude and 'data_manager' in metadata: data_managers = metadata['data_manager'].get( 'data_managers', None ) folder_id, data_managers_root_folder = build_data_managers_folder( trans, folder_id, data_managers, label="Data Managers" ) containers_dict[ 'valid_data_managers' ] = data_managers_root_folder diff -r 436ca79f821132163526fa9bf1f7d6b7fe3f13c7 -r 8ca6cfea19091f0d44be3eec3b3d18794133a39a lib/tool_shed/galaxy_install/repository_util.py --- a/lib/tool_shed/galaxy_install/repository_util.py +++ b/lib/tool_shed/galaxy_install/repository_util.py @@ -4,7 +4,6 @@ import tempfile import threading from galaxy import tools -from galaxy.util import asbool from galaxy.util import json from galaxy import util from galaxy import web @@ -118,34 +117,6 @@ installed_repositories.append( installed_repository ) return installed_repositories -def get_next_prior_install_required_dict_entry( prior_install_required_dict, processed_tsr_ids ): - """ - The order in which the prior_install_required_dict is processed is critical in order to ensure that the ultimate repository installation order is correctly - defined. This method determines the next key / value pair from the received prior_install_required_dict that should be processed. - """ - # Return the first key / value pair that is not yet processed and whose value is an empty list. - for key, value in prior_install_required_dict.items(): - if key in processed_tsr_ids: - continue - if not value: - return key - # Return the first key / value pair that is not yet processed and whose ids in value are all included in processed_tsr_ids. - for key, value in prior_install_required_dict.items(): - if key in processed_tsr_ids: - continue - all_contained = True - for required_repository_id in value: - if required_repository_id not in processed_tsr_ids: - all_contained = False - break - if all_contained: - return key - # Return the first key / value pair that is not yet processed. Hopefully this is all that is necessary at this point. - for key, value in prior_install_required_dict.items(): - if key in processed_tsr_ids: - continue - return key - def get_prior_install_required_dict( trans, tsr_ids, repo_info_dicts ): """ Return a dictionary whose keys are the received tsr_ids and whose values are a list of tsr_ids, each of which is contained in the received list of tsr_ids @@ -155,15 +126,15 @@ prior_install_required_dict = {} for tsr_id in tsr_ids: prior_install_required_dict[ tsr_id ] = [] - # inspect the repository dependencies for each repository about to be installed and populate the dictionary. + # Inspect the repository dependencies for each repository about to be installed and populate the dictionary. for repo_info_dict in repo_info_dicts: - repository, repository_dependencies = get_repository_and_repository_dependencies_from_repo_info_dict( trans, repo_info_dict ) + repository, repository_dependencies = suc.get_repository_and_repository_dependencies_from_repo_info_dict( trans, repo_info_dict ) if repository: encoded_repository_id = trans.security.encode_id( repository.id ) if encoded_repository_id in tsr_ids: # We've located the database table record for one of the repositories we're about to install, so find out if it has any repository # dependencies that require prior installation. - prior_install_ids = get_repository_ids_requiring_prior_install( trans, tsr_ids, repository_dependencies ) + prior_install_ids = suc.get_repository_ids_requiring_prior_import_or_install( trans, tsr_ids, repository_dependencies ) prior_install_required_dict[ encoded_repository_id ] = prior_install_ids return prior_install_required_dict @@ -287,38 +258,6 @@ return repo_info_dict, tool_panel_section_key return None, None -def get_repository_and_repository_dependencies_from_repo_info_dict( trans, repo_info_dict ): - """Return a tool_shed_repository record defined by the information in the received repo_info_dict.""" - repository_name = repo_info_dict.keys()[ 0 ] - repo_info_tuple = repo_info_dict[ repository_name ] - description, repository_clone_url, changeset_revision, ctx_rev, repository_owner, repository_dependencies, tool_dependencies = \ - suc.get_repo_info_tuple_contents( repo_info_tuple ) - tool_shed = suc.get_tool_shed_from_clone_url( repository_clone_url ) - repository = suc.get_repository_for_dependency_relationship( trans.app, tool_shed, repository_name, repository_owner, changeset_revision ) - return repository, repository_dependencies - -def get_repository_ids_requiring_prior_install( trans, tsr_ids, repository_dependencies ): - """ - Inspect the received repository_dependencies and determine if the encoded id of each required repository is in the received tsr_ids. If so, - then determine whether that required repository should be installed prior to it's dependent repository. Return a list of encoded repository - ids, each of which is contained in the received list of tsr_ids, and whose associated repositories must be installed prior to the dependent - repository associated with the received repository_dependencies. - """ - prior_install_ids = [] - if repository_dependencies: - for key, rd_tups in repository_dependencies.items(): - if key in [ 'description', 'root_key' ]: - continue - for rd_tup in rd_tups: - tool_shed, name, owner, changeset_revision, prior_installation_required = suc.parse_repository_dependency_tuple( rd_tup ) - if asbool( prior_installation_required ): - repository = suc.get_repository_for_dependency_relationship( trans.app, tool_shed, name, owner, changeset_revision ) - if repository: - encoded_repository_id = trans.security.encode_id( repository.id ) - if encoded_repository_id in tsr_ids: - prior_install_ids.append( encoded_repository_id ) - return prior_install_ids - def get_tool_shed_repository_ids( as_string=False, **kwd ): tsrid = kwd.get( 'tool_shed_repository_id', None ) tsridslist = util.listify( kwd.get( 'tool_shed_repository_ids', None ) ) @@ -722,10 +661,10 @@ ordered_tool_panel_section_keys = [] # Create a dictionary whose keys are the received tsr_ids and whose values are a list of tsr_ids, each of which is contained in the received list of tsr_ids # and whose associated repository must be installed prior to the repository associated with the tsr_id key. - prior_install_required_dict = get_prior_install_required_dict( trans, tsr_ids, repo_info_dicts ) + prior_install_required_dict = suc.get_prior_import_or_install_required_dict( trans, tsr_ids, repo_info_dicts ) processed_tsr_ids = [] while len( processed_tsr_ids ) != len( prior_install_required_dict.keys() ): - tsr_id = get_next_prior_install_required_dict_entry( prior_install_required_dict, processed_tsr_ids ) + tsr_id = suc.get_next_prior_import_or_install_required_dict_entry( prior_install_required_dict, processed_tsr_ids ) processed_tsr_ids.append( tsr_id ) # Create the ordered_tsr_ids, the ordered_repo_info_dicts and the ordered_tool_panel_section_keys lists. if tsr_id not in ordered_tsr_ids: diff -r 436ca79f821132163526fa9bf1f7d6b7fe3f13c7 -r 8ca6cfea19091f0d44be3eec3b3d18794133a39a lib/tool_shed/scripts/api/common.py --- a/lib/tool_shed/scripts/api/common.py +++ b/lib/tool_shed/scripts/api/common.py @@ -1,6 +1,6 @@ import os, sys, urllib, urllib2 -new_path = [ os.path.join( os.path.dirname( __file__ ), '..', '..', '..', 'lib' ) ] +new_path = [ os.path.join( os.path.dirname( __file__ ), '..', '..', '..', '..', 'lib' ) ] new_path.extend( sys.path[ 1: ] ) sys.path = new_path @@ -45,10 +45,10 @@ print '--------' print r -def display( api_key, url, return_formatted=True ): +def display( url, api_key=None, return_formatted=True ): # Sends an API GET request and acts as a generic formatter for the JSON response. try: - r = get( api_key, url ) + r = get( url, api_key=api_key ) except urllib2.HTTPError, e: print e print e.read( 1024 ) # Only return the first 1K of errors. @@ -82,43 +82,44 @@ else: print 'response is unknown type: %s' % type( r ) -def get( api_key, url ): +def get( url, api_key=None ): # Do the actual GET. - url = make_url( api_key, url ) + url = make_url( url, api_key=api_key ) try: return simplejson.loads( urllib2.urlopen( url ).read() ) except simplejson.decoder.JSONDecodeError, e: print "URL did not return JSON data" sys.exit(1) -def make_url( api_key, url, args=None ): +def make_url( url, api_key=None, args=None ): # Adds the API Key to the URL if it's not already there. if args is None: args = [] argsep = '&' if '?' not in url: argsep = '?' - if '?key=' not in url and '&key=' not in url: - args.insert( 0, ( 'key', api_key ) ) + if api_key: + if '?key=' not in url and '&key=' not in url: + args.insert( 0, ( 'key', api_key ) ) return url + argsep + '&'.join( [ '='.join( t ) for t in args ] ) -def post( api_key, url, data ): +def post( url, data, api_key=None ): # Do the actual POST. - url = make_url( api_key, url ) + url = make_url( url, api_key=api_key ) req = urllib2.Request( url, headers = { 'Content-Type': 'application/json' }, data = simplejson.dumps( data ) ) return simplejson.loads( urllib2.urlopen( req ).read() ) -def put( api_key, url, data ): +def put( url, data, api_key=None ): # Do the actual PUT. - url = make_url( api_key, url ) + url = make_url( url, api_key=api_key ) req = urllib2.Request( url, headers = { 'Content-Type': 'application/json' }, data = simplejson.dumps( data )) req.get_method = lambda: 'PUT' return simplejson.loads( urllib2.urlopen( req ).read() ) -def submit( api_key, url, data, return_formatted=True ): +def submit( url, data, api_key=None, return_formatted=True ): # Sends an API POST request and acts as a generic formatter for the JSON response - 'data' will become the JSON payload read by Galaxy. try: - r = post( api_key, url, data ) + r = post( url, data, api_key=api_key ) except urllib2.HTTPError, e: if return_formatted: print e diff -r 436ca79f821132163526fa9bf1f7d6b7fe3f13c7 -r 8ca6cfea19091f0d44be3eec3b3d18794133a39a lib/tool_shed/scripts/api/export.py --- /dev/null +++ b/lib/tool_shed/scripts/api/export.py @@ -0,0 +1,103 @@ +#!/usr/bin/env python +""" +Export a specified repository revision and optionally all of it's defined repository dependencies from the tool shed into a compressed archive. + +Here is a working example of how to use this script to export a repository from the tool shed. +./export.py --url http://testtoolshed.g2.bx.psu.edu --name chemicaltoolbox --owner bgruening --revision 4133dbf7ff4d --export_repository_dependencies True --download_dir /tmp +""" + +import os +import sys +import argparse +import urllib2 +sys.path.insert( 0, os.path.dirname( __file__ ) ) +from common import display +from common import submit + +CHUNK_SIZE = 2**20 # 1Mb + +def get_file_type_str( changeset_revision, file_type ): + if file_type == 'zip': + file_type_str = '%s.zip' % changeset_revision + elif file_type == 'bz2': + file_type_str = '%s.tar.bz2' % changeset_revision + elif file_type == 'gz': + file_type_str = '%s.tar.gz' % changeset_revision + else: + file_type_str = '' + return file_type_str + +def string_as_bool( string ): + if str( string ).lower() in ( 'true', 'yes', 'on' ): + return True + else: + return False + +def main( options ): + """Collect all user data and export the repository via the Tool Shed API.""" + base_tool_shed_url = options.tool_shed_url.rstrip( '/' ) + repositories_url = '%s/api/repositories' % base_tool_shed_url + data = {} + data[ 'tool_shed_url' ] = base_tool_shed_url + data[ 'name' ] = options.name + data[ 'owner' ] = options.owner + data[ 'changeset_revision' ] = options.changeset_revision + data[ 'export_repository_dependencies' ] = options.export_repository_dependencies + repository_id = None + repositories = display( repositories_url, api_key=None, return_formatted=False ) + for repository in repositories: + name = str( repository[ 'name' ] ) + owner = str( repository[ 'owner' ] ) + if name == options.name and owner == options.owner: + repository_id = repository[ 'id' ] + break + if repository_id: + # We'll currently support only gzip-compressed tar archives. + file_type = 'gz' + file_type_str = get_file_type_str( options.changeset_revision, file_type ) + url = '%s%s' % ( base_tool_shed_url, '/api/repository_revisions/%s/export' % str( repository_id ) ) + export_dict = submit( url, data, return_formatted=False ) + error_messages = export_dict[ 'error_messages' ] + if error_messages: + print "Error attempting to export revision ", options.changeset_revision, " of repository ", options.name, " owned by ", options.owner, ":\n", error_messages + else: + if string_as_bool( options.export_repository_dependencies ): + repositories_archive_filename = 'exported-with-dependencies-%s-%s' % ( name, file_type_str ) + else: + repositories_archive_filename = 'exported-%s-%s' % ( name, file_type_str ) + download_url = export_dict[ 'download_url' ] + download_dir = os.path.abspath( options.download_dir ) + file_path = os.path.join( download_dir, repositories_archive_filename ) + src = None + dst = None + try: + src = urllib2.urlopen( download_url ) + dst = open( file_path, 'wb' ) + while True: + chunk = src.read( CHUNK_SIZE ) + if chunk: + dst.write( chunk ) + else: + break + except: + raise + finally: + if src: + src.close() + if dst: + dst.close() + print "Successfully exported revision ", options.changeset_revision, " of repository ", options.name, " owned by ", options.owner + print "to location ", file_path + else: + print "Invalid tool_shed / name / owner ." + +if __name__ == '__main__': + parser = argparse.ArgumentParser( description='Installation of tool shed repositories via the Galaxy API.' ) + parser.add_argument( "-u", "--url", dest="tool_shed_url", required=True, help="Tool Shed URL" ) + parser.add_argument( "-n", "--name", required=True, help="Repository name." ) + parser.add_argument( "-o", "--owner", required=True, help="Repository owner." ) + parser.add_argument( "-r", "--revision", dest="changeset_revision", required=True, help="Repository owner." ) + parser.add_argument( "-e", "--export_repository_dependencies", dest="export_repository_dependencies", required=False, default='False', help="Export repository dependencies." ) + parser.add_argument( "-d", "--download_dir", dest="download_dir", required=False, default='/tmp', help="Download directory." ) + options = parser.parse_args() + main( options ) diff -r 436ca79f821132163526fa9bf1f7d6b7fe3f13c7 -r 8ca6cfea19091f0d44be3eec3b3d18794133a39a lib/tool_shed/util/commit_util.py --- a/lib/tool_shed/util/commit_util.py +++ b/lib/tool_shed/util/commit_util.py @@ -9,6 +9,7 @@ from galaxy import util from galaxy.datatypes import checkers from galaxy.util import json +from galaxy.util.odict import odict from galaxy.web import url_for import tool_shed.util.shed_util_common as suc from tool_shed.util import tool_util @@ -228,7 +229,12 @@ gzipped_file.close() shutil.move( uncompressed, uploaded_file_name ) -def handle_repository_dependencies_definition( trans, repository_dependencies_config ): +def handle_repository_dependencies_definition( trans, repository_dependencies_config, unpopulate=False ): + """ + Populate or unpopulate the toolshed and changeset_revision attributes of a <repository> tag. Populating will occur when a + dependency definition file is being uploaded to the repository, while depopulating will occur when the repository is being + exported. + """ altered = False # Make sure we're looking at a valid repository_dependencies.xml file. tree, error_message = xml_util.parse_xml( repository_dependencies_config ) @@ -239,33 +245,45 @@ for index, elem in enumerate( root ): if elem.tag == 'repository': # <repository name="molecule_datatypes" owner="test" changeset_revision="1a070566e9c6" /> - populated, elem, error_message = handle_repository_dependency_elem( trans, elem ) + revised, elem, error_message = handle_repository_dependency_elem( trans, elem, unpopulate=unpopulate ) if error_message: exception_message = 'The repository_dependencies.xml file contains an invalid <repository> tag. %s' % error_message raise Exception( exception_message ) - if populated: + if revised: root[ index ] = elem if not altered: altered = True return altered, root return False, None -def handle_repository_dependency_elem( trans, elem ): +def handle_repository_dependency_elem( trans, elem, unpopulate=False ): # <repository name="molecule_datatypes" owner="test" changeset_revision="1a070566e9c6" /> error_message = '' name = elem.get( 'name' ) owner = elem.get( 'owner' ) + # The name and owner attributes are always required, so if either are missing, return the error message. if not name or not owner: error_message = handle_missing_repository_attribute( elem ) return False, elem, error_message - populated = False + revised = False toolshed = elem.get( 'toolshed' ) + changeset_revision = elem.get( 'changeset_revision' ) + if unpopulate: + # We're exporting the repository, so eliminate all toolshed and changeset_revision attributes from the <repository> tag. + if toolshed or changeset_revision: + attributes = odict() + attributes[ 'name' ] = name + attributes[ 'owner' ] = owner + attributes[ 'prior_installation_required' ] = elem.get( 'prior_installation_required', 'False' ) + elem = xml_util.create_element( 'repository', attributes=attributes, sub_elements=None ) + revised = True + return revised, elem, error_message + # From here on we're populating the toolshed and changeset_revisions if necessary. if not toolshed: # Default the setting to the current tool shed. toolshed = str( url_for( '/', qualified=True ) ).rstrip( '/' ) elem.attrib[ 'toolshed' ] = toolshed - populated = True - changeset_revision = elem.get( 'changeset_revision' ) + revised = True if not changeset_revision: # Populate the changeset_revision attribute with the latest installable metadata revision for the defined repository. # We use the latest installable revision instead of the latest metadata revision to ensure that the contents of the @@ -277,12 +295,12 @@ lastest_installable_changeset_revision = suc.get_latest_downloadable_changeset_revision( trans, repository, repo ) if lastest_installable_changeset_revision != suc.INITIAL_CHANGELOG_HASH: elem.attrib[ 'changeset_revision' ] = lastest_installable_changeset_revision - populated = True + revised = True else: error_message = 'Unable to locate repository with name %s and owner %s. ' % ( str( name ), str( owner ) ) - return populated, elem, error_message + return revised, elem, error_message -def handle_tool_dependencies_definition( trans, tool_dependencies_config ): +def handle_tool_dependencies_definition( trans, tool_dependencies_config, unpopulate=False ): altered = False # Make sure we're looking at a valid tool_dependencies.xml file. tree, error_message = xml_util.parse_xml( tool_dependencies_config ) @@ -297,11 +315,11 @@ for package_index, package_elem in enumerate( root_elem ): if package_elem.tag == 'repository': # <repository name="package_eigen_2_0" owner="test" changeset_revision="09eb05087cd0" prior_installation_required="True" /> - populated, repository_elem, error_message = handle_repository_dependency_elem( trans, package_elem ) + revised, repository_elem, error_message = handle_repository_dependency_elem( trans, package_elem, unpopulate=unpopulate ) if error_message: exception_message = 'The tool_dependencies.xml file contains an invalid <repository> tag. %s' % error_message raise Exception( exception_message ) - if populated: + if revised: root_elem[ package_index ] = repository_elem package_altered = True if not altered: @@ -318,11 +336,11 @@ # </repository> # </action> for repo_index, repo_elem in enumerate( action_elem ): - populated, repository_elem, error_message = handle_repository_dependency_elem( trans, repo_elem ) + revised, repository_elem, error_message = handle_repository_dependency_elem( trans, repo_elem, unpopulate=unpopulate ) if error_message: exception_message = 'The tool_dependencies.xml file contains an invalid <repository> tag. %s' % error_message raise Exception( exception_message ) - if populated: + if revised: action_elem[ repo_index ] = repository_elem package_altered = True if not altered: diff -r 436ca79f821132163526fa9bf1f7d6b7fe3f13c7 -r 8ca6cfea19091f0d44be3eec3b3d18794133a39a lib/tool_shed/util/export_util.py --- /dev/null +++ b/lib/tool_shed/util/export_util.py @@ -0,0 +1,255 @@ +import logging +import os +import shutil +import tarfile +import tempfile +import threading +import tool_shed.util.shed_util_common as suc +from galaxy import eggs +from galaxy import web +from galaxy.util.odict import odict +from tool_shed.util import commit_util +from tool_shed.util import common_install_util +from tool_shed.util import encoding_util +from tool_shed.util import repository_dependency_util +from tool_shed.util import xml_util + +eggs.require( 'mercurial' ) + +import mercurial.util +from mercurial import commands +from mercurial import hg +from mercurial import patch +from mercurial import ui + +log = logging.getLogger( __name__ ) + + +class ExportedRepositoryRegistry( object ): + + def __init__( self ): + self.exported_repository_elems = [] + +def archive_repository_revision( trans, ui, repository, archive_dir, changeset_revision ): + '''Create an un-versioned archive of a repository.''' + repo = hg.repository( suc.get_configured_ui(), repository.repo_path( trans.app ) ) + options_dict = suc.get_mercurial_default_options_dict( 'archive' ) + options_dict[ 'rev' ] = changeset_revision + error_message = '' + return_code = None + try: + return_code = commands.archive( ui, repo, archive_dir, **options_dict ) + except Exception, e: + error_message = "Error attempting to archive revision <b>%s</b> of repository %s: %s\nReturn code: %s\n" % \ + ( str( changeset_revision ), str( repository.name ), str( e ), str( return_code ) ) + log.exception( error_message ) + return return_code, error_message + +def export_repository( trans, tool_shed_url, repository_id, repository_name, changeset_revision, file_type, export_repository_dependencies, api=False ): + file_type_str = suc.get_file_type_str( changeset_revision, file_type ) + tmp_archive_dir = tempfile.mkdtemp( prefix="tmp-toolshed-arcdir" ) + if export_repository_dependencies: + repositories_archive_filename = os.path.join( tmp_archive_dir, 'exported-with-dependencies-%s-%s' % ( repository_name, file_type_str ) ) + repo_info_dicts = get_repo_info_dicts( trans, tool_shed_url, repository_id, changeset_revision ) + repository_ids = get_repository_ids( trans, repo_info_dicts ) + ordered_repository_ids, ordered_repositories, ordered_changeset_revisions = order_components_for_import( trans, repository_ids, repo_info_dicts ) + else: + ordered_repository_ids = [] + ordered_repositories = [] + ordered_changeset_revisions = [] + repositories_archive_filename = os.path.join( tmp_archive_dir, 'exported-%s-%s' % ( repository_name, file_type_str ) ) + repository = suc.get_repository_in_tool_shed( trans, repository_id ) + if repository: + repository_metadata = suc.get_current_repository_metadata_for_changeset_revision( trans, repository, changeset_revision ) + if repository_metadata: + ordered_repository_ids = [ repository_id ] + ordered_repositories = [ repository ] + ordered_changeset_revisions = [ repository_metadata.changeset_revision ] + repositories_archive = None + error_messages = '' + lock = threading.Lock() + lock.acquire( True ) + try: + repositories_archive = tarfile.open( repositories_archive_filename, "w:%s" % file_type ) + exported_repository_registry = ExportedRepositoryRegistry() + for index, repository_id in enumerate( ordered_repository_ids ): + work_dir = tempfile.mkdtemp( prefix="tmp-toolshed-export-er" ) + ordered_repository = ordered_repositories[ index ] + ordered_changeset_revision = ordered_changeset_revisions[ index ] + repository_archive, error_message = generate_repository_archive( trans, work_dir, tool_shed_url, ordered_repository, ordered_changeset_revision, file_type ) + if error_message: + error_messages = '%s %s' % ( error_messages, error_message ) + else: + archive_name = str( os.path.basename( repository_archive.name ) ) + repositories_archive.add( repository_archive.name, arcname=archive_name ) + attributes, sub_elements = get_repository_attributes_and_sub_elements( ordered_repository, archive_name ) + elem = xml_util.create_element( 'repository', attributes=attributes, sub_elements=sub_elements ) + exported_repository_registry.exported_repository_elems.append( elem ) + shutil.rmtree( work_dir ) + # Write the manifest, which must preserve the order in which the repositories should be imported. + tmp_xml_file = xml_util.create_and_write_tmp_file( exported_repository_registry.exported_repository_elems, use_indent=True ) + repositories_archive.add( tmp_xml_file, arcname='manifest.xml' ) + except Exception, e: + log.exception( str( e ) ) + finally: + lock.release() + repositories_archive.close() + if api: + encoded_repositories_archive_name = encoding_util.tool_shed_encode( repositories_archive_filename ) + download_url = suc.url_join( web.url_for( '/', qualified=True ), + 'repository/export_via_api?encoded_repositories_archive_name=%s' % encoded_repositories_archive_name ) + return dict( download_url=download_url, error_messages=error_messages ) + return repositories_archive, error_messages + +def generate_repository_archive( trans, work_dir, tool_shed_url, repository, changeset_revision, file_type ): + file_type_str = suc.get_file_type_str( changeset_revision, file_type ) + file_name = '%s-%s' % ( repository.name, file_type_str ) + return_code, error_message = archive_repository_revision( trans, ui, repository, work_dir, changeset_revision ) + if return_code: + return None, error_message + repository_archive_name = os.path.join( work_dir, file_name ) + # Create a compressed tar archive that will contain only valid files and possibly altered dependency definition files. + repository_archive = tarfile.open( repository_archive_name, "w:%s" % file_type ) + for root, dirs, files in os.walk( work_dir ): + if root.find( '.hg' ) < 0 and root.find( 'hgrc' ) < 0: + for dir in dirs: + if dir in commit_util.UNDESIRABLE_DIRS: + dirs.remove( dir ) + for name in files: + name = str( name ) + if str( name ) in commit_util.UNDESIRABLE_FILES: + continue + full_path = os.path.join( root, name ) + relative_path = full_path.replace( work_dir, '' ).lstrip( '/' ) + # See if we have a repository dependencies defined. + if name == suc.REPOSITORY_DEPENDENCY_DEFINITION_FILENAME: + # Eliminate the toolshed, and changeset_revision attributes from all <repository> tags. + altered, root_elem = commit_util.handle_repository_dependencies_definition( trans, full_path, unpopulate=True ) + if altered: + tmp_filename = xml_util.create_and_write_tmp_file( root_elem, use_indent=True ) + shutil.move( tmp_filename, full_path ) + elif name == suc.TOOL_DEPENDENCY_DEFINITION_FILENAME: + # Eliminate the toolshed, and changeset_revision attributes from all <repository> tags. + altered, root_elem = commit_util.handle_tool_dependencies_definition( trans, full_path, unpopulate=True ) + if altered: + tmp_filename = xml_util.create_and_write_tmp_file( root_elem, use_indent=True ) + shutil.move( tmp_filename, full_path ) + repository_archive.add( full_path, arcname=relative_path ) + repository_archive.close() + return repository_archive, error_message + +def get_components_from_repo_info_dict( trans, repo_info_dict ): + """ + Return the repository and the associated latest installable changeset_revision (including updates) for the repository defined by the received + repo_info_dict. + """ + for repository_name, repo_info_tup in repo_info_dict.items(): + # There should only be one entry in the received repo_info_dict. + description, repository_clone_url, changeset_revision, ctx_rev, repository_owner, repository_dependencies, tool_dependencies = \ + suc.get_repo_info_tuple_contents( repo_info_tup ) + repository = suc.get_repository_by_name_and_owner( trans.app, repository_name, repository_owner ) + repository_metadata = suc.get_current_repository_metadata_for_changeset_revision( trans, repository, changeset_revision ) + if repository_metadata: + return repository, repository_metadata.changeset_revision + return None, None + +def get_repo_info_dict_for_import( encoded_repository_id, encoded_repository_ids, repo_info_dicts ): + """ + The received encoded_repository_ids and repo_info_dicts are lists that contain associated elements at each location in the list. This method will return the element + from repo_info_dicts associated with the received encoded_repository_id by determining it's location in the received encoded_repository_ids list. + """ + for index, repository_id in enumerate( encoded_repository_ids ): + if repository_id == encoded_repository_id: + repo_info_dict = repo_info_dicts[ index ] + return repo_info_dict + return None + +def get_repo_info_dicts( trans, tool_shed_url, repository_id, changeset_revision ): + repository = suc.get_repository_in_tool_shed( trans, repository_id ) + repository_metadata = suc.get_repository_metadata_by_changeset_revision( trans, repository_id, changeset_revision ) + # Get a dictionary of all repositories upon which the contents of the current repository_metadata record depend. + repository_dependencies = \ + repository_dependency_util.get_repository_dependencies_for_changeset_revision( trans=trans, + repository=repository, + repository_metadata=repository_metadata, + toolshed_base_url=str( web.url_for( '/', qualified=True ) ).rstrip( '/' ), + key_rd_dicts_to_be_processed=None, + all_repository_dependencies=None, + handled_key_rd_dicts=None ) + repo = hg.repository( suc.get_configured_ui(), repository.repo_path( trans.app ) ) + ctx = suc.get_changectx_for_changeset( repo, changeset_revision ) + repo_info_dict = {} + # Cast unicode to string. + repo_info_dict[ str( repository.name ) ] = ( str( repository.description ), + suc.generate_clone_url_for_repository_in_tool_shed( trans, repository ), + str( changeset_revision ), + str( ctx.rev() ), + str( repository.user.username ), + repository_dependencies, + None ) + all_required_repo_info_dict = common_install_util.get_required_repo_info_dicts( trans, tool_shed_url, [ repo_info_dict ] ) + all_repo_info_dicts = all_required_repo_info_dict.get( 'all_repo_info_dicts', [] ) + return all_repo_info_dicts + +def get_repository_attributes_and_sub_elements( repository, archive_name ): + """ + Get the information about a repository to create and populate an XML tag set. The generated attributes will be contained within the <repository> + tag, while the sub_elements will be tag sets contained within the <repository> tag set. + """ + attributes = odict() + sub_elements = odict() + attributes[ 'name' ] = str( repository.name ) + attributes[ 'type' ] = str( repository.type ) + # We have to associate the public username since the user_id will be different between tool sheds. + attributes[ 'username' ] = str( repository.user.username ) + sub_elements[ 'description' ] = str( repository.description ) + sub_elements[ 'long_description' ] = str( repository.long_description ) + sub_elements[ 'archive' ] = archive_name + return attributes, sub_elements + +def get_repository_ids( trans, repo_info_dicts ): + repository_ids = [] + for repo_info_dict in repo_info_dicts: + for repository_name, repo_info_tup in repo_info_dict.items(): + description, repository_clone_url, changeset_revision, ctx_rev, repository_owner, repository_dependencies, tool_dependencies = \ + suc.get_repo_info_tuple_contents( repo_info_tup ) + repository = suc.get_repository_by_name_and_owner( trans.app, repository_name, repository_owner ) + repository_ids.append( trans.security.encode_id( repository.id ) ) + return repository_ids + +def order_components_for_import( trans, repository_ids, repo_info_dicts ): + """ + Some repositories may have repository dependencies that must be imported and have metadata set on them before the dependent repository is imported. This method + will inspect the list of repositories about to be exported and make sure to order them appropriately for proper import. For each repository about to be exported, + if required repositories are not contained in the list of repositories about to be exported, then they are not considered. Repository dependency definitions that + contain circular dependencies should not result in an infinite loop, but obviously ordering the list will not be handled for one or more of the repositories that + require prior import. + """ + ordered_repository_ids = [] + ordered_repositories = [] + ordered_changeset_revisions = [] + # Create a dictionary whose keys are the received repository_ids and whose values are a list of repository_ids, each of which is contained in the received list of + # repository_ids and whose associated repository must be imported prior to the repository associated with the repository_id key. + prior_import_required_dict = suc.get_prior_import_or_install_required_dict( trans, repository_ids, repo_info_dicts ) + processed_repository_ids = [] + while len( processed_repository_ids ) != len( prior_import_required_dict.keys() ): + repository_id = suc.get_next_prior_import_or_install_required_dict_entry( prior_import_required_dict, processed_repository_ids ) + processed_repository_ids.append( repository_id ) + if repository_id not in ordered_repository_ids: + prior_import_required_ids = prior_import_required_dict[ repository_id ] + for prior_import_required_id in prior_import_required_ids: + if prior_import_required_id not in ordered_repository_ids: + # Import the associated repository dependency first. + prior_repo_info_dict = get_repo_info_dict_for_import( prior_import_required_id, repository_ids, repo_info_dicts ) + prior_repository, prior_import_changeset_revision = get_components_from_repo_info_dict( trans, prior_repo_info_dict ) + if prior_repository and prior_import_changeset_revision: + ordered_repository_ids.append( prior_import_required_id ) + ordered_repositories.append( prior_repository ) + ordered_changeset_revisions.append( prior_import_changeset_revision ) + repo_info_dict = get_repo_info_dict_for_import( repository_id, repository_ids, repo_info_dicts ) + repository, changeset_revision = get_components_from_repo_info_dict( trans, repo_info_dict ) + if repository and changeset_revision: + ordered_repository_ids.append( repository_id ) + ordered_repositories.append( repository ) + ordered_changeset_revisions.append( changeset_revision ) + return ordered_repository_ids, ordered_repositories, ordered_changeset_revisions diff -r 436ca79f821132163526fa9bf1f7d6b7fe3f13c7 -r 8ca6cfea19091f0d44be3eec3b3d18794133a39a lib/tool_shed/util/metadata_util.py --- a/lib/tool_shed/util/metadata_util.py +++ b/lib/tool_shed/util/metadata_util.py @@ -1614,7 +1614,6 @@ ancestor_changeset_revision = None ancestor_metadata_dict = None invalid_file_tups = [] - home_dir = os.getcwd() for changeset in repository.get_changesets_for_setting_metadata( trans.app ): work_dir = tempfile.mkdtemp( prefix="tmp-toolshed-ramorits" ) current_changeset_revision = str( repo.changectx( changeset ) ) diff -r 436ca79f821132163526fa9bf1f7d6b7fe3f13c7 -r 8ca6cfea19091f0d44be3eec3b3d18794133a39a lib/tool_shed/util/shed_util_common.py --- a/lib/tool_shed/util/shed_util_common.py +++ b/lib/tool_shed/util/shed_util_common.py @@ -7,6 +7,7 @@ from time import gmtime from time import strftime from galaxy import util +from galaxy.util import asbool from galaxy.util import json from galaxy.util import unicodify from galaxy.web import url_for @@ -22,7 +23,10 @@ import pkg_resources pkg_resources.require( 'mercurial' ) -from mercurial import hg, ui, commands +from mercurial import cmdutil +from mercurial import commands +from mercurial import hg +from mercurial import ui eggs.require( 'markupsafe' ) import markupsafe @@ -530,6 +534,20 @@ return manifest_ctx, ctx_file return None, None +def get_current_repository_metadata_for_changeset_revision( trans, repository, changeset_revision ): + encoded_repository_id = trans.security.encode_id( repository.id ) + repository_metadata = get_repository_metadata_by_changeset_revision( trans, encoded_repository_id, changeset_revision ) + if repository_metadata: + return repository_metadata + # The installable changeset_revision may have been changed because it was "moved ahead" in the repository changelog. + repo = hg.repository( get_configured_ui(), repository.repo_path( trans.app ) ) + updated_changeset_revision = get_next_downloadable_changeset_revision( repository, repo, after_changeset_revision=changeset_revision ) + if updated_changeset_revision: + repository_metadata = get_repository_metadata_by_changeset_revision( trans, encoded_repository_id, updated_changeset_revision ) + if repository_metadata: + return repository_metadata + return None + def get_file_context_from_ctx( ctx, filename ): """Return the mercurial file context for a specified file.""" # We have to be careful in determining if we found the correct file because multiple files with the same name may be in different directories @@ -552,6 +570,17 @@ return 'DELETED' return None +def get_file_type_str( changeset_revision, file_type ): + if file_type == 'zip': + file_type_str = '%s.zip' % changeset_revision + elif file_type == 'bz2': + file_type_str = '%s.tar.bz2' % changeset_revision + elif file_type == 'gz': + file_type_str = '%s.tar.gz' % changeset_revision + else: + file_type_str = '' + return file_type_str + def get_ids_of_tool_shed_repositories_being_installed( trans, as_string=False ): installing_repository_ids = [] new_status = trans.model.ToolShedRepository.installation_status.NEW @@ -594,6 +623,18 @@ return changeset_revisions[ -1 ] return INITIAL_CHANGELOG_HASH +def get_mercurial_default_options_dict( command, command_table=None, **kwd ): + '''Borrowed from repoman - get default parameters for a mercurial command.''' + if command_table is None: + command_table = commands.table + possible = cmdutil.findpossible( command, command_table ) + if len( possible ) != 1: + raise Exception, 'unable to find mercurial command "%s"' % command + default_options_dict = dict( ( r[ 1 ].replace( '-', '_' ), r[ 2 ] ) for r in possible[ possible.keys()[ 0 ] ][ 1 ][ 1 ] ) + for option in kwd: + default_options_dict[ option ] = kwd[ option ] + return default_options_dict + def get_named_tmpfile_from_ctx( ctx, filename, dir ): """Return a named temporary file created from a specified file with a given name included in a repository changeset revision.""" filename = strip_path( filename ) @@ -638,6 +679,36 @@ found_after_changeset_revision = True return None +def get_next_prior_import_or_install_required_dict_entry( prior_required_dict, processed_tsr_ids ): + """ + This method is used in the Tool Shed when exporting a repository and it's dependencies, and in Galaxy when a repository and it's dependencies + are being installed. The order in which the prior_required_dict is processed is critical in order to ensure that the ultimate repository import + or installation order is correctly defined. This method determines the next key / value pair from the received prior_required_dict that should + be processed. + """ + # Return the first key / value pair that is not yet processed and whose value is an empty list. + for key, value in prior_required_dict.items(): + if key in processed_tsr_ids: + continue + if not value: + return key + # Return the first key / value pair that is not yet processed and whose ids in value are all included in processed_tsr_ids. + for key, value in prior_required_dict.items(): + if key in processed_tsr_ids: + continue + all_contained = True + for required_repository_id in value: + if required_repository_id not in processed_tsr_ids: + all_contained = False + break + if all_contained: + return key + # Return the first key / value pair that is not yet processed. Hopefully this is all that is necessary at this point. + for key, value in prior_required_dict.items(): + if key in processed_tsr_ids: + continue + return key + def get_or_create_tool_shed_repository( trans, tool_shed, name, owner, changeset_revision ): """ Return a tool shed repository database record defined by the combination of tool shed, repository name, repository owner and changeset_revision @@ -703,6 +774,28 @@ else: previous_changeset_revision = changeset_revision +def get_prior_import_or_install_required_dict( trans, tsr_ids, repo_info_dicts ): + """ + This method is used in the Tool Shed when exporting a repository and it's dependencies, and in Galaxy when a repository and it's dependencies + are being installed. Return a dictionary whose keys are the received tsr_ids and whose values are a list of tsr_ids, each of which is contained + in the received list of tsr_ids and whose associated repository must be imported or installed prior to the repository associated with the tsr_id key. + """ + # Initialize the dictionary. + prior_import_or_install_required_dict = {} + for tsr_id in tsr_ids: + prior_import_or_install_required_dict[ tsr_id ] = [] + # Inspect the repository dependencies for each repository about to be installed and populate the dictionary. + for repo_info_dict in repo_info_dicts: + repository, repository_dependencies = get_repository_and_repository_dependencies_from_repo_info_dict( trans, repo_info_dict ) + if repository: + encoded_repository_id = trans.security.encode_id( repository.id ) + if encoded_repository_id in tsr_ids: + # We've located the database table record for one of the repositories we're about to install, so find out if it has any repository + # dependencies that require prior installation. + prior_import_or_install_ids = get_repository_ids_requiring_prior_import_or_install( trans, tsr_ids, repository_dependencies ) + prior_import_or_install_required_dict[ encoded_repository_id ] = prior_import_or_install_ids + return prior_import_or_install_required_dict + def get_repo_info_tuple_contents( repo_info_tuple ): """Take care in handling the repo_info_tuple as it evolves over time as new tool shed features are introduced.""" if len( repo_info_tuple ) == 6: @@ -712,6 +805,20 @@ description, repository_clone_url, changeset_revision, ctx_rev, repository_owner, repository_dependencies, tool_dependencies = repo_info_tuple return description, repository_clone_url, changeset_revision, ctx_rev, repository_owner, repository_dependencies, tool_dependencies +def get_repository_and_repository_dependencies_from_repo_info_dict( trans, repo_info_dict ): + """Return a tool_shed_repository or repository record defined by the information in the received repo_info_dict.""" + repository_name = repo_info_dict.keys()[ 0 ] + repo_info_tuple = repo_info_dict[ repository_name ] + description, repository_clone_url, changeset_revision, ctx_rev, repository_owner, repository_dependencies, tool_dependencies = \ + get_repo_info_tuple_contents( repo_info_tuple ) + if trans.webapp.name == 'galaxy': + tool_shed = get_tool_shed_from_clone_url( repository_clone_url ) + repository = get_repository_for_dependency_relationship( trans.app, tool_shed, repository_name, repository_owner, changeset_revision ) + else: + # We're in the tool shed. + repository = get_repository_by_name_and_owner( trans.app, repository_name, repository_owner ) + return repository, repository_dependencies + def get_repository_by_id( trans, id ): """Get a repository from the database via id.""" if trans.webapp.name == 'galaxy': @@ -745,7 +852,7 @@ return None def get_repository_for_dependency_relationship( app, tool_shed, name, owner, changeset_revision ): - """Return a tool shed repository database record that is defined by either the current changeset revision or the installed_changeset_revision.""" + """Return an installed tool_shed_repository database record that is defined by either the current changeset revision or the installed_changeset_revision.""" # This method is used only in Galaxy, not the tool shed. if tool_shed.endswith( '/' ): tool_shed = tool_shed.rstrip( '/' ) @@ -821,6 +928,32 @@ # This should never be reached - raise an exception? return v, None +def get_repository_ids_requiring_prior_import_or_install( trans, tsr_ids, repository_dependencies ): + """ + This method is used in the Tool Shed when exporting a repository and it's dependencies, and in Galaxy when a repository and it's dependencies + are being installed. Inspect the received repository_dependencies and determine if the encoded id of each required repository is in the received + tsr_ids. If so, then determine whether that required repository should be imported / installed prior to it's dependent repository. Return a list + of encoded repository ids, each of which is contained in the received list of tsr_ids, and whose associated repositories must be impoerted / installed + prior to the dependent repository associated with the received repository_dependencies. + """ + prior_tsr_ids = [] + if repository_dependencies: + for key, rd_tups in repository_dependencies.items(): + if key in [ 'description', 'root_key' ]: + continue + for rd_tup in rd_tups: + tool_shed, name, owner, changeset_revision, prior_installation_required = parse_repository_dependency_tuple( rd_tup ) + if asbool( prior_installation_required ): + if trans.webapp.name == 'galaxy': + repository = get_repository_for_dependency_relationship( trans.app, tool_shed, name, owner, changeset_revision ) + else: + repository = get_repository_by_name_and_owner( trans.app, name, owner ) + if repository: + encoded_repository_id = trans.security.encode_id( repository.id ) + if encoded_repository_id in tsr_ids: + prior_tsr_ids.append( encoded_repository_id ) + return prior_tsr_ids + def get_repository_in_tool_shed( trans, id ): """Get a repository on the tool shed side from the database via id.""" return trans.sa_session.query( trans.model.Repository ).get( trans.security.decode_id( id ) ) diff -r 436ca79f821132163526fa9bf1f7d6b7fe3f13c7 -r 8ca6cfea19091f0d44be3eec3b3d18794133a39a lib/tool_shed/util/xml_util.py --- a/lib/tool_shed/util/xml_util.py +++ b/lib/tool_shed/util/xml_util.py @@ -2,6 +2,7 @@ import os import sys import tempfile +from galaxy.util import listify from xml.etree import ElementTree as XmlET import xml.etree.ElementTree @@ -30,8 +31,10 @@ self.data( data ) self.end( XmlET.Comment ) -def create_and_write_tmp_file( elem ): - tmp_str = xml_to_string( elem ) +def create_and_write_tmp_file( elems, use_indent=False ): + tmp_str = '' + for elem in listify( elems ): + tmp_str += xml_to_string( elem, use_indent=use_indent ) fh = tempfile.NamedTemporaryFile( 'wb', prefix="tmp-toolshed-cawrf" ) tmp_filename = fh.name fh.close() @@ -41,6 +44,28 @@ fh.close() return tmp_filename +def create_element( tag, attributes=None, sub_elements=None ): + """ + Create a new element whose tag is the value of the received tag, and whose attributes are all key / value pairs in the received the attributes + and sub_elements. + """ + if tag: + elem = XmlET.Element( tag ) + if attributes: + # The received attributes is an odict to preserve ordering. + for k, v in attributes.items(): + elem.set( k, v ) + if sub_elements: + # The received attributes is an odict as well. These handle information that tends to be long text including paragraphs (e.g., + # description and long_description. + for k, v in sub_elements.items(): + # Don't include fields that are blank. + if v: + sub_elem = XmlET.SubElement( elem, k ) + sub_elem.text = v + return elem + return None + def indent( elem, level=0 ): """ Prints an XML tree with each node indented according to its depth. This method is used to print the shed tool config (e.g., shed_tool_conf.xml @@ -86,11 +111,11 @@ fobj.close() return tree, error_message -def xml_to_string( elem, encoding='utf-8', use_indent=False ): - if len( elem ): +def xml_to_string( elem, encoding='utf-8', use_indent=False, level=0 ): + if elem is not None: if use_indent: # We were called from suc.config_elems_to_xml_file(), so set the level to 1 since level 0 is the <toolbox> tag set. - indent( elem, level=1 ) + indent( elem, level=level ) if using_python_27: xml_str = '%s\n' % xml.etree.ElementTree.tostring( elem, encoding=encoding, method="xml" ) else: diff -r 436ca79f821132163526fa9bf1f7d6b7fe3f13c7 -r 8ca6cfea19091f0d44be3eec3b3d18794133a39a scripts/api/repair_tool_shed_repository.py --- a/scripts/api/repair_tool_shed_repository.py +++ b/scripts/api/repair_tool_shed_repository.py @@ -1,7 +1,6 @@ #!/usr/bin/env python """ Repair a specified repository revision previously installed into Galaxy. -</section> Here is a working example of how to use this script to repair a repository installed into Galaxy. ./repair_tool_shed_repository.py --api <api key> --local <galaxy base url> --url http://testtoolshed.g2.bx.psu.edu --name gregs_filter --owner greg --revision f28d5018f9cb diff -r 436ca79f821132163526fa9bf1f7d6b7fe3f13c7 -r 8ca6cfea19091f0d44be3eec3b3d18794133a39a templates/admin/tool_shed_repository/common.mako --- a/templates/admin/tool_shed_repository/common.mako +++ b/templates/admin/tool_shed_repository/common.mako @@ -69,7 +69,7 @@ </script></%def> -<%def name="render_dependencies_section( install_repository_dependencies_check_box, install_tool_dependencies_check_box, containers_dict )"> +<%def name="render_dependencies_section( repository_dependencies_check_box, install_tool_dependencies_check_box, containers_dict, revision_label=None, export=False )"><style type="text/css"> #dependency_table{ table-layout:fixed; width:100%; @@ -89,27 +89,44 @@ def __str__( self ): return str( self.count ) - repository_dependencies_root_folder = containers_dict[ 'repository_dependencies' ] - tool_dependencies_root_folder = containers_dict[ 'tool_dependencies' ] - missing_tool_dependencies_root_folder = containers_dict[ 'missing_tool_dependencies' ] + repository_dependencies_root_folder = containers_dict.get( 'repository_dependencies', None ) + tool_dependencies_root_folder = containers_dict.get( 'tool_dependencies', None ) + missing_tool_dependencies_root_folder = containers_dict.get( 'missing_tool_dependencies', None ) env_settings_heaader_row_displayed = False package_header_row_displayed = False + if revision_label: + revision_label_str = ' revision <b>%s</b> of ' % str( revision_label ) + else: + revision_label_str = ' ' %><div class="form-row"><div class="toolParamHelp" style="clear: both;"><p> - These dependencies can be automatically handled with the installed repository, providing significant benefits, and - Galaxy includes various features to manage them. + %if export: + The following additional repositories are required by${revision_label_str}the <b>${repository.name}</b> repository + and they can be exported as well. + %else: + These dependencies can be automatically handled with${revision_label_str}the installed repository, providing significant + benefits, and Galaxy includes various features to manage them. + %endif </p></div></div> %if repository_dependencies_root_folder: - %if install_repository_dependencies_check_box is not None: + %if repository_dependencies_check_box is not None: <div class="form-row"> - <label>Handle repository dependencies?</label> - ${install_repository_dependencies_check_box.get_html()} + %if export: + <label>Export repository dependencies?</label> + %else: + <label>Handle repository dependencies?</label> + %endif + ${repository_dependencies_check_box.get_html()} <div class="toolParamHelp" style="clear: both;"> - Un-check to skip automatic installation of these additional repositories required by this repository. + %if export: + Un-check to skip exporting the following additional repositories that are required by this repository. + %else: + Un-check to skip automatic installation of these additional repositories required by this repository. + %endif </div></div><div style="clear: both"></div> diff -r 436ca79f821132163526fa9bf1f7d6b7fe3f13c7 -r 8ca6cfea19091f0d44be3eec3b3d18794133a39a templates/admin/tool_shed_repository/reselect_tool_panel_section.mako --- a/templates/admin/tool_shed_repository/reselect_tool_panel_section.mako +++ b/templates/admin/tool_shed_repository/reselect_tool_panel_section.mako @@ -39,7 +39,7 @@ <th bgcolor="#EBD9B2">Confirm dependency installation</th></table></div> - ${render_dependencies_section( install_repository_dependencies_check_box, install_tool_dependencies_check_box, containers_dict )} + ${render_dependencies_section( install_repository_dependencies_check_box, install_tool_dependencies_check_box, containers_dict, revision_label=None, export=False )} %endif %if shed_tool_conf_select_field: <div class="form-row"> diff -r 436ca79f821132163526fa9bf1f7d6b7fe3f13c7 -r 8ca6cfea19091f0d44be3eec3b3d18794133a39a templates/admin/tool_shed_repository/select_shed_tool_panel_config.mako --- a/templates/admin/tool_shed_repository/select_shed_tool_panel_config.mako +++ b/templates/admin/tool_shed_repository/select_shed_tool_panel_config.mako @@ -78,7 +78,7 @@ <th bgcolor="#EBD9B2">Confirm dependency installation</th></table></div> - ${render_dependencies_section( install_repository_dependencies_check_box, install_tool_dependencies_check_box, containers_dict )} + ${render_dependencies_section( install_repository_dependencies_check_box, install_tool_dependencies_check_box, containers_dict, revision_label=None, export=False )} <div style="clear: both"></div> %endif <div class="form-row"> diff -r 436ca79f821132163526fa9bf1f7d6b7fe3f13c7 -r 8ca6cfea19091f0d44be3eec3b3d18794133a39a templates/admin/tool_shed_repository/select_tool_panel_section.mako --- a/templates/admin/tool_shed_repository/select_tool_panel_section.mako +++ b/templates/admin/tool_shed_repository/select_tool_panel_section.mako @@ -78,7 +78,7 @@ <th bgcolor="#EBD9B2">Confirm dependency installation</th></table></div> - ${render_dependencies_section( install_repository_dependencies_check_box, install_tool_dependencies_check_box, containers_dict )} + ${render_dependencies_section( install_repository_dependencies_check_box, install_tool_dependencies_check_box, containers_dict, revision_label=None, export=False )} <div style="clear: both"></div> %endif %if shed_tool_conf_select_field: diff -r 436ca79f821132163526fa9bf1f7d6b7fe3f13c7 -r 8ca6cfea19091f0d44be3eec3b3d18794133a39a templates/webapps/tool_shed/common/repository_actions_menu.mako --- a/templates/webapps/tool_shed/common/repository_actions_menu.mako +++ b/templates/webapps/tool_shed/common/repository_actions_menu.mako @@ -174,6 +174,7 @@ <a class="action-button" target="galaxy_main" href="${h.url_for( controller='repository', action='deprecate', id=trans.security.encode_id( repository.id ), mark_deprecated=False )}">Mark repository as not deprecated</a> %endif %if can_download: + <a class="action-button" href="${h.url_for( controller='repository', action='export', repository_id=trans.app.security.encode_id( repository.id ), changeset_revision=changeset_revision )}">Export this revision</a><a class="action-button" href="${h.url_for( controller='repository', action='download', repository_id=trans.app.security.encode_id( repository.id ), changeset_revision=repository.tip( trans.app ), file_type='gz' )}">Download as a .tar.gz file</a><a class="action-button" href="${h.url_for( controller='repository', action='download', repository_id=trans.app.security.encode_id( repository.id ), changeset_revision=repository.tip( trans.app ), file_type='bz2' )}">Download as a .tar.bz2 file</a><a class="action-button" href="${h.url_for( controller='repository', action='download', repository_id=trans.app.security.encode_id( repository.id ), changeset_revision=repository.tip( trans.app ), file_type='zip' )}">Download as a zip file</a> diff -r 436ca79f821132163526fa9bf1f7d6b7fe3f13c7 -r 8ca6cfea19091f0d44be3eec3b3d18794133a39a templates/webapps/tool_shed/repository/export_repository.mako --- /dev/null +++ b/templates/webapps/tool_shed/repository/export_repository.mako @@ -0,0 +1,47 @@ +<%inherit file="/base.mako"/> +<%namespace file="/message.mako" import="render_msg" /> +<%namespace file="/webapps/tool_shed/repository/common.mako" import="*" /> +<%namespace file="/admin/tool_shed_repository/common.mako" import="*" /> +<%namespace file="/webapps/tool_shed/common/repository_actions_menu.mako" import="render_tool_shed_repository_actions" /> + +<%! + def inherit(context): + if context.get('use_panels'): + return '/webapps/tool_shed/base_panels.mako' + else: + return '/base.mako' +%> +<%inherit file="${inherit(context)}"/> + +<%def name="stylesheets()"> + ${parent.stylesheets()} + ${h.css( "library" )} +</%def> + +<%def name="javascripts()"> + ${parent.javascripts()} + ${container_javascripts()} +</%def> + +${render_tool_shed_repository_actions( repository )} + +%if message: + ${render_msg( message, status )} +%endif + +<div class="toolForm"> + <div class="toolFormTitle">Repository '${repository.name | h}'</div> + <div class="toolFormBody"> + <form name="export_repository" id="export_repository" action="${h.url_for( controller='repository', action='export', repository_id=trans.security.encode_id( repository.id ), changeset_revision=changeset_revision )}" method="post" > + %if containers_dict is not None and export_repository_dependencies_check_box is not None: + ${render_dependencies_section( export_repository_dependencies_check_box, None, containers_dict, revision_label=revision_label, export=True )} + <div style="clear: both"></div> + %else: + No repository dependencies are defined for revision <b>${revision_label}</b> of this repository, so click <b>Export</b> to export the selected revision. + %endif + <div class="form-row"> + <input type="submit" name="export_repository_button" value="Export"/> + </div> + </form> + </div> +</div> Repository URL: https://bitbucket.org/galaxy/galaxy-central/ -- This is a commit notification from bitbucket.org. You are receiving this because you have the service enabled, addressing the recipient of this email.