commit/galaxy-central: greg: Tool Shed API fixes and code cleanup.
1 new commit in galaxy-central: https://bitbucket.org/galaxy/galaxy-central/commits/0643bbc23cc1/ Changeset: 0643bbc23cc1 User: greg Date: 2014-01-09 21:59:18 Summary: Tool Shed API fixes and code cleanup. Affected #: 7 files diff -r 17e9e3ffb571e8f200ee8bf195a3708b2be4792b -r 0643bbc23cc1c845516469e3fa07270318c0c1a0 lib/galaxy/webapps/galaxy/api/tool_shed_repositories.py --- a/lib/galaxy/webapps/galaxy/api/tool_shed_repositories.py +++ b/lib/galaxy/webapps/galaxy/api/tool_shed_repositories.py @@ -19,11 +19,6 @@ log = logging.getLogger( __name__ ) -def default_tool_shed_repository_value_mapper( trans, tool_shed_repository ): - value_mapper={ 'id' : trans.security.encode_id( tool_shed_repository.id ), - 'error_message' : tool_shed_repository.error_message or '' } - return value_mapper - def get_message_for_no_shed_tool_config(): # This Galaxy instance is not configured with a shed-related tool panel configuration file. message = 'The tool_config_file setting in universe_wsgi.ini must include at least one shed tool configuration file name with a <toolbox> ' @@ -48,8 +43,8 @@ :param id: the encoded id of the ToolShedRepository object """ # Example URL: http://localhost:8763/api/tool_shed_repositories/f2db41e1fa331b3e/exported_w... - # Since exported workflows are dictionaries with very few attributes that differentiate them from each other, we'll build the - # list based on the following dictionary of those few attributes. + # Since exported workflows are dictionaries with very few attributes that differentiate them from each + # other, we'll build the list based on the following dictionary of those few attributes. exported_workflows = [] repository = suc.get_tool_shed_repository_by_id( trans, id ) metadata = repository.metadata @@ -58,17 +53,23 @@ else: exported_workflow_tups = [] for index, exported_workflow_tup in enumerate( exported_workflow_tups ): - # The exported_workflow_tup looks like ( relative_path, exported_workflow_dict ), where the value of relative_path is the location - # on disk (relative to the root of the installed repository) where the exported_workflow_dict file (.ga file) is located. + # The exported_workflow_tup looks like ( relative_path, exported_workflow_dict ), where the value of + # relative_path is the location on disk (relative to the root of the installed repository) where the + # exported_workflow_dict file (.ga file) is located. exported_workflow_dict = exported_workflow_tup[ 1 ] annotation = exported_workflow_dict.get( 'annotation', '' ) format_version = exported_workflow_dict.get( 'format-version', '' ) workflow_name = exported_workflow_dict.get( 'name', '' ) - # Since we don't have an in-memory object with an id, we'll identify the exported workflow via it's location (i.e., index) in the list. + # Since we don't have an in-memory object with an id, we'll identify the exported workflow via it's + # location (i.e., index) in the list. display_dict = dict( index=index, annotation=annotation, format_version=format_version, workflow_name=workflow_name ) exported_workflows.append( display_dict ) return exported_workflows + def __get_value_mapper( self, trans ): + value_mapper = { 'id' : trans.security.encode_id } + return value_mapper + @web.expose_api def import_workflow( self, trans, payload, **kwd ): """ @@ -96,13 +97,11 @@ # Since we don't have an in-memory object with an id, we'll identify the exported workflow via it's location (i.e., index) in the list. exported_workflow = exported_workflows[ int( index ) ] workflow_name = exported_workflow[ 'workflow_name' ] - workflow, status, message = workflow_util.import_workflow( trans, repository, workflow_name ) + workflow, status, error_message = workflow_util.import_workflow( trans, repository, workflow_name ) if status == 'error': - log.error( message, exc_info=True ) - trans.response.status = 500 - return message - else: - return workflow.to_dict( view='element' ) + log.debug( error_message ) + return {} + return workflow.to_dict( view='element' ) @web.expose_api def import_workflows( self, trans, **kwd ): @@ -125,11 +124,9 @@ imported_workflow_dicts = [] for exported_workflow_dict in exported_workflows: workflow_name = exported_workflow_dict[ 'workflow_name' ] - workflow, status, message = workflow_util.import_workflow( trans, repository, workflow_name ) + workflow, status, error_message = workflow_util.import_workflow( trans, repository, workflow_name ) if status == 'error': - log.error( message, exc_info=True ) - trans.response.status = 500 - return message + log.debug( error_message ) else: imported_workflow_dicts.append( workflow.to_dict( view='element' ) ) return imported_workflow_dicts @@ -142,22 +139,15 @@ """ # Example URL: http://localhost:8763/api/tool_shed_repositories tool_shed_repository_dicts = [] - try: - query = trans.install_model.context.query( trans.app.install_model.ToolShedRepository ) \ - .order_by( trans.app.install_model.ToolShedRepository.table.c.name ) \ - .all() - for tool_shed_repository in query: - tool_shed_repository_dict = tool_shed_repository.to_dict( value_mapper=default_tool_shed_repository_value_mapper( trans, tool_shed_repository ) ) - tool_shed_repository_dict[ 'url' ] = web.url_for( controller='tool_shed_repositories', - action='show', - id=trans.security.encode_id( tool_shed_repository.id ) ) - tool_shed_repository_dicts.append( tool_shed_repository_dict ) - return tool_shed_repository_dicts - except Exception, e: - message = "Error in the tool_shed_repositories API in index: %s" % str( e ) - log.error( message, exc_info=True ) - trans.response.status = 500 - return message + for tool_shed_repository in trans.install_model.context.query( trans.app.install_model.ToolShedRepository ) \ + .order_by( trans.app.install_model.ToolShedRepository.table.c.name ): + tool_shed_repository_dict = \ + tool_shed_repository.to_dict( value_mapper=self.__get_value_mapper( trans ) ) + tool_shed_repository_dict[ 'url' ] = web.url_for( controller='tool_shed_repositories', + action='show', + id=trans.security.encode_id( tool_shed_repository.id ) ) + tool_shed_repository_dicts.append( tool_shed_repository_dict ) + return tool_shed_repository_dicts @web.expose_api def install_repository_revision( self, trans, payload, **kwd ): @@ -208,8 +198,7 @@ # Make sure this Galaxy instance is configured with a shed-related tool panel configuration file. if not suc.have_shed_tool_conf_for_install( trans ): message = get_message_for_no_shed_tool_config() - log.error( message, exc_info=True ) - trans.response.status = 500 + log.debug( message ) return dict( status='error', error=message ) # Make sure the current user's API key proves he is an admin user in this Galaxy instance. if not trans.user_is_admin(): @@ -225,18 +214,20 @@ except Exception, e: message = "Error attempting to retrieve installation information from tool shed %s for revision %s of repository %s owned by %s: %s" % \ ( str( tool_shed_url ), str( changeset_revision ), str( name ), str( owner ), str( e ) ) - log.error( message, exc_info=True ) - trans.response.status = 500 + log.debug( message ) return dict( status='error', error=message ) if raw_text: + # If successful, the response from get_repository_revision_install_info will be 3 + # dictionaries, a dictionary defining the Repository, a dictionary defining the + # Repository revision (RepositoryMetadata), and a dictionary including the additional + # information required to install the repository. items = json.from_json_string( raw_text ) repository_revision_dict = items[ 1 ] repo_info_dict = items[ 2 ] else: message = "Unable to retrieve installation information from tool shed %s for revision %s of repository %s owned by %s: %s" % \ ( str( tool_shed_url ), str( changeset_revision ), str( name ), str( owner ), str( e ) ) - log.error( message, exc_info=True ) - trans.response.status = 500 + log.debug( message ) return dict( status='error', error=message ) repo_info_dicts = [ repo_info_dict ] # Make sure the tool shed returned everything we need for installing the repository. @@ -345,7 +336,7 @@ tool_path, install_tool_dependencies, reinstalling=False ) - tool_shed_repository_dict = tool_shed_repository.as_dict( value_mapper=default_tool_shed_repository_value_mapper( trans, tool_shed_repository ) ) + tool_shed_repository_dict = tool_shed_repository.as_dict( value_mapper=self.__get_value_mapper( trans ) ) tool_shed_repository_dict[ 'url' ] = web.url_for( controller='tool_shed_repositories', action='show', id=trans.security.encode_id( tool_shed_repository.id ) ) @@ -394,8 +385,7 @@ if not suc.have_shed_tool_conf_for_install( trans ): # This Galaxy instance is not configured with a shed-related tool panel configuration file. message = get_message_for_no_shed_tool_config() - log.error( message, exc_info=True ) - trans.response.status = 500 + log.debug( message ) return dict( status='error', error=message ) if not trans.user_is_admin(): raise HTTPForbidden( detail='You are not authorized to install a tool shed repository into this Galaxy instance.' ) @@ -410,8 +400,7 @@ len( changeset_revisions ) != num_specified_repositories: message = 'Error in tool_shed_repositories API in install_repository_revisions: the received parameters must be ordered ' message += 'lists so that positional values in tool_shed_urls, names, owners and changeset_revisions are associated.' - log.error( message, exc_info=True ) - trans.response.status = 500 + log.debug( message ) return dict( status='error', error=message ) # Get the information about the Galaxy components (e.g., tool pane section, tool config file, etc) that will contain information # about each of the repositories being installed. @@ -482,7 +471,7 @@ repair_dict = repository_util.repair_tool_shed_repository( trans, repository, encoding_util.tool_shed_encode( repo_info_dict ) ) - repository_dict = repository.to_dict( value_mapper=default_tool_shed_repository_value_mapper( trans, repository ) ) + repository_dict = repository.to_dict( value_mapper=self.__get_value_mapper( trans ) ) repository_dict[ 'url' ] = web.url_for( controller='tool_shed_repositories', action='show', id=trans.security.encode_id( repository.id ) ) @@ -502,39 +491,39 @@ :param key: the API key of the Galaxy admin user. """ - try: - start_time = strftime( "%Y-%m-%d %H:%M:%S" ) - results = dict( start_time=start_time, - successful_count=0, - unsuccessful_count=0, - repository_status=[] ) - # Make sure the current user's API key proves he is an admin user in this Galaxy instance. - if not trans.user_is_admin(): - raise HTTPForbidden( detail='You are not authorized to reset metadata on repositories installed into this Galaxy instance.' ) - query = suc.get_query_for_setting_metadata_on_repositories( trans, my_writable=False, order=False ) - # Now reset metadata on all remaining repositories. - for repository in query: - repository_id = trans.security.encode_id( repository.id ) - try: - invalid_file_tups, metadata_dict = metadata_util.reset_all_metadata_on_installed_repository( trans, repository_id ) - if invalid_file_tups: - message = tool_util.generate_message_for_invalid_tools( trans, invalid_file_tups, repository, None, as_html=False ) - results[ 'unsuccessful_count' ] += 1 - else: - message = "Successfully reset metadata on repository %s owned by %s" % ( str( repository.name ), str( repository.owner ) ) - results[ 'successful_count' ] += 1 - except Exception, e: - message = "Error resetting metadata on repository %s owned by %s: %s" % ( str( repository.name ), str( repository.owner ), str( e ) ) + start_time = strftime( "%Y-%m-%d %H:%M:%S" ) + results = dict( start_time=start_time, + successful_count=0, + unsuccessful_count=0, + repository_status=[] ) + # Make sure the current user's API key proves he is an admin user in this Galaxy instance. + if not trans.user_is_admin(): + raise HTTPForbidden( detail='You are not authorized to reset metadata on repositories installed into this Galaxy instance.' ) + query = suc.get_query_for_setting_metadata_on_repositories( trans, my_writable=False, order=False ) + # Now reset metadata on all remaining repositories. + for repository in query: + repository_id = trans.security.encode_id( repository.id ) + try: + invalid_file_tups, metadata_dict = metadata_util.reset_all_metadata_on_installed_repository( trans, repository_id ) + if invalid_file_tups: + message = tool_util.generate_message_for_invalid_tools( trans, + invalid_file_tups, + repository, + None, + as_html=False ) results[ 'unsuccessful_count' ] += 1 - results[ 'repository_status' ].append( message ) - stop_time = strftime( "%Y-%m-%d %H:%M:%S" ) - results[ 'stop_time' ] = stop_time - return json.to_json_string( results, sort_keys=True, indent=4 * ' ' ) - except Exception, e: - message = "Error in the Galaxy tool_shed_repositories API in reset_metadata_on_installed_repositories: %s" % str( e ) - log.error( message, exc_info=True ) - trans.response.status = 500 - return message + else: + message = "Successfully reset metadata on repository %s owned by %s" % \ + ( str( repository.name ), str( repository.owner ) ) + results[ 'successful_count' ] += 1 + except Exception, e: + message = "Error resetting metadata on repository %s owned by %s: %s" % \ + ( str( repository.name ), str( repository.owner ), str( e ) ) + results[ 'unsuccessful_count' ] += 1 + results[ 'repository_status' ].append( message ) + stop_time = strftime( "%Y-%m-%d %H:%M:%S" ) + results[ 'stop_time' ] = stop_time + return json.to_json_string( results, sort_keys=True, indent=4 * ' ' ) @web.expose_api def show( self, trans, id, **kwd ): @@ -545,15 +534,12 @@ :param id: the encoded id of the ToolShedRepository object """ # Example URL: http://localhost:8763/api/tool_shed_repositories/df7a1f0c02a5b08e - try: - tool_shed_repository = suc.get_tool_shed_repository_by_id( trans, id ) - tool_shed_repository_dict = tool_shed_repository.as_dict( value_mapper=default_tool_shed_repository_value_mapper( trans, tool_shed_repository ) ) - tool_shed_repository_dict[ 'url' ] = web.url_for( controller='tool_shed_repositories', - action='show', - id=trans.security.encode_id( tool_shed_repository.id ) ) - return tool_shed_repository_dict - except Exception, e: - message = "Error in tool_shed_repositories API in index: " + str( e ) - log.error( message, exc_info=True ) - trans.response.status = 500 - return message + tool_shed_repository = suc.get_tool_shed_repository_by_id( trans, id ) + if tool_shed_repository is None: + log.debug( "Unable to locate tool_shed_repository record for id %s." % ( str( id ) ) ) + return {} + tool_shed_repository_dict = tool_shed_repository.as_dict( value_mapper=self.__get_value_mapper( trans ) ) + tool_shed_repository_dict[ 'url' ] = web.url_for( controller='tool_shed_repositories', + action='show', + id=trans.security.encode_id( tool_shed_repository.id ) ) + return tool_shed_repository_dict diff -r 17e9e3ffb571e8f200ee8bf195a3708b2be4792b -r 0643bbc23cc1c845516469e3fa07270318c0c1a0 lib/galaxy/webapps/tool_shed/api/repositories.py --- a/lib/galaxy/webapps/tool_shed/api/repositories.py +++ b/lib/galaxy/webapps/tool_shed/api/repositories.py @@ -36,22 +36,27 @@ :param name: the name of the Repository :param owner: the owner of the Repository - Returns the ordered list of changeset revision hash strings that are associated with installable revisions. As in the changelog, the - list is ordered oldest to newest. + Returns the ordered list of changeset revision hash strings that are associated with installable revisions. + As in the changelog, the list is ordered oldest to newest. """ # Example URL: http://localhost:9009/api/repositories/get_installable_revisions?name=add_column&owner=test - try: + if name and owner: # Get the repository information. repository = suc.get_repository_by_name_and_owner( trans.app, name, owner ) + if repository is None: + error_message = "Error in the Tool Shed repositories API in get_ordered_installable_revisions: " + error_message += "cannot locate repository %s owned by %s." % ( str( name ), str( owner ) ) + log.debug( error_message ) + return [] repo_dir = repository.repo_path( trans.app ) repo = hg.repository( suc.get_configured_ui(), repo_dir ) ordered_installable_revisions = suc.get_ordered_metadata_changeset_revisions( repository, repo, downloadable=True ) return ordered_installable_revisions - except Exception, e: - message = "Error in the Tool Shed repositories API in get_ordered_installable_revisions: %s" % str( e ) - log.error( message, exc_info=True ) - trans.response.status = 500 - return message + else: + error_message = "Error in the Tool Shed repositories API in get_ordered_installable_revisions: " + error_message += "invalid name %s or owner %s received." % ( str( name ), str( owner ) ) + log.debug( error_message ) + return [] @web.expose_api_anonymous def get_repository_revision_install_info( self, trans, name, owner, changeset_revision, **kwd ): @@ -106,49 +111,65 @@ ] } """ - repository_value_mapper = { 'id' : trans.security.encode_id, - 'user_id' : trans.security.encode_id } - # Example URL: http://localhost:9009/api/repositories/get_repository_revision_install_info?name=add_column&owner=test&changeset_revision=3a08cc21466f - try: + # Example URL: + # http://<xyz>/api/repositories/get_repository_revision_install_info?name=<n>&owner=<o>&changeset_revision=<cr> + if name and owner and changeset_revision: # Get the repository information. repository = suc.get_repository_by_name_and_owner( trans.app, name, owner ) + if repository is None: + log.debug( 'Cannot locate repository %s owned by %s' % ( str( name ), str( owner ) ) ) + return {}, {}, {} encoded_repository_id = trans.security.encode_id( repository.id ) - repository_dict = repository.to_dict( view='element', value_mapper=repository_value_mapper ) + repository_dict = repository.to_dict( view='element', + value_mapper=self.__get_value_mapper( trans ) ) repository_dict[ 'url' ] = web.url_for( controller='repositories', action='show', id=encoded_repository_id ) # Get the repository_metadata information. - repository_metadata = suc.get_repository_metadata_by_changeset_revision( trans, encoded_repository_id, changeset_revision ) + repository_metadata = suc.get_repository_metadata_by_changeset_revision( trans, + encoded_repository_id, + changeset_revision ) if not repository_metadata: - # The changeset_revision column in the repository_metadata table has been updated with a new value value, so find the - # changeset_revision to which we need to update. + # The changeset_revision column in the repository_metadata table has been updated with a new + # value value, so find the changeset_revision to which we need to update. repo_dir = repository.repo_path( trans.app ) repo = hg.repository( suc.get_configured_ui(), repo_dir ) new_changeset_revision = suc.get_next_downloadable_changeset_revision( repository, repo, changeset_revision ) - repository_metadata = suc.get_repository_metadata_by_changeset_revision( trans, encoded_repository_id, new_changeset_revision ) + repository_metadata = suc.get_repository_metadata_by_changeset_revision( trans, + encoded_repository_id, + new_changeset_revision ) changeset_revision = new_changeset_revision if repository_metadata: encoded_repository_metadata_id = trans.security.encode_id( repository_metadata.id ) repository_metadata_dict = repository_metadata.to_dict( view='collection', - value_mapper=self.__get_value_mapper( trans, repository_metadata ) ) + value_mapper=self.__get_value_mapper( trans ) ) repository_metadata_dict[ 'url' ] = web.url_for( controller='repository_revisions', action='show', id=encoded_repository_metadata_id ) # Get the repo_info_dict for installing the repository. - repo_info_dict, includes_tools, includes_tool_dependencies, includes_tools_for_display_in_tool_panel, \ - has_repository_dependencies, has_repository_dependencies_only_if_compiling_contained_td = \ + repo_info_dict, \ + includes_tools, \ + includes_tool_dependencies, \ + includes_tools_for_display_in_tool_panel, \ + has_repository_dependencies, \ + has_repository_dependencies_only_if_compiling_contained_td = \ repository_util.get_repo_info_dict( trans, encoded_repository_id, changeset_revision ) return repository_dict, repository_metadata_dict, repo_info_dict else: - message = "Unable to locate repository_metadata record for repository id %d and changeset_revision %s" % ( repository.id, changeset_revision ) - log.error( message, exc_info=True ) - trans.response.status = 500 + log.debug( "Unable to locate repository_metadata record for repository id %s and changeset_revision %s" % \ + ( str( repository.id ), str( changeset_revision ) ) ) return repository_dict, {}, {} - except Exception, e: - message = "Error in the Tool Shed repositories API in get_repository_revision_install_info: %s" % str( e ) - log.error( message, exc_info=True ) - trans.response.status = 500 - return message + else: + debug_msg = "Error in the Tool Shed repositories API in get_repository_revision_install_info: " + debug_msg += "Invalid name %s or owner %s or changeset_revision %s received." % \ + ( str( name ), str( owner ), str( changeset_revision ) ) + log.debug( debug_msg ) + return {}, {}, {} + + def __get_value_mapper( self, trans ): + value_mapper = { 'id' : trans.security.encode_id, + 'repository_id' : trans.security.encode_id } + return value_mapper @web.expose_api def import_capsule( self, trans, payload, **kwd ): @@ -177,29 +198,27 @@ uploaded_file=None, capsule_file_name=None ) if os.path.getsize( os.path.abspath( capsule_file_name ) ) == 0: - message = 'Your capsule file is empty.' - log.error( message, exc_info=True ) - trans.response.status = 500 - return message + log.debug( 'Your capsule file %s is empty.' % str( capsule_file_name ) ) + return {} try: # Open for reading with transparent compression. tar_archive = tarfile.open( capsule_file_path, 'r:*' ) except tarfile.ReadError, e: - message = 'Error opening file %s: %s' % ( str( capsule_file_name ), str( e ) ) - log.error( message, exc_info=True ) - trans.response.status = 500 - return message + log.debug( 'Error opening capsule file %s: %s' % ( str( capsule_file_name ), str( e ) ) ) + return {} capsule_dict[ 'tar_archive' ] = tar_archive capsule_dict[ 'capsule_file_name' ] = capsule_file_name capsule_dict = import_util.extract_capsule_files( trans, **capsule_dict ) capsule_dict = import_util.validate_capsule( trans, **capsule_dict ) status = capsule_dict.get( 'status', 'error' ) if status == 'error': - message = 'The capsule contents are invalid and cannpt be imported:<br/>%s' % str( capsule_dict.get( 'error_message', '' ) ) - log.error( message, exc_info=True ) - trans.response.status = 500 - return message + log.debug( 'The capsule contents are invalid and cannpt be imported:<br/>%s' % \ + str( capsule_dict.get( 'error_message', '' ) ) ) + return {} encoded_file_path = capsule_dict.get( 'encoded_file_path', None ) + if encoded_file_path is None: + log.debug( 'The capsule_dict %s is missing the required encoded_file_path entry.' % str( capsule_dict ) ) + return {} file_path = encoding_util.tool_shed_decode( encoded_file_path ) export_info_file_path = os.path.join( file_path, 'export_info.xml' ) export_info_dict = import_util.get_export_info_dict( export_info_file_path ) @@ -216,12 +235,14 @@ # Add the capsule_file_name and encoded_file_path to the repository_status_info_dict. repository_status_info_dict[ 'capsule_file_name' ] = capsule_file_name repository_status_info_dict[ 'encoded_file_path' ] = encoded_file_path - import_results_tups = repository_maintenance_util.create_repository_and_import_archive( trans, - repository_status_info_dict, - import_results_tups ) + import_results_tups = \ + repository_maintenance_util.create_repository_and_import_archive( trans, + repository_status_info_dict, + import_results_tups ) import_util.check_status_and_reset_downloadable( trans, import_results_tups ) suc.remove_dir( file_path ) - # NOTE: the order of installation is defined in import_results_tups, but order will be lost when transferred to return_dict. + # NOTE: the order of installation is defined in import_results_tups, but order will be lost + # when transferred to return_dict. return_dict = {} for import_results_tup in import_results_tups: ok, name_owner, message = import_results_tup @@ -237,28 +258,19 @@ GET /api/repositories Displays a collection (list) of repositories. """ - value_mapper = { 'id' : trans.security.encode_id, - 'user_id' : trans.security.encode_id } # Example URL: http://localhost:9009/api/repositories repository_dicts = [] - deleted = util.string_as_bool( deleted ) - try: - query = trans.sa_session.query( trans.app.model.Repository ) \ - .filter( trans.app.model.Repository.table.c.deleted == deleted ) \ - .order_by( trans.app.model.Repository.table.c.name ) \ - .all() - for repository in query: - repository_dict = repository.to_dict( view='collection', value_mapper=value_mapper ) - repository_dict[ 'url' ] = web.url_for( controller='repositories', - action='show', - id=trans.security.encode_id( repository.id ) ) - repository_dicts.append( repository_dict ) - return repository_dicts - except Exception, e: - message = "Error in the Tool Shed repositories API in index: %s" % str( e ) - log.error( message, exc_info=True ) - trans.response.status = 500 - return message + deleted = util.asbool( deleted ) + for repository in trans.sa_session.query( trans.app.model.Repository ) \ + .filter( trans.app.model.Repository.table.c.deleted == deleted ) \ + .order_by( trans.app.model.Repository.table.c.name ): + repository_dict = repository.to_dict( view='collection', + value_mapper=self.__get_value_mapper( trans ) ) + repository_dict[ 'url' ] = web.url_for( controller='repositories', + action='show', + id=trans.security.encode_id( repository.id ) ) + repository_dicts.append( repository_dict ) + return repository_dicts @web.expose_api def repository_ids_for_setting_metadata( self, trans, my_writable=False, **kwd ): @@ -273,28 +285,22 @@ in addition to those repositories of type tool_dependency_definition. This param is ignored if the current user is not an admin user, in which case this same restriction is automatic. """ - try: - if trans.user_is_admin(): - my_writable = util.asbool( my_writable ) - else: - my_writable = True - handled_repository_ids = [] - repository_ids = [] - query = suc.get_query_for_setting_metadata_on_repositories( trans, my_writable=my_writable, order=False ) - # Make sure repositories of type tool_dependency_definition are first in the list. - for repository in query: - if repository.type == rt_util.TOOL_DEPENDENCY_DEFINITION and repository.id not in handled_repository_ids: - repository_ids.append( trans.security.encode_id( repository.id ) ) - # Now add all remaining repositories to the list. - for repository in query: - if repository.type != rt_util.TOOL_DEPENDENCY_DEFINITION and repository.id not in handled_repository_ids: - repository_ids.append( trans.security.encode_id( repository.id ) ) - return repository_ids - except Exception, e: - message = "Error in the Tool Shed repositories API in repository_ids_for_setting_metadata: %s" % str( e ) - log.error( message, exc_info=True ) - trans.response.status = 500 - return message + if trans.user_is_admin(): + my_writable = util.asbool( my_writable ) + else: + my_writable = True + handled_repository_ids = [] + repository_ids = [] + query = suc.get_query_for_setting_metadata_on_repositories( trans, my_writable=my_writable, order=False ) + # Make sure repositories of type tool_dependency_definition are first in the list. + for repository in query: + if repository.type == rt_util.TOOL_DEPENDENCY_DEFINITION and repository.id not in handled_repository_ids: + repository_ids.append( trans.security.encode_id( repository.id ) ) + # Now add all remaining repositories to the list. + for repository in query: + if repository.type != rt_util.TOOL_DEPENDENCY_DEFINITION and repository.id not in handled_repository_ids: + repository_ids.append( trans.security.encode_id( repository.id ) ) + return repository_ids @web.expose_api def reset_metadata_on_repositories( self, trans, payload, **kwd ): @@ -318,6 +324,7 @@ :param skip_file (optional): A local file name that contains the encoded repository ids associated with repositories to skip. This param can be used as an alternative to the above encoded_ids_to_skip. """ + def handle_repository( trans, repository, results ): log.debug( "Resetting metadata on repository %s" % str( repository.name ) ) repository_id = trans.security.encode_id( repository.id ) @@ -335,53 +342,48 @@ status = '%s : %s' % ( str( repository.name ), message ) results[ 'repository_status' ].append( status ) return results - try: - start_time = strftime( "%Y-%m-%d %H:%M:%S" ) - results = dict( start_time=start_time, - repository_status=[], - successful_count=0, - unsuccessful_count=0 ) - handled_repository_ids = [] - encoded_ids_to_skip = payload.get( 'encoded_ids_to_skip', [] ) - skip_file = payload.get( 'skip_file', None ) - if skip_file and os.path.exists( skip_file ) and not encoded_ids_to_skip: - # Load the list of encoded_ids_to_skip from the skip_file. - # Contents of file must be 1 encoded repository id per line. - lines = open( skip_file, 'rb' ).readlines() - for line in lines: - if line.startswith( '#' ): - # Skip comments. - continue - encoded_ids_to_skip.append( line.rstrip( '\n' ) ) - if trans.user_is_admin(): - my_writable = util.asbool( payload.get( 'my_writable', False ) ) - else: - my_writable = True - query = suc.get_query_for_setting_metadata_on_repositories( trans, my_writable=my_writable, order=False ) - # First reset metadata on all repositories of type repository_dependency_definition. - for repository in query: - encoded_id = trans.security.encode_id( repository.id ) - if encoded_id in encoded_ids_to_skip: - log.debug( "Skipping repository with id %s because it is in encoded_ids_to_skip %s" % \ - ( str( repository.id ), str( encoded_ids_to_skip ) ) ) - elif repository.type == rt_util.TOOL_DEPENDENCY_DEFINITION and repository.id not in handled_repository_ids: - results = handle_repository( trans, repository, results ) - # Now reset metadata on all remaining repositories. - for repository in query: - encoded_id = trans.security.encode_id( repository.id ) - if encoded_id in encoded_ids_to_skip: - log.debug( "Skipping repository with id %s because it is in encoded_ids_to_skip %s" % \ - ( str( repository.id ), str( encoded_ids_to_skip ) ) ) - elif repository.type != rt_util.TOOL_DEPENDENCY_DEFINITION and repository.id not in handled_repository_ids: - results = handle_repository( trans, repository, results ) - stop_time = strftime( "%Y-%m-%d %H:%M:%S" ) - results[ 'stop_time' ] = stop_time - return json.to_json_string( results, sort_keys=True, indent=4 * ' ' ) - except Exception, e: - message = "Error in the Tool Shed repositories API in reset_metadata_on_repositories: %s" % str( e ) - log.error( message, exc_info=True ) - trans.response.status = 500 - return message + + start_time = strftime( "%Y-%m-%d %H:%M:%S" ) + results = dict( start_time=start_time, + repository_status=[], + successful_count=0, + unsuccessful_count=0 ) + handled_repository_ids = [] + encoded_ids_to_skip = payload.get( 'encoded_ids_to_skip', [] ) + skip_file = payload.get( 'skip_file', None ) + if skip_file and os.path.exists( skip_file ) and not encoded_ids_to_skip: + # Load the list of encoded_ids_to_skip from the skip_file. + # Contents of file must be 1 encoded repository id per line. + lines = open( skip_file, 'rb' ).readlines() + for line in lines: + if line.startswith( '#' ): + # Skip comments. + continue + encoded_ids_to_skip.append( line.rstrip( '\n' ) ) + if trans.user_is_admin(): + my_writable = util.asbool( payload.get( 'my_writable', False ) ) + else: + my_writable = True + query = suc.get_query_for_setting_metadata_on_repositories( trans, my_writable=my_writable, order=False ) + # First reset metadata on all repositories of type repository_dependency_definition. + for repository in query: + encoded_id = trans.security.encode_id( repository.id ) + if encoded_id in encoded_ids_to_skip: + log.debug( "Skipping repository with id %s because it is in encoded_ids_to_skip %s" % \ + ( str( repository.id ), str( encoded_ids_to_skip ) ) ) + elif repository.type == rt_util.TOOL_DEPENDENCY_DEFINITION and repository.id not in handled_repository_ids: + results = handle_repository( trans, repository, results ) + # Now reset metadata on all remaining repositories. + for repository in query: + encoded_id = trans.security.encode_id( repository.id ) + if encoded_id in encoded_ids_to_skip: + log.debug( "Skipping repository with id %s because it is in encoded_ids_to_skip %s" % \ + ( str( repository.id ), str( encoded_ids_to_skip ) ) ) + elif repository.type != rt_util.TOOL_DEPENDENCY_DEFINITION and repository.id not in handled_repository_ids: + results = handle_repository( trans, repository, results ) + stop_time = strftime( "%Y-%m-%d %H:%M:%S" ) + results[ 'stop_time' ] = stop_time + return json.to_json_string( results, sort_keys=True, indent=4 * ' ' ) @web.expose_api def reset_metadata_on_repository( self, trans, payload, **kwd ): @@ -395,6 +397,7 @@ The following parameters must be included in the payload. :param repository_id: the encoded id of the repository on which metadata is to be reset. """ + def handle_repository( trans, start_time, repository ): results = dict( start_time=start_time, repository_status=[] ) @@ -410,21 +413,16 @@ status = '%s : %s' % ( str( repository.name ), message ) results[ 'repository_status' ].append( status ) return results - try: - repository_id = payload.get( 'repository_id', None ) - if repository_id is not None: - repository = suc.get_repository_in_tool_shed( trans, repository_id ) - start_time = strftime( "%Y-%m-%d %H:%M:%S" ) - log.debug( "%s...resetting metadata on repository %s" % ( start_time, str( repository.name ) ) ) - results = handle_repository( trans, start_time, repository ) - stop_time = strftime( "%Y-%m-%d %H:%M:%S" ) - results[ 'stop_time' ] = stop_time - return json.to_json_string( results, sort_keys=True, indent=4 * ' ' ) - except Exception, e: - message = "Error in the Tool Shed repositories API in reset_metadata_on_repositories: %s" % str( e ) - log.error( message, exc_info=True ) - trans.response.status = 500 - return message + + repository_id = payload.get( 'repository_id', None ) + if repository_id is not None: + repository = suc.get_repository_in_tool_shed( trans, repository_id ) + start_time = strftime( "%Y-%m-%d %H:%M:%S" ) + log.debug( "%s...resetting metadata on repository %s" % ( start_time, str( repository.name ) ) ) + results = handle_repository( trans, start_time, repository ) + stop_time = strftime( "%Y-%m-%d %H:%M:%S" ) + results[ 'stop_time' ] = stop_time + return json.to_json_string( results, sort_keys=True, indent=4 * ' ' ) @web.expose_api_anonymous def show( self, trans, id, **kwd ): @@ -434,27 +432,14 @@ :param id: the encoded id of the Repository object """ - value_mapper = { 'id' : trans.security.encode_id, - 'user_id' : trans.security.encode_id } # Example URL: http://localhost:9009/api/repositories/f9cad7b01a472135 - try: - repository = suc.get_repository_in_tool_shed( trans, id ) - repository_dict = repository.to_dict( view='element', value_mapper=value_mapper ) - repository_dict[ 'url' ] = web.url_for( controller='repositories', - action='show', - id=trans.security.encode_id( repository.id ) ) - return repository_dict - except Exception, e: - message = "Error in the Tool Shed repositories API in show: %s" % str( e ) - log.error( message, exc_info=True ) - trans.response.status = 500 - return message - - def __get_value_mapper( self, trans, repository_metadata ): - value_mapper = { 'id' : trans.security.encode_id, - 'repository_id' : trans.security.encode_id } - if repository_metadata.time_last_tested is not None: - # For some reason the Dictifiable.to_dict() method in ~/galaxy/model/item_attrs.py requires - # a function rather than a mapped value, so just pass the time_ago function here. - value_mapper[ 'time_last_tested' ] = time_ago - return value_mapper + repository = suc.get_repository_in_tool_shed( trans, id ) + if repository is None: + log.debug( "Unable to locate repository record for id %s." % ( str( id ) ) ) + return {} + repository_dict = repository.to_dict( view='element', + value_mapper=self.__get_value_mapper( trans ) ) + repository_dict[ 'url' ] = web.url_for( controller='repositories', + action='show', + id=trans.security.encode_id( repository.id ) ) + return repository_dict diff -r 17e9e3ffb571e8f200ee8bf195a3708b2be4792b -r 0643bbc23cc1c845516469e3fa07270318c0c1a0 lib/galaxy/webapps/tool_shed/api/repository_revisions.py --- a/lib/galaxy/webapps/tool_shed/api/repository_revisions.py +++ b/lib/galaxy/webapps/tool_shed/api/repository_revisions.py @@ -47,65 +47,29 @@ if not changeset_revision: raise HTTPBadRequest( detail="Missing required parameter 'changeset_revision'." ) export_repository_dependencies = payload.get( 'export_repository_dependencies', False ) - try: - # We'll currently support only gzip-compressed tar archives. - file_type = 'gz' - export_repository_dependencies = util.string_as_bool( export_repository_dependencies ) - # Get the repository information. - repository = suc.get_repository_by_name_and_owner( trans.app, name, owner ) - repository_id = trans.security.encode_id( repository.id ) - response = export_util.export_repository( trans, - tool_shed_url, - repository_id, - str( repository.name ), - changeset_revision, - file_type, - export_repository_dependencies, - api=True ) - return response - except Exception, e: - message = "Error in the Tool Shed repository_revisions API in export: %s" % str( e ) - log.error( message, exc_info=True ) - trans.response.status = 500 - return message + # We'll currently support only gzip-compressed tar archives. + file_type = 'gz' + export_repository_dependencies = util.asbool( export_repository_dependencies ) + # Get the repository information. + repository = suc.get_repository_by_name_and_owner( trans.app, name, owner ) + if repository is None: + error_message = 'Cannot locate repository with name %s and owner %s,' % ( str( name ), str( owner ) ) + log.debug( error_message ) + return None, error_message + repository_id = trans.security.encode_id( repository.id ) + return export_util.export_repository( trans, + tool_shed_url, + repository_id, + str( repository.name ), + changeset_revision, + file_type, + export_repository_dependencies, + api=True ) - @web.expose_api_anonymous - def repository_dependencies( self, trans, id, **kwd ): - """ - GET /api/repository_revisions/{encoded repository_metadata id}/repository_dependencies - Displays information about a repository_metadata record in the Tool Shed. - - :param id: the encoded id of the `RepositoryMetadata` object - """ - # Example URL: http://localhost:9009/api/repository_revisions/repository_dependencies/bb125... + def __get_value_mapper( self, trans ): value_mapper = { 'id' : trans.security.encode_id, - 'user_id' : trans.security.encode_id } - repository_dependencies_dicts = [] - try: - repository_metadata = metadata_util.get_repository_metadata_by_id( trans, id ) - metadata = repository_metadata.metadata - if metadata and 'repository_dependencies' in metadata: - rd_tups = metadata[ 'repository_dependencies' ][ 'repository_dependencies' ] - for rd_tup in rd_tups: - tool_shed, name, owner, changeset_revision = rd_tup[ 0:4 ] - repository_dependency = suc.get_repository_by_name_and_owner( trans.app, name, owner ) - repository_dependency_id = trans.security.encode_id( repository_dependency.id ) - repository_dependency_repository_metadata = \ - suc.get_repository_metadata_by_changeset_revision( trans, repository_dependency_id, changeset_revision ) - repository_dependency_repository_metadata_id = trans.security.encode_id( repository_dependency_repository_metadata.id ) - repository_dependency_dict = repository_dependency.to_dict( view='element', value_mapper=value_mapper ) - # We have to add the changeset_revision of of the repository dependency. - repository_dependency_dict[ 'changeset_revision' ] = changeset_revision - repository_dependency_dict[ 'url' ] = web.url_for( controller='repositories', - action='show', - id=repository_dependency_repository_metadata_id ) - repository_dependencies_dicts.append( repository_dependency_dict ) - return repository_dependencies_dicts - except Exception, e: - message = "Error in the Tool Shed repository_revisions API in repository_dependencies: %s" % str( e ) - log.error( message, exc_info=True ) - trans.response.status = 500 - return message + 'repository_id' : trans.security.encode_id } + return value_mapper @web.expose_api_anonymous def index( self, trans, **kwd ): @@ -120,59 +84,94 @@ # Filter by downloadable if received. downloadable = kwd.get( 'downloadable', None ) if downloadable is not None: - clause_list.append( trans.model.RepositoryMetadata.table.c.downloadable == util.string_as_bool( downloadable ) ) + clause_list.append( trans.model.RepositoryMetadata.table.c.downloadable == util.asbool( downloadable ) ) # Filter by malicious if received. malicious = kwd.get( 'malicious', None ) if malicious is not None: - clause_list.append( trans.model.RepositoryMetadata.table.c.malicious == util.string_as_bool( malicious ) ) + clause_list.append( trans.model.RepositoryMetadata.table.c.malicious == util.asbool( malicious ) ) # Filter by tools_functionally_correct if received. tools_functionally_correct = kwd.get( 'tools_functionally_correct', None ) if tools_functionally_correct is not None: - clause_list.append( trans.model.RepositoryMetadata.table.c.tools_functionally_correct == util.string_as_bool( tools_functionally_correct ) ) + clause_list.append( trans.model.RepositoryMetadata.table.c.tools_functionally_correct == util.asbool( tools_functionally_correct ) ) # Filter by missing_test_components if received. missing_test_components = kwd.get( 'missing_test_components', None ) if missing_test_components is not None: - clause_list.append( trans.model.RepositoryMetadata.table.c.missing_test_components == util.string_as_bool( missing_test_components ) ) + clause_list.append( trans.model.RepositoryMetadata.table.c.missing_test_components == util.asbool( missing_test_components ) ) # Filter by do_not_test if received. do_not_test = kwd.get( 'do_not_test', None ) if do_not_test is not None: - clause_list.append( trans.model.RepositoryMetadata.table.c.do_not_test == util.string_as_bool( do_not_test ) ) + clause_list.append( trans.model.RepositoryMetadata.table.c.do_not_test == util.asbool( do_not_test ) ) # Filter by includes_tools if received. includes_tools = kwd.get( 'includes_tools', None ) if includes_tools is not None: - clause_list.append( trans.model.RepositoryMetadata.table.c.includes_tools == util.string_as_bool( includes_tools ) ) + clause_list.append( trans.model.RepositoryMetadata.table.c.includes_tools == util.asbool( includes_tools ) ) # Filter by test_install_error if received. test_install_error = kwd.get( 'test_install_error', None ) if test_install_error is not None: - clause_list.append( trans.model.RepositoryMetadata.table.c.test_install_error == util.string_as_bool( test_install_error ) ) + clause_list.append( trans.model.RepositoryMetadata.table.c.test_install_error == util.asbool( test_install_error ) ) # Filter by skip_tool_test if received. skip_tool_test = kwd.get( 'skip_tool_test', None ) if skip_tool_test is not None: - skip_tool_test = util.string_as_bool( skip_tool_test ) + skip_tool_test = util.asbool( skip_tool_test ) skipped_metadata_ids_subquery = select( [ trans.app.model.SkipToolTest.table.c.repository_metadata_id ] ) if skip_tool_test: clause_list.append( trans.model.RepositoryMetadata.id.in_( skipped_metadata_ids_subquery ) ) else: clause_list.append( not_( trans.model.RepositoryMetadata.id.in_( skipped_metadata_ids_subquery ) ) ) - # Generate and execute the query. - try: - query = trans.sa_session.query( trans.app.model.RepositoryMetadata ) \ - .filter( and_( *clause_list ) ) \ - .order_by( trans.app.model.RepositoryMetadata.table.c.repository_id.desc() ) \ - .all() - for repository_metadata in query: - repository_metadata_dict = repository_metadata.to_dict( view='collection', - value_mapper=self.__get_value_mapper( trans, repository_metadata ) ) - repository_metadata_dict[ 'url' ] = web.url_for( controller='repository_revisions', - action='show', - id=trans.security.encode_id( repository_metadata.id ) ) - repository_metadata_dicts.append( repository_metadata_dict ) - return repository_metadata_dicts - except Exception, e: - message = "Error in the Tool Shed repository_revisions API in index: " + str( e ) - log.error( message, exc_info=True ) - trans.response.status = 500 - return message + for repository_metadata in trans.sa_session.query( trans.app.model.RepositoryMetadata ) \ + .filter( and_( *clause_list ) ) \ + .order_by( trans.app.model.RepositoryMetadata.table.c.repository_id.desc() ): + repository_metadata_dict = repository_metadata.to_dict( view='collection', + value_mapper=self.__get_value_mapper( trans ) ) + repository_metadata_dict[ 'url' ] = web.url_for( controller='repository_revisions', + action='show', + id=trans.security.encode_id( repository_metadata.id ) ) + repository_metadata_dicts.append( repository_metadata_dict ) + return repository_metadata_dicts + + @web.expose_api_anonymous + def repository_dependencies( self, trans, id, **kwd ): + """ + GET /api/repository_revisions/{encoded repository_metadata id}/repository_dependencies + Displays information about a repository_metadata record in the Tool Shed. + + :param id: the encoded id of the `RepositoryMetadata` object + """ + # Example URL: http://localhost:9009/api/repository_revisions/repository_dependencies/bb125... + repository_dependencies_dicts = [] + repository_metadata = metadata_util.get_repository_metadata_by_id( trans, id ) + if repository_metadata is None: + log.debug( 'Invalid repository_metadata id received: %s' % str( id ) ) + return repository_dependencies_dicts + metadata = repository_metadata.metadata + if metadata is None: + log.debug( 'The repository_metadata record with id %s has no metadata.' % str ( id ) ) + return repository_dependencies_dicts + if 'repository_dependencies' in metadata: + rd_tups = metadata[ 'repository_dependencies' ][ 'repository_dependencies' ] + for rd_tup in rd_tups: + tool_shed, name, owner, changeset_revision = rd_tup[ 0:4 ] + repository_dependency = suc.get_repository_by_name_and_owner( trans.app, name, owner ) + if repository_dependency is None: + log.dbug( 'Cannot locate repository dependency %s owned by %s.' % ( name, owner ) ) + continue + repository_dependency_id = trans.security.encode_id( repository_dependency.id ) + repository_dependency_repository_metadata = \ + suc.get_repository_metadata_by_changeset_revision( trans, repository_dependency_id, changeset_revision ) + if repository_dependency_repository_metadata is None: + log.debug( 'Cannot locate repository_metadata with id %s for repository dependency %s owned by %s.' % \ + ( str( repository_dependency_id ), str( name ), str( owner ) ) ) + continue + repository_dependency_repository_metadata_id = trans.security.encode_id( repository_dependency_repository_metadata.id ) + repository_dependency_dict = repository_dependency.to_dict( view='element', + value_mapper=self.__get_value_mapper( trans ) ) + # We have to add the changeset_revision of of the repository dependency. + repository_dependency_dict[ 'changeset_revision' ] = changeset_revision + repository_dependency_dict[ 'url' ] = web.url_for( controller='repositories', + action='show', + id=repository_dependency_repository_metadata_id ) + repository_dependencies_dicts.append( repository_dependency_dict ) + return repository_dependencies_dicts @web.expose_api_anonymous def show( self, trans, id, **kwd ): @@ -183,19 +182,16 @@ :param id: the encoded id of the `RepositoryMetadata` object """ # Example URL: http://localhost:9009/api/repository_revisions/bb125606ff9ea620 - try: - repository_metadata = metadata_util.get_repository_metadata_by_id( trans, id ) - repository_metadata_dict = repository_metadata.to_dict( view='element', - value_mapper=self.__get_value_mapper( trans, repository_metadata ) ) - repository_metadata_dict[ 'url' ] = web.url_for( controller='repository_revisions', - action='show', - id=trans.security.encode_id( repository_metadata.id ) ) - return repository_metadata_dict - except Exception, e: - message = "Error in the Tool Shed repository_revisions API in show: %s" % str( e ) - log.error( message, exc_info=True ) - trans.response.status = 500 - return message + repository_metadata = metadata_util.get_repository_metadata_by_id( trans, id ) + if repository_metadata is None: + log.debug( 'Cannot locate repository_metadata with id %s' % str( id ) ) + return {} + repository_metadata_dict = repository_metadata.to_dict( view='element', + value_mapper=self.__get_value_mapper( trans ) ) + repository_metadata_dict[ 'url' ] = web.url_for( controller='repository_revisions', + action='show', + id=trans.security.encode_id( repository_metadata.id ) ) + return repository_metadata_dict @web.expose_api def update( self, trans, payload, **kwd ): @@ -204,37 +200,32 @@ Updates the value of specified columns of the repository_metadata table based on the key / value pairs in payload. """ repository_metadata_id = kwd.get( 'id', None ) - try: - repository_metadata = metadata_util.get_repository_metadata_by_id( trans, repository_metadata_id ) - flush_needed = False - for key, new_value in payload.items(): - if key == 'time_last_tested': - repository_metadata.time_last_tested = datetime.datetime.utcnow() - flush_needed = True - elif hasattr( repository_metadata, key ): - # log information when setting attributes associated with the Tool Shed's install and test framework. - if key in [ 'do_not_test', 'includes_tools', 'missing_test_components', 'test_install_error', - 'tools_functionally_correct' ]: - log.debug( 'Setting repository_metadata table column %s to value %s for changeset_revision %s via the Tool Shed API.' % \ - ( str( key ), str( new_value ), str( repository_metadata.changeset_revision ) ) ) - setattr( repository_metadata, key, new_value ) - flush_needed = True - if flush_needed: - trans.sa_session.add( repository_metadata ) - trans.sa_session.flush() - except Exception, e: - message = "Error in the Tool Shed repository_revisions API in update: %s" % str( e ) - log.error( message, exc_info=True ) - trans.response.status = 500 - return message + if repository_metadata_id is None: + raise HTTPBadRequest( detail="Missing required parameter 'id'." ) + repository_metadata = metadata_util.get_repository_metadata_by_id( trans, repository_metadata_id ) + if repository_metadata is None: + log.debug( 'Cannot locate repository_metadata with id %s' % str( repository_metadata_id ) ) + return {} + flush_needed = False + for key, new_value in payload.items(): + if key == 'time_last_tested': + repository_metadata.time_last_tested = datetime.datetime.utcnow() + flush_needed = True + elif hasattr( repository_metadata, key ): + # log information when setting attributes associated with the Tool Shed's install and test framework. + if key in [ 'do_not_test', 'includes_tools', 'missing_test_components', 'test_install_error', + 'tools_functionally_correct' ]: + log.debug( 'Setting repository_metadata column %s to value %s for changeset_revision %s via the Tool Shed API.' % \ + ( str( key ), str( new_value ), str( repository_metadata.changeset_revision ) ) ) + setattr( repository_metadata, key, new_value ) + flush_needed = True + if flush_needed: + trans.sa_session.add( repository_metadata ) + trans.sa_session.flush() + trans.sa_session.refresh( repository_metadata ) repository_metadata_dict = repository_metadata.to_dict( view='element', - value_mapper=self.__get_value_mapper( trans, repository_metadata ) ) + value_mapper=self.__get_value_mapper( trans ) ) repository_metadata_dict[ 'url' ] = web.url_for( controller='repository_revisions', action='show', id=trans.security.encode_id( repository_metadata.id ) ) return repository_metadata_dict - - def __get_value_mapper( self, trans, repository_metadata ): - value_mapper = { 'id' : trans.security.encode_id, - 'repository_id' : trans.security.encode_id } - return value_mapper diff -r 17e9e3ffb571e8f200ee8bf195a3708b2be4792b -r 0643bbc23cc1c845516469e3fa07270318c0c1a0 lib/tool_shed/scripts/api/common.py --- a/lib/tool_shed/scripts/api/common.py +++ b/lib/tool_shed/scripts/api/common.py @@ -1,34 +1,27 @@ -import os, sys, urllib, urllib2 +import os +import sys +import urllib +import urllib2 new_path = [ os.path.join( os.path.dirname( __file__ ), '..', '..', '..', '..', 'lib' ) ] new_path.extend( sys.path[ 1: ] ) sys.path = new_path +import tool_shed.util.shed_util_common as suc + from galaxy import eggs import pkg_resources pkg_resources.require( "simplejson" ) import simplejson -pkg_resources.require( "pycrypto" ) -from Crypto.Cipher import Blowfish -from Crypto.Util.randpool import RandomPool -from Crypto.Util import number - -def encode_id( config_id_secret, obj_id ): - # Utility method to encode ID's - id_cipher = Blowfish.new( config_id_secret ) - # Convert to string - s = str( obj_id ) - # Pad to a multiple of 8 with leading "!" - s = ( "!" * ( 8 - len(s) % 8 ) ) + s - # Encrypt - return id_cipher.encrypt( s ).encode( 'hex' ) - def delete( api_key, url, data, return_formatted=True ): - # Sends an API DELETE request and acts as a generic formatter for the JSON response - 'data' will become the JSON payload read by Galaxy. + """ + Sends an API DELETE request and acts as a generic formatter for the JSON response. The + 'data' will become the JSON payload read by the Tool Shed. + """ try: - url = make_url( api_key, url ) + url = make_url( url, api_key=api_key, args=None ) req = urllib2.Request( url, headers = { 'Content-Type': 'application/json' }, data = simplejson.dumps( data )) req.get_method = lambda: 'DELETE' r = simplejson.loads( urllib2.urlopen( req ).read() ) @@ -46,12 +39,13 @@ print r def display( url, api_key=None, return_formatted=True ): - # Sends an API GET request and acts as a generic formatter for the JSON response. + """Sends an API GET request and acts as a generic formatter for the JSON response.""" try: r = get( url, api_key=api_key ) except urllib2.HTTPError, e: print e - print e.read( 1024 ) # Only return the first 1K of errors. + # Only return the first 1K of errors. + print e.read( 1024 ) sys.exit( 1 ) if type( r ) == unicode: print 'error: %s' % r @@ -83,16 +77,94 @@ print 'response is unknown type: %s' % type( r ) def get( url, api_key=None ): - # Do the actual GET. - url = make_url( url, api_key=api_key ) + """Do the GET.""" + url = make_url( url, api_key=api_key, args=None ) try: return simplejson.loads( urllib2.urlopen( url ).read() ) except simplejson.decoder.JSONDecodeError, e: print "URL did not return JSON data" - sys.exit(1) + sys.exit( 1 ) + +def get_api_url( base, parts=[], params=None ): + """Compose and return a URL for the Tool Shed API.""" + if 'api' in parts and parts.index( 'api' ) != 0: + parts.pop( parts.index( 'api' ) ) + parts.insert( 0, 'api' ) + elif 'api' not in parts: + parts.insert( 0, 'api' ) + url = suc.url_join( base, *parts ) + if params is not None: + try: + query_string = urllib.urlencode( params ) + except Exception, e: + # The value of params must be a string. + query_string = params + url += '?%s' % query_string + return url + +def get_latest_downloadable_changeset_revision_via_api( url, name, owner ): + """ + Return the latest downloadable changeset revision for the repository defined by the received + name and owner. + """ + error_message = '' + parts = [ 'api', 'repositories', 'get_ordered_installable_revisions' ] + params = dict( name=name, owner=owner ) + api_url = get_api_url( base=url, parts=parts, params=params ) + changeset_revisions, error_message = json_from_url( api_url ) + if changeset_revisions is None or error_message: + return None, error_message + if len( changeset_revisions ) >= 1: + return changeset_revisions[ -1 ], error_message + return suc.INITIAL_CHANGELOG_HASH, error_message + +def get_repository_dict( url, repository_dict ): + """ + Send a request to the Tool Shed to get additional information about the repository defined + by the received repository_dict. Add the information to the repository_dict and return it. + """ + error_message = '' + if not isinstance( repository_dict, dict ): + error_message = 'Invalid repository_dict received: %s' % str( repository_dict ) + return None, error_message + repository_id = repository_dict.get( 'repository_id', None ) + if repository_id is None: + error_message = 'Invalid repository_dict does not contain a repository_id entry: %s' % str( repository_dict ) + return None, error_message + parts = [ 'api', 'repositories', repository_id ] + api_url = get_api_url( base=url, parts=parts ) + extended_dict, error_message = json_from_url( api_url ) + if extended_dict is None or error_message: + return None, error_message + name = extended_dict.get( 'name', None ) + owner = extended_dict.get( 'owner', None ) + if name is not None and owner is not None: + name = str( name ) + owner = str( owner ) + latest_changeset_revision, error_message = get_latest_downloadable_changeset_revision_via_api( url, name, owner ) + if latest_changeset_revision is None or error_message: + return None, error_message + extended_dict[ 'latest_revision' ] = str( latest_changeset_revision ) + return extended_dict, error_message + else: + error_message = 'Invalid extended_dict does not contain name or woner entries: %s' % str( extended_dict ) + return None, error_message + +def json_from_url( url ): + """Send a request to the Tool Shed via the Tool Shed API and handle the response.""" + error_message = '' + url_handle = urllib.urlopen( url ) + url_contents = url_handle.read() + try: + parsed_json = simplejson.loads( url_contents ) + except Exception, e: + error_message = str( url_contents ) + print 'Error parsing JSON data in json_from_url(): ', str( e ) + return None, error_message + return parsed_json, error_message def make_url( url, api_key=None, args=None ): - # Adds the API Key to the URL if it's not already there. + """Adds the API Key to the URL if it's not already there.""" if args is None: args = [] argsep = '&' @@ -104,20 +176,23 @@ return url + argsep + '&'.join( [ '='.join( t ) for t in args ] ) def post( url, data, api_key=None ): - # Do the actual POST. - url = make_url( url, api_key=api_key ) + """Do the POST.""" + url = make_url( url, api_key=api_key, args=None ) req = urllib2.Request( url, headers = { 'Content-Type': 'application/json' }, data = simplejson.dumps( data ) ) return simplejson.loads( urllib2.urlopen( req ).read() ) def put( url, data, api_key=None ): - # Do the actual PUT. - url = make_url( url, api_key=api_key ) + """Do the PUT.""" + url = make_url( url, api_key=api_key, args=None ) req = urllib2.Request( url, headers = { 'Content-Type': 'application/json' }, data = simplejson.dumps( data )) req.get_method = lambda: 'PUT' return simplejson.loads( urllib2.urlopen( req ).read() ) def submit( url, data, api_key=None, return_formatted=True ): - # Sends an API POST request and acts as a generic formatter for the JSON response - 'data' will become the JSON payload read by Galaxy. + """ + Sends an API POST request and acts as a generic formatter for the JSON response. The + 'data' will become the JSON payload read by the Tool Shed. + """ try: r = post( url, data, api_key=api_key ) except urllib2.HTTPError, e: @@ -132,7 +207,8 @@ print 'Response' print '--------' if type( r ) == list: - # Currently the only implemented responses are lists of dicts, because submission creates some number of collection elements. + # Currently the only implemented responses are lists of dicts, because submission creates + # some number of collection elements. for i in r: if type( i ) == dict: if 'url' in i: @@ -149,9 +225,12 @@ print r def update( api_key, url, data, return_formatted=True ): - # Sends an API PUT request and acts as a generic formatter for the JSON response - 'data' will become the JSON payload read by Galaxy. + """ + Sends an API PUT request and acts as a generic formatter for the JSON response. The + 'data' will become the JSON payload read by the Tool Shed. + """ try: - r = put( api_key, url, data ) + r = put( url, data, api_key=api_key ) except urllib2.HTTPError, e: if return_formatted: print e diff -r 17e9e3ffb571e8f200ee8bf195a3708b2be4792b -r 0643bbc23cc1c845516469e3fa07270318c0c1a0 lib/tool_shed/scripts/api/get_filtered_repository_revisions.py --- a/lib/tool_shed/scripts/api/get_filtered_repository_revisions.py +++ b/lib/tool_shed/scripts/api/get_filtered_repository_revisions.py @@ -26,69 +26,19 @@ import urllib sys.path.insert( 0, os.path.dirname( __file__ ) ) + from common import get +from common import get_api_url +from common import get_repository_dict +from common import json_from_url + +from galaxy.util import asbool from galaxy.util.json import from_json_string import tool_shed.util.shed_util_common as suc -def get_api_url( base, parts=[], params=None ): - if 'api' in parts and parts.index( 'api' ) != 0: - parts.pop( parts.index( 'api' ) ) - parts.insert( 0, 'api' ) - elif 'api' not in parts: - parts.insert( 0, 'api' ) - url = suc.url_join( base, *parts ) - if params: - url += '?%s' % params - return url - -def get_latest_downloadable_changeset_revision( url, name, owner ): - error_message = '' - parts = [ 'api', 'repositories', 'get_ordered_installable_revisions' ] - params = urllib.urlencode( dict( name=name, owner=owner ) ) - api_url = get_api_url( base=url, parts=parts, params=params ) - changeset_revisions, error_message = json_from_url( api_url ) - if error_message: - return None, error_message - if changeset_revisions: - return changeset_revisions[ -1 ], error_message - else: - return suc.INITIAL_CHANGELOG_HASH, error_message - -def get_repository_dict( url, repository_dict ): - error_message = '' - parts = [ 'api', 'repositories', repository_dict[ 'repository_id' ] ] - api_url = get_api_url( base=url, parts=parts ) - extended_dict, error_message = json_from_url( api_url ) - if error_message: - return None, error_message - name = str( extended_dict[ 'name' ] ) - owner = str( extended_dict[ 'owner' ] ) - latest_changeset_revision, error_message = get_latest_downloadable_changeset_revision( url, name, owner ) - if error_message: - print error_message - extended_dict[ 'latest_revision' ] = str( latest_changeset_revision ) - return extended_dict, error_message - -def json_from_url( url ): - error_message = '' - url_handle = urllib.urlopen( url ) - url_contents = url_handle.read() - try: - parsed_json = from_json_string( url_contents ) - except Exception, e: - error_message = str( url_contents ) - return None, error_message - return parsed_json, error_message - -def string_as_bool( string ): - if str( string ).lower() in [ 'true' ]: - return True - else: - return False - def main( options ): base_tool_shed_url = options.tool_shed_url.rstrip( '/' ) - latest_revision_only = string_as_bool( options.latest_revision_only ) + latest_revision_only = asbool( options.latest_revision_only ) do_not_test = str( options.do_not_test ) downloadable = str( options.downloadable ) includes_tools = str( options.includes_tools ) @@ -108,30 +58,31 @@ tools_functionally_correct=tools_functionally_correct ) ) api_url = get_api_url( base=base_tool_shed_url, parts=parts, params=params ) baseline_repository_dicts, error_message = json_from_url( api_url ) - if error_message: + if baseline_repository_dicts is None or error_message: print error_message - repository_dicts = [] - for baseline_repository_dict in baseline_repository_dicts: - # We need to get some details from the tool shed API, such as repository name and owner, to pass on to the - # module that will generate the install methods. - repository_dict, error_message = get_repository_dict( base_tool_shed_url, baseline_repository_dict ) - if error_message: - print 'Error getting additional details from the API: ', error_message - repository_dicts.append( baseline_repository_dict ) - else: - # Don't test empty repositories. - changeset_revision = baseline_repository_dict[ 'changeset_revision' ] - if changeset_revision != suc.INITIAL_CHANGELOG_HASH: - # Merge the dictionary returned from /api/repository_revisions with the detailed repository_dict and - # append it to the list of repository_dicts to install and test. - if latest_revision_only: - latest_revision = repository_dict[ 'latest_revision' ] - if changeset_revision == latest_revision: + else: + repository_dicts = [] + for baseline_repository_dict in baseline_repository_dicts: + # We need to get additional details from the tool shed API to pass on to the + # module that will generate the install methods. + repository_dict, error_message = get_repository_dict( base_tool_shed_url, baseline_repository_dict ) + if error_message: + print 'Error getting additional details from the API: ', error_message + repository_dicts.append( baseline_repository_dict ) + else: + # Don't test empty repositories. + changeset_revision = baseline_repository_dict.get( 'changeset_revision', suc.INITIAL_CHANGELOG_HASH ) + if changeset_revision != suc.INITIAL_CHANGELOG_HASH: + # Merge the dictionary returned from /api/repository_revisions with the detailed repository_dict and + # append it to the list of repository_dicts to install and test. + if latest_revision_only: + latest_revision = repository_dict.get( 'latest_revision', suc.INITIAL_CHANGELOG_HASH ) + if changeset_revision == latest_revision: + repository_dicts.append( dict( repository_dict.items() + baseline_repository_dict.items() ) ) + else: repository_dicts.append( dict( repository_dict.items() + baseline_repository_dict.items() ) ) - else: - repository_dicts.append( dict( repository_dict.items() + baseline_repository_dict.items() ) ) - print '\n\n', repository_dicts - print '\nThe url:\n\n', api_url, '\n\nreturned ', len( repository_dicts ), ' repository dictionaries...' + print '\n\n', repository_dicts + print '\nThe url:\n\n', api_url, '\n\nreturned ', len( repository_dicts ), ' repository dictionaries...' if __name__ == '__main__': parser = argparse.ArgumentParser( description='Get a filtered list of repository dictionaries.' ) diff -r 17e9e3ffb571e8f200ee8bf195a3708b2be4792b -r 0643bbc23cc1c845516469e3fa07270318c0c1a0 test/install_and_test_tool_shed_repositories/base/util.py --- a/test/install_and_test_tool_shed_repositories/base/util.py +++ b/test/install_and_test_tool_shed_repositories/base/util.py @@ -3,10 +3,9 @@ cwd = os.getcwd() sys.path.append( cwd ) -new_path = [ os.path.join( cwd, "scripts" ), - os.path.join( cwd, "lib" ), +new_path = [ os.path.join( cwd, "lib" ), os.path.join( cwd, 'test' ), - os.path.join( cwd, 'scripts', 'api' ) ] + os.path.join( cwd, 'lib', 'tool_shed', 'scripts', 'api' ) ] new_path.extend( sys.path ) sys.path = new_path @@ -27,6 +26,10 @@ from datetime import datetime from datetime import timedelta +from common import get_api_url +from common import get_latest_downloadable_changeset_revision_via_api +from common import get_repository_dict +from common import json_from_url from common import update from galaxy.util import asbool @@ -267,18 +270,6 @@ version = str( tool_dependency_dict[ 'version' ] ) print "# %s %s version %s" % ( type, name, version ) -def get_api_url( base, parts=[], params=None ): - if 'api' in parts and parts.index( 'api' ) != 0: - parts.pop( parts.index( 'api' ) ) - parts.insert( 0, 'api' ) - elif 'api' not in parts: - parts.insert( 0, 'api' ) - url = suc.url_join( base, *parts ) - if params is not None: - query_string = urllib.urlencode( params ) - url += '?%s' % query_string - return url - def get_database_version( app ): ''' This method returns the value of the version column from the migrate_version table, using the provided app's SQLAlchemy session to determine @@ -296,19 +287,6 @@ break return version -def get_latest_downloadable_changeset_revision( url, name, owner ): - error_message = '' - parts = [ 'api', 'repositories', 'get_ordered_installable_revisions' ] - params = dict( name=name, owner=owner ) - api_url = get_api_url( base=url, parts=parts, params=params ) - changeset_revisions, error_message = json_from_url( api_url ) - if error_message: - return None, error_message - if changeset_revisions: - return changeset_revisions[ -1 ], error_message - else: - return suc.INITIAL_CHANGELOG_HASH, error_message - def get_missing_repository_dependencies( repository ): """ Return the entire list of missing repository dependencies for the received repository. The entire @@ -467,28 +445,6 @@ return None, error_message return repository_dependency_dicts, error_message -def get_repository_dict( url, repository_dict ): - error_message = '' - if not isinstance( repository_dict, dict ): - error_message = 'Invalid repository_dict received: %s' % str( repository_dict ) - return None, error_message - repository_id = repository_dict.get( 'repository_id', None ) - if repository_id is None: - error_message = 'Invalid repository_dict does not contain a repository_id entry: %s' % str( repository_dict ) - return None, error_message - parts = [ 'api', 'repositories', repository_id ] - api_url = get_api_url( base=url, parts=parts ) - extended_dict, error_message = json_from_url( api_url ) - if error_message: - return None, error_message - name = str( extended_dict[ 'name' ] ) - owner = str( extended_dict[ 'owner' ] ) - latest_changeset_revision, error_message = get_latest_downloadable_changeset_revision( url, name, owner ) - if error_message: - return None, error_message - extended_dict[ 'latest_revision' ] = str( latest_changeset_revision ) - return extended_dict, error_message - def get_repository_dependencies_dicts( url, encoded_repository_metadata_id ): """ Return a list if dictionaries that define the repository dependencies of the repository defined by the @@ -709,23 +665,23 @@ return False, None def is_latest_downloadable_revision( url, repository_dict ): - name = str( repository_dict[ 'name' ] ) - owner = str( repository_dict[ 'owner' ] ) - changeset_revision = str( repository_dict[ 'changeset_revision' ] ) - latest_revision = get_latest_downloadable_changeset_revision( url, name=name, owner=owner ) - return changeset_revision == str( latest_revision ) - -def json_from_url( url ): + """ + Return True if the changeset_revision defined in the received repository_dict is the latest + installable revision for the repository. + """ error_message = '' - url_handle = urllib.urlopen( url ) - url_contents = url_handle.read() - try: - parsed_json = from_json_string( url_contents ) - except Exception, e: - error_message = str( url_contents ) - log.exception( 'Error parsing JSON data in json_from_url(): %s.' % str( e ) ) - return None, error_message - return parsed_json, error_message + name = repository_dict.get( 'name', None ) + owner = repository_dict.get( 'owner', None ) + changeset_revision = repository_dict.get( 'changeset_revision', None ) + if name is not None and owner is not None and changeset_revision is not None: + name = str( name ) + owner = str( owner ) + changeset_revision = str( changeset_revision ) + latest_revision, error_message = get_latest_downloadable_changeset_revision_via_api( url, name=name, owner=owner ) + if latest_revision is None or error_message: + return None, error_message + is_latest_downloadable = changeset_revision == str( latest_revision ) + return is_latest_downloadable, error_message def parse_exclude_list( xml_filename ): """Return a list of repositories to exclude from testing.""" @@ -1024,9 +980,15 @@ if can_update_tool_shed: metadata_revision_id = repository_dict.get( 'id', None ) if metadata_revision_id is not None: - name = str( repository_dict[ 'name' ] ) - owner = str( repository_dict[ 'owner' ] ) - changeset_revision = str( repository_dict[ 'changeset_revision' ] ) + name = repository_dict.get( 'name', None ) + owner = repository_dict.get( 'owner', None ) + changeset_revision = repository_dict.get( 'changeset_revision', None ) + if name is None or owner is None or changeset_revision is None: + log.debug( 'Entries for name, owner or changeset_revision missing from repository_dict %s' % str( repository_dict ) ) + return {} + name = str( name ) + owner = str( owner ) + changeset_revision = str( changeset_revision ) log.debug('\n=============================================================\n' ) log.debug( 'Inserting the following into tool_test_results for revision %s of repository %s owned by %s:\n%s' % \ ( changeset_revision, name, owner, str( tool_test_results_dict ) ) ) diff -r 17e9e3ffb571e8f200ee8bf195a3708b2be4792b -r 0643bbc23cc1c845516469e3fa07270318c0c1a0 test/install_and_test_tool_shed_repositories/repositories_with_tools/functional_tests.py --- a/test/install_and_test_tool_shed_repositories/repositories_with_tools/functional_tests.py +++ b/test/install_and_test_tool_shed_repositories/repositories_with_tools/functional_tests.py @@ -685,9 +685,16 @@ tool_test_results_dict[ 'failed_tests' ] = failed_test_dicts failed_repository_dict = repository_identifier_dict install_and_test_statistics_dict[ 'at_least_one_test_failed' ].append( failed_repository_dict ) - set_do_not_test = \ - not install_and_test_base_util.is_latest_downloadable_revision( install_and_test_base_util.galaxy_tool_shed_url, - repository_dict ) + is_latest_downloadable_revision, error_message = \ + install_and_test_base_util.is_latest_downloadable_revision( install_and_test_base_util.galaxy_tool_shed_url, + repository_dict ) + if is_latest_downloadable_revision is None or error_message: + log.debug( 'Error attempting to determine if revision %s of repository %s owned by %s ' % \ + ( changeset_revision, name, owner ) ) + log.debug( 'is the latest downloadable revision: %s' % str( error_message ) ) + set_do_not_test = False + else: + set_do_not_test = not is_latest_downloadable_revision params = dict( tools_functionally_correct=False, test_install_error=False, do_not_test=str( set_do_not_test ) ) Repository URL: https://bitbucket.org/galaxy/galaxy-central/ -- This is a commit notification from bitbucket.org. You are receiving this because you have the service enabled, addressing the recipient of this email.
participants (1)
-
commits-noreply@bitbucket.org