1 new commit in galaxy-central: https://bitbucket.org/galaxy/galaxy-central/commits/49b11be8c5c4/ Changeset: 49b11be8c5c4 User: greg Date: 2014-06-18 21:28:20 Summary: Eliminate the use of the Galaxy web transaction object in Tool Shed utility functions related to importing a repository capsule into a Tool Shed and move som hg related functions from the commit_util module to the hg_util module. Affected #: 15 files diff -r 7af8ed727418f8eac8499f20853890ab8731101e -r 49b11be8c5c4f1e51eb3e1f10f4b20f9cd77ab8e lib/galaxy/webapps/galaxy/api/tool_shed_repositories.py --- a/lib/galaxy/webapps/galaxy/api/tool_shed_repositories.py +++ b/lib/galaxy/webapps/galaxy/api/tool_shed_repositories.py @@ -419,7 +419,7 @@ try: invalid_file_tups, metadata_dict = metadata_util.reset_all_metadata_on_installed_repository( trans.app, repository_id ) if invalid_file_tups: - message = tool_util.generate_message_for_invalid_tools( trans, + message = tool_util.generate_message_for_invalid_tools( trans.app, invalid_file_tups, repository, None, diff -r 7af8ed727418f8eac8499f20853890ab8731101e -r 49b11be8c5c4f1e51eb3e1f10f4b20f9cd77ab8e lib/galaxy/webapps/tool_shed/api/repositories.py --- a/lib/galaxy/webapps/tool_shed/api/repositories.py +++ b/lib/galaxy/webapps/tool_shed/api/repositories.py @@ -250,8 +250,8 @@ return {} capsule_dict[ 'tar_archive' ] = tar_archive capsule_dict[ 'capsule_file_name' ] = capsule_file_name - capsule_dict = import_util.extract_capsule_files( trans, **capsule_dict ) - capsule_dict = import_util.validate_capsule( trans, **capsule_dict ) + capsule_dict = import_util.extract_capsule_files( **capsule_dict ) + capsule_dict = import_util.validate_capsule( **capsule_dict ) status = capsule_dict.get( 'status', 'error' ) if status == 'error': log.debug( 'The capsule contents are invalid and cannot be imported:<br/>%s' % \ @@ -268,7 +268,10 @@ # The manifest.xml file has already been validated, so no error_message should be returned here. repository_info_dicts, error_message = import_util.get_repository_info_from_manifest( manifest_file_path ) # Determine the status for each exported repository archive contained within the capsule. - repository_status_info_dicts = import_util.get_repository_status_from_tool_shed( trans, repository_info_dicts ) + repository_status_info_dicts = import_util.get_repository_status_from_tool_shed( trans.app, + trans.user, + trans.user_is_admin(), + repository_info_dicts ) # Generate a list of repository name / import results message tuples for display after the capsule is imported. import_results_tups = [] # Only create repositories that do not yet exist and that the current user is authorized to create. The @@ -277,11 +280,12 @@ # Add the capsule_file_name and encoded_file_path to the repository_status_info_dict. repository_status_info_dict[ 'capsule_file_name' ] = capsule_file_name repository_status_info_dict[ 'encoded_file_path' ] = encoded_file_path - import_results_tups = \ - repository_maintenance_util.create_repository_and_import_archive( trans, - repository_status_info_dict, - import_results_tups ) - import_util.check_status_and_reset_downloadable( trans, import_results_tups ) + import_results_tups = import_util.create_repository_and_import_archive( trans.app, + trans.request.host, + trans.user, + repository_status_info_dict, + import_results_tups ) + import_util.check_status_and_reset_downloadable( trans.app, import_results_tups ) basic_util.remove_dir( file_path ) # NOTE: the order of installation is defined in import_results_tups, but order will be lost # when transferred to return_dict. @@ -413,15 +417,22 @@ log.debug( "Resetting metadata on repository %s" % str( repository.name ) ) repository_id = trans.security.encode_id( repository.id ) try: - invalid_file_tups, metadata_dict = metadata_util.reset_all_metadata_on_repository_in_tool_shed( trans, repository_id ) + invalid_file_tups, metadata_dict = \ + metadata_util.reset_all_metadata_on_repository_in_tool_shed( trans, repository_id ) if invalid_file_tups: - message = tool_util.generate_message_for_invalid_tools( trans, invalid_file_tups, repository, None, as_html=False ) + message = tool_util.generate_message_for_invalid_tools( trans.app, + invalid_file_tups, + repository, + None, + as_html=False ) results[ 'unsuccessful_count' ] += 1 else: - message = "Successfully reset metadata on repository %s owned by %s" % ( str( repository.name ), str( repository.user.username ) ) + message = "Successfully reset metadata on repository %s owned by %s" % \ + ( str( repository.name ), str( repository.user.username ) ) results[ 'successful_count' ] += 1 except Exception, e: - message = "Error resetting metadata on repository %s owned by %s: %s" % ( str( repository.name ), str( repository.user.username ), str( e ) ) + message = "Error resetting metadata on repository %s owned by %s: %s" % \ + ( str( repository.name ), str( repository.user.username ), str( e ) ) results[ 'unsuccessful_count' ] += 1 status = '%s : %s' % ( str( repository.name ), message ) results[ 'repository_status' ].append( status ) @@ -486,14 +497,21 @@ results = dict( start_time=start_time, repository_status=[] ) try: - invalid_file_tups, metadata_dict = metadata_util.reset_all_metadata_on_repository_in_tool_shed( trans, - trans.security.encode_id( repository.id ) ) + invalid_file_tups, metadata_dict = \ + metadata_util.reset_all_metadata_on_repository_in_tool_shed( trans, + trans.security.encode_id( repository.id ) ) if invalid_file_tups: - message = tool_util.generate_message_for_invalid_tools( trans, invalid_file_tups, repository, None, as_html=False ) + message = tool_util.generate_message_for_invalid_tools( trans.app, + invalid_file_tups, + repository, + None, + as_html=False ) else: - message = "Successfully reset metadata on repository %s owned by %s" % ( str( repository.name ), str( repository.user.username ) ) + message = "Successfully reset metadata on repository %s owned by %s" % \ + ( str( repository.name ), str( repository.user.username ) ) except Exception, e: - message = "Error resetting metadata on repository %s owned by %s: %s" % ( str( repository.name ), str( repository.user.username ), str( e ) ) + message = "Error resetting metadata on repository %s owned by %s: %s" % \ + ( str( repository.name ), str( repository.user.username ), str( e ) ) status = '%s : %s' % ( str( repository.name ), message ) results[ 'repository_status' ].append( status ) return results diff -r 7af8ed727418f8eac8499f20853890ab8731101e -r 49b11be8c5c4f1e51eb3e1f10f4b20f9cd77ab8e lib/galaxy/webapps/tool_shed/controllers/admin.py --- a/lib/galaxy/webapps/tool_shed/controllers/admin.py +++ b/lib/galaxy/webapps/tool_shed/controllers/admin.py @@ -315,11 +315,14 @@ def manage_role_associations( self, trans, **kwd ): """Manage users, groups and repositories associated with a role.""" role_id = kwd.get( 'id', None ) - role = repository_maintenance_util.get_role_by_id( trans, role_id ) + role = repository_maintenance_util.get_role_by_id( trans.app, role_id ) # We currently only have a single role associated with a repository, the repository admin role. repository_role_association = role.repositories[ 0 ] repository = repository_role_association.repository - associations_dict = repository_maintenance_util.handle_role_associations( trans, role, repository, **kwd ) + associations_dict = repository_maintenance_util.handle_role_associations( trans.app, + role, + repository, + **kwd ) in_users = associations_dict.get( 'in_users', [] ) out_users = associations_dict.get( 'out_users', [] ) in_groups = associations_dict.get( 'in_groups', [] ) diff -r 7af8ed727418f8eac8499f20853890ab8731101e -r 49b11be8c5c4f1e51eb3e1f10f4b20f9cd77ab8e lib/galaxy/webapps/tool_shed/controllers/hg.py --- a/lib/galaxy/webapps/tool_shed/controllers/hg.py +++ b/lib/galaxy/webapps/tool_shed/controllers/hg.py @@ -45,7 +45,7 @@ repo = hg.repository( ui.ui(), repository.repo_path( trans.app ) ) update_repository( repo, ctx_rev=None ) # Set metadata using the repository files on disk. - error_message, status = set_repository_metadata( trans, repository ) + error_message, status = set_repository_metadata( trans.app, trans.request.host, trans.user, repository ) if status == 'ok' and error_message: log.debug( "Successfully reset metadata on repository %s owned by %s, but encountered problem: %s" % \ ( str( repository.name ), str( repository.user.username ), error_message ) ) diff -r 7af8ed727418f8eac8499f20853890ab8731101e -r 49b11be8c5c4f1e51eb3e1f10f4b20f9cd77ab8e lib/galaxy/webapps/tool_shed/controllers/repository.py --- a/lib/galaxy/webapps/tool_shed/controllers/repository.py +++ b/lib/galaxy/webapps/tool_shed/controllers/repository.py @@ -41,7 +41,6 @@ from galaxy import eggs eggs.require( 'mercurial' ) -from mercurial import commands from mercurial import mdiff from mercurial import patch @@ -849,7 +848,7 @@ operation = kwd[ 'operation' ].lower() if operation == "preview_tools_in_changeset": repository = suc.get_repository_in_tool_shed( trans.app, repository_id ) - repository_metadata = metadata_util.get_latest_repository_metadata( trans, repository.id, downloadable=True ) + repository_metadata = metadata_util.get_latest_repository_metadata( trans.app, repository.id, downloadable=True ) latest_installable_changeset_revision = repository_metadata.changeset_revision return trans.response.send_redirect( web.url_for( controller='repository', action='preview_tools_in_changeset', @@ -1017,7 +1016,7 @@ if error: status = 'error' else: - repository, message = repository_maintenance_util.create_repository( trans, + repository, message = repository_maintenance_util.create_repository( trans.app, name, repository_type, description, @@ -1974,7 +1973,10 @@ # The manifest.xml file has already been validated, so no error_message should be returned here. repository_info_dicts, error_message = import_util.get_repository_info_from_manifest( manifest_file_path ) # Determine the status for each exported repository archive contained within the capsule. - repository_status_info_dicts = import_util.get_repository_status_from_tool_shed( trans, repository_info_dicts ) + repository_status_info_dicts = import_util.get_repository_status_from_tool_shed( trans.app, + trans.user, + trans.user_is_admin(), + repository_info_dicts ) if 'import_capsule_button' in kwd: # Generate a list of repository name / import results message tuples for display after the capsule is imported. import_results_tups = [] @@ -1984,11 +1986,12 @@ # Add the capsule_file_name and encoded_file_path to the repository_status_info_dict. repository_status_info_dict[ 'capsule_file_name' ] = capsule_file_name repository_status_info_dict[ 'encoded_file_path' ] = encoded_file_path - import_results_tups = \ - repository_maintenance_util.create_repository_and_import_archive( trans, - repository_status_info_dict, - import_results_tups ) - import_util.check_status_and_reset_downloadable( trans, import_results_tups ) + import_results_tups = import_util.create_repository_and_import_archive( trans.app, + trans.request.host, + trans.user, + repository_status_info_dict, + import_results_tups ) + import_util.check_status_and_reset_downloadable( trans.app, import_results_tups ) basic_util.remove_dir( file_path ) return trans.fill_template( '/webapps/tool_shed/repository/import_capsule_results.mako', export_info_dict=export_info_dict, @@ -2099,7 +2102,12 @@ tool, [] ) if invalid_file_tups: - message = tool_util.generate_message_for_invalid_tools( trans, invalid_file_tups, repository, {}, as_html=True, displaying_invalid_tool=True ) + message = tool_util.generate_message_for_invalid_tools( trans.app, + invalid_file_tups, + repository, + {}, + as_html=True, + displaying_invalid_tool=True ) elif error_message: message = error_message try: @@ -2459,7 +2467,10 @@ if repository_metadata: metadata = repository_metadata.metadata role = repository.admin_role - associations_dict = repository_maintenance_util.handle_role_associations( trans, role, repository, **kwd ) + associations_dict = repository_maintenance_util.handle_role_associations( trans.app, + role, + repository, + **kwd ) in_users = associations_dict.get( 'in_users', [] ) out_users = associations_dict.get( 'out_users', [] ) in_groups = associations_dict.get( 'in_groups', [] ) @@ -2693,7 +2704,7 @@ invalid_file_tups, metadata_dict = metadata_util.reset_all_metadata_on_repository_in_tool_shed( trans, id, **kwd ) if invalid_file_tups: repository = suc.get_repository_in_tool_shed( trans.app, id ) - message = tool_util.generate_message_for_invalid_tools( trans, invalid_file_tups, repository, metadata_dict ) + message = tool_util.generate_message_for_invalid_tools( trans.app, invalid_file_tups, repository, metadata_dict ) status = 'error' else: message = "All repository metadata has been reset. " @@ -2733,7 +2744,7 @@ tip = repository.tip( trans.app ) for selected_file in selected_files_to_delete: try: - commands.remove( repo.ui, repo, selected_file, force=True ) + hg_util.remove_file( repo.ui, repo, selected_file, force=True ) except Exception, e: log.debug( "Error removing the following file using the mercurial API:\n %s" % str( selected_file ) ) log.debug( "The error was: %s" % str( e )) @@ -2759,19 +2770,27 @@ # Commit the change set. if not commit_message: commit_message = 'Deleted selected files' - commands.commit( repo.ui, repo, repo_dir, user=trans.user.username, message=commit_message ) - suc.handle_email_alerts( trans, repository ) + hg_util.commit_changeset( repo.ui, + repo, + full_path_to_changeset=repo_dir, + username=trans.user.username, + message=commit_message ) + suc.handle_email_alerts( trans.app, trans.request.host, repository ) # Update the repository files for browsing. hg_util.update_repository( repo ) # Get the new repository tip. if tip == repository.tip( trans.app ): message += 'No changes to repository. ' - kwd[ 'message' ] = message - else: - message += 'The selected files were deleted from the repository. ' - kwd[ 'message' ] = message - metadata_util.set_repository_metadata_due_to_new_tip( trans, repository, **kwd ) + status, error_message = metadata_util.set_repository_metadata_due_to_new_tip( trans.app, + trans.request.host, + trans.user, + repository, + **kwd ) + if error_message: + message = error_message + else: + message += 'The selected files were deleted from the repository. ' else: message = "Select at least 1 file to delete from the repository before clicking <b>Delete selected files</b>." status = "error" @@ -3061,13 +3080,13 @@ status = kwd.get( 'status', 'done' ) url = kwd.get( 'url', '' ) if 'upload_capsule_button' in kwd: - capsule_dict = import_util.upload_capsule( trans, **kwd ) + capsule_dict = import_util.upload_capsule( **kwd ) status = capsule_dict.get( 'status', 'error' ) if status == 'error': message = capsule_dict.get( 'error_message', '' ) else: - capsule_dict = import_util.extract_capsule_files( trans, **capsule_dict ) - capsule_dict = import_util.validate_capsule( trans, **capsule_dict ) + capsule_dict = import_util.extract_capsule_files( **capsule_dict ) + capsule_dict = import_util.validate_capsule( **capsule_dict ) status = capsule_dict.get( 'status', 'error' ) if status == 'ok': return trans.response.send_redirect( web.url_for( controller='repository', diff -r 7af8ed727418f8eac8499f20853890ab8731101e -r 49b11be8c5c4f1e51eb3e1f10f4b20f9cd77ab8e lib/galaxy/webapps/tool_shed/controllers/upload.py --- a/lib/galaxy/webapps/tool_shed/controllers/upload.py +++ b/lib/galaxy/webapps/tool_shed/controllers/upload.py @@ -167,7 +167,7 @@ # Inspect the contents of the file to see if changeset_revision values are missing and if so, # set them appropriately. altered, root_elem, error_message = \ - commit_util.handle_repository_dependencies_definition( trans, + commit_util.handle_repository_dependencies_definition( trans.app, uploaded_file_name, unpopulate=False ) if error_message: @@ -184,7 +184,7 @@ # Inspect the contents of the file to see if changeset_revision values are # missing and if so, set them appropriately. altered, root_elem, error_message = \ - commit_util.handle_tool_dependencies_definition( trans, uploaded_file_name ) + commit_util.handle_tool_dependencies_definition( trans.app, uploaded_file_name ) if error_message: ok = False message = error_message @@ -200,15 +200,19 @@ if ok: # See if any admin users have chosen to receive email alerts when a repository is updated. # If so, check every uploaded file to ensure content is appropriate. - check_contents = commit_util.check_file_contents_for_email_alerts( trans ) + check_contents = commit_util.check_file_contents_for_email_alerts( trans.app ) if check_contents and os.path.isfile( full_path ): content_alert_str = commit_util.check_file_content_for_html_and_images( full_path ) else: content_alert_str = '' - commands.add( repo.ui, repo, full_path ) + hg_util.add_changeset( repo.ui, repo, full_path ) # Convert from unicode to prevent "TypeError: array item must be char" full_path = full_path.encode( 'ascii', 'replace' ) - commands.commit( repo.ui, repo, full_path, user=trans.user.username, message=commit_message ) + hg_util.commit_changeset( repo.ui, + repo, + full_path_to_changeset=full_path, + username=trans.user.username, + message=commit_message ) if full_path.endswith( 'tool_data_table_conf.xml.sample' ): # Handle the special case where a tool_data_table_conf.xml.sample file is being uploaded # by parsing the file and adding new entries to the in-memory trans.app.tool_data_tables @@ -218,7 +222,8 @@ message = '%s<br/>%s' % ( message, error_message ) # See if the content of the change set was valid. admin_only = len( repository.downloadable_revisions ) != 1 - suc.handle_email_alerts( trans, + suc.handle_email_alerts( trans.app, + trans.request.host, repository, content_alert_str=content_alert_str, new_repo_alert=new_repo_alert, @@ -253,8 +258,15 @@ ( len( files_to_remove ), upload_point ) else: message += " %d files were removed from the repository root. " % len( files_to_remove ) + status, error_message = metadata_util.set_repository_metadata_due_to_new_tip( trans.app, + trans.request.host, + trans.user, + repository, + content_alert_str=content_alert_str, + **kwd ) + if error_message: + message = error_message kwd[ 'message' ] = message - metadata_util.set_repository_metadata_due_to_new_tip( trans, repository, content_alert_str=content_alert_str, **kwd ) if repository.metadata_revisions: # A repository's metadata revisions are order descending by update_time, so the zeroth revision # will be the tip just after an upload. @@ -355,7 +367,7 @@ # Inspect the contents of the file to see if changeset_revision values are missing and # if so, set them appropriately. altered, root_elem, error_message = \ - commit_util.handle_repository_dependencies_definition( trans, + commit_util.handle_repository_dependencies_definition( trans.app, uploaded_file_name, unpopulate=False ) if error_message: @@ -365,7 +377,7 @@ shutil.move( tmp_filename, uploaded_file_name ) elif os.path.split( uploaded_file_name )[ -1 ] == rt_util.TOOL_DEPENDENCY_DEFINITION_FILENAME: # Inspect the contents of the file to see if changeset_revision values are missing and if so, set them appropriately. - altered, root_elem, error_message = commit_util.handle_tool_dependencies_definition( trans, uploaded_file_name ) + altered, root_elem, error_message = commit_util.handle_tool_dependencies_definition( trans.app, uploaded_file_name ) if error_message: return False, error_message, [], '', [], [] if altered: @@ -382,8 +394,17 @@ os.remove( repo_path ) shutil.move( os.path.join( uploaded_directory, relative_path ), repo_path ) filenames_in_archive.append( relative_path ) - return commit_util.handle_directory_changes( trans, repository, full_path, filenames_in_archive, remove_repo_files_not_in_tar, - new_repo_alert, commit_message, undesirable_dirs_removed, undesirable_files_removed ) + return commit_util.handle_directory_changes( trans.app, + trans.request.host, + trans.user.username, + repository, + full_path, + filenames_in_archive, + remove_repo_files_not_in_tar, + new_repo_alert, + commit_message, + undesirable_dirs_removed, + undesirable_files_removed ) def upload_tar( self, trans, repository, tar, uploaded_file, upload_point, remove_repo_files_not_in_tar, commit_message, new_repo_alert ): # Upload a tar archive of files. @@ -422,7 +443,7 @@ uploaded_file_name = os.path.join( full_path, filename ) if os.path.split( uploaded_file_name )[ -1 ] == rt_util.REPOSITORY_DEPENDENCY_DEFINITION_FILENAME: # Inspect the contents of the file to see if changeset_revision values are missing and if so, set them appropriately. - altered, root_elem, error_message = commit_util.handle_repository_dependencies_definition( trans, + altered, root_elem, error_message = commit_util.handle_repository_dependencies_definition( trans.app, uploaded_file_name, unpopulate=False ) if error_message: @@ -432,13 +453,15 @@ shutil.move( tmp_filename, uploaded_file_name ) elif os.path.split( uploaded_file_name )[ -1 ] == rt_util.TOOL_DEPENDENCY_DEFINITION_FILENAME: # Inspect the contents of the file to see if changeset_revision values are missing and if so, set them appropriately. - altered, root_elem, error_message = commit_util.handle_tool_dependencies_definition( trans, uploaded_file_name ) + altered, root_elem, error_message = commit_util.handle_tool_dependencies_definition( trans.app, uploaded_file_name ) if error_message: return False, error_message, [], '', [], [] if altered: tmp_filename = xml_util.create_and_write_tmp_file( root_elem ) shutil.move( tmp_filename, uploaded_file_name ) - return commit_util.handle_directory_changes( trans, + return commit_util.handle_directory_changes( trans.app, + trans.request.host, + trans.user.username, repository, full_path, filenames_in_archive, diff -r 7af8ed727418f8eac8499f20853890ab8731101e -r 49b11be8c5c4f1e51eb3e1f10f4b20f9cd77ab8e lib/galaxy/webapps/tool_shed/framework/middleware/hg.py --- a/lib/galaxy/webapps/tool_shed/framework/middleware/hg.py +++ b/lib/galaxy/webapps/tool_shed/framework/middleware/hg.py @@ -1,6 +1,7 @@ """Middle-ware for handling hg authentication for users pushing change sets to local repositories.""" +import json +import logging import os -import logging import sqlalchemy import sys import tempfile @@ -9,11 +10,9 @@ from paste.httpheaders import REMOTE_USER from galaxy.util import asbool -from galaxy.util import json -from galaxy.webapps.tool_shed import model from galaxy.util.hash_util import new_secure_hash -import tool_shed.util.shed_util_common as suc from tool_shed.util import commit_util +from tool_shed.util import hg_util import tool_shed.repository_types.util as rt_util from galaxy import eggs @@ -104,7 +103,7 @@ fh.write( chunk ) fh.close() fh = open( tmp_filename, 'rb' ) - changeset_groups = json.from_json_string( commit_util.bundle_to_json( fh ) ) + changeset_groups = json.loads( hg_util.bundle_to_json( fh ) ) fh.close() try: os.unlink( tmp_filename ) diff -r 7af8ed727418f8eac8499f20853890ab8731101e -r 49b11be8c5c4f1e51eb3e1f10f4b20f9cd77ab8e lib/tool_shed/util/commit_util.py --- a/lib/tool_shed/util/commit_util.py +++ b/lib/tool_shed/util/commit_util.py @@ -1,14 +1,11 @@ -import cStringIO import gzip +import json import logging import os -import pkg_resources import shutil -import struct import tempfile from galaxy.datatypes import checkers from galaxy.util import asbool -from galaxy.util import json from galaxy.util.odict import odict from galaxy.web import url_for import tool_shed.util.shed_util_common as suc @@ -18,24 +15,11 @@ from tool_shed.util import xml_util import tool_shed.repository_types.util as rt_util -from galaxy import eggs -eggs.require( 'mercurial' ) -from mercurial import commands -from mercurial.changegroup import readbundle -from mercurial.changegroup import readexactly - log = logging.getLogger( __name__ ) UNDESIRABLE_DIRS = [ '.hg', '.svn', '.git', '.cvs' ] UNDESIRABLE_FILES = [ '.hg_archival.txt', 'hgrc', '.DS_Store' ] -def bundle_to_json( fh ): - """Convert the received HG10xx data stream (a mercurial 1.0 bundle created using hg push from the command line) to a json object.""" - # See http://www.wstein.org/home/wstein/www/home/was/patches/hg_json - hg_unbundle10_obj = readbundle( fh, None ) - groups = [ group for group in unpack_groups( hg_unbundle10_obj ) ] - return json.to_json_string( groups, indent=4 ) - def check_archive( repository, archive ): for member in archive.getmembers(): # Allow regular files and directories only @@ -62,15 +46,16 @@ return False, message return True, '' -def check_file_contents_for_email_alerts( trans ): +def check_file_contents_for_email_alerts( app ): """ See if any admin users have chosen to receive email alerts when a repository is updated. If so, the file contents of the update must be checked for inappropriate content. """ - admin_users = trans.app.config.get( "admin_users", "" ).split( "," ) - for repository in trans.sa_session.query( trans.model.Repository ) \ - .filter( trans.model.Repository.table.c.email_alerts != None ): - email_alerts = json.from_json_string( repository.email_alerts ) + sa_session = app.model.context.current + admin_users = app.config.get( "admin_users", "" ).split( "," ) + for repository in sa_session.query( app.model.Repository ) \ + .filter( app.model.Repository.table.c.email_alerts != None ): + email_alerts = json.loads( repository.email_alerts ) for user_email in email_alerts: if user_email in admin_users: return True @@ -140,14 +125,14 @@ bzipped_file.close() shutil.move( uncompressed, uploaded_file_name ) -def handle_complex_repository_dependency_elem( trans, elem, sub_elem_index, sub_elem, sub_elem_altered, altered, unpopulate=False ): +def handle_complex_repository_dependency_elem( app, elem, sub_elem_index, sub_elem, sub_elem_altered, altered, unpopulate=False ): """ Populate or unpopulate the toolshed and changeset_revision attributes of a <repository> tag that defines a complex repository dependency. """ # The received sub_elem looks something like the following: # <repository name="package_eigen_2_0" owner="test" prior_installation_required="True" /> - revised, repository_elem, error_message = handle_repository_dependency_elem( trans, sub_elem, unpopulate=unpopulate ) + revised, repository_elem, error_message = handle_repository_dependency_elem( app, sub_elem, unpopulate=unpopulate ) if error_message: error_message = 'The tool_dependencies.xml file contains an invalid <repository> tag. %s' % error_message if revised: @@ -157,13 +142,13 @@ altered = True return altered, sub_elem_altered, elem, error_message -def handle_directory_changes( trans, repository, full_path, filenames_in_archive, remove_repo_files_not_in_tar, new_repo_alert, - commit_message, undesirable_dirs_removed, undesirable_files_removed ): - repo = hg_util.get_repo_for_repository( trans.app, repository=repository, repo_path=None, create=False ) +def handle_directory_changes( app, host, username, repository, full_path, filenames_in_archive, remove_repo_files_not_in_tar, + new_repo_alert, commit_message, undesirable_dirs_removed, undesirable_files_removed ): + repo = hg_util.get_repo_for_repository( app, repository=repository, repo_path=None, create=False ) content_alert_str = '' files_to_remove = [] filenames_in_archive = [ os.path.join( full_path, name ) for name in filenames_in_archive ] - if remove_repo_files_not_in_tar and not repository.is_new( trans.app ): + if remove_repo_files_not_in_tar and not repository.is_new( app ): # We have a repository that is not new (it contains files), so discover those files that are in the # repository, but not in the uploaded archive. for root, dirs, files in os.walk( full_path ): @@ -181,9 +166,10 @@ if full_name not in filenames_in_archive: files_to_remove.append( full_name ) for repo_file in files_to_remove: - # Remove files in the repository (relative to the upload point) that are not in the uploaded archive. + # Remove files in the repository (relative to the upload point) that are not in + # the uploaded archive. try: - commands.remove( repo.ui, repo, repo_file, force=True ) + hg_util.remove_file( repo.ui, repo, repo_file, force=True ) except Exception, e: log.debug( "Error removing files using the mercurial API, so trying a different approach, the error was: %s" % str( e )) relative_selected_file = repo_file.split( 'repo_%d' % repository.id )[1].lstrip( '/' ) @@ -204,23 +190,33 @@ except OSError, e: # The directory is not empty. pass - # See if any admin users have chosen to receive email alerts when a repository is updated. If so, check every uploaded file to ensure - # content is appropriate. - check_contents = check_file_contents_for_email_alerts( trans ) + # See if any admin users have chosen to receive email alerts when a repository is updated. + # If so, check every uploaded file to ensure content is appropriate. + check_contents = check_file_contents_for_email_alerts( app ) for filename_in_archive in filenames_in_archive: # Check file content to ensure it is appropriate. if check_contents and os.path.isfile( filename_in_archive ): content_alert_str += check_file_content_for_html_and_images( filename_in_archive ) - commands.add( repo.ui, repo, filename_in_archive ) + hg_util.add_changeset( repo.ui, repo, filename_in_archive ) if filename_in_archive.endswith( 'tool_data_table_conf.xml.sample' ): - # Handle the special case where a tool_data_table_conf.xml.sample file is being uploaded by parsing the file and adding new entries - # to the in-memory trans.app.tool_data_tables dictionary. - error, message = tool_util.handle_sample_tool_data_table_conf_file( trans.app, filename_in_archive ) + # Handle the special case where a tool_data_table_conf.xml.sample file is being uploaded + # by parsing the file and adding new entries to the in-memory app.tool_data_tables + # dictionary. + error, message = tool_util.handle_sample_tool_data_table_conf_file( app, filename_in_archive ) if error: return False, message, files_to_remove, content_alert_str, undesirable_dirs_removed, undesirable_files_removed - commands.commit( repo.ui, repo, full_path, user=trans.user.username, message=commit_message ) + hg_util.commit_changeset( repo.ui, + repo, + full_path_to_changeset=full_path, + username=username, + message=commit_message ) admin_only = len( repository.downloadable_revisions ) != 1 - suc.handle_email_alerts( trans, repository, content_alert_str=content_alert_str, new_repo_alert=new_repo_alert, admin_only=admin_only ) + suc.handle_email_alerts( app, + host, + repository, + content_alert_str=content_alert_str, + new_repo_alert=new_repo_alert, + admin_only=admin_only ) return True, '', files_to_remove, content_alert_str, undesirable_dirs_removed, undesirable_files_removed def handle_missing_repository_attribute( elem ): @@ -253,7 +249,7 @@ gzipped_file.close() shutil.move( uncompressed, uploaded_file_name ) -def handle_repository_dependencies_definition( trans, repository_dependencies_config, unpopulate=False ): +def handle_repository_dependencies_definition( app, repository_dependencies_config, unpopulate=False ): """ Populate or unpopulate the toolshed and changeset_revision attributes of a <repository> tag. Populating will occur when a dependency definition file is being uploaded to the repository, while depopulating will occur when the repository is being @@ -269,7 +265,7 @@ for index, elem in enumerate( root ): if elem.tag == 'repository': # <repository name="molecule_datatypes" owner="test" changeset_revision="1a070566e9c6" /> - revised, elem, error_message = handle_repository_dependency_elem( trans, elem, unpopulate=unpopulate ) + revised, elem, error_message = handle_repository_dependency_elem( app, elem, unpopulate=unpopulate ) if error_message: error_message = 'The repository_dependencies.xml file contains an invalid <repository> tag. %s' % error_message return False, None, error_message @@ -280,7 +276,7 @@ return altered, root, error_message return False, None, error_message -def handle_repository_dependency_elem( trans, elem, unpopulate=False ): +def handle_repository_dependency_elem( app, elem, unpopulate=False ): """Populate or unpopulate repository tags.""" # <repository name="molecule_datatypes" owner="test" changeset_revision="1a070566e9c6" /> # <repository changeset_revision="xxx" name="package_xorg_macros_1_17_1" owner="test" toolshed="yyy"> @@ -342,10 +338,10 @@ # Populate the changeset_revision attribute with the latest installable metadata revision for the defined repository. # We use the latest installable revision instead of the latest metadata revision to ensure that the contents of the # revision are valid. - repository = suc.get_repository_by_name_and_owner( trans.app, name, owner ) + repository = suc.get_repository_by_name_and_owner( app, name, owner ) if repository: - repo = hg_util.get_repo_for_repository( trans.app, repository=repository, repo_path=None, create=False ) - lastest_installable_changeset_revision = suc.get_latest_downloadable_changeset_revision( trans.app, repository, repo ) + repo = hg_util.get_repo_for_repository( app, repository=repository, repo_path=None, create=False ) + lastest_installable_changeset_revision = suc.get_latest_downloadable_changeset_revision( app, repository, repo ) if lastest_installable_changeset_revision != hg_util.INITIAL_CHANGELOG_HASH: elem.attrib[ 'changeset_revision' ] = lastest_installable_changeset_revision revised = True @@ -356,7 +352,7 @@ error_message = 'Unable to locate repository with name %s and owner %s. ' % ( str( name ), str( owner ) ) return revised, elem, error_message -def handle_repository_dependency_sub_elem( trans, package_altered, altered, actions_elem, action_index, action_elem, unpopulate=False ): +def handle_repository_dependency_sub_elem( app, package_altered, altered, actions_elem, action_index, action_elem, unpopulate=False ): """ Populate or unpopulate the toolshed and changeset_revision attributes for each of the following tag sets. <action type="set_environment_for_install"> @@ -367,7 +363,7 @@ for repo_index, repo_elem in enumerate( action_elem ): # Make sure to skip comments and tags that are not <repository>. if repo_elem.tag == 'repository': - revised, repository_elem, message = handle_repository_dependency_elem( trans, repo_elem, unpopulate=unpopulate ) + revised, repository_elem, message = handle_repository_dependency_elem( app, repo_elem, unpopulate=unpopulate ) if message: error_message += 'The tool_dependencies.xml file contains an invalid <repository> tag. %s' % message if revised: @@ -379,7 +375,7 @@ actions_elem[ action_index ] = action_elem return package_altered, altered, actions_elem, error_message -def handle_tool_dependencies_definition( trans, tool_dependencies_config, unpopulate=False ): +def handle_tool_dependencies_definition( app, tool_dependencies_config, unpopulate=False ): """ Populate or unpopulate the tooshed and changeset_revision attributes of each <repository> tag defined within a tool_dependencies.xml file. @@ -401,7 +397,7 @@ if package_elem.tag == 'repository': # We have a complex repository dependency. altered, package_altered, root_elem, message = \ - handle_complex_repository_dependency_elem( trans, + handle_complex_repository_dependency_elem( app, root_elem, package_index, package_elem, @@ -440,7 +436,7 @@ if last_actions_elem_package_elem.tag == 'repository': # We have a complex repository dependency. altered, last_actions_package_altered, last_actions_elem, message = \ - handle_complex_repository_dependency_elem( trans, + handle_complex_repository_dependency_elem( app, last_actions_elem, last_actions_elem_package_index, last_actions_elem_package_elem, @@ -456,7 +452,7 @@ # Inspect the sub elements of last_actions_elem to locate all <repository> tags and # populate them with toolshed and changeset_revision attributes if necessary. last_actions_package_altered, altered, last_actions_elem, message = \ - handle_repository_dependency_sub_elem( trans, + handle_repository_dependency_sub_elem( app, last_actions_package_altered, altered, actions_group_elem, @@ -468,23 +464,25 @@ elif actions_elem.tag == 'actions': # We are not in an <actions_group> tag set, so we must be in an <actions> tag set. for action_index, action_elem in enumerate( actions_elem ): - # Inspect the sub elements of last_actions_elem to locate all <repository> tags and populate them with - # toolshed and changeset_revision attributes if necessary. - package_altered, altered, actions_elem, message = handle_repository_dependency_sub_elem( trans, - package_altered, - altered, - actions_elem, - action_index, - action_elem, - unpopulate=unpopulate ) + # Inspect the sub elements of last_actions_elem to locate all <repository> tags + # and populate them with toolshed and changeset_revision attributes if necessary. + package_altered, altered, actions_elem, message = \ + handle_repository_dependency_sub_elem( app, + package_altered, + altered, + actions_elem, + action_index, + action_elem, + unpopulate=unpopulate ) if message: error_message += message else: package_name = root_elem.get( 'name', '' ) package_version = root_elem.get( 'version', '' ) - error_message += 'Version %s of the %s package cannot be installed because ' % ( str( package_version ), str( package_name ) ) - error_message += 'the recipe for installing the package is missing either an <actions> tag set or an <actions_group> ' - error_message += 'tag set.' + error_message += 'Version %s of the %s package cannot be installed because ' % \ + ( str( package_version ), str( package_name ) ) + error_message += 'the recipe for installing the package is missing either an ' + error_message += '<actions> tag set or an <actions_group> tag set.' if package_altered: package_elem[ actions_index ] = actions_elem if package_altered: @@ -534,62 +532,3 @@ if isbz2: handle_bz2( repository, uploaded_file_name ) return uploaded_file_filename.rstrip( '.bz2' ) - -def unpack_chunks( hg_unbundle10_obj ): - """ - This method provides a generator of parsed chunks of a "group" in a mercurial unbundle10 object which - is created when a changeset that is pushed to a Tool Shed repository using hg push from the command line - is read using readbundle. - """ - while True: - length, = struct.unpack( '>l', readexactly( hg_unbundle10_obj, 4 ) ) - if length <= 4: - # We found a "null chunk", which ends the group. - break - if length < 84: - raise Exception( "negative data length" ) - node, p1, p2, cs = struct.unpack( '20s20s20s20s', readexactly( hg_unbundle10_obj, 80 ) ) - yield { 'node': node.encode( 'hex' ), - 'p1': p1.encode( 'hex' ), - 'p2': p2.encode( 'hex' ), - 'cs': cs.encode( 'hex' ), - 'data': [ patch for patch in unpack_patches( hg_unbundle10_obj, length - 84 ) ] } - -def unpack_groups( hg_unbundle10_obj ): - """ - This method provides a generator of parsed groups from a mercurial unbundle10 object which is - created when a changeset that is pushed to a Tool Shed repository using hg push from the command - line is read using readbundle. - """ - # Process the changelog group. - yield [ chunk for chunk in unpack_chunks( hg_unbundle10_obj ) ] - # Process the manifest group. - yield [ chunk for chunk in unpack_chunks( hg_unbundle10_obj ) ] - while True: - length, = struct.unpack( '>l', readexactly( hg_unbundle10_obj, 4 ) ) - if length <= 4: - # We found a "null meta chunk", which ends the changegroup. - break - filename = readexactly( hg_unbundle10_obj, length-4 ).encode( 'string_escape' ) - # Process the file group. - yield ( filename, [ chunk for chunk in unpack_chunks( hg_unbundle10_obj ) ] ) - -def unpack_patches( hg_unbundle10_obj, remaining ): - """ - This method provides a generator of patches from the data field in a chunk. As there is no delimiter - for this data field, a length argument is required. - """ - while remaining >= 12: - start, end, blocklen = struct.unpack( '>lll', readexactly( hg_unbundle10_obj, 12 ) ) - remaining -= 12 - if blocklen > remaining: - raise Exception( "unexpected end of patch stream" ) - block = readexactly( hg_unbundle10_obj, blocklen ) - remaining -= blocklen - yield { 'start': start, - 'end': end, - 'blocklen': blocklen, - 'block': block.encode( 'string_escape' ) } - if remaining > 0: - print remaining - raise Exception( "unexpected end of patch stream" ) diff -r 7af8ed727418f8eac8499f20853890ab8731101e -r 49b11be8c5c4f1e51eb3e1f10f4b20f9cd77ab8e lib/tool_shed/util/export_util.py --- a/lib/tool_shed/util/export_util.py +++ b/lib/tool_shed/util/export_util.py @@ -151,7 +151,7 @@ if name == rt_util.REPOSITORY_DEPENDENCY_DEFINITION_FILENAME: # Eliminate the toolshed, and changeset_revision attributes from all <repository> tags. altered, root_elem, error_message = \ - commit_util.handle_repository_dependencies_definition( trans, full_path, unpopulate=True ) + commit_util.handle_repository_dependencies_definition( trans.app, full_path, unpopulate=True ) if error_message: return None, error_message if altered: @@ -160,7 +160,7 @@ elif name == rt_util.TOOL_DEPENDENCY_DEFINITION_FILENAME: # Eliminate the toolshed, and changeset_revision attributes from all <repository> tags. altered, root_elem, error_message = \ - commit_util.handle_tool_dependencies_definition( trans, full_path, unpopulate=True ) + commit_util.handle_tool_dependencies_definition( trans.app, full_path, unpopulate=True ) if error_message: return None, error_message if altered: diff -r 7af8ed727418f8eac8499f20853890ab8731101e -r 49b11be8c5c4f1e51eb3e1f10f4b20f9cd77ab8e lib/tool_shed/util/hg_util.py --- a/lib/tool_shed/util/hg_util.py +++ b/lib/tool_shed/util/hg_util.py @@ -1,5 +1,7 @@ +import json import logging import os +import struct from datetime import datetime from time import gmtime @@ -14,6 +16,8 @@ from mercurial import commands from mercurial import hg from mercurial import ui +from mercurial.changegroup import readbundle +from mercurial.changegroup import readexactly from tool_shed.util import basic_util @@ -21,6 +25,19 @@ INITIAL_CHANGELOG_HASH = '000000000000' +def add_changeset( repo_ui, repo, path_to_filename_in_archive ): + commands.add( repo_ui, repo, path_to_filename_in_archive ) + +def bundle_to_json( fh ): + """ + Convert the received HG10xx data stream (a mercurial 1.0 bundle created using hg push from the + command line) to a json object. + """ + # See http://www.wstein.org/home/wstein/www/home/was/patches/hg_json + hg_unbundle10_obj = readbundle( fh, None ) + groups = [ group for group in unpack_groups( hg_unbundle10_obj ) ] + return json.dumps( groups, indent=4 ) + def clone_repository( repository_clone_url, repository_file_dir, ctx_rev ): """ Clone the repository up to the specified changeset_revision. No subsequent revisions will be @@ -39,6 +56,9 @@ log.debug( error_message ) return False, error_message +def commit_changeset( repo_ui, repo, full_path_to_changeset, username, message ): + commands.commit( repo_ui, repo, full_path_to_changeset, user=username, message=message ) + def copy_file_from_manifest( repo, ctx, filename, dir ): """ Copy the latest version of the file named filename from the repository manifest to the directory @@ -268,6 +288,9 @@ """Pull changes from a remote repository to a local one.""" commands.pull( get_configured_ui(), repo, source=repository_clone_url, rev=[ ctx_rev ] ) +def remove_file( repo_ui, repo, selected_file, force=True ): + commands.remove( repo_ui, repo, selected_file, force=force ) + def reversed_lower_upper_bounded_changelog( repo, excluded_lower_bounds_changeset_revision, included_upper_bounds_changeset_revision ): """ Return a reversed list of changesets in the repository changelog after the excluded_lower_bounds_changeset_revision, @@ -300,6 +323,65 @@ """ return reversed_lower_upper_bounded_changelog( repo, INITIAL_CHANGELOG_HASH, included_upper_bounds_changeset_revision ) +def unpack_chunks( hg_unbundle10_obj ): + """ + This method provides a generator of parsed chunks of a "group" in a mercurial unbundle10 object which + is created when a changeset that is pushed to a Tool Shed repository using hg push from the command line + is read using readbundle. + """ + while True: + length, = struct.unpack( '>l', readexactly( hg_unbundle10_obj, 4 ) ) + if length <= 4: + # We found a "null chunk", which ends the group. + break + if length < 84: + raise Exception( "negative data length" ) + node, p1, p2, cs = struct.unpack( '20s20s20s20s', readexactly( hg_unbundle10_obj, 80 ) ) + yield { 'node': node.encode( 'hex' ), + 'p1': p1.encode( 'hex' ), + 'p2': p2.encode( 'hex' ), + 'cs': cs.encode( 'hex' ), + 'data': [ patch for patch in unpack_patches( hg_unbundle10_obj, length - 84 ) ] } + +def unpack_groups( hg_unbundle10_obj ): + """ + This method provides a generator of parsed groups from a mercurial unbundle10 object which is + created when a changeset that is pushed to a Tool Shed repository using hg push from the command + line is read using readbundle. + """ + # Process the changelog group. + yield [ chunk for chunk in unpack_chunks( hg_unbundle10_obj ) ] + # Process the manifest group. + yield [ chunk for chunk in unpack_chunks( hg_unbundle10_obj ) ] + while True: + length, = struct.unpack( '>l', readexactly( hg_unbundle10_obj, 4 ) ) + if length <= 4: + # We found a "null meta chunk", which ends the changegroup. + break + filename = readexactly( hg_unbundle10_obj, length-4 ).encode( 'string_escape' ) + # Process the file group. + yield ( filename, [ chunk for chunk in unpack_chunks( hg_unbundle10_obj ) ] ) + +def unpack_patches( hg_unbundle10_obj, remaining ): + """ + This method provides a generator of patches from the data field in a chunk. As there is no delimiter + for this data field, a length argument is required. + """ + while remaining >= 12: + start, end, blocklen = struct.unpack( '>lll', readexactly( hg_unbundle10_obj, 12 ) ) + remaining -= 12 + if blocklen > remaining: + raise Exception( "unexpected end of patch stream" ) + block = readexactly( hg_unbundle10_obj, blocklen ) + remaining -= blocklen + yield { 'start': start, + 'end': end, + 'blocklen': blocklen, + 'block': block.encode( 'string_escape' ) } + if remaining > 0: + print remaining + raise Exception( "unexpected end of patch stream" ) + def update_repository( repo, ctx_rev=None ): """ Update the cloned repository to changeset_revision. It is critical that the installed repository is updated to the desired diff -r 7af8ed727418f8eac8499f20853890ab8731101e -r 49b11be8c5c4f1e51eb3e1f10f4b20f9cd77ab8e lib/tool_shed/util/import_util.py --- a/lib/tool_shed/util/import_util.py +++ b/lib/tool_shed/util/import_util.py @@ -5,50 +5,117 @@ import tempfile import urllib from galaxy import util -from galaxy.datatypes import checkers from tool_shed.util import commit_util from tool_shed.util import encoding_util from tool_shed.util import hg_util from tool_shed.util import metadata_util +from tool_shed.util import repository_maintenance_util from tool_shed.util import xml_util import tool_shed.util.shed_util_common as suc import tool_shed.repository_types.util as rt_util log = logging.getLogger( __name__ ) -def check_status_and_reset_downloadable( trans, import_results_tups ): +def check_status_and_reset_downloadable( app, import_results_tups ): """Check the status of each imported repository and set downloadable to False if errors.""" + sa_session = app.model.context.current flush = False for import_results_tup in import_results_tups: ok, name_owner, message = import_results_tup name, owner = name_owner if not ok: - repository = suc.get_repository_by_name_and_owner( trans.app, name, owner ) + repository = suc.get_repository_by_name_and_owner( app, name, owner ) if repository is not None: # Do not allow the repository to be automatically installed if population resulted in errors. - tip_changeset_revision = repository.tip( trans.app ) - repository_metadata = suc.get_repository_metadata_by_changeset_revision( trans.app, - trans.security.encode_id( repository.id ), + tip_changeset_revision = repository.tip( app ) + repository_metadata = suc.get_repository_metadata_by_changeset_revision( app, + app.security.encode_id( repository.id ), tip_changeset_revision ) if repository_metadata: if repository_metadata.downloadable: repository_metadata.downloadable = False - trans.sa_session.add( repository_metadata ) + sa_session.add( repository_metadata ) if not flush: flush = True # Do not allow dependent repository revisions to be automatically installed if population # resulted in errors. - dependent_downloadable_revisions = suc.get_dependent_downloadable_revisions( trans.app, repository_metadata ) + dependent_downloadable_revisions = suc.get_dependent_downloadable_revisions( app, repository_metadata ) for dependent_downloadable_revision in dependent_downloadable_revisions: if dependent_downloadable_revision.downloadable: dependent_downloadable_revision.downloadable = False - trans.sa_session.add( dependent_downloadable_revision ) + sa_session.add( dependent_downloadable_revision ) if not flush: flush = True if flush: - trans.sa_session.flush() + sa_session.flush() -def extract_capsule_files( trans, **kwd ): +def create_repository_and_import_archive( app, host, user, repository_archive_dict, import_results_tups ): + """ + Create a new repository in the tool shed and populate it with the contents of a gzip compressed + tar archive that was exported as part or all of the contents of a capsule. + """ + results_message = '' + name = repository_archive_dict.get( 'name', None ) + username = repository_archive_dict.get( 'owner', None ) + if name is None or username is None: + ok = False + results_message += 'Import failed: required repository name <b>%s</b> or owner <b>%s</b> is missing.' % \ + ( str( name ), str( username )) + import_results_tups.append( ( ok, ( str( name ), str( username ) ), results_message ) ) + else: + status = repository_archive_dict.get( 'status', None ) + if status is None: + # The repository does not yet exist in this Tool Shed and the current user is authorized to import + # the current archive file. + type = repository_archive_dict.get( 'type', 'unrestricted' ) + description = repository_archive_dict.get( 'description', '' ) + long_description = repository_archive_dict.get( 'long_description', '' ) + # The owner entry in the repository_archive_dict is the public username of the user associated with + # the exported repository archive. + user = suc.get_user_by_username( app, username ) + if user is None: + ok = False + results_message += 'Import failed: repository owner <b>%s</b> does not have an account in this Tool Shed.' % str( username ) + import_results_tups.append( ( ok, ( str( name ), str( username ) ), results_message ) ) + else: + user_id = user.id + # The categories entry in the repository_archive_dict is a list of category names. If a name does not + # exist in the current Tool Shed, the category will not be created, so it will not be associated with + # the repository. + category_ids = [] + category_names = repository_archive_dict.get( 'category_names', [] ) + for category_name in category_names: + category = suc.get_category_by_name( app, category_name ) + if category is None: + results_message += 'This Tool Shed does not have the category <b>%s</b> so it ' % str( category_name ) + results_message += 'will not be associated with this repository.' + else: + category_ids.append( app.security.encode_id( category.id ) ) + # Create the repository record in the database. + repository, create_message = repository_maintenance_util.create_repository( app, + name, + type, + description, + long_description, + user_id=user_id, + category_ids=category_ids ) + if create_message: + results_message += create_message + # Populate the new repository with the contents of exported repository archive. + results_dict = import_repository_archive( app, host, user, repository, repository_archive_dict ) + ok = results_dict.get( 'ok', False ) + error_message = results_dict.get( 'error_message', '' ) + if error_message: + results_message += error_message + import_results_tups.append( ( ok, ( str( name ), str( username ) ), results_message ) ) + else: + # The repository either already exists in this Tool Shed or the current user is not authorized to create it. + ok = True + results_message += 'Import not necessary: repository status for this Tool Shed is: %s.' % str( status ) + import_results_tups.append( ( ok, ( str( name ), str( username ) ), results_message ) ) + return import_results_tups + +def extract_capsule_files( **kwd ): """Extract the uploaded capsule archive into a temporary location for inspection, validation and potential import.""" return_dict = {} tar_archive = kwd.get( 'tar_archive', None ) @@ -168,7 +235,7 @@ repository_info_dicts.append( repository_info_dict ) return repository_info_dicts, error_message -def get_repository_status_from_tool_shed( trans, repository_info_dicts ): +def get_repository_status_from_tool_shed( app, user, user_is_admin, repository_info_dicts ): """ For each exported repository archive contained in the capsule, inspect the Tool Shed to see if that repository already exists or if the current user is authorized to create the repository, and set a status appropriately. If repository @@ -179,7 +246,7 @@ """ repository_status_info_dicts = [] for repository_info_dict in repository_info_dicts: - repository = suc.get_repository_by_name_and_owner( trans.app, repository_info_dict[ 'name' ], repository_info_dict[ 'owner' ] ) + repository = suc.get_repository_by_name_and_owner( app, repository_info_dict[ 'name' ], repository_info_dict[ 'owner' ] ) if repository: if repository.deleted: repository_info_dict[ 'status' ] = 'Exists, deleted' @@ -189,16 +256,16 @@ repository_info_dict[ 'status' ] = 'Exists' else: # No repository with the specified name and owner currently exists, so make sure the current user can create one. - if trans.user_is_admin(): + if user_is_admin: repository_info_dict[ 'status' ] = None - elif trans.app.security_agent.user_can_import_repository_archive( trans.user, repository_info_dict[ 'owner' ] ): + elif app.security_agent.user_can_import_repository_archive( user, repository_info_dict[ 'owner' ] ): repository_info_dict[ 'status' ] = None else: repository_info_dict[ 'status' ] = 'Not authorized to import' repository_status_info_dicts.append( repository_info_dict ) return repository_status_info_dicts -def import_repository_archive( trans, repository, repository_archive_dict ): +def import_repository_archive( app, host, user, repository, repository_archive_dict ): """Import a repository archive contained within a repository capsule.""" archive_file_name = repository_archive_dict.get( 'archive_file_name', None ) capsule_file_name = repository_archive_dict[ 'capsule_file_name' ] @@ -207,8 +274,8 @@ results_dict = dict( ok=True, error_message='' ) archive_file_path = os.path.join( file_path, archive_file_name ) archive = tarfile.open( archive_file_path, 'r:*' ) - repo_dir = repository.repo_path( trans.app ) - repo = hg_util.get_repo_for_repository( trans.app, repository=None, repo_path=repo_dir, create=False ) + repo_dir = repository.repo_path( app ) + repo = hg_util.get_repo_for_repository( app, repository=None, repo_path=repo_dir, create=False ) undesirable_dirs_removed = 0 undesirable_files_removed = 0 ok, error_message = commit_util.check_archive( repository, archive ) @@ -237,9 +304,8 @@ uploaded_file_name = os.path.join( full_path, filename ) if os.path.split( uploaded_file_name )[ -1 ] == rt_util.REPOSITORY_DEPENDENCY_DEFINITION_FILENAME: # Inspect the contents of the file to see if changeset_revision values are missing and if so, set them appropriately. - altered, root_elem, error_message = commit_util.handle_repository_dependencies_definition( trans, - uploaded_file_name, - unpopulate=False ) + altered, root_elem, error_message = \ + commit_util.handle_repository_dependencies_definition( app, uploaded_file_name, unpopulate=False ) if error_message: results_dict[ 'ok' ] = False results_dict[ 'error_message' ] += error_message @@ -248,7 +314,7 @@ shutil.move( tmp_filename, uploaded_file_name ) elif os.path.split( uploaded_file_name )[ -1 ] == rt_util.TOOL_DEPENDENCY_DEFINITION_FILENAME: # Inspect the contents of the file to see if changeset_revision values are missing and if so, set them appropriately. - altered, root_elem, error_message = commit_util.handle_tool_dependencies_definition( trans, uploaded_file_name ) + altered, root_elem, error_message = commit_util.handle_tool_dependencies_definition( app, uploaded_file_name ) if error_message: results_dict[ 'ok' ] = False results_dict[ 'error_message' ] += error_message @@ -261,7 +327,9 @@ # Since the repository is new, the following must be False. remove_repo_files_not_in_tar = False ok, error_message, files_to_remove, content_alert_str, undesirable_dirs_removed, undesirable_files_removed = \ - commit_util.handle_directory_changes( trans, + commit_util.handle_directory_changes( app, + host, + user.username, repository, full_path, filenames_in_archive, @@ -274,9 +342,14 @@ results_dict[ 'ok' ] = False results_dict[ 'error_message' ] += error_message try: - metadata_util.set_repository_metadata_due_to_new_tip( trans, - repository, - content_alert_str=content_alert_str ) + status, error_message = metadata_util.set_repository_metadata_due_to_new_tip( app, + host, + user, + repository, + content_alert_str=content_alert_str ) + if error_message: + results_dict[ 'ok' ] = False + results_dict[ 'error_message' ] += error_message except Exception, e: log.debug( "Error setting metadata on repository %s created from imported archive %s: %s" % \ ( str( repository.name ), str( archive_file_name ), str( e ) ) ) @@ -286,7 +359,7 @@ results_dict[ 'error_message' ] += error_message return results_dict -def upload_capsule( trans, **kwd ): +def upload_capsule( **kwd ): """Upload and prepare an exported repository capsule for validation.""" file_data = kwd.get( 'file_data', '' ) url = kwd.get( 'url', '' ) @@ -349,7 +422,7 @@ return return_dict return return_dict -def validate_capsule( trans, **kwd ): +def validate_capsule( **kwd ): """Inspect the uploaded capsule's manifest and its contained files to ensure it is a valid repository capsule.""" capsule_dict = {} capsule_dict.update( kwd ) diff -r 7af8ed727418f8eac8499f20853890ab8731101e -r 49b11be8c5c4f1e51eb3e1f10f4b20f9cd77ab8e lib/tool_shed/util/metadata_util.py --- a/lib/tool_shed/util/metadata_util.py +++ b/lib/tool_shed/util/metadata_util.py @@ -40,13 +40,14 @@ rt_util.TOOL_DEPENDENCY_DEFINITION_FILENAME, REPOSITORY_DATA_MANAGER_CONFIG_FILENAME ] -def add_tool_versions( trans, id, repository_metadata, changeset_revisions ): +def add_tool_versions( app, id, repository_metadata, changeset_revisions ): # Build a dictionary of { 'tool id' : 'parent tool id' } pairs for each tool in repository_metadata. + sa_session = app.model.context.current metadata = repository_metadata.metadata tool_versions_dict = {} for tool_dict in metadata.get( 'tools', [] ): # We have at least 2 changeset revisions to compare tool guids and tool ids. - parent_id = get_parent_id( trans.app, + parent_id = get_parent_id( app, id, tool_dict[ 'id' ], tool_dict[ 'version' ], @@ -55,8 +56,8 @@ tool_versions_dict[ tool_dict[ 'guid' ] ] = parent_id if tool_versions_dict: repository_metadata.tool_versions = tool_versions_dict - trans.sa_session.add( repository_metadata ) - trans.sa_session.flush() + sa_session.add( repository_metadata ) + sa_session.flush() def clean_repository_metadata( trans, id, changeset_revisions ): # Delete all repository_metadata records associated with the repository that have a changeset_revision that is not in changeset_revisions. @@ -72,10 +73,15 @@ trans.sa_session.delete( repository_metadata ) trans.sa_session.flush() -def compare_changeset_revisions( trans, ancestor_changeset_revision, ancestor_metadata_dict, current_changeset_revision, current_metadata_dict ): - """Compare the contents of two changeset revisions to determine if a new repository metadata revision should be created.""" - # The metadata associated with ancestor_changeset_revision is ancestor_metadata_dict. This changeset_revision is an ancestor of - # current_changeset_revision which is associated with current_metadata_dict. A new repository_metadata record will be created only +def compare_changeset_revisions( app, ancestor_changeset_revision, ancestor_metadata_dict, + current_changeset_revision, current_metadata_dict ): + """ + Compare the contents of two changeset revisions to determine if a new repository + metadata revision should be created. + """ + # The metadata associated with ancestor_changeset_revision is ancestor_metadata_dict. + # This changeset_revision is an ancestor of current_changeset_revision which is associated + # with current_metadata_dict. A new repository_metadata record will be created only # when this method returns the constant value NOT_EQUAL_AND_NOT_SUBSET. ancestor_datatypes = ancestor_metadata_dict.get( 'datatypes', [] ) ancestor_tools = ancestor_metadata_dict.get( 'tools', [] ) @@ -107,14 +113,16 @@ no_data_manager = not ancestor_data_manager and not current_data_manager if no_datatypes and no_readme_files and no_repository_dependencies and no_tool_dependencies and no_tools and no_workflows and no_data_manager: return NO_METADATA - # Uncomment the following if we decide that README files should affect how installable repository revisions are defined. See the NOTE in the - # compare_readme_files() method. + # Uncomment the following if we decide that README files should affect how installable + # repository revisions are defined. See the NOTE in the compare_readme_files() method. # readme_file_comparision = compare_readme_files( ancestor_readme_files, current_readme_files ) - repository_dependency_comparison = compare_repository_dependencies( trans, ancestor_repository_dependencies, current_repository_dependencies ) - tool_dependency_comparison = compare_tool_dependencies( trans, ancestor_tool_dependencies, current_tool_dependencies ) - workflow_comparison = compare_workflows( trans, ancestor_workflows, current_workflows ) - datatype_comparison = compare_datatypes( trans, ancestor_datatypes, current_datatypes ) - data_manager_comparison = compare_data_manager( trans, ancestor_data_manager, current_data_manager ) + repository_dependency_comparison = compare_repository_dependencies( app, + ancestor_repository_dependencies, + current_repository_dependencies ) + tool_dependency_comparison = compare_tool_dependencies( ancestor_tool_dependencies, current_tool_dependencies ) + workflow_comparison = compare_workflows( ancestor_workflows, current_workflows ) + datatype_comparison = compare_datatypes( ancestor_datatypes, current_datatypes ) + data_manager_comparison = compare_data_manager( ancestor_data_manager, current_data_manager ) # Handle case where all metadata is the same. if ancestor_guids == current_guids and \ repository_dependency_comparison == EQUAL and \ @@ -130,7 +138,11 @@ workflow_dependency_is_subset = workflow_comparison in SUBSET_VALUES datatype_is_subset = datatype_comparison in SUBSET_VALUES datamanager_is_subset = data_manager_comparison in SUBSET_VALUES - if repository_dependency_is_subset and tool_dependency_is_subset and workflow_dependency_is_subset and datatype_is_subset and datamanager_is_subset: + if repository_dependency_is_subset and \ + tool_dependency_is_subset and \ + workflow_dependency_is_subset and \ + datatype_is_subset and \ + datamanager_is_subset: is_subset = True for guid in ancestor_guids: if guid not in current_guids: @@ -140,11 +152,16 @@ return SUBSET return NOT_EQUAL_AND_NOT_SUBSET -def compare_data_manager( trans, ancestor_metadata, current_metadata ): +def compare_data_manager( ancestor_metadata, current_metadata ): """Determine if ancestor_metadata is the same as or a subset of current_metadata for data_managers.""" def __data_manager_dict_to_tuple_list( metadata_dict ): # we do not check tool_guid or tool conf file name - return set( sorted( [ ( name, tuple( sorted( value.get( 'data_tables', [] ) ) ), value.get( 'guid' ), value.get( 'version' ), value.get( 'name' ), value.get( 'id' ) ) for name, value in metadata_dict.iteritems() ] ) ) + return set( sorted( [ ( name, + tuple( sorted( value.get( 'data_tables', [] ) ) ), + value.get( 'guid' ), + value.get( 'version' ), + value.get( 'name' ), + value.get( 'id' ) ) for name, value in metadata_dict.iteritems() ] ) ) # only compare valid entries, any invalid entries are ignored ancestor_metadata = __data_manager_dict_to_tuple_list( ancestor_metadata.get( 'data_managers', {} ) ) current_metadata = __data_manager_dict_to_tuple_list( current_metadata.get( 'data_managers', {} ) ) @@ -155,7 +172,7 @@ return SUBSET return NOT_EQUAL_AND_NOT_SUBSET -def compare_datatypes( trans, ancestor_datatypes, current_datatypes ): +def compare_datatypes( ancestor_datatypes, current_datatypes ): """Determine if ancestor_datatypes is the same as or a subset of current_datatypes.""" # Each datatype dict looks something like: {"dtype": "galaxy.datatypes.images:Image", "extension": "pdf", "mimetype": "application/pdf"} if len( ancestor_datatypes ) <= len( current_datatypes ): @@ -199,8 +216,10 @@ return SUBSET return NOT_EQUAL_AND_NOT_SUBSET -def compare_repository_dependencies( trans, ancestor_repository_dependencies, current_repository_dependencies ): - """Determine if ancestor_repository_dependencies is the same as or a subset of current_repository_dependencies.""" +def compare_repository_dependencies( app, ancestor_repository_dependencies, current_repository_dependencies ): + """ + Determine if ancestor_repository_dependencies is the same as or a subset of current_repository_dependencies. + """ # The list of repository_dependencies looks something like: # [["http://localhost:9009", "emboss_datatypes", "test", "ab03a2a5f407", "False", "False"]]. # Create a string from each tuple in the list for easier comparison. @@ -223,9 +242,12 @@ found_in_current = True break if not found_in_current: - # In some cases, the only difference between a dependency definition in the lists is the changeset_revision value. We'll - # check to see if this is the case, and if the defined dependency is a repository that has metadata set only on its tip. - if not different_revision_defines_tip_only_repository_dependency( trans, ancestor_tup, current_repository_dependencies ): + # In some cases, the only difference between a dependency definition in the lists + # is the changeset_revision value. We'll check to see if this is the case, and if + # the defined dependency is a repository that has metadata set only on its tip. + if not different_revision_defines_tip_only_repository_dependency( app, + ancestor_tup, + current_repository_dependencies ): return NOT_EQUAL_AND_NOT_SUBSET return SUBSET if len( ancestor_repository_dependencies ) == len( current_repository_dependencies ): @@ -234,16 +256,17 @@ return SUBSET return NOT_EQUAL_AND_NOT_SUBSET -def compare_tool_dependencies( trans, ancestor_tool_dependencies, current_tool_dependencies ): +def compare_tool_dependencies( ancestor_tool_dependencies, current_tool_dependencies ): """Determine if ancestor_tool_dependencies is the same as or a subset of current_tool_dependencies.""" # The tool_dependencies dictionary looks something like: # {'bwa/0.5.9': {'readme': 'some string', 'version': '0.5.9', 'type': 'package', 'name': 'bwa'}} if len( ancestor_tool_dependencies ) <= len( current_tool_dependencies ): for ancestor_td_key, ancestor_requirements_dict in ancestor_tool_dependencies.items(): if ancestor_td_key in current_tool_dependencies: - # The only values that could have changed between the 2 dictionaries are the "readme" or "type" values. Changing the readme value - # makes no difference. Changing the type will change the installation process, but for now we'll assume it was a typo, so new metadata - # shouldn't be generated. + # The only values that could have changed between the 2 dictionaries are the + # "readme" or "type" values. Changing the readme value makes no difference. + # Changing the type will change the installation process, but for now we'll + # assume it was a typo, so new metadata shouldn't be generated. continue else: return NOT_EQUAL_AND_NOT_SUBSET @@ -254,8 +277,11 @@ return SUBSET return NOT_EQUAL_AND_NOT_SUBSET -def compare_workflows( trans, ancestor_workflows, current_workflows ): - """Determine if ancestor_workflows is the same as current_workflows or if ancestor_workflows is a subset of current_workflows.""" +def compare_workflows( ancestor_workflows, current_workflows ): + """ + Determine if ancestor_workflows is the same as current_workflows or if ancestor_workflows + is a subset of current_workflows. + """ if len( ancestor_workflows ) <= len( current_workflows ): for ancestor_workflow_tup in ancestor_workflows: # ancestor_workflows is a list of tuples where each contained tuple is @@ -267,8 +293,10 @@ found_in_current = False for current_workflow_tup in current_workflows: current_workflow_dict = current_workflow_tup[1] - # Assume that if the name and number of steps are euqal, then the workflows are the same. Of course, this may not be true... - if current_workflow_dict[ 'name' ] == ancestor_workflow_name and len( current_workflow_dict[ 'steps' ] ) == num_ancestor_workflow_steps: + # Assume that if the name and number of steps are euqal, then the workflows + # are the same. Of course, this may not be true... + if current_workflow_dict[ 'name' ] == ancestor_workflow_name and \ + len( current_workflow_dict[ 'steps' ] ) == num_ancestor_workflow_steps: found_in_current = True break if not found_in_current: @@ -279,8 +307,9 @@ return SUBSET return NOT_EQUAL_AND_NOT_SUBSET -def create_or_update_repository_metadata( trans, id, repository, changeset_revision, metadata_dict ): +def create_or_update_repository_metadata( app, id, repository, changeset_revision, metadata_dict ): """Create or update a repository_metadatqa record in the tool shed.""" + sa_session = app.model.context.current has_repository_dependencies = False has_repository_dependencies_only_if_compiling_contained_td = False includes_datatypes = False @@ -309,7 +338,7 @@ downloadable = True else: downloadable = False - repository_metadata = suc.get_repository_metadata_by_changeset_revision( trans.app, id, changeset_revision ) + repository_metadata = suc.get_repository_metadata_by_changeset_revision( app, id, changeset_revision ) if repository_metadata: # A repository metadata record already exists with the received changeset_revision, so we don't need to # check the skip_tool_test table. @@ -325,15 +354,15 @@ # No repository_metadata record exists for the received changeset_revision, so we may need to update the # skip_tool_test table. check_skip_tool_test = True - repository_metadata = trans.model.RepositoryMetadata( repository_id=repository.id, - changeset_revision=changeset_revision, - metadata=metadata_dict, - downloadable=downloadable, - has_repository_dependencies=has_repository_dependencies, - includes_datatypes=includes_datatypes, - includes_tools=includes_tools, - includes_tool_dependencies=includes_tool_dependencies, - includes_workflows=includes_workflows ) + repository_metadata = app.model.RepositoryMetadata( repository_id=repository.id, + changeset_revision=changeset_revision, + metadata=metadata_dict, + downloadable=downloadable, + has_repository_dependencies=has_repository_dependencies, + includes_datatypes=includes_datatypes, + includes_tools=includes_tools, + includes_tool_dependencies=includes_tool_dependencies, + includes_workflows=includes_workflows ) # Always set the default values for the following columns. When resetting all metadata on a repository # this will reset the values. repository_metadata.tools_functionally_correct = False @@ -342,20 +371,20 @@ repository_metadata.do_not_test = False repository_metadata.time_last_tested = None repository_metadata.tool_test_results = None - trans.sa_session.add( repository_metadata ) - trans.sa_session.flush() + sa_session.add( repository_metadata ) + sa_session.flush() if check_skip_tool_test: # Since we created a new repository_metadata record, we may need to update the skip_tool_test table to point to it. Inspect each # changeset revision in the received repository's changelog (up to the received changeset revision) to see if it is contained in the # skip_tool_test table. If it is, but is not associated with a repository_metadata record, reset that skip_tool_test record to the # newly created repository_metadata record. - repo = hg_util.get_repo_for_repository( trans.app, repository=repository, repo_path=None, create=False ) + repo = hg_util.get_repo_for_repository( app, repository=repository, repo_path=None, create=False ) for changeset in repo.changelog: changeset_hash = str( repo.changectx( changeset ) ) - skip_tool_test = suc.get_skip_tool_test_by_changeset_revision( trans.app, changeset_hash ) + skip_tool_test = suc.get_skip_tool_test_by_changeset_revision( app, changeset_hash ) if skip_tool_test: # We found a skip_tool_test record associated with the changeset_revision, so see if it has a valid repository_revision. - repository_revision = get_repository_metadata_by_id( trans.app, trans.security.encode_id( repository_metadata.id ) ) + repository_revision = get_repository_metadata_by_id( app, app.security.encode_id( repository_metadata.id ) ) if repository_revision: # The skip_tool_test record is associated with a valid repository_metadata record, so proceed. continue @@ -364,14 +393,14 @@ # require updating, so we won't break here, we'll continue to inspect the rest of the changelog up to the received # changeset_revision. skip_tool_test.repository_metadata_id = repository_metadata.id - trans.sa_session.add( skip_tool_test ) - trans.sa_session.flush() + sa_session.add( skip_tool_test ) + sa_session.flush() if changeset_hash == changeset_revision: # Proceed no further than the received changeset_revision. break return repository_metadata -def different_revision_defines_tip_only_repository_dependency( trans, rd_tup, repository_dependencies ): +def different_revision_defines_tip_only_repository_dependency( app, rd_tup, repository_dependencies ): """ Determine if the only difference between rd_tup and a dependency definition in the list of repository_dependencies is the changeset_revision value. @@ -386,8 +415,8 @@ cleaned_tool_shed = common_util.remove_protocol_from_tool_shed_url( tool_shed ) if cleaned_rd_tool_shed == cleaned_tool_shed and rd_name == name and rd_owner == owner: # Determine if the repository represented by the dependency tuple is an instance of the repository type TipOnly. - required_repository = suc.get_repository_by_name_and_owner( trans.app, name, owner ) - repository_type_class = trans.app.repository_types_registry.get_class_by_label( required_repository.type ) + required_repository = suc.get_repository_by_name_and_owner( app, name, owner ) + repository_type_class = app.repository_types_registry.get_class_by_label( required_repository.type ) return isinstance( repository_type_class, TipOnly ) return False @@ -1045,20 +1074,17 @@ metadata_dict[ 'workflows' ] = [ ( relative_path, exported_workflow_dict ) ] return metadata_dict -def get_latest_repository_metadata( trans, decoded_repository_id, downloadable=False ): +def get_latest_repository_metadata( app, decoded_repository_id, downloadable=False ): """Get last metadata defined for a specified repository from the database.""" - repository = trans.sa_session.query( trans.model.Repository ).get( decoded_repository_id ) - repo = hg_util.get_repo_for_repository( trans.app, repository=repository, repo_path=None, create=False ) + sa_session = app.model.context.current + repository = sa_session.query( app.model.Repository ).get( decoded_repository_id ) + repo = hg_util.get_repo_for_repository( app, repository=repository, repo_path=None, create=False ) if downloadable: - changeset_revision = suc.get_latest_downloadable_changeset_revision( trans.app, - repository, - repo ) + changeset_revision = suc.get_latest_downloadable_changeset_revision( app, repository, repo ) else: - changeset_revision = suc.get_latest_changeset_revision( trans.app, - repository, - repo ) - return suc.get_repository_metadata_by_changeset_revision( trans.app, - trans.security.encode_id( repository.id ), + changeset_revision = suc.get_latest_changeset_revision( app, repository, repo ) + return suc.get_repository_metadata_by_changeset_revision( app, + app.security.encode_id( repository.id ), changeset_revision ) def get_parent_id( app, id, old_id, version, guid, changeset_revisions ): @@ -1377,10 +1403,11 @@ return repository_metadata.malicious return False -def new_datatypes_metadata_required( trans, repository_metadata, metadata_dict ): +def new_datatypes_metadata_required( repository_metadata, metadata_dict ): """ - Compare the last saved metadata for each datatype in the repository with the new metadata in metadata_dict to determine if a new - repository_metadata table record is required or if the last saved metadata record can be updated for datatypes instead. + Compare the last saved metadata for each datatype in the repository with the new metadata + in metadata_dict to determine if a new repository_metadata table record is required or if + the last saved metadata record can be updated for datatypes instead. """ # Datatypes are stored in metadata as a list of dictionaries that looks like: # [{'dtype': 'galaxy.datatypes.data:Text', 'subclass': 'True', 'extension': 'acedb'}] @@ -1392,7 +1419,7 @@ if 'datatypes' in metadata: ancestor_datatypes = metadata[ 'datatypes' ] # The saved metadata must be a subset of the new metadata. - datatype_comparison = compare_datatypes( trans, ancestor_datatypes, current_datatypes ) + datatype_comparison = compare_datatypes( ancestor_datatypes, current_datatypes ) if datatype_comparison == NOT_EQUAL_AND_NOT_SUBSET: return True else: @@ -1409,23 +1436,28 @@ # The received metadata_dict includes no metadata for datatypes, so a new repository_metadata table record is not needed. return False -def new_metadata_required_for_utilities( trans, repository, new_tip_metadata_dict ): +def new_metadata_required_for_utilities( app, repository, new_tip_metadata_dict ): """ - Galaxy utilities currently consist of datatypes, repository_dependency definitions, tools, tool_dependency definitions and exported - Galaxy workflows. This method compares the last stored repository_metadata record associated with the received repository against the - contents of the received new_tip_metadata_dict and returns True or False for the union set of Galaxy utilities contained in both metadata - dictionaries. The metadata contained in new_tip_metadata_dict may not be a subset of that contained in the last stored repository_metadata - record associated with the received repository because one or more Galaxy utilities may have been deleted from the repository in the new tip. + Galaxy utilities currently consist of datatypes, repository_dependency definitions, + tools, tool_dependency definitions and exported Galaxy workflows. This method compares + the last stored repository_metadata record associated with the received repository against + the contents of the received new_tip_metadata_dict and returns True or False for the union + set of Galaxy utilities contained in both metadata dictionaries. The metadata contained + in new_tip_metadata_dict may not be a subset of that contained in the last stored + repository_metadata record associated with the received repository because one or more + Galaxy utilities may have been deleted from the repository in the new tip. """ - repository_metadata = get_latest_repository_metadata( trans, repository.id, downloadable=False ) - datatypes_required = new_datatypes_metadata_required( trans, repository_metadata, new_tip_metadata_dict ) - # Uncomment the following if we decide that README files should affect how installable repository revisions are defined. See the NOTE in the - # compare_readme_files() method. - # readme_files_required = new_readme_files_metadata_required( trans, repository_metadata, new_tip_metadata_dict ) - repository_dependencies_required = new_repository_dependency_metadata_required( trans, repository_metadata, new_tip_metadata_dict ) - tools_required = new_tool_metadata_required( trans, repository_metadata, new_tip_metadata_dict ) - tool_dependencies_required = new_tool_dependency_metadata_required( trans, repository_metadata, new_tip_metadata_dict ) - workflows_required = new_workflow_metadata_required( trans, repository_metadata, new_tip_metadata_dict ) + repository_metadata = get_latest_repository_metadata( app, repository.id, downloadable=False ) + datatypes_required = new_datatypes_metadata_required( repository_metadata, new_tip_metadata_dict ) + # Uncomment the following if we decide that README files should affect how installable + # repository revisions are defined. See the NOTE in the compare_readme_files() method. + # readme_files_required = new_readme_files_metadata_required( repository_metadata, new_tip_metadata_dict ) + repository_dependencies_required = new_repository_dependency_metadata_required( app, + repository_metadata, + new_tip_metadata_dict ) + tools_required = new_tool_metadata_required( repository_metadata, new_tip_metadata_dict ) + tool_dependencies_required = new_tool_dependency_metadata_required( repository_metadata, new_tip_metadata_dict ) + workflows_required = new_workflow_metadata_required( repository_metadata, new_tip_metadata_dict ) if datatypes_required or repository_dependencies_required or tools_required or tool_dependencies_required or workflows_required: return True return False @@ -1462,7 +1494,7 @@ # The received metadata_dict includes no metadata for readme_files, so a new repository_metadata table record is not needed. return False -def new_repository_dependency_metadata_required( trans, repository_metadata, metadata_dict ): +def new_repository_dependency_metadata_required( app, repository_metadata, metadata_dict ): """ Compare the last saved metadata for each repository dependency in the repository with the new metadata in metadata_dict to determine if a new repository_metadata table record is required or if the last saved metadata record can be updated for repository_dependencies instead. @@ -1474,24 +1506,29 @@ new_repository_dependencies_metadata = metadata_dict.get( 'repository_dependencies', None ) if new_repository_dependencies_metadata: new_repository_dependencies = metadata_dict[ 'repository_dependencies' ][ 'repository_dependencies' ] - # TODO: We used to include the following here to handle the case where repository dependency definitions were deleted. However, - # this erroneously returned True in cases where is should not have done so. This usually occurred where multiple single files - # were uploaded when a single tarball should have been. We need to implement support for handling deleted repository dependency - # definitions so that we can guarantee reproducibility, but we need to do it in a way that is better than the following. + # TODO: We used to include the following here to handle the case where repository + # dependency definitions were deleted. However this erroneously returned True in + # cases where is should not have done so. This usually occurred where multiple single + # files were uploaded when a single tarball should have been. We need to implement + # support for handling deleted repository dependency definitions so that we can guarantee + # reproducibility, but we need to do it in a way that is better than the following. # for new_repository_dependency in new_repository_dependencies: # if new_repository_dependency not in saved_repository_dependencies: # return True # The saved metadata must be a subset of the new metadata. for saved_repository_dependency in saved_repository_dependencies: if saved_repository_dependency not in new_repository_dependencies: - # In some cases, the only difference between a dependency definition in the lists is the changeset_revision value. We'll - # check to see if this is the case, and if the defined dependency is a repository that has metadata set only on its tip. - if not different_revision_defines_tip_only_repository_dependency( trans, saved_repository_dependency, new_repository_dependencies ): + # In some cases, the only difference between a dependency definition in the lists + # is the changeset_revision value. We'll check to see if this is the case, and if + # the defined dependency is a repository that has metadata set only on its tip. + if not different_revision_defines_tip_only_repository_dependency( app, + saved_repository_dependency, + new_repository_dependencies ): return True return False else: - # The repository_dependencies.xml file must have been deleted, so create a new repository_metadata record so we always have - # access to the deleted file. + # The repository_dependencies.xml file must have been deleted, so create a new + # repository_metadata record so we always have access to the deleted file. return True else: return False @@ -1500,13 +1537,15 @@ # There is no saved repository metadata, so we need to create a new repository_metadata record. return True else: - # The received metadata_dict includes no metadata for repository dependencies, so a new repository_metadata record is not needed. + # The received metadata_dict includes no metadata for repository dependencies, so + # a new repository_metadata record is not needed. return False -def new_tool_dependency_metadata_required( trans, repository_metadata, metadata_dict ): +def new_tool_dependency_metadata_required( repository_metadata, metadata_dict ): """ - Compare the last saved metadata for each tool dependency in the repository with the new metadata in metadata_dict to determine if a new - repository_metadata table record is required or if the last saved metadata record can be updated for tool_dependencies instead. + Compare the last saved metadata for each tool dependency in the repository with the new + metadata in metadata_dict to determine if a new repository_metadata table record is required + or if the last saved metadata record can be updated for tool_dependencies instead. """ if repository_metadata: metadata = repository_metadata.metadata @@ -1545,10 +1584,11 @@ # The received metadata_dict includes no metadata for tool dependencies, so a new repository_metadata record is not needed. return False -def new_tool_metadata_required( trans, repository_metadata, metadata_dict ): +def new_tool_metadata_required( repository_metadata, metadata_dict ): """ - Compare the last saved metadata for each tool in the repository with the new metadata in metadata_dict to determine if a new repository_metadata - table record is required, or if the last saved metadata record can be updated instead. + Compare the last saved metadata for each tool in the repository with the new metadata in + metadata_dict to determine if a new repository_metadata table record is required, or if + the last saved metadata record can be updated instead. """ if 'tools' in metadata_dict: if repository_metadata: @@ -1587,10 +1627,11 @@ # The received metadata_dict includes no metadata for tools, so a new repository_metadata table record is not needed. return False -def new_workflow_metadata_required( trans, repository_metadata, metadata_dict ): +def new_workflow_metadata_required( repository_metadata, metadata_dict ): """ - Currently everything about an exported workflow except the name is hard-coded, so there's no real way to differentiate versions of - exported workflows. If this changes at some future time, this method should be enhanced accordingly. + Currently everything about an exported workflow except the name is hard-coded, so + there's no real way to differentiate versions of exported workflows. If this changes + at some future time, this method should be enhanced accordingly. """ if 'workflows' in metadata_dict: if repository_metadata: @@ -1819,7 +1860,7 @@ # SUBSET - ancestor metadata is a subset of current metadata, so continue from current # NOT_EQUAL_AND_NOT_SUBSET - ancestor metadata is neither equal to nor a subset of current # metadata, so persist ancestor metadata. - comparison = compare_changeset_revisions( trans, + comparison = compare_changeset_revisions( trans.app, ancestor_changeset_revision, ancestor_metadata_dict, current_changeset_revision, @@ -1830,7 +1871,7 @@ elif comparison == NOT_EQUAL_AND_NOT_SUBSET: metadata_changeset_revision = ancestor_changeset_revision metadata_dict = ancestor_metadata_dict - repository_metadata = create_or_update_repository_metadata( trans, + repository_metadata = create_or_update_repository_metadata( trans.app, id, repository, metadata_changeset_revision, @@ -1846,7 +1887,7 @@ metadata_changeset_revision = current_changeset_revision metadata_dict = current_metadata_dict # We're at the end of the change log. - repository_metadata = create_or_update_repository_metadata( trans, + repository_metadata = create_or_update_repository_metadata( trans.app, id, repository, metadata_changeset_revision, @@ -1858,7 +1899,7 @@ # We reach here only if current_metadata_dict is empty and ancestor_metadata_dict is not. if not ctx.children(): # We're at the end of the change log. - repository_metadata = create_or_update_repository_metadata( trans, + repository_metadata = create_or_update_repository_metadata( trans.app, id, repository, metadata_changeset_revision, @@ -1901,7 +1942,7 @@ owner = str( repository.owner ) invalid_file_tups, metadata_dict = reset_all_metadata_on_installed_repository( trans.app, repository_id ) if invalid_file_tups: - message = tool_util.generate_message_for_invalid_tools( trans, invalid_file_tups, repository, None, as_html=False ) + message = tool_util.generate_message_for_invalid_tools( trans.app, invalid_file_tups, repository, None, as_html=False ) log.debug( message ) unsuccessful_count += 1 else: @@ -1947,20 +1988,21 @@ return False return True -def set_repository_metadata( trans, repository, content_alert_str='', **kwd ): +def set_repository_metadata( app, host, user, repository, content_alert_str='', **kwd ): """ - Set metadata using the repository's current disk files, returning specific error messages (if any) to alert the - repository owner that the changeset has problems. + Set metadata using the repository's current disk files, returning specific error + messages (if any) to alert the repository owner that the changeset has problems. """ + sa_session = app.model.context.current message = '' status = 'done' - encoded_id = trans.security.encode_id( repository.id ) - repository_clone_url = common_util.generate_clone_url_for_repository_in_tool_shed( trans.user, repository ) - repo_dir = repository.repo_path( trans.app ) - repo = hg_util.get_repo_for_repository( trans.app, repository=None, repo_path=repo_dir, create=False ) - metadata_dict, invalid_file_tups = generate_metadata_for_changeset_revision( app=trans.app, + encoded_id = app.security.encode_id( repository.id ) + repository_clone_url = common_util.generate_clone_url_for_repository_in_tool_shed( user, repository ) + repo_dir = repository.repo_path( app ) + repo = hg_util.get_repo_for_repository( app, repository=None, repo_path=repo_dir, create=False ) + metadata_dict, invalid_file_tups = generate_metadata_for_changeset_revision( app=app, repository=repository, - changeset_revision=repository.tip( trans.app ), + changeset_revision=repository.tip( app ), repository_clone_url=repository_clone_url, relative_install_dir=repo_dir, repository_files_dir=None, @@ -1969,25 +2011,25 @@ persist=False ) if metadata_dict: repository_metadata = None - repository_type_class = trans.app.repository_types_registry.get_class_by_label( repository.type ) + repository_type_class = app.repository_types_registry.get_class_by_label( repository.type ) tip_only = isinstance( repository_type_class, TipOnly ) - if not tip_only and new_metadata_required_for_utilities( trans, repository, metadata_dict ): + if not tip_only and new_metadata_required_for_utilities( app, repository, metadata_dict ): # Create a new repository_metadata table row. - repository_metadata = create_or_update_repository_metadata( trans, + repository_metadata = create_or_update_repository_metadata( app, encoded_id, repository, - repository.tip( trans.app ), + repository.tip( app ), metadata_dict ) # If this is the first record stored for this repository, see if we need to send any email alerts. if len( repository.downloadable_revisions ) == 1: - suc.handle_email_alerts( trans, repository, content_alert_str='', new_repo_alert=True, admin_only=False ) + suc.handle_email_alerts( app, host, repository, content_alert_str='', new_repo_alert=True, admin_only=False ) else: # Update the latest stored repository metadata with the contents and attributes of metadata_dict. - repository_metadata = get_latest_repository_metadata( trans, repository.id, downloadable=False ) + repository_metadata = get_latest_repository_metadata( app, repository.id, downloadable=False ) if repository_metadata: downloadable = is_downloadable( metadata_dict ) # Update the last saved repository_metadata table row. - repository_metadata.changeset_revision = repository.tip( trans.app ) + repository_metadata.changeset_revision = repository.tip( app ) repository_metadata.metadata = metadata_dict repository_metadata.downloadable = downloadable if 'datatypes' in metadata_dict: @@ -2018,14 +2060,14 @@ repository_metadata.tools_functionally_correct = False repository_metadata.missing_test_components = False repository_metadata.tool_test_results = None - trans.sa_session.add( repository_metadata ) - trans.sa_session.flush() + sa_session.add( repository_metadata ) + sa_session.flush() else: # There are no metadata records associated with the repository. - repository_metadata = create_or_update_repository_metadata( trans, + repository_metadata = create_or_update_repository_metadata( app, encoded_id, repository, - repository.tip( trans.app ), + repository.tip( app ), metadata_dict ) if 'tools' in metadata_dict and repository_metadata and status != 'error': # Set tool versions on the new downloadable change set. The order of the list of changesets is @@ -2033,41 +2075,32 @@ changeset_revisions = [] for changeset in repo.changelog: changeset_revision = str( repo.changectx( changeset ) ) - if suc.get_repository_metadata_by_changeset_revision( trans.app, encoded_id, changeset_revision ): + if suc.get_repository_metadata_by_changeset_revision( app, encoded_id, changeset_revision ): changeset_revisions.append( changeset_revision ) - add_tool_versions( trans, encoded_id, repository_metadata, changeset_revisions ) + add_tool_versions( app, encoded_id, repository_metadata, changeset_revisions ) elif len( repo ) == 1 and not invalid_file_tups: - message = "Revision <b>%s</b> includes no Galaxy utilities for which metadata can " % str( repository.tip( trans.app ) ) + message = "Revision <b>%s</b> includes no Galaxy utilities for which metadata can " % str( repository.tip( app ) ) message += "be defined so this revision cannot be automatically installed into a local Galaxy instance." status = "error" if invalid_file_tups: - message = tool_util.generate_message_for_invalid_tools( trans, invalid_file_tups, repository, metadata_dict ) + message = tool_util.generate_message_for_invalid_tools( app, invalid_file_tups, repository, metadata_dict ) status = 'error' # Reset the tool_data_tables by loading the empty tool_data_table_conf.xml file. - tool_util.reset_tool_data_tables( trans.app ) + tool_util.reset_tool_data_tables( app ) return message, status -def set_repository_metadata_due_to_new_tip( trans, repository, content_alert_str=None, **kwd ): +def set_repository_metadata_due_to_new_tip( app, host, user, repository, content_alert_str=None, **kwd ): """Set metadata on the repository tip in the tool shed.""" # This method is not called from Galaxy. - error_message, status = set_repository_metadata( trans, - repository, - content_alert_str=content_alert_str, - **kwd ) - if error_message: - # FIXME: This probably should not redirect since this method is called from the upload controller as well - # as the repository controller. - # If there is an error, display it. - return trans.response.send_redirect( web.url_for( controller='repository', - action='manage_repository', - id=trans.security.encode_id( repository.id ), - message=error_message, - status='error' ) ) + error_message, status = set_repository_metadata( app, host, user, repository, content_alert_str=content_alert_str, **kwd ) + return status, error_message def update_existing_tool_dependency( app, repository, original_dependency_dict, new_dependencies_dict ): """ - Update an exsiting tool dependency whose definition was updated in a change set pulled by a Galaxy administrator when getting updates - to an installed tool shed repository. The original_dependency_dict is a single tool dependency definition, an example of which is:: + Update an exsiting tool dependency whose definition was updated in a change set + pulled by a Galaxy administrator when getting updates to an installed tool shed + repository. The original_dependency_dict is a single tool dependency definition, + an example of which is:: {"name": "bwa", "readme": "\\nCompiling BWA requires zlib and libpthread to be present on your system.\\n ", This diff is so big that we needed to truncate the remainder. Repository URL: https://bitbucket.org/galaxy/galaxy-central/ -- This is a commit notification from bitbucket.org. You are receiving this because you have the service enabled, addressing the recipient of this email.