1 new commit in galaxy-central: https://bitbucket.org/galaxy/galaxy-central/commits/dd395d9b8a01/ changeset: dd395d9b8a01 user: greg date: 2013-01-17 22:34:26 summary: Refactor the tool shed's common controller for planned elimination. affected #: 7 files diff -r 6538175fb3e6483895aaadfdeddd09aac558fdf5 -r dd395d9b8a01255412b3e56219d11639ccce2e50 lib/galaxy/util/shed_util_common.py --- a/lib/galaxy/util/shed_util_common.py +++ b/lib/galaxy/util/shed_util_common.py @@ -1,12 +1,14 @@ import os, shutil, tempfile, logging, string, threading, urllib2, filecmp -from galaxy import util +from galaxy import web, util from galaxy.tools import parameters from galaxy.util import inflector, json +from galaxy.util.odict import odict from galaxy.web import url_for from galaxy.web.form_builder import SelectField from galaxy.webapps.community.util import container_util from galaxy.datatypes import checkers from galaxy.model.orm import and_ +import sqlalchemy.orm.exc from galaxy.tools.parameters import dynamic_options from galaxy.tool_shed import encoding_util @@ -38,6 +40,60 @@ TOOL_SHED_ADMIN_CONTROLLER = 'TOOL_SHED_ADMIN_CONTROLLER' VALID_CHARS = set( string.letters + string.digits + "'\"-=_.()/+*^,:?!#[]%\\$@;{}" ) +new_repo_email_alert_template = """ +Repository name: ${repository_name} +Revision: ${revision} +Change description: +${description} + +Uploaded by: ${username} +Date content uploaded: ${display_date} + +${content_alert_str} + +----------------------------------------------------------------------------- +This change alert was sent from the Galaxy tool shed hosted on the server +"${host}" +----------------------------------------------------------------------------- +You received this alert because you registered to receive email when +new repositories were created in the Galaxy tool shed named "${host}". +----------------------------------------------------------------------------- +""" + +email_alert_template = """ +Repository name: ${repository_name} +Revision: ${revision} +Change description: +${description} + +Changed by: ${username} +Date of change: ${display_date} + +${content_alert_str} + +----------------------------------------------------------------------------- +This change alert was sent from the Galaxy tool shed hosted on the server +"${host}" +----------------------------------------------------------------------------- +You received this alert because you registered to receive email whenever +changes were made to the repository named "${repository_name}". +----------------------------------------------------------------------------- +""" + +contact_owner_template = """ +GALAXY TOOL SHED REPOSITORY MESSAGE +------------------------ + +The user '${username}' sent you the following message regarding your tool shed +repository named '${repository_name}'. You can respond by sending a reply to +the user's email address: ${email}. +----------------------------------------------------------------------------- +${message} +----------------------------------------------------------------------------- +This message was sent from the Galaxy Tool Shed instance hosted on the server +'${host}' +""" + def add_installation_directories_to_tool_dependencies( trans, tool_dependencies ): """ Determine the path to the installation directory for each of the received tool dependencies. This path will be displayed within the tool dependencies @@ -65,6 +121,18 @@ requirements_dict[ 'install_dir' ] = install_dir tool_dependencies[ dependency_key ] = requirements_dict return tool_dependencies +def add_tool_versions( trans, id, repository_metadata, changeset_revisions ): + # Build a dictionary of { 'tool id' : 'parent tool id' } pairs for each tool in repository_metadata. + metadata = repository_metadata.metadata + tool_versions_dict = {} + for tool_dict in metadata.get( 'tools', [] ): + # We have at least 2 changeset revisions to compare tool guids and tool ids. + parent_id = get_parent_id( trans, id, tool_dict[ 'id' ], tool_dict[ 'version' ], tool_dict[ 'guid' ], changeset_revisions ) + tool_versions_dict[ tool_dict[ 'guid' ] ] = parent_id + if tool_versions_dict: + repository_metadata.tool_versions = tool_versions_dict + trans.sa_session.add( repository_metadata ) + trans.sa_session.flush() def build_readme_files_dict( metadata, tool_path=None ): """Return a dictionary of valid readme file name <-> readme file content pairs for all readme files contained in the received metadata.""" readme_files_dict = {} @@ -439,6 +507,30 @@ except: pass return can_use_disk_file +def changeset_is_malicious( trans, id, changeset_revision, **kwd ): + """Check the malicious flag in repository metadata for a specified change set""" + repository_metadata = get_repository_metadata_by_changeset_revision( trans, id, changeset_revision ) + if repository_metadata: + return repository_metadata.malicious + return False +def changeset_revision_reviewed_by_user( trans, user, repository, changeset_revision ): + """Determine if the current changeset revision has been reviewed by the current user.""" + for review in repository.reviews: + if review.changeset_revision == changeset_revision and review.user == user: + return True + return False +def check_file_contents( trans ): + """See if any admin users have chosen to receive email alerts when a repository is updated. If so, the file contents of the update must be + checked for inappropriate content. + """ + admin_users = trans.app.config.get( "admin_users", "" ).split( "," ) + for repository in trans.sa_session.query( trans.model.Repository ) \ + .filter( trans.model.Repository.table.c.email_alerts != None ): + email_alerts = json.from_json_string( repository.email_alerts ) + for user_email in email_alerts: + if user_email in admin_users: + return True + return False def check_tool_input_params( app, repo_dir, tool_config_name, tool, sample_files ): """ Check all of the tool's input parameters, looking for any that are dynamically generated using external data files to make @@ -1411,13 +1503,49 @@ if name == stripped_file_name: return os.path.abspath( os.path.join( root, name ) ) return file_path +def get_categories( trans ): + """Get all categories from the database.""" + return trans.sa_session.query( trans.model.Category ) \ + .filter( trans.model.Category.table.c.deleted==False ) \ + .order_by( trans.model.Category.table.c.name ) \ + .all() +def get_category( trans, id ): + """Get a category from the database.""" + return trans.sa_session.query( trans.model.Category ).get( trans.security.decode_id( id ) ) +def get_category_by_name( trans, name ): + """Get a category from the database via name.""" + try: + return trans.sa_session.query( trans.model.Category ).filter_by( name=name ).one() + except sqlalchemy.orm.exc.NoResultFound: + return None def get_changectx_for_changeset( repo, changeset_revision, **kwd ): - """Retrieve a specified changectx from a repository""" + """Retrieve a specified changectx from a repository.""" for changeset in repo.changelog: ctx = repo.changectx( changeset ) if str( ctx ) == changeset_revision: return ctx return None +def get_component( trans, id ): + """Get a component from the database.""" + return trans.sa_session.query( trans.model.Component ).get( trans.security.decode_id( id ) ) +def get_component_by_name( trans, name ): + """Get a component from the database via a name.""" + return trans.sa_session.query( trans.app.model.Component ) \ + .filter( trans.app.model.Component.table.c.name==name ) \ + .first() +def get_component_review( trans, id ): + """Get a component_review from the database""" + return trans.sa_session.query( trans.model.ComponentReview ).get( trans.security.decode_id( id ) ) +def get_component_review_by_repository_review_id_component_id( trans, repository_review_id, component_id ): + """Get a component_review from the database via repository_review_id and component_id.""" + return trans.sa_session.query( trans.model.ComponentReview ) \ + .filter( and_( trans.model.ComponentReview.table.c.repository_review_id == trans.security.decode_id( repository_review_id ), + trans.model.ComponentReview.table.c.component_id == trans.security.decode_id( component_id ) ) ) \ + .first() +def get_components( trans ): + return trans.sa_session.query( trans.app.model.Component ) \ + .order_by( trans.app.model.Component.name ) \ + .all() def get_config_from_disk( config_file, relative_install_dir ): for root, dirs, files in os.walk( relative_install_dir ): if root.find( '.hg' ) < 0: @@ -1426,7 +1554,7 @@ return os.path.abspath( os.path.join( root, name ) ) return None def get_configured_ui(): - # Configure any desired ui settings. + """Configure any desired ui settings.""" _ui = ui.ui() # The following will suppress all messages. This is # the same as adding the following setting to the repo @@ -1479,6 +1607,12 @@ def get_installed_tool_shed_repository( trans, id ): """Get a repository on the Galaxy side from the database via id""" return trans.sa_session.query( trans.model.ToolShedRepository ).get( trans.security.decode_id( id ) ) +def get_latest_repository_metadata( trans, decoded_repository_id ): + """Get last metadata defined for a specified repository from the database.""" + return trans.sa_session.query( trans.model.RepositoryMetadata ) \ + .filter( trans.model.RepositoryMetadata.table.c.repository_id == decoded_repository_id ) \ + .order_by( trans.model.RepositoryMetadata.table.c.id.desc() ) \ + .first() def get_latest_tool_config_revision_from_repository_manifest( repo, filename, changeset_revision ): """ Get the latest revision of a tool config file named filename from the repository manifest up to the value of changeset_revision. @@ -1649,6 +1783,21 @@ return INITIAL_CHANGELOG_HASH else: previous_changeset_revision = changeset_revision +def get_previous_repository_reviews( trans, repository, changeset_revision ): + """Return an ordered dictionary of repository reviews up to and including the received changeset revision.""" + repo = hg.repository( get_configured_ui(), repository.repo_path( trans.app ) ) + reviewed_revision_hashes = [ review.changeset_revision for review in repository.reviews ] + previous_reviews_dict = odict() + for changeset in reversed_upper_bounded_changelog( repo, changeset_revision ): + previous_changeset_revision = str( repo.changectx( changeset ) ) + if previous_changeset_revision in reviewed_revision_hashes: + previous_rev, previous_changeset_revision_label = get_rev_label_from_changeset_revision( repo, previous_changeset_revision ) + revision_reviews = get_reviews_by_repository_id_changeset_revision( trans, + trans.security.encode_id( repository.id ), + previous_changeset_revision ) + previous_reviews_dict[ previous_changeset_revision ] = dict( changeset_revision_label=previous_changeset_revision_label, + reviews=revision_reviews ) + return previous_reviews_dict def get_readme_file_names( repository_name ): readme_files = [ 'readme', 'read_me', 'install' ] valid_filenames = [ r for r in readme_files ] @@ -1664,6 +1813,9 @@ elif len( repo_info_tuple ) == 7: description, repository_clone_url, changeset_revision, ctx_rev, repository_owner, repository_dependencies, tool_dependencies = repo_info_tuple return description, repository_clone_url, changeset_revision, ctx_rev, repository_owner, repository_dependencies, tool_dependencies +def get_repository_by_name( trans, name ): + """Get a repository from the database via name.""" + return trans.sa_session.query( trans.model.Repository ).filter_by( name=name ).one() def get_repository_by_name_and_owner( trans, name, owner ): """Get a repository from the database via name and owner""" if trans.webapp.name == 'galaxy': @@ -1819,6 +1971,25 @@ .filter( and_( trans.model.RepositoryMetadata.table.c.repository_id == trans.security.decode_id( id ), trans.model.RepositoryMetadata.table.c.changeset_revision == changeset_revision ) ) \ .first() +def get_repository_metadata_revisions_for_review( repository, reviewed=True ): + repository_metadata_revisions = [] + metadata_changeset_revision_hashes = [] + if reviewed: + for metadata_revision in repository.metadata_revisions: + metadata_changeset_revision_hashes.append( metadata_revision.changeset_revision ) + for review in repository.reviews: + if review.changeset_revision in metadata_changeset_revision_hashes: + rmcr_hashes = [ rmr.changeset_revision for rmr in repository_metadata_revisions ] + if review.changeset_revision not in rmcr_hashes: + repository_metadata_revisions.append( review.repository_metadata ) + else: + for review in repository.reviews: + if review.changeset_revision not in metadata_changeset_revision_hashes: + metadata_changeset_revision_hashes.append( review.changeset_revision ) + for metadata_revision in repository.metadata_revisions: + if metadata_revision.changeset_revision not in metadata_changeset_revision_hashes: + repository_metadata_revisions.append( metadata_revision ) + return repository_metadata_revisions def get_repository_tools_tups( app, metadata_dict ): repository_tools_tups = [] index, shed_conf_dict = get_shed_tool_conf_dict( app, metadata_dict.get( 'shed_config_filename' ) ) @@ -1849,6 +2020,48 @@ relative_path_to_file.startswith( os.path.join( shed_config_dict.get( 'tool_path' ), relative_install_dir ) ): relative_path_to_file = relative_path_to_file[ len( shed_config_dict.get( 'tool_path' ) ) + 1: ] return relative_path_to_file +def get_reversed_changelog_changesets( repo ): + reversed_changelog = [] + for changeset in repo.changelog: + reversed_changelog.insert( 0, changeset ) + return reversed_changelog +def get_review( trans, id ): + """Get a repository_review from the database via id.""" + return trans.sa_session.query( trans.model.RepositoryReview ).get( trans.security.decode_id( id ) ) +def get_reviews_by_repository_id_changeset_revision( trans, repository_id, changeset_revision ): + """Get all repository_reviews from the database via repository id and changeset_revision.""" + return trans.sa_session.query( trans.model.RepositoryReview ) \ + .filter( and_( trans.model.RepositoryReview.repository_id == trans.security.decode_id( repository_id ), + trans.model.RepositoryReview.changeset_revision == changeset_revision ) ) \ + .all() +def get_review_by_repository_id_changeset_revision_user_id( trans, repository_id, changeset_revision, user_id ): + """Get a repository_review from the database via repository id, changeset_revision and user_id.""" + return trans.sa_session.query( trans.model.RepositoryReview ) \ + .filter( and_( trans.model.RepositoryReview.repository_id == trans.security.decode_id( repository_id ), + trans.model.RepositoryReview.changeset_revision == changeset_revision, + trans.model.RepositoryReview.user_id == trans.security.decode_id( user_id ) ) ) \ + .first() +def get_rev_label_changeset_revision_from_repository_metadata( trans, repository_metadata, repository=None ): + if repository is None: + repository = repository_metadata.repository + repo = hg.repository( get_configured_ui(), repository.repo_path( trans.app ) ) + changeset_revision = repository_metadata.changeset_revision + ctx = get_changectx_for_changeset( repo, changeset_revision ) + if ctx: + rev = '%04d' % ctx.rev() + label = "%s:%s" % ( str( ctx.rev() ), changeset_revision ) + else: + rev = '-1' + label = "-1:%s" % changeset_revision + return rev, label, changeset_revision +def get_revision_label( trans, repository, changeset_revision ): + """Return a string consisting of the human read-able changeset rev and the changeset revision string.""" + repo = hg.repository( get_configured_ui(), repository.repo_path( trans.app ) ) + ctx = get_changectx_for_changeset( repo, changeset_revision ) + if ctx: + return "%s:%s" % ( str( ctx.rev() ), changeset_revision ) + else: + return "-1:%s" % changeset_revision def get_sample_files_from_disk( repository_files_dir, tool_path=None, relative_install_dir=None, resetting_all_metadata_on_repository=False ): if resetting_all_metadata_on_repository: # Keep track of the location where the repository is temporarily cloned so that we can strip it when setting metadata. @@ -1877,6 +2090,15 @@ relative_path_to_sample_file = relative_path_to_sample_file[ len( tool_path ) + 1 :] sample_file_metadata_paths.append( relative_path_to_sample_file ) return sample_file_metadata_paths, sample_file_copy_paths +def get_rev_label_from_changeset_revision( repo, changeset_revision ): + ctx = get_changectx_for_changeset( repo, changeset_revision ) + if ctx: + rev = '%04d' % ctx.rev() + label = "%s:%s" % ( str( ctx.rev() ), changeset_revision ) + else: + rev = '-1' + label = "-1:%s" % changeset_revision + return rev, label def get_shed_tool_conf_dict( app, shed_tool_conf ): """ Return the in-memory version of the shed_tool_conf file, which is stored in the config_elems entry @@ -2004,8 +2226,11 @@ return shed_url # The tool shed from which the repository was originally installed must no longer be configured in tool_sheds_conf.xml. return None +def get_user( trans, id ): + """Get a user from the database by id.""" + return trans.sa_session.query( trans.model.User ).get( trans.security.decode_id( id ) ) def get_user_by_username( trans, username ): - """Get a user from the database by username""" + """Get a user from the database by username.""" return trans.sa_session.query( trans.model.User ) \ .filter( trans.model.User.table.c.username == username ) \ .one() @@ -2044,6 +2269,94 @@ all_repository_dependencies=all_repository_dependencies, handled_key_rd_dicts=handled_key_rd_dicts, circular_repository_dependencies=circular_repository_dependencies ) +def handle_email_alerts( trans, repository, content_alert_str='', new_repo_alert=False, admin_only=False ): + # There are 2 complementary features that enable a tool shed user to receive email notification: + # 1. Within User Preferences, they can elect to receive email when the first (or first valid) + # change set is produced for a new repository. + # 2. When viewing or managing a repository, they can check the box labeled "Receive email alerts" + # which caused them to receive email alerts when updates to the repository occur. This same feature + # is available on a per-repository basis on the repository grid within the tool shed. + # + # There are currently 4 scenarios for sending email notification when a change is made to a repository: + # 1. An admin user elects to receive email when the first change set is produced for a new repository + # from User Preferences. The change set does not have to include any valid content. This allows for + # the capture of inappropriate content being uploaded to new repositories. + # 2. A regular user elects to receive email when the first valid change set is produced for a new repository + # from User Preferences. This differs from 1 above in that the user will not receive email until a + # change set tha tincludes valid content is produced. + # 3. An admin user checks the "Receive email alerts" check box on the manage repository page. Since the + # user is an admin user, the email will include information about both HTML and image content that was + # included in the change set. + # 4. A regular user checks the "Receive email alerts" check box on the manage repository page. Since the + # user is not an admin user, the email will not include any information about both HTML and image content + # that was included in the change set. + repo_dir = repository.repo_path( trans.app ) + repo = hg.repository( get_configured_ui(), repo_dir ) + smtp_server = trans.app.config.smtp_server + if smtp_server and ( new_repo_alert or repository.email_alerts ): + # Send email alert to users that want them. + if trans.app.config.email_from is not None: + email_from = trans.app.config.email_from + elif trans.request.host.split( ':' )[0] == 'localhost': + email_from = 'galaxy-no-reply@' + socket.getfqdn() + else: + email_from = 'galaxy-no-reply@' + trans.request.host.split( ':' )[0] + tip_changeset = repo.changelog.tip() + ctx = repo.changectx( tip_changeset ) + t, tz = ctx.date() + date = datetime( *gmtime( float( t ) - tz )[:6] ) + display_date = date.strftime( "%Y-%m-%d" ) + try: + username = ctx.user().split()[0] + except: + username = ctx.user() + # We'll use 2 template bodies because we only want to send content + # alerts to tool shed admin users. + if new_repo_alert: + template = new_repo_email_alert_template + else: + template = email_alert_template + admin_body = string.Template( template ).safe_substitute( host=trans.request.host, + repository_name=repository.name, + revision='%s:%s' %( str( ctx.rev() ), ctx ), + display_date=display_date, + description=ctx.description(), + username=username, + content_alert_str=content_alert_str ) + body = string.Template( template ).safe_substitute( host=trans.request.host, + repository_name=repository.name, + revision='%s:%s' %( str( ctx.rev() ), ctx ), + display_date=display_date, + description=ctx.description(), + username=username, + content_alert_str='' ) + admin_users = trans.app.config.get( "admin_users", "" ).split( "," ) + frm = email_from + if new_repo_alert: + subject = "Galaxy tool shed alert for new repository named %s" % str( repository.name ) + subject = subject[ :80 ] + email_alerts = [] + for user in trans.sa_session.query( trans.model.User ) \ + .filter( and_( trans.model.User.table.c.deleted == False, + trans.model.User.table.c.new_repo_alert == True ) ): + if admin_only: + if user.email in admin_users: + email_alerts.append( user.email ) + else: + email_alerts.append( user.email ) + else: + subject = "Galaxy tool shed update alert for repository named %s" % str( repository.name ) + email_alerts = json.from_json_string( repository.email_alerts ) + for email in email_alerts: + to = email.strip() + # Send it + try: + if to in admin_users: + util.send_mail( frm, to, subject, admin_body, trans.app.config ) + else: + util.send_mail( frm, to, subject, body, trans.app.config ) + except Exception, e: + log.exception( "An error occurred sending a tool shed repository update alert by email." ) def handle_existing_tool_dependencies_that_changed_in_update( app, repository, original_dependency_dict, new_dependency_dict ): """ This method is called when a Galaxy admin is getting updates for an installed tool shed repository in order to cover the case where an @@ -2160,6 +2473,15 @@ message = str( e ) error = True return error, message +def has_previous_repository_reviews( trans, repository, changeset_revision ): + """Determine if a repository has a changeset revision review prior to the received changeset revision.""" + repo = hg.repository( get_configured_ui(), repository.repo_path( trans.app ) ) + reviewed_revision_hashes = [ review.changeset_revision for review in repository.reviews ] + for changeset in reversed_upper_bounded_changelog( repo, changeset_revision ): + previous_changeset_revision = str( repo.changectx( changeset ) ) + if previous_changeset_revision in reviewed_revision_hashes: + return True + return False def in_all_repository_dependencies( repository_key, repository_dependency, all_repository_dependencies ): """Return True if { repository_key :repository_dependency } is in all_repository_dependencies.""" for key, val in all_repository_dependencies.items(): @@ -2348,6 +2670,89 @@ containers_dict[ 'tool_dependencies' ] = root_container containers_dict[ 'missing_tool_dependencies' ] = None return containers_dict +def new_repository_dependency_metadata_required( trans, repository, metadata_dict ): + """ + Compare the last saved metadata for each repository dependency in the repository with the new metadata in metadata_dict to determine if a new + repository_metadata table record is required or if the last saved metadata record can be updated instead. + """ + if 'repository_dependencies' in metadata_dict: + repository_metadata = get_latest_repository_metadata( trans, repository.id ) + if repository_metadata: + metadata = repository_metadata.metadata + if metadata: + if 'repository_dependencies' in metadata: + saved_repository_dependencies = metadata[ 'repository_dependencies' ][ 'repository_dependencies' ] + new_repository_dependencies = metadata_dict[ 'repository_dependencies' ][ 'repository_dependencies' ] + # The saved metadata must be a subset of the new metadata. + for new_repository_dependency_metadata in new_repository_dependencies: + if new_repository_dependency_metadata not in saved_repository_dependencies: + return True + for saved_repository_dependency_metadata in saved_repository_dependencies: + if saved_repository_dependency_metadata not in new_repository_dependencies: + return True + else: + # We have repository metadata that does not include metadata for any repository dependencies in the + # repository, so we can update the existing repository metadata. + return False + else: + # There is no saved repository metadata, so we need to create a new repository_metadata table record. + return True + # The received metadata_dict includes no metadata for repository dependencies, so a new repository_metadata table record is not needed. + return False +def new_tool_metadata_required( trans, repository, metadata_dict ): + """ + Compare the last saved metadata for each tool in the repository with the new metadata in metadata_dict to determine if a new repository_metadata + table record is required, or if the last saved metadata record can be updated instead. + """ + if 'tools' in metadata_dict: + repository_metadata = get_latest_repository_metadata( trans, repository.id ) + if repository_metadata: + metadata = repository_metadata.metadata + if metadata: + if 'tools' in metadata: + saved_tool_ids = [] + # The metadata for one or more tools was successfully generated in the past + # for this repository, so we first compare the version string for each tool id + # in metadata_dict with what was previously saved to see if we need to create + # a new table record or if we can simply update the existing record. + for new_tool_metadata_dict in metadata_dict[ 'tools' ]: + for saved_tool_metadata_dict in metadata[ 'tools' ]: + if saved_tool_metadata_dict[ 'id' ] not in saved_tool_ids: + saved_tool_ids.append( saved_tool_metadata_dict[ 'id' ] ) + if new_tool_metadata_dict[ 'id' ] == saved_tool_metadata_dict[ 'id' ]: + if new_tool_metadata_dict[ 'version' ] != saved_tool_metadata_dict[ 'version' ]: + return True + # So far, a new metadata record is not required, but we still have to check to see if + # any new tool ids exist in metadata_dict that are not in the saved metadata. We do + # this because if a new tarball was uploaded to a repository that included tools, it + # may have removed existing tool files if they were not included in the uploaded tarball. + for new_tool_metadata_dict in metadata_dict[ 'tools' ]: + if new_tool_metadata_dict[ 'id' ] not in saved_tool_ids: + return True + else: + # We have repository metadata that does not include metadata for any tools in the + # repository, so we can update the existing repository metadata. + return False + else: + # There is no saved repository metadata, so we need to create a new repository_metadata table record. + return True + # The received metadata_dict includes no metadata for tools, so a new repository_metadata table record is not needed. + return False +def new_workflow_metadata_required( trans, repository, metadata_dict ): + """ + Currently everything about an exported workflow except the name is hard-coded, so there's no real way to differentiate versions of + exported workflows. If this changes at some future time, this method should be enhanced accordingly. + """ + if 'workflows' in metadata_dict: + repository_metadata = get_latest_repository_metadata( trans, repository.id ) + if repository_metadata: + # The repository has metadata, so update the workflows value - no new record is needed. + return False + else: + # There is no saved repository metadata, so we need to create a new repository_metadata table record. + return True + # The received metadata_dict includes no metadata for workflows, so a new repository_metadata table record is not needed. + return False def open_repository_files_folder( trans, folder_path ): try: files_list = get_repository_files( trans, folder_path ) @@ -2701,6 +3106,77 @@ return reversed_changelog def reversed_upper_bounded_changelog( repo, included_upper_bounds_changeset_revision ): return reversed_lower_upper_bounded_changelog( repo, INITIAL_CHANGELOG_HASH, included_upper_bounds_changeset_revision ) +def set_repository_metadata( trans, repository, content_alert_str='', **kwd ): + """ + Set metadata using the repository's current disk files, returning specific error messages (if any) to alert the repository owner that the changeset + has problems. + """ + message = '' + status = 'done' + encoded_id = trans.security.encode_id( repository.id ) + repository_clone_url = generate_clone_url_for_repository_in_tool_shed( trans, repository ) + repo_dir = repository.repo_path( trans.app ) + repo = hg.repository( get_configured_ui(), repo_dir ) + metadata_dict, invalid_file_tups = generate_metadata_for_changeset_revision( app=trans.app, + repository=repository, + repository_clone_url=repository_clone_url, + relative_install_dir=repo_dir, + repository_files_dir=None, + resetting_all_metadata_on_repository=False, + updating_installed_repository=False, + persist=False ) + if metadata_dict: + downloadable = is_downloadable( metadata_dict ) + repository_metadata = None + if new_repository_dependency_metadata_required( trans, repository, metadata_dict ) or \ + new_tool_metadata_required( trans, repository, metadata_dict ) or \ + new_workflow_metadata_required( trans, repository, metadata_dict ): + # Create a new repository_metadata table row. + repository_metadata = create_or_update_repository_metadata( trans, encoded_id, repository, repository.tip( trans.app ), metadata_dict ) + # If this is the first record stored for this repository, see if we need to send any email alerts. + if len( repository.downloadable_revisions ) == 1: + handle_email_alerts( trans, repository, content_alert_str='', new_repo_alert=True, admin_only=False ) + else: + repository_metadata = get_latest_repository_metadata( trans, repository.id ) + if repository_metadata: + downloadable = is_downloadable( metadata_dict ) + # Update the last saved repository_metadata table row. + repository_metadata.changeset_revision = repository.tip( trans.app ) + repository_metadata.metadata = metadata_dict + repository_metadata.downloadable = downloadable + trans.sa_session.add( repository_metadata ) + trans.sa_session.flush() + else: + # There are no tools in the repository, and we're setting metadata on the repository tip. + repository_metadata = create_or_update_repository_metadata( trans, encoded_id, repository, repository.tip( trans.app ), metadata_dict ) + if 'tools' in metadata_dict and repository_metadata and status != 'error': + # Set tool versions on the new downloadable change set. The order of the list of changesets is critical, so we use the repo's changelog. + changeset_revisions = [] + for changeset in repo.changelog: + changeset_revision = str( repo.changectx( changeset ) ) + if get_repository_metadata_by_changeset_revision( trans, encoded_id, changeset_revision ): + changeset_revisions.append( changeset_revision ) + add_tool_versions( trans, encoded_id, repository_metadata, changeset_revisions ) + elif len( repo ) == 1 and not invalid_file_tups: + message = "Revision '%s' includes no tools, datatypes or exported workflows for which metadata can " % str( repository.tip( trans.app ) ) + message += "be defined so this revision cannot be automatically installed into a local Galaxy instance." + status = "error" + if invalid_file_tups: + message = generate_message_for_invalid_tools( trans, invalid_file_tups, repository, metadata_dict ) + status = 'error' + # Reset the tool_data_tables by loading the empty tool_data_table_conf.xml file. + reset_tool_data_tables( trans.app ) + return message, status +def set_repository_metadata_due_to_new_tip( trans, repository, content_alert_str=None, **kwd ): + # Set metadata on the repository tip. + error_message, status = set_repository_metadata( trans, repository, content_alert_str=content_alert_str, **kwd ) + if error_message: + # If there is an error, display it. + return trans.response.send_redirect( web.url_for( controller='repository', + action='manage_repository', + id=trans.security.encode_id( repository.id ), + message=error_message, + status='error' ) ) def strip_path( fpath ): if not fpath: return fpath @@ -2862,10 +3338,8 @@ # ? = not tracked # I = ignored # It would be nice if we could use mercurial's purge extension to remove untracked files. The problem is that - # purging is not supported by the mercurial API. See the deprecated update_for_browsing() method in common.py. - commands.update( get_configured_ui(), - repo, - rev=ctx_rev ) + # purging is not supported by the mercurial API. + commands.update( get_configured_ui(), repo, rev=ctx_rev ) def url_join( *args ): parts = [] for arg in args: diff -r 6538175fb3e6483895aaadfdeddd09aac558fdf5 -r dd395d9b8a01255412b3e56219d11639ccce2e50 lib/galaxy/webapps/community/controllers/admin.py --- a/lib/galaxy/webapps/community/controllers/admin.py +++ b/lib/galaxy/webapps/community/controllers/admin.py @@ -6,7 +6,6 @@ from galaxy.web.form_builder import SelectField from galaxy.util import inflector import galaxy.util.shed_util_common as suc -import common from repository import RepositoryGrid, CategoryGrid from galaxy import eggs @@ -474,7 +473,7 @@ if k.startswith( 'f-' ): del kwd[ k ] if 'user_id' in kwd: - user = common.get_user( trans, kwd[ 'user_id' ] ) + user = suc.get_user( trans, kwd[ 'user_id' ] ) kwd[ 'f-email' ] = user.email del kwd[ 'user_id' ] else: @@ -489,7 +488,7 @@ if k.startswith( 'f-' ): del kwd[ k ] category_id = kwd.get( 'id', None ) - category = common.get_category( trans, category_id ) + category = suc.get_category( trans, category_id ) kwd[ 'f-Category.name' ] = category.name elif operation == "receive email alerts": if kwd[ 'id' ]: @@ -554,7 +553,7 @@ if not name or not description: message = 'Enter a valid name and a description' status = 'error' - elif common.get_category_by_name( trans, name ): + elif suc.get_category_by_name( trans, name ): message = 'A category with that name already exists' status = 'error' else: @@ -641,7 +640,7 @@ action='manage_categories', message=message, status='error' ) ) - category = common.get_category( trans, id ) + category = suc.get_category( trans, id ) if params.get( 'edit_category_button', False ): new_name = util.restore_text( params.get( 'name', '' ) ).strip() new_description = util.restore_text( params.get( 'description', '' ) ).strip() @@ -649,7 +648,7 @@ if not new_name: message = 'Enter a valid name' status = 'error' - elif category.name != new_name and common.get_category_by_name( trans, name ): + elif category.name != new_name and suc.get_category_by_name( trans, name ): message = 'A category with that name already exists' status = 'error' else: @@ -772,7 +771,7 @@ ids = util.listify( id ) message = "Deleted %d categories: " % len( ids ) for category_id in ids: - category = common.get_category( trans, category_id ) + category = suc.get_category( trans, category_id ) category.deleted = True trans.sa_session.add( category ) trans.sa_session.flush() @@ -800,7 +799,7 @@ purged_categories = "" message = "Purged %d categories: " % len( ids ) for category_id in ids: - category = common.get_category( trans, category_id ) + category = suc.get_category( trans, category_id ) if category.deleted: # Delete RepositoryCategoryAssociations for rca in category.repositories: @@ -827,7 +826,7 @@ count = 0 undeleted_categories = "" for category_id in ids: - category = common.get_category( trans, category_id ) + category = suc.get_category( trans, category_id ) if category.deleted: category.deleted = False trans.sa_session.add( category ) diff -r 6538175fb3e6483895aaadfdeddd09aac558fdf5 -r dd395d9b8a01255412b3e56219d11639ccce2e50 lib/galaxy/webapps/community/controllers/common.py --- a/lib/galaxy/webapps/community/controllers/common.py +++ b/lib/galaxy/webapps/community/controllers/common.py @@ -1,13 +1,4 @@ -import os, string, socket, logging, simplejson, binascii, tempfile -from time import gmtime, strftime -from datetime import * -from galaxy.tools import * -from galaxy.util.odict import odict -from galaxy.util.json import from_json_string, to_json_string -import galaxy.util.shed_util_common as suc -from galaxy.web.base.controllers.admin import * -from galaxy.webapps.community import model -from galaxy.model.orm import and_ +import logging from galaxy.model.item_attrs import UsesItemRatings from galaxy import eggs @@ -16,63 +7,6 @@ log = logging.getLogger( __name__ ) -new_repo_email_alert_template = """ -Repository name: ${repository_name} -Revision: ${revision} -Change description: -${description} - -Uploaded by: ${username} -Date content uploaded: ${display_date} - -${content_alert_str} - ------------------------------------------------------------------------------ -This change alert was sent from the Galaxy tool shed hosted on the server -"${host}" ------------------------------------------------------------------------------ -You received this alert because you registered to receive email when -new repositories were created in the Galaxy tool shed named "${host}". ------------------------------------------------------------------------------ -""" - -email_alert_template = """ -Repository name: ${repository_name} -Revision: ${revision} -Change description: -${description} - -Changed by: ${username} -Date of change: ${display_date} - -${content_alert_str} - ------------------------------------------------------------------------------ -This change alert was sent from the Galaxy tool shed hosted on the server -"${host}" ------------------------------------------------------------------------------ -You received this alert because you registered to receive email whenever -changes were made to the repository named "${repository_name}". ------------------------------------------------------------------------------ -""" - -contact_owner_template = """ -GALAXY TOOL SHED REPOSITORY MESSAGE ------------------------- - -The user '${username}' sent you the following message regarding your tool shed -repository named '${repository_name}'. You can respond by sending a reply to -the user's email address: ${email}. ------------------------------------------------------------------------------ -${message} ------------------------------------------------------------------------------ -This message was sent from the Galaxy Tool Shed instance hosted on the server -'${host}' -""" - -malicious_error = " This changeset cannot be downloaded because it potentially produces malicious behavior or contains inappropriate content." -malicious_error_can_push = " Correct this changeset as soon as possible, it potentially produces malicious behavior or contains inappropriate content." - class ItemRatings( UsesItemRatings ): """Overrides rate_item method since we also allow for comments""" def rate_item( self, trans, user, item, rating, comment='' ): @@ -95,503 +29,3 @@ trans.sa_session.add( item_rating ) trans.sa_session.flush() return item_rating - -def add_tool_versions( trans, id, repository_metadata, changeset_revisions ): - # Build a dictionary of { 'tool id' : 'parent tool id' } pairs for each tool in repository_metadata. - metadata = repository_metadata.metadata - tool_versions_dict = {} - for tool_dict in metadata.get( 'tools', [] ): - # We have at least 2 changeset revisions to compare tool guids and tool ids. - parent_id = suc.get_parent_id( trans, - id, - tool_dict[ 'id' ], - tool_dict[ 'version' ], - tool_dict[ 'guid' ], - changeset_revisions ) - tool_versions_dict[ tool_dict[ 'guid' ] ] = parent_id - if tool_versions_dict: - repository_metadata.tool_versions = tool_versions_dict - trans.sa_session.add( repository_metadata ) - trans.sa_session.flush() -def changeset_is_malicious( trans, id, changeset_revision, **kwd ): - """Check the malicious flag in repository metadata for a specified change set""" - repository_metadata = suc.get_repository_metadata_by_changeset_revision( trans, id, changeset_revision ) - if repository_metadata: - return repository_metadata.malicious - return False -def changeset_revision_reviewed_by_user( trans, user, repository, changeset_revision ): - """Determine if the current changeset revision has been reviewed by the current user.""" - for review in repository.reviews: - if review.changeset_revision == changeset_revision and review.user == user: - return True - return False -def check_file_contents( trans ): - # See if any admin users have chosen to receive email alerts when a repository is updated. - # If so, the file contents of the update must be checked for inappropriate content. - admin_users = trans.app.config.get( "admin_users", "" ).split( "," ) - for repository in trans.sa_session.query( trans.model.Repository ) \ - .filter( trans.model.Repository.table.c.email_alerts != None ): - email_alerts = from_json_string( repository.email_alerts ) - for user_email in email_alerts: - if user_email in admin_users: - return True - return False -def get_category( trans, id ): - """Get a category from the database""" - return trans.sa_session.query( trans.model.Category ).get( trans.security.decode_id( id ) ) -def get_category_by_name( trans, name ): - """Get a category from the database via name""" - try: - return trans.sa_session.query( trans.model.Category ).filter_by( name=name ).one() - except sqlalchemy.orm.exc.NoResultFound: - return None -def get_categories( trans ): - """Get all categories from the database""" - return trans.sa_session.query( trans.model.Category ) \ - .filter( trans.model.Category.table.c.deleted==False ) \ - .order_by( trans.model.Category.table.c.name ) \ - .all() -def get_component( trans, id ): - """Get a component from the database""" - return trans.sa_session.query( trans.model.Component ).get( trans.security.decode_id( id ) ) -def get_component_by_name( trans, name ): - return trans.sa_session.query( trans.app.model.Component ) \ - .filter( trans.app.model.Component.table.c.name==name ) \ - .first() -def get_component_review( trans, id ): - """Get a component_review from the database""" - return trans.sa_session.query( trans.model.ComponentReview ).get( trans.security.decode_id( id ) ) -def get_component_review_by_repository_review_id_component_id( trans, repository_review_id, component_id ): - """Get a component_review from the database via repository_review_id and component_id""" - return trans.sa_session.query( trans.model.ComponentReview ) \ - .filter( and_( trans.model.ComponentReview.table.c.repository_review_id == trans.security.decode_id( repository_review_id ), - trans.model.ComponentReview.table.c.component_id == trans.security.decode_id( component_id ) ) ) \ - .first() -def get_components( trans ): - return trans.sa_session.query( trans.app.model.Component ) \ - .order_by( trans.app.model.Component.name ) \ - .all() -def get_latest_repository_metadata( trans, decoded_repository_id ): - """Get last metadata defined for a specified repository from the database""" - return trans.sa_session.query( trans.model.RepositoryMetadata ) \ - .filter( trans.model.RepositoryMetadata.table.c.repository_id == decoded_repository_id ) \ - .order_by( trans.model.RepositoryMetadata.table.c.id.desc() ) \ - .first() -def get_previous_repository_reviews( trans, repository, changeset_revision ): - """Return an ordered dictionary of repository reviews up to and including the received changeset revision.""" - repo = hg.repository( suc.get_configured_ui(), repository.repo_path( trans.app ) ) - reviewed_revision_hashes = [ review.changeset_revision for review in repository.reviews ] - previous_reviews_dict = odict() - for changeset in suc.reversed_upper_bounded_changelog( repo, changeset_revision ): - previous_changeset_revision = str( repo.changectx( changeset ) ) - if previous_changeset_revision in reviewed_revision_hashes: - previous_rev, previous_changeset_revision_label = get_rev_label_from_changeset_revision( repo, previous_changeset_revision ) - revision_reviews = get_reviews_by_repository_id_changeset_revision( trans, - trans.security.encode_id( repository.id ), - previous_changeset_revision ) - previous_reviews_dict[ previous_changeset_revision ] = dict( changeset_revision_label=previous_changeset_revision_label, - reviews=revision_reviews ) - return previous_reviews_dict -def get_repository_by_name( trans, name ): - """Get a repository from the database via name""" - return trans.sa_session.query( trans.model.Repository ).filter_by( name=name ).one() -def get_repository_metadata_revisions_for_review( repository, reviewed=True ): - repository_metadata_revisions = [] - metadata_changeset_revision_hashes = [] - if reviewed: - for metadata_revision in repository.metadata_revisions: - metadata_changeset_revision_hashes.append( metadata_revision.changeset_revision ) - for review in repository.reviews: - if review.changeset_revision in metadata_changeset_revision_hashes: - rmcr_hashes = [ rmr.changeset_revision for rmr in repository_metadata_revisions ] - if review.changeset_revision not in rmcr_hashes: - repository_metadata_revisions.append( review.repository_metadata ) - else: - for review in repository.reviews: - if review.changeset_revision not in metadata_changeset_revision_hashes: - metadata_changeset_revision_hashes.append( review.changeset_revision ) - for metadata_revision in repository.metadata_revisions: - if metadata_revision.changeset_revision not in metadata_changeset_revision_hashes: - repository_metadata_revisions.append( metadata_revision ) - return repository_metadata_revisions -def get_rev_label_changeset_revision_from_repository_metadata( trans, repository_metadata, repository=None ): - if repository is None: - repository = repository_metadata.repository - repo = hg.repository( suc.get_configured_ui(), repository.repo_path( trans.app ) ) - changeset_revision = repository_metadata.changeset_revision - ctx = suc.get_changectx_for_changeset( repo, changeset_revision ) - if ctx: - rev = '%04d' % ctx.rev() - label = "%s:%s" % ( str( ctx.rev() ), changeset_revision ) - else: - rev = '-1' - label = "-1:%s" % changeset_revision - return rev, label, changeset_revision -def get_rev_label_from_changeset_revision( repo, changeset_revision ): - ctx = suc.get_changectx_for_changeset( repo, changeset_revision ) - if ctx: - rev = '%04d' % ctx.rev() - label = "%s:%s" % ( str( ctx.rev() ), changeset_revision ) - else: - rev = '-1' - label = "-1:%s" % changeset_revision - return rev, label -def get_reversed_changelog_changesets( repo ): - reversed_changelog = [] - for changeset in repo.changelog: - reversed_changelog.insert( 0, changeset ) - return reversed_changelog -def get_review( trans, id ): - """Get a repository_review from the database via id""" - return trans.sa_session.query( trans.model.RepositoryReview ).get( trans.security.decode_id( id ) ) -def get_review_by_repository_id_changeset_revision_user_id( trans, repository_id, changeset_revision, user_id ): - """Get a repository_review from the database via repository id, changeset_revision and user_id""" - return trans.sa_session.query( trans.model.RepositoryReview ) \ - .filter( and_( trans.model.RepositoryReview.repository_id == trans.security.decode_id( repository_id ), - trans.model.RepositoryReview.changeset_revision == changeset_revision, - trans.model.RepositoryReview.user_id == trans.security.decode_id( user_id ) ) ) \ - .first() -def get_reviews_by_repository_id_changeset_revision( trans, repository_id, changeset_revision ): - """Get all repository_reviews from the database via repository id and changeset_revision""" - return trans.sa_session.query( trans.model.RepositoryReview ) \ - .filter( and_( trans.model.RepositoryReview.repository_id == trans.security.decode_id( repository_id ), - trans.model.RepositoryReview.changeset_revision == changeset_revision ) ) \ - .all() -def get_revision_label( trans, repository, changeset_revision ): - """ - Return a string consisting of the human read-able - changeset rev and the changeset revision string. - """ - repo = hg.repository( suc.get_configured_ui(), repository.repo_path( trans.app ) ) - ctx = suc.get_changectx_for_changeset( repo, changeset_revision ) - if ctx: - return "%s:%s" % ( str( ctx.rev() ), changeset_revision ) - else: - return "-1:%s" % changeset_revision -def get_user( trans, id ): - """Get a user from the database by id""" - return trans.sa_session.query( trans.model.User ).get( trans.security.decode_id( id ) ) -def handle_email_alerts( trans, repository, content_alert_str='', new_repo_alert=False, admin_only=False ): - # There are 2 complementary features that enable a tool shed user to receive email notification: - # 1. Within User Preferences, they can elect to receive email when the first (or first valid) - # change set is produced for a new repository. - # 2. When viewing or managing a repository, they can check the box labeled "Receive email alerts" - # which caused them to receive email alerts when updates to the repository occur. This same feature - # is available on a per-repository basis on the repository grid within the tool shed. - # - # There are currently 4 scenarios for sending email notification when a change is made to a repository: - # 1. An admin user elects to receive email when the first change set is produced for a new repository - # from User Preferences. The change set does not have to include any valid content. This allows for - # the capture of inappropriate content being uploaded to new repositories. - # 2. A regular user elects to receive email when the first valid change set is produced for a new repository - # from User Preferences. This differs from 1 above in that the user will not receive email until a - # change set tha tincludes valid content is produced. - # 3. An admin user checks the "Receive email alerts" check box on the manage repository page. Since the - # user is an admin user, the email will include information about both HTML and image content that was - # included in the change set. - # 4. A regular user checks the "Receive email alerts" check box on the manage repository page. Since the - # user is not an admin user, the email will not include any information about both HTML and image content - # that was included in the change set. - repo_dir = repository.repo_path( trans.app ) - repo = hg.repository( suc.get_configured_ui(), repo_dir ) - smtp_server = trans.app.config.smtp_server - if smtp_server and ( new_repo_alert or repository.email_alerts ): - # Send email alert to users that want them. - if trans.app.config.email_from is not None: - email_from = trans.app.config.email_from - elif trans.request.host.split( ':' )[0] == 'localhost': - email_from = 'galaxy-no-reply@' + socket.getfqdn() - else: - email_from = 'galaxy-no-reply@' + trans.request.host.split( ':' )[0] - tip_changeset = repo.changelog.tip() - ctx = repo.changectx( tip_changeset ) - t, tz = ctx.date() - date = datetime( *gmtime( float( t ) - tz )[:6] ) - display_date = date.strftime( "%Y-%m-%d" ) - try: - username = ctx.user().split()[0] - except: - username = ctx.user() - # We'll use 2 template bodies because we only want to send content - # alerts to tool shed admin users. - if new_repo_alert: - template = new_repo_email_alert_template - else: - template = email_alert_template - admin_body = string.Template( template ).safe_substitute( host=trans.request.host, - repository_name=repository.name, - revision='%s:%s' %( str( ctx.rev() ), ctx ), - display_date=display_date, - description=ctx.description(), - username=username, - content_alert_str=content_alert_str ) - body = string.Template( template ).safe_substitute( host=trans.request.host, - repository_name=repository.name, - revision='%s:%s' %( str( ctx.rev() ), ctx ), - display_date=display_date, - description=ctx.description(), - username=username, - content_alert_str='' ) - admin_users = trans.app.config.get( "admin_users", "" ).split( "," ) - frm = email_from - if new_repo_alert: - subject = "Galaxy tool shed alert for new repository named %s" % str( repository.name ) - subject = subject[ :80 ] - email_alerts = [] - for user in trans.sa_session.query( trans.model.User ) \ - .filter( and_( trans.model.User.table.c.deleted == False, - trans.model.User.table.c.new_repo_alert == True ) ): - if admin_only: - if user.email in admin_users: - email_alerts.append( user.email ) - else: - email_alerts.append( user.email ) - else: - subject = "Galaxy tool shed update alert for repository named %s" % str( repository.name ) - email_alerts = from_json_string( repository.email_alerts ) - for email in email_alerts: - to = email.strip() - # Send it - try: - if to in admin_users: - util.send_mail( frm, to, subject, admin_body, trans.app.config ) - else: - util.send_mail( frm, to, subject, body, trans.app.config ) - except Exception, e: - log.exception( "An error occurred sending a tool shed repository update alert by email." ) -def has_previous_repository_reviews( trans, repository, changeset_revision ): - """Determine if a repository has a changeset revision review prior to the received changeset revision.""" - repo = hg.repository( suc.get_configured_ui(), repository.repo_path( trans.app ) ) - reviewed_revision_hashes = [ review.changeset_revision for review in repository.reviews ] - for changeset in suc.reversed_upper_bounded_changelog( repo, changeset_revision ): - previous_changeset_revision = str( repo.changectx( changeset ) ) - if previous_changeset_revision in reviewed_revision_hashes: - return True - return False -def new_repository_dependency_metadata_required( trans, repository, metadata_dict ): - """ - Compare the last saved metadata for each repository dependency in the repository with the new - metadata in metadata_dict to determine if a new repository_metadata table record is required, - or if the last saved metadata record can be updated instead. - """ - if 'repository_dependencies' in metadata_dict: - repository_metadata = get_latest_repository_metadata( trans, repository.id ) - if repository_metadata: - metadata = repository_metadata.metadata - if metadata: - if 'repository_dependencies' in metadata: - saved_repository_dependencies = metadata[ 'repository_dependencies' ][ 'repository_dependencies' ] - new_repository_dependencies = metadata_dict[ 'repository_dependencies' ][ 'repository_dependencies' ] - # The saved metadata must be a subset of the new metadata. - for new_repository_dependency_metadata in new_repository_dependencies: - if new_repository_dependency_metadata not in saved_repository_dependencies: - return True - for saved_repository_dependency_metadata in saved_repository_dependencies: - if saved_repository_dependency_metadata not in new_repository_dependencies: - return True - else: - # We have repository metadata that does not include metadata for any repository dependencies in the - # repository, so we can update the existing repository metadata. - return False - else: - # There is no saved repository metadata, so we need to create a new repository_metadata table record. - return True - # The received metadata_dict includes no metadata for repository dependencies, so a new repository_metadata table record is not needed. - return False -def new_tool_metadata_required( trans, repository, metadata_dict ): - """ - Compare the last saved metadata for each tool in the repository with the new metadata in metadata_dict to determine if a new repository_metadata - table record is required, or if the last saved metadata record can be updated instead. - """ - if 'tools' in metadata_dict: - repository_metadata = get_latest_repository_metadata( trans, repository.id ) - if repository_metadata: - metadata = repository_metadata.metadata - if metadata: - if 'tools' in metadata: - saved_tool_ids = [] - # The metadata for one or more tools was successfully generated in the past - # for this repository, so we first compare the version string for each tool id - # in metadata_dict with what was previously saved to see if we need to create - # a new table record or if we can simply update the existing record. - for new_tool_metadata_dict in metadata_dict[ 'tools' ]: - for saved_tool_metadata_dict in metadata[ 'tools' ]: - if saved_tool_metadata_dict[ 'id' ] not in saved_tool_ids: - saved_tool_ids.append( saved_tool_metadata_dict[ 'id' ] ) - if new_tool_metadata_dict[ 'id' ] == saved_tool_metadata_dict[ 'id' ]: - if new_tool_metadata_dict[ 'version' ] != saved_tool_metadata_dict[ 'version' ]: - return True - # So far, a new metadata record is not required, but we still have to check to see if - # any new tool ids exist in metadata_dict that are not in the saved metadata. We do - # this because if a new tarball was uploaded to a repository that included tools, it - # may have removed existing tool files if they were not included in the uploaded tarball. - for new_tool_metadata_dict in metadata_dict[ 'tools' ]: - if new_tool_metadata_dict[ 'id' ] not in saved_tool_ids: - return True - else: - # We have repository metadata that does not include metadata for any tools in the - # repository, so we can update the existing repository metadata. - return False - else: - # There is no saved repository metadata, so we need to create a new repository_metadata table record. - return True - # The received metadata_dict includes no metadata for tools, so a new repository_metadata table record is not needed. - return False -def new_workflow_metadata_required( trans, repository, metadata_dict ): - """ - Currently everything about an exported workflow except the name is hard-coded, so there's no real way to differentiate versions of - exported workflows. If this changes at some future time, this method should be enhanced accordingly. - """ - if 'workflows' in metadata_dict: - repository_metadata = get_latest_repository_metadata( trans, repository.id ) - if repository_metadata: - # The repository has metadata, so update the workflows value - no new record is needed. - return False - else: - # There is no saved repository metadata, so we need to create a new repository_metadata table record. - return True - # The received metadata_dict includes no metadata for workflows, so a new repository_metadata table record is not needed. - return False -def set_repository_metadata( trans, repository, content_alert_str='', **kwd ): - """ - Set metadata using the repository's current disk files, returning specific error messages (if any) to alert the repository owner that the changeset - has problems. - """ - message = '' - status = 'done' - encoded_id = trans.security.encode_id( repository.id ) - repository_clone_url = suc.generate_clone_url_for_repository_in_tool_shed( trans, repository ) - repo_dir = repository.repo_path( trans.app ) - repo = hg.repository( suc.get_configured_ui(), repo_dir ) - metadata_dict, invalid_file_tups = suc.generate_metadata_for_changeset_revision( app=trans.app, - repository=repository, - repository_clone_url=repository_clone_url, - relative_install_dir=repo_dir, - repository_files_dir=None, - resetting_all_metadata_on_repository=False, - updating_installed_repository=False, - persist=False ) - if metadata_dict: - downloadable = suc.is_downloadable( metadata_dict ) - repository_metadata = None - if new_repository_dependency_metadata_required( trans, repository, metadata_dict ) or \ - new_tool_metadata_required( trans, repository, metadata_dict ) or \ - new_workflow_metadata_required( trans, repository, metadata_dict ): - # Create a new repository_metadata table row. - repository_metadata = suc.create_or_update_repository_metadata( trans, - encoded_id, - repository, - repository.tip( trans.app ), - metadata_dict ) - # If this is the first record stored for this repository, see if we need to send any email alerts. - if len( repository.downloadable_revisions ) == 1: - handle_email_alerts( trans, repository, content_alert_str='', new_repo_alert=True, admin_only=False ) - else: - repository_metadata = get_latest_repository_metadata( trans, repository.id ) - if repository_metadata: - downloadable = suc.is_downloadable( metadata_dict ) - # Update the last saved repository_metadata table row. - repository_metadata.changeset_revision = repository.tip( trans.app ) - repository_metadata.metadata = metadata_dict - repository_metadata.downloadable = downloadable - trans.sa_session.add( repository_metadata ) - trans.sa_session.flush() - else: - # There are no tools in the repository, and we're setting metadata on the repository tip. - repository_metadata = suc.create_or_update_repository_metadata( trans, - encoded_id, - repository, - repository.tip( trans.app ), - metadata_dict ) - if 'tools' in metadata_dict and repository_metadata and status != 'error': - # Set tool versions on the new downloadable change set. The order of the list of changesets is critical, so we use the repo's changelog. - changeset_revisions = [] - for changeset in repo.changelog: - changeset_revision = str( repo.changectx( changeset ) ) - if suc.get_repository_metadata_by_changeset_revision( trans, encoded_id, changeset_revision ): - changeset_revisions.append( changeset_revision ) - add_tool_versions( trans, encoded_id, repository_metadata, changeset_revisions ) - elif len( repo ) == 1 and not invalid_file_tups: - message = "Revision '%s' includes no tools, datatypes or exported workflows for which metadata can " % str( repository.tip( trans.app ) ) - message += "be defined so this revision cannot be automatically installed into a local Galaxy instance." - status = "error" - if invalid_file_tups: - message = suc.generate_message_for_invalid_tools( trans, invalid_file_tups, repository, metadata_dict ) - status = 'error' - # Reset the tool_data_tables by loading the empty tool_data_table_conf.xml file. - suc.reset_tool_data_tables( trans.app ) - return message, status -def set_repository_metadata_due_to_new_tip( trans, repository, content_alert_str=None, **kwd ): - # Set metadata on the repository tip. - error_message, status = set_repository_metadata( trans, repository, content_alert_str=content_alert_str, **kwd ) - if error_message: - # If there is an error, display it. - return trans.response.send_redirect( web.url_for( controller='repository', - action='manage_repository', - id=trans.security.encode_id( repository.id ), - message=error_message, - status='error' ) ) -def update_for_browsing( trans, repository, current_working_dir, commit_message='' ): - # This method id deprecated, but we'll keep it around for a while in case we need it. The problem is that hg purge - # is not supported by the mercurial API. - # Make a copy of a repository's files for browsing, remove from disk all files that are not tracked, and commit all - # added, modified or removed files that have not yet been committed. - repo_dir = repository.repo_path( trans.app ) - repo = hg.repository( suc.get_configured_ui(), repo_dir ) - # The following will delete the disk copy of only the files in the repository. - #os.system( 'hg update -r null > /dev/null 2>&1' ) - files_to_remove_from_disk = [] - files_to_commit = [] - # We may have files on disk in the repo directory that aren't being tracked, so they must be removed. - # The codes used to show the status of files are as follows. - # M = modified - # A = added - # R = removed - # C = clean - # ! = deleted, but still tracked - # ? = not tracked - # I = ignored - # We'll use mercurial's purge extension to remove untracked file. Using this extension requires the - # following entry in the repository's hgrc file which was not required for some time, so we'll add it - # if it's missing. - # [extensions] - # hgext.purge= - lines = repo.opener( 'hgrc', 'rb' ).readlines() - if not '[extensions]\n' in lines: - # No extensions have been added at all, so just append to the file. - fp = repo.opener( 'hgrc', 'a' ) - fp.write( '[extensions]\n' ) - fp.write( 'hgext.purge=\n' ) - fp.close() - elif not 'hgext.purge=\n' in lines: - # The file includes and [extensions] section, but we need to add the - # purge extension. - fp = repo.opener( 'hgrc', 'wb' ) - for line in lines: - if line.startswith( '[extensions]' ): - fp.write( line ) - fp.write( 'hgext.purge=\n' ) - else: - fp.write( line ) - fp.close() - cmd = 'hg purge' - os.chdir( repo_dir ) - proc = subprocess.Popen( args=cmd, shell=True, stdout=subprocess.PIPE, stderr=subprocess.STDOUT ) - return_code = proc.wait() - os.chdir( current_working_dir ) - if return_code != 0: - output = proc.stdout.read( 32768 ) - log.debug( 'hg purge failed in repository directory %s, reason: %s' % ( repo_dir, output ) ) - if files_to_commit: - if not commit_message: - commit_message = 'Committed changes to: %s' % ', '.join( files_to_commit ) - repo.dirstate.write() - repo.commit( user=trans.user.username, text=commit_message ) - cmd = 'hg update > /dev/null 2>&1' - os.chdir( repo_dir ) - proc = subprocess.Popen( args=cmd, shell=True, stdout=subprocess.PIPE, stderr=subprocess.STDOUT ) - return_code = proc.wait() - os.chdir( current_working_dir ) - if return_code != 0: - output = proc.stdout.read( 32768 ) - log.debug( 'hg update > /dev/null 2>&1 failed in repository directory %s, reason: %s' % ( repo_dir, output ) ) diff -r 6538175fb3e6483895aaadfdeddd09aac558fdf5 -r dd395d9b8a01255412b3e56219d11639ccce2e50 lib/galaxy/webapps/community/controllers/hg.py --- a/lib/galaxy/webapps/community/controllers/hg.py +++ b/lib/galaxy/webapps/community/controllers/hg.py @@ -1,7 +1,6 @@ import os, logging from galaxy.web.base.controller import * -from galaxy.util.shed_util_common import get_repository_by_name_and_owner -from galaxy.webapps.community.controllers.common import set_repository_metadata +from galaxy.util.shed_util_common import get_repository_by_name_and_owner, set_repository_metadata from galaxy import eggs eggs.require('mercurial') diff -r 6538175fb3e6483895aaadfdeddd09aac558fdf5 -r dd395d9b8a01255412b3e56219d11639ccce2e50 lib/galaxy/webapps/community/controllers/repository.py --- a/lib/galaxy/webapps/community/controllers/repository.py +++ b/lib/galaxy/webapps/community/controllers/repository.py @@ -25,6 +25,8 @@ log = logging.getLogger( __name__ ) VALID_REPOSITORYNAME_RE = re.compile( "^[a-z0-9\_]+$" ) +malicious_error = " This changeset cannot be downloaded because it potentially produces malicious behavior or contains inappropriate content." +malicious_error_can_push = " Correct this changeset as soon as possible, it potentially produces malicious behavior or contains inappropriate content." class CategoryGrid( grids.Grid ): class NameColumn( grids.TextColumn ): @@ -540,7 +542,7 @@ # The value of 'id' has been set to the search string, which is a repository name. We'll try to get the desired encoded repository # id to pass on. try: - repository = common.get_repository_by_name( trans, kwd[ 'id' ] ) + repository = suc.get_repository_by_name( trans, kwd[ 'id' ] ) kwd[ 'id' ] = trans.security.encode_id( repository.id ) except: pass @@ -615,7 +617,7 @@ if k.startswith( 'f-' ): del kwd[ k ] if 'user_id' in kwd: - user = common.get_user( trans, kwd[ 'user_id' ] ) + user = suc.get_user( trans, kwd[ 'user_id' ] ) kwd[ 'f-email' ] = user.email del kwd[ 'user_id' ] else: @@ -655,7 +657,7 @@ if k.startswith( 'f-' ): del kwd[ k ] category_id = kwd.get( 'id', None ) - category = common.get_category( trans, category_id ) + category = suc.get_category( trans, category_id ) kwd[ 'f-Category.name' ] = category.name elif operation == "receive email alerts": if trans.user: @@ -696,7 +698,7 @@ repo = hg.repository( suc.get_configured_ui(), repository.repo_path( trans.app ) ) # Update repository files for browsing. suc.update_repository( repo ) - is_malicious = common.changeset_is_malicious( trans, id, repository.tip( trans.app ) ) + is_malicious = suc.changeset_is_malicious( trans, id, repository.tip( trans.app ) ) metadata = self.get_metadata( trans, id, repository.tip( trans.app ) ) return trans.fill_template( '/webapps/community/repository/browse_repository.mako', repository=repository, @@ -722,7 +724,7 @@ # We'll try to get the desired encoded repository id to pass on. try: name = kwd[ 'id' ] - repository = common.get_repository_by_name( trans, name ) + repository = suc.get_repository_by_name( trans, name ) kwd[ 'id' ] = trans.security.encode_id( repository.id ) except: pass @@ -745,7 +747,7 @@ if 'f-Category.name' in kwd: # The user browsed to a category and then entered a search string, so get the category associated with it's value. category_name = kwd[ 'f-Category.name' ] - category = common.get_category_by_name( trans, category_name ) + category = suc.get_category_by_name( trans, category_name ) # Set the id value in kwd since it is required by the ValidRepositoryGrid.build_initial_query method. kwd[ 'id' ] = trans.security.encode_id( category.id ) if galaxy_url: @@ -755,7 +757,7 @@ if operation == "preview_tools_in_changeset": repository_id = kwd.get( 'id', None ) repository = suc.get_repository_in_tool_shed( trans, repository_id ) - repository_metadata = common.get_latest_repository_metadata( trans, repository.id ) + repository_metadata = suc.get_latest_repository_metadata( trans, repository.id ) latest_installable_changeset_revision = repository_metadata.changeset_revision return trans.response.send_redirect( web.url_for( controller='repository', action='preview_tools_in_changeset', @@ -767,7 +769,7 @@ if k.startswith( 'f-' ): del kwd[ k ] category_id = kwd.get( 'id', None ) - category = common.get_category( trans, category_id ) + category = suc.get_category( trans, category_id ) kwd[ 'f-Category.name' ] = category.name # The changeset_revision_select_field in the ValidRepositoryGrid performs a refresh_on_change which sends in request parameters like # changeset_revison_1, changeset_revision_2, etc. One of the many select fields on the grid performed the refresh_on_change, so we loop @@ -927,7 +929,7 @@ params = util.Params( kwd ) message = util.restore_text( params.get( 'message', '' ) ) status = params.get( 'status', 'done' ) - categories = common.get_categories( trans ) + categories = suc.get_categories( trans ) if not categories: message = 'No categories have been configured in this instance of the Galaxy Tool Shed. ' + \ 'An administrator needs to create some via the Administrator control panel before creating repositories.', @@ -1028,7 +1030,7 @@ if message: status = 'error' tool_state = self.__new_state( trans ) - is_malicious = common.changeset_is_malicious( trans, repository_id, repository.tip( trans.app ) ) + is_malicious = suc.changeset_is_malicious( trans, repository_id, repository.tip( trans.app ) ) metadata = self.get_metadata( trans, repository_id, changeset_revision ) try: return trans.fill_template( "/webapps/community/repository/tool_form.mako", @@ -1685,7 +1687,7 @@ status = params.get( 'status', 'error' ) repository, tool, error_message = suc.load_tool_from_changeset_revision( trans, repository_id, changeset_revision, tool_config ) tool_state = self.__new_state( trans ) - is_malicious = common.changeset_is_malicious( trans, repository_id, repository.tip( trans.app ) ) + is_malicious = suc.changeset_is_malicious( trans, repository_id, repository.tip( trans.app ) ) invalid_file_tups = [] if tool: invalid_file_tups = suc.check_tool_input_params( trans.app, @@ -1883,7 +1885,7 @@ selected_value=changeset_revision, add_id_to_name=False, downloadable=False ) - revision_label = common.get_revision_label( trans, repository, repository.tip( trans.app ) ) + revision_label = suc.get_revision_label( trans, repository, repository.tip( trans.app ) ) repository_metadata = None repository_metadata_id = None metadata = None @@ -1892,7 +1894,7 @@ if changeset_revision != suc.INITIAL_CHANGELOG_HASH: repository_metadata = suc.get_repository_metadata_by_changeset_revision( trans, id, changeset_revision ) if repository_metadata: - revision_label = common.get_revision_label( trans, repository, changeset_revision ) + revision_label = suc.get_revision_label( trans, repository, changeset_revision ) repository_metadata_id = trans.security.encode_id( repository_metadata.id ) metadata = repository_metadata.metadata is_malicious = repository_metadata.malicious @@ -1902,7 +1904,7 @@ if previous_changeset_revision != suc.INITIAL_CHANGELOG_HASH: repository_metadata = suc.get_repository_metadata_by_changeset_revision( trans, id, previous_changeset_revision ) if repository_metadata: - revision_label = common.get_revision_label( trans, repository, previous_changeset_revision ) + revision_label = suc.get_revision_label( trans, repository, previous_changeset_revision ) repository_metadata_id = trans.security.encode_id( repository_metadata.id ) metadata = repository_metadata.metadata is_malicious = repository_metadata.malicious @@ -1917,20 +1919,20 @@ handled_key_rd_dicts=None ) if is_malicious: if trans.app.security_agent.can_push( trans.app, trans.user, repository ): - message += common.malicious_error_can_push + message += malicious_error_can_push else: - message += common.malicious_error + message += malicious_error status = 'error' malicious_check_box = CheckboxField( 'malicious', checked=is_malicious ) - categories = common.get_categories( trans ) + categories = suc.get_categories( trans ) selected_categories = [ rca.category_id for rca in repository.categories ] # Determine if the current changeset revision has been reviewed by the current user. - reviewed_by_user = common.changeset_revision_reviewed_by_user( trans, trans.user, repository, changeset_revision ) + reviewed_by_user = suc.changeset_revision_reviewed_by_user( trans, trans.user, repository, changeset_revision ) if reviewed_by_user: - review = common.get_review_by_repository_id_changeset_revision_user_id( trans=trans, - repository_id=id, - changeset_revision=changeset_revision, - user_id=trans.security.encode_id( trans.user.id ) ) + review = suc.get_review_by_repository_id_changeset_revision_user_id( trans=trans, + repository_id=id, + changeset_revision=changeset_revision, + user_id=trans.security.encode_id( trans.user.id ) ) review_id = trans.security.encode_id( review.id ) else: review_id = None @@ -2031,7 +2033,7 @@ repository_metadata_id = None metadata = None repository_dependencies = None - revision_label = common.get_revision_label( trans, repository, changeset_revision ) + revision_label = suc.get_revision_label( trans, repository, changeset_revision ) changeset_revision_select_field = build_changeset_revision_select_field( trans, repository, selected_value=changeset_revision, @@ -2103,7 +2105,7 @@ avg_rating, num_ratings = self.get_ave_item_rating_data( trans.sa_session, repository, webapp_model=trans.model ) display_reviews = util.string_as_bool( params.get( 'display_reviews', False ) ) rra = self.get_user_item_rating( trans.sa_session, trans.user, repository, webapp_model=trans.model ) - is_malicious = common.changeset_is_malicious( trans, id, repository.tip( trans.app ) ) + is_malicious = suc.changeset_is_malicious( trans, id, repository.tip( trans.app ) ) metadata = self.get_metadata( trans, id, repository.tip( trans.app ) ) return trans.fill_template( '/webapps/community/repository/rate_repository.mako', repository=repository, @@ -2263,7 +2265,7 @@ if not commit_message: commit_message = 'Deleted selected files' commands.commit( repo.ui, repo, repo_dir, user=trans.user.username, message=commit_message ) - common.handle_email_alerts( trans, repository ) + suc.handle_email_alerts( trans, repository ) # Update the repository files for browsing. suc.update_repository( repo ) # Get the new repository tip. @@ -2275,11 +2277,11 @@ else: message += 'The selected files were deleted from the repository. ' kwd[ 'message' ] = message - common.set_repository_metadata_due_to_new_tip( trans, repository, **kwd ) + suc.set_repository_metadata_due_to_new_tip( trans, repository, **kwd ) else: message = "Select at least 1 file to delete from the repository before clicking <b>Delete selected files</b>." status = "error" - is_malicious = common.changeset_is_malicious( trans, id, repository.tip( trans.app ) ) + is_malicious = suc.changeset_is_malicious( trans, id, repository.tip( trans.app ) ) return trans.fill_template( '/webapps/community/repository/browse_repository.mako', repo=repo, repository=repository, @@ -2302,7 +2304,7 @@ # Get the name of the server hosting the tool shed instance. host = trans.request.host # Build the email message - body = string.Template( common.contact_owner_template ) \ + body = string.Template( suc.contact_owner_template ) \ .safe_substitute( username=trans.user.username, repository_name=repository.name, email=trans.user.email, @@ -2434,7 +2436,7 @@ 'has_metadata' : has_metadata } # Make sure we'll view latest changeset first. changesets.insert( 0, change_dict ) - is_malicious = common.changeset_is_malicious( trans, id, repository.tip( trans.app ) ) + is_malicious = suc.changeset_is_malicious( trans, id, repository.tip( trans.app ) ) metadata = self.get_metadata( trans, id, repository.tip( trans.app ) ) return trans.fill_template( '/webapps/community/repository/view_changelog.mako', repository=repository, @@ -2465,7 +2467,7 @@ diffs = [] for diff in patch.diff( repo, node1=ctx_parent.node(), node2=ctx.node() ): diffs.append( suc.to_safe_string( diff, to_html=True ) ) - is_malicious = common.changeset_is_malicious( trans, id, repository.tip( trans.app ) ) + is_malicious = suc.changeset_is_malicious( trans, id, repository.tip( trans.app ) ) metadata = self.get_metadata( trans, id, ctx_str ) return trans.fill_template( '/webapps/community/repository/view_changeset.mako', repository=repository, @@ -2535,7 +2537,7 @@ selected_value=changeset_revision, add_id_to_name=False, downloadable=False ) - revision_label = common.get_revision_label( trans, repository, changeset_revision ) + revision_label = suc.get_revision_label( trans, repository, changeset_revision ) repository_metadata = suc.get_repository_metadata_by_changeset_revision( trans, id, changeset_revision ) if repository_metadata: repository_metadata_id = trans.security.encode_id( repository_metadata.id ) @@ -2551,20 +2553,20 @@ else: repository_metadata_id = None metadata = None - is_malicious = common.changeset_is_malicious( trans, id, repository.tip( trans.app ) ) + is_malicious = suc.changeset_is_malicious( trans, id, repository.tip( trans.app ) ) if is_malicious: if trans.app.security_agent.can_push( trans.app, trans.user, repository ): - message += common.malicious_error_can_push + message += malicious_error_can_push else: - message += common.malicious_error + message += malicious_error status = 'error' # Determine if the current changeset revision has been reviewed by the current user. - reviewed_by_user = common.changeset_revision_reviewed_by_user( trans, trans.user, repository, changeset_revision ) + reviewed_by_user = suc.changeset_revision_reviewed_by_user( trans, trans.user, repository, changeset_revision ) if reviewed_by_user: - review = common.get_review_by_repository_id_changeset_revision_user_id( trans=trans, - repository_id=id, - changeset_revision=changeset_revision, - user_id=trans.security.encode_id( trans.user.id ) ) + review = suc.get_review_by_repository_id_changeset_revision_user_id( trans=trans, + repository_id=id, + changeset_revision=changeset_revision, + user_id=trans.security.encode_id( trans.user.id ) ) review_id = trans.security.encode_id( review.id ) else: review_id = None @@ -2601,7 +2603,7 @@ tool = None guid = None original_tool_data_path = trans.app.config.tool_data_path - revision_label = common.get_revision_label( trans, repository, changeset_revision ) + revision_label = suc.get_revision_label( trans, repository, changeset_revision ) repository_metadata = suc.get_repository_metadata_by_changeset_revision( trans, repository_id, changeset_revision ) if repository_metadata: metadata = repository_metadata.metadata @@ -2636,19 +2638,19 @@ tool_lineage = self.get_versions_of_tool( trans, repository, repository_metadata, guid ) else: metadata = None - is_malicious = common.changeset_is_malicious( trans, repository_id, repository.tip( trans.app ) ) + is_malicious = suc.changeset_is_malicious( trans, repository_id, repository.tip( trans.app ) ) changeset_revision_select_field = build_changeset_revision_select_field( trans, repository, selected_value=changeset_revision, add_id_to_name=False, downloadable=False ) trans.app.config.tool_data_path = original_tool_data_path - reviewed_by_user = common.changeset_revision_reviewed_by_user( trans, trans.user, repository, changeset_revision ) + reviewed_by_user = suc.changeset_revision_reviewed_by_user( trans, trans.user, repository, changeset_revision ) if reviewed_by_user: - review = common.get_review_by_repository_id_changeset_revision_user_id( trans=trans, - repository_id=repository_id, - changeset_revision=changeset_revision, - user_id=trans.security.encode_id( trans.user.id ) ) + review = suc.get_review_by_repository_id_changeset_revision_user_id( trans=trans, + repository_id=repository_id, + changeset_revision=changeset_revision, + user_id=trans.security.encode_id( trans.user.id ) ) review_id = trans.security.encode_id( review.id ) else: review_id = None @@ -2719,7 +2721,7 @@ # Restrict the options to all revisions that have associated metadata. repository_metadata_revisions = repository.metadata_revisions for repository_metadata in repository_metadata_revisions: - rev, label, changeset_revision = common.get_rev_label_changeset_revision_from_repository_metadata( trans, repository_metadata, repository=repository ) + rev, label, changeset_revision = suc.get_rev_label_changeset_revision_from_repository_metadata( trans, repository_metadata, repository=repository ) changeset_tups.append( ( rev, label, changeset_revision ) ) refresh_on_change_values.append( changeset_revision ) # Sort options by the revision label. Even though the downloadable_revisions query sorts by update_time, diff -r 6538175fb3e6483895aaadfdeddd09aac558fdf5 -r dd395d9b8a01255412b3e56219d11639ccce2e50 lib/galaxy/webapps/community/controllers/repository_review.py --- a/lib/galaxy/webapps/community/controllers/repository_review.py +++ b/lib/galaxy/webapps/community/controllers/repository_review.py @@ -59,7 +59,7 @@ repo = hg.repository( suc.get_configured_ui(), repository.repo_path( trans.app ) ) for review in repository.reviews: changeset_revision = review.changeset_revision - rev, label = common.get_rev_label_from_changeset_revision( repo, changeset_revision ) + rev, label = suc.get_rev_label_from_changeset_revision( repo, changeset_revision ) rval += '<a href="manage_repository_reviews_of_revision' rval += '?id=%s&changeset_revision=%s">%s</a><br/>' % ( trans.security.encode_id( repository.id ), changeset_revision, label ) return rval @@ -67,13 +67,13 @@ class WithoutReviewsRevisionColumn( grids.GridColumn ): def get_value( self, trans, grid, repository ): # Restrict the options to revisions that have not yet been reviewed. - repository_metadata_revisions = common.get_repository_metadata_revisions_for_review( repository, reviewed=False ) + repository_metadata_revisions = suc.get_repository_metadata_revisions_for_review( repository, reviewed=False ) if repository_metadata_revisions: rval = '' for repository_metadata in repository_metadata_revisions: - rev, label, changeset_revision = common.get_rev_label_changeset_revision_from_repository_metadata( trans, - repository_metadata, - repository=repository ) + rev, label, changeset_revision = suc.get_rev_label_changeset_revision_from_repository_metadata( trans, + repository_metadata, + repository=repository ) rval += '<a href="manage_repository_reviews_of_revision' rval += '?id=%s&changeset_revision=%s">%s</a><br/>' % ( trans.security.encode_id( repository.id ), changeset_revision, label ) return rval @@ -177,7 +177,7 @@ rval += 'edit_review' else: rval +='browse_review' - rval += '?id=%s">%s</a>' % ( encoded_review_id, common.get_revision_label( trans, review.repository, review.changeset_revision ) ) + rval += '?id=%s">%s</a>' % ( encoded_review_id, suc.get_revision_label( trans, review.repository, review.changeset_revision ) ) return rval class RatingColumn( grids.TextColumn ): def get_value( self, trans, grid, review ): @@ -277,7 +277,7 @@ message = util.restore_text( params.get( 'message', '' ) ) status = params.get( 'status', 'done' ) encoded_review_id = kwd[ 'id' ] - review = common.get_review( trans, encoded_review_id ) + review = suc.get_review( trans, encoded_review_id ) if kwd.get( 'approve_repository_review_button', False ): approved_select_field_name = '%s%sapproved' % ( encoded_review_id, STRSEP ) approved_select_field_value = str( kwd[ approved_select_field_name ] ) @@ -309,10 +309,10 @@ params = util.Params( kwd ) message = util.restore_text( params.get( 'message', '' ) ) status = params.get( 'status', 'done' ) - review = common.get_review( trans, kwd[ 'id' ] ) + review = suc.get_review( trans, kwd[ 'id' ] ) repository = review.repository repo = hg.repository( suc.get_configured_ui(), repository.repo_path( trans.app ) ) - rev, changeset_revision_label = common.get_rev_label_from_changeset_revision( repo, review.changeset_revision ) + rev, changeset_revision_label = suc.get_rev_label_from_changeset_revision( repo, review.changeset_revision ) return trans.fill_template( '/webapps/community/repository_review/browse_review.mako', repository=repository, changeset_revision_label=changeset_revision_label, @@ -345,7 +345,7 @@ if not name or not description: message = 'Enter a valid name and a description' status = 'error' - elif common.get_component_by_name( trans, name ): + elif suc.get_component_by_name( trans, name ): message = 'A component with that name already exists' status = 'error' else: @@ -378,15 +378,15 @@ if changeset_revision: # Make sure there is not already a review of the revision by the user. repository = suc.get_repository_in_tool_shed( trans, repository_id ) - if common.get_review_by_repository_id_changeset_revision_user_id( trans=trans, - repository_id=repository_id, - changeset_revision=changeset_revision, - user_id=trans.security.encode_id( trans.user.id ) ): + if suc.get_review_by_repository_id_changeset_revision_user_id( trans=trans, + repository_id=repository_id, + changeset_revision=changeset_revision, + user_id=trans.security.encode_id( trans.user.id ) ): message = "You have already created a review for revision <b>%s</b> of repository <b>%s</b>." % ( changeset_revision, repository.name ) status = "error" else: # See if there are any reviews for previous changeset revisions that the user can copy. - if not create_without_copying and not previous_review_id and common.has_previous_repository_reviews( trans, repository, changeset_revision ): + if not create_without_copying and not previous_review_id and suc.has_previous_repository_reviews( trans, repository, changeset_revision ): return trans.response.send_redirect( web.url_for( controller='repository_review', action='select_previous_review', **kwd ) ) @@ -404,7 +404,7 @@ trans.sa_session.add( review ) trans.sa_session.flush() if previous_review_id: - review_to_copy = common.get_review( trans, previous_review_id ) + review_to_copy = suc.get_review( trans, previous_review_id ) self.copy_review( trans, review_to_copy, review ) review_id = trans.security.encode_id( review.id ) message = "Begin your review of revision <b>%s</b> of repository <b>%s</b>." \ @@ -440,7 +440,7 @@ action='manage_categories', message=message, status='error' ) ) - component = common.get_component( trans, id ) + component = suc.get_component( trans, id ) if params.get( 'edit_component_button', False ): new_description = util.restore_text( params.get( 'description', '' ) ).strip() if component.description != new_description: @@ -465,9 +465,9 @@ message = util.restore_text( params.get( 'message', '' ) ) status = params.get( 'status', 'done' ) review_id = kwd.get( 'id', None ) - review = common.get_review( trans, review_id ) + review = suc.get_review( trans, review_id ) components_dict = odict() - for component in common.get_components( trans ): + for component in suc.get_components( trans ): components_dict[ component.name ] = dict( component=component, component_review=None ) repository = review.repository repo = hg.repository( suc.get_configured_ui(), repository.repo_path( trans.app ) ) @@ -517,8 +517,8 @@ approved = str( v ) elif component_review_attr == 'rating': rating = int( str( v ) ) - component = common.get_component( trans, component_id ) - component_review = common.get_component_review_by_repository_review_id_component_id( trans, review_id, component_id ) + component = suc.get_component( trans, component_id ) + component_review = suc.get_component_review_by_repository_review_id_component_id( trans, review_id, component_id ) if component_review: # See if the existing component review should be updated. if component_review.comment != comment or \ @@ -572,7 +572,7 @@ name='revision_approved', selected_value=selected_value, for_component=False ) - rev, changeset_revision_label = common.get_rev_label_from_changeset_revision( repo, review.changeset_revision ) + rev, changeset_revision_label = suc.get_rev_label_from_changeset_revision( repo, review.changeset_revision ) return trans.fill_template( '/webapps/community/repository_review/edit_review.mako', repository=repository, review=review, @@ -659,14 +659,14 @@ metadata_revision_hashes = [ metadata_revision.changeset_revision for metadata_revision in repository.metadata_revisions ] reviewed_revision_hashes = [ review.changeset_revision for review in repository.reviews ] reviews_dict = odict() - for changeset in common.get_reversed_changelog_changesets( repo ): + for changeset in suc.get_reversed_changelog_changesets( repo ): ctx = repo.changectx( changeset ) changeset_revision = str( ctx ) if changeset_revision in metadata_revision_hashes or changeset_revision in reviewed_revision_hashes: - rev, changeset_revision_label = common.get_rev_label_from_changeset_revision( repo, changeset_revision ) + rev, changeset_revision_label = suc.get_rev_label_from_changeset_revision( repo, changeset_revision ) if changeset_revision in reviewed_revision_hashes: # Find the review for this changeset_revision - repository_reviews = common.get_reviews_by_repository_id_changeset_revision( trans, repository_id, changeset_revision ) + repository_reviews = suc.get_reviews_by_repository_id_changeset_revision( trans, repository_id, changeset_revision ) # Determine if the current user can add a review to this revision. can_add_review = trans.user not in [ repository_review.user for repository_review in repository_reviews ] repository_metadata = suc.get_repository_metadata_by_changeset_revision( trans, repository_id, changeset_revision ) @@ -704,8 +704,8 @@ repo_dir = repository.repo_path( trans.app ) repo = hg.repository( suc.get_configured_ui(), repo_dir ) installable = changeset_revision in [ metadata_revision.changeset_revision for metadata_revision in repository.metadata_revisions ] - rev, changeset_revision_label = common.get_rev_label_from_changeset_revision( repo, changeset_revision ) - reviews = common.get_reviews_by_repository_id_changeset_revision( trans, repository_id, changeset_revision ) + rev, changeset_revision_label = suc.get_rev_label_from_changeset_revision( repo, changeset_revision ) + reviews = suc.get_reviews_by_repository_id_changeset_revision( trans, repository_id, changeset_revision ) return trans.fill_template( '/webapps/community/repository_review/reviews_of_changeset_revision.mako', repository=repository, changeset_revision=changeset_revision, @@ -724,7 +724,7 @@ if 'operation' in kwd: operation = kwd['operation'].lower() # The value of the received id is the encoded review id. - review = common.get_review( trans, kwd[ 'id' ] ) + review = suc.get_review( trans, kwd[ 'id' ] ) repository = review.repository kwd[ 'id' ] = trans.security.encode_id( repository.id ) if operation == "inspect repository revisions": @@ -737,7 +737,7 @@ action='view_or_manage_repository', **kwd ) ) # The user may not be the current user. The value of the received id is the encoded user id. - user = common.get_user( trans, kwd[ 'id' ] ) + user = suc.get_user( trans, kwd[ 'id' ] ) self.repository_reviews_by_user_grid.title = "All repository revision reviews for user '%s'" % user.username return self.repository_reviews_by_user_grid( trans, **kwd ) @web.expose @@ -768,8 +768,8 @@ repository = suc.get_repository_in_tool_shed( trans, kwd[ 'id' ] ) changeset_revision = kwd.get( 'changeset_revision', None ) repo = hg.repository( suc.get_configured_ui(), repository.repo_path( trans.app ) ) - previous_reviews_dict = common.get_previous_repository_reviews( trans, repository, changeset_revision ) - rev, changeset_revision_label = common.get_rev_label_from_changeset_revision( repo, changeset_revision ) + previous_reviews_dict = suc.get_previous_repository_reviews( trans, repository, changeset_revision ) + rev, changeset_revision_label = suc.get_rev_label_from_changeset_revision( repo, changeset_revision ) return trans.fill_template( '/webapps/community/repository_review/select_previous_review.mako', repository=repository, changeset_revision=changeset_revision, diff -r 6538175fb3e6483895aaadfdeddd09aac558fdf5 -r dd395d9b8a01255412b3e56219d11639ccce2e50 lib/galaxy/webapps/community/controllers/upload.py --- a/lib/galaxy/webapps/community/controllers/upload.py +++ b/lib/galaxy/webapps/community/controllers/upload.py @@ -1,7 +1,6 @@ import sys, os, shutil, logging, tarfile, tempfile, urllib from galaxy.web.base.controller import * from galaxy.datatypes import checkers -import common import galaxy.util.shed_util_common as suc from galaxy import eggs @@ -23,7 +22,7 @@ status = params.get( 'status', 'done' ) commit_message = util.restore_text( params.get( 'commit_message', 'Uploaded' ) ) category_ids = util.listify( params.get( 'category_id', '' ) ) - categories = common.get_categories( trans ) + categories = suc.get_categories( trans ) repository_id = params.get( 'repository_id', '' ) repository = suc.get_repository_in_tool_shed( trans, repository_id ) repo_dir = repository.repo_path( trans.app ) @@ -37,7 +36,7 @@ url = params.get( 'url', '' ) # Part of the upload process is sending email notification to those that have registered to # receive them. One scenario occurs when the first change set is produced for the repository. - # See the common.handle_email_alerts() method for the definition of the scenarios. + # See the suc.handle_email_alerts() method for the definition of the scenarios. new_repo_alert = repository.is_new( trans.app ) uploaded_directory = None if params.get( 'upload_button', False ): @@ -104,22 +103,11 @@ # Uploaded directory istar = False if istar: - ok, message, files_to_remove, content_alert_str, undesirable_dirs_removed, undesirable_files_removed = self.upload_tar( trans, - repository, - tar, - uploaded_file, - upload_point, - remove_repo_files_not_in_tar, - commit_message, - new_repo_alert ) + ok, message, files_to_remove, content_alert_str, undesirable_dirs_removed, undesirable_files_removed = \ + self.upload_tar( trans, repository, tar, uploaded_file, upload_point, remove_repo_files_not_in_tar, commit_message, new_repo_alert ) elif uploaded_directory: - ok,message, files_to_remove, content_alert_str, undesirable_dirs_removed, undesirable_files_removed = self.upload_directory( trans, - repository, - uploaded_directory, - upload_point, - remove_repo_files_not_in_tar, - commit_message, - new_repo_alert ) + ok,message, files_to_remove, content_alert_str, undesirable_dirs_removed, undesirable_files_removed = \ + self.upload_directory( trans, repository, uploaded_directory, upload_point, remove_repo_files_not_in_tar, commit_message, new_repo_alert ) else: if ( isgzip or isbz2 ) and uncompress_file: uploaded_file_filename = self.uncompress( repository, uploaded_file_name, uploaded_file_filename, isgzip, isbz2 ) @@ -131,7 +119,7 @@ shutil.move( uploaded_file_name, full_path ) # See if any admin users have chosen to receive email alerts when a repository is # updated. If so, check every uploaded file to ensure content is appropriate. - check_contents = common.check_file_contents( trans ) + check_contents = suc.check_file_contents( trans ) if check_contents and os.path.isfile( full_path ): content_alert_str = self.__check_file_content( full_path ) else: @@ -148,7 +136,7 @@ message = '%s<br/>%s' % ( message, error_message ) # See if the content of the change set was valid. admin_only = len( repository.downloadable_revisions ) != 1 - common.handle_email_alerts( trans, repository, content_alert_str=content_alert_str, new_repo_alert=new_repo_alert, admin_only=admin_only ) + suc.handle_email_alerts( trans, repository, content_alert_str=content_alert_str, new_repo_alert=new_repo_alert, admin_only=admin_only ) if ok: # Update the repository files for browsing. suc.update_repository( repo ) @@ -177,17 +165,20 @@ else: message += " %d files were removed from the repository root. " % len( files_to_remove ) kwd[ 'message' ] = message - common.set_repository_metadata_due_to_new_tip( trans, repository, content_alert_str=content_alert_str, **kwd ) - # Provide a warning message if a tool_dependencies.xml file is provided, but tool dependencies weren't loaded due to e.g. a requirement tag mismatch + suc.set_repository_metadata_due_to_new_tip( trans, repository, content_alert_str=content_alert_str, **kwd ) + # Provide a warning message if a tool_dependencies.xml file is provided, but tool dependencies weren't loaded due to a requirement tag mismatch + # or some other problem. if suc.get_config_from_disk( 'tool_dependencies.xml', repo_dir ): if repository.metadata_revisions: + # A repository's metadata revisions are order descending by update_time, so the zeroth revision will be the tip just after an upload. metadata_dict = repository.metadata_revisions[0].metadata else: metadata_dict = {} if 'tool_dependencies' not in metadata_dict: - message += 'Name, version and type from a tool requirement tag does not match the information in the "tool_dependencies.xml file". ' + message += 'Name, version and type from a tool requirement tag does not match the information in the "tool_dependencies.xml file", ' + message += 'so the tool dependency definitions will be ignored.' status = 'warning' - log.debug( 'Error in tool dependencies for repository %s: %s.' % ( repository.id, repository.name ) ) + log.debug( 'Error in tool dependencies for repository with id %s and name %s: %s' % ( str( repository.id ), str( repository.name ), message ) ) # Reset the tool_data_tables by loading the empty tool_data_table_conf.xml file. suc.reset_tool_data_tables( trans.app ) trans.response.send_redirect( web.url_for( controller='repository', @@ -327,7 +318,7 @@ pass # See if any admin users have chosen to receive email alerts when a repository is # updated. If so, check every uploaded file to ensure content is appropriate. - check_contents = common.check_file_contents( trans ) + check_contents = suc.check_file_contents( trans ) for filename_in_archive in filenames_in_archive: # Check file content to ensure it is appropriate. if check_contents and os.path.isfile( filename_in_archive ): @@ -341,7 +332,7 @@ return False, message, files_to_remove, content_alert_str, undesirable_dirs_removed, undesirable_files_removed commands.commit( repo.ui, repo, full_path, user=trans.user.username, message=commit_message ) admin_only = len( repository.downloadable_revisions ) != 1 - common.handle_email_alerts( trans, repository, content_alert_str=content_alert_str, new_repo_alert=new_repo_alert, admin_only=admin_only ) + suc.handle_email_alerts( trans, repository, content_alert_str=content_alert_str, new_repo_alert=new_repo_alert, admin_only=admin_only ) return True, '', files_to_remove, content_alert_str, undesirable_dirs_removed, undesirable_files_removed def uncompress( self, repository, uploaded_file_name, uploaded_file_filename, isgzip, isbz2 ): if isgzip: Repository URL: https://bitbucket.org/galaxy/galaxy-central/ -- This is a commit notification from bitbucket.org. You are receiving this because you have the service enabled, addressing the recipient of this email.