galaxy-commits
Threads by month
- ----- 2026 -----
- February
- January
- ----- 2025 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2024 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2023 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2022 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2021 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2020 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2019 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2018 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2017 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2016 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2015 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2014 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2013 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2012 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2011 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2010 -----
- December
- November
- October
- September
- August
- July
- June
- May
- 15302 discussions
commit/galaxy-central: greg: Refactor the tool shed's common controller for planned elimination.
by Bitbucket 17 Jan '13
by Bitbucket 17 Jan '13
17 Jan '13
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/dd395d9b8a01/
changeset: dd395d9b8a01
user: greg
date: 2013-01-17 22:34:26
summary: Refactor the tool shed's common controller for planned elimination.
affected #: 7 files
diff -r 6538175fb3e6483895aaadfdeddd09aac558fdf5 -r dd395d9b8a01255412b3e56219d11639ccce2e50 lib/galaxy/util/shed_util_common.py
--- a/lib/galaxy/util/shed_util_common.py
+++ b/lib/galaxy/util/shed_util_common.py
@@ -1,12 +1,14 @@
import os, shutil, tempfile, logging, string, threading, urllib2, filecmp
-from galaxy import util
+from galaxy import web, util
from galaxy.tools import parameters
from galaxy.util import inflector, json
+from galaxy.util.odict import odict
from galaxy.web import url_for
from galaxy.web.form_builder import SelectField
from galaxy.webapps.community.util import container_util
from galaxy.datatypes import checkers
from galaxy.model.orm import and_
+import sqlalchemy.orm.exc
from galaxy.tools.parameters import dynamic_options
from galaxy.tool_shed import encoding_util
@@ -38,6 +40,60 @@
TOOL_SHED_ADMIN_CONTROLLER = 'TOOL_SHED_ADMIN_CONTROLLER'
VALID_CHARS = set( string.letters + string.digits + "'\"-=_.()/+*^,:?!#[]%\\$@;{}" )
+new_repo_email_alert_template = """
+Repository name: ${repository_name}
+Revision: ${revision}
+Change description:
+${description}
+
+Uploaded by: ${username}
+Date content uploaded: ${display_date}
+
+${content_alert_str}
+
+-----------------------------------------------------------------------------
+This change alert was sent from the Galaxy tool shed hosted on the server
+"${host}"
+-----------------------------------------------------------------------------
+You received this alert because you registered to receive email when
+new repositories were created in the Galaxy tool shed named "${host}".
+-----------------------------------------------------------------------------
+"""
+
+email_alert_template = """
+Repository name: ${repository_name}
+Revision: ${revision}
+Change description:
+${description}
+
+Changed by: ${username}
+Date of change: ${display_date}
+
+${content_alert_str}
+
+-----------------------------------------------------------------------------
+This change alert was sent from the Galaxy tool shed hosted on the server
+"${host}"
+-----------------------------------------------------------------------------
+You received this alert because you registered to receive email whenever
+changes were made to the repository named "${repository_name}".
+-----------------------------------------------------------------------------
+"""
+
+contact_owner_template = """
+GALAXY TOOL SHED REPOSITORY MESSAGE
+------------------------
+
+The user '${username}' sent you the following message regarding your tool shed
+repository named '${repository_name}'. You can respond by sending a reply to
+the user's email address: ${email}.
+-----------------------------------------------------------------------------
+${message}
+-----------------------------------------------------------------------------
+This message was sent from the Galaxy Tool Shed instance hosted on the server
+'${host}'
+"""
+
def add_installation_directories_to_tool_dependencies( trans, tool_dependencies ):
"""
Determine the path to the installation directory for each of the received tool dependencies. This path will be displayed within the tool dependencies
@@ -65,6 +121,18 @@
requirements_dict[ 'install_dir' ] = install_dir
tool_dependencies[ dependency_key ] = requirements_dict
return tool_dependencies
+def add_tool_versions( trans, id, repository_metadata, changeset_revisions ):
+ # Build a dictionary of { 'tool id' : 'parent tool id' } pairs for each tool in repository_metadata.
+ metadata = repository_metadata.metadata
+ tool_versions_dict = {}
+ for tool_dict in metadata.get( 'tools', [] ):
+ # We have at least 2 changeset revisions to compare tool guids and tool ids.
+ parent_id = get_parent_id( trans, id, tool_dict[ 'id' ], tool_dict[ 'version' ], tool_dict[ 'guid' ], changeset_revisions )
+ tool_versions_dict[ tool_dict[ 'guid' ] ] = parent_id
+ if tool_versions_dict:
+ repository_metadata.tool_versions = tool_versions_dict
+ trans.sa_session.add( repository_metadata )
+ trans.sa_session.flush()
def build_readme_files_dict( metadata, tool_path=None ):
"""Return a dictionary of valid readme file name <-> readme file content pairs for all readme files contained in the received metadata."""
readme_files_dict = {}
@@ -439,6 +507,30 @@
except:
pass
return can_use_disk_file
+def changeset_is_malicious( trans, id, changeset_revision, **kwd ):
+ """Check the malicious flag in repository metadata for a specified change set"""
+ repository_metadata = get_repository_metadata_by_changeset_revision( trans, id, changeset_revision )
+ if repository_metadata:
+ return repository_metadata.malicious
+ return False
+def changeset_revision_reviewed_by_user( trans, user, repository, changeset_revision ):
+ """Determine if the current changeset revision has been reviewed by the current user."""
+ for review in repository.reviews:
+ if review.changeset_revision == changeset_revision and review.user == user:
+ return True
+ return False
+def check_file_contents( trans ):
+ """See if any admin users have chosen to receive email alerts when a repository is updated. If so, the file contents of the update must be
+ checked for inappropriate content.
+ """
+ admin_users = trans.app.config.get( "admin_users", "" ).split( "," )
+ for repository in trans.sa_session.query( trans.model.Repository ) \
+ .filter( trans.model.Repository.table.c.email_alerts != None ):
+ email_alerts = json.from_json_string( repository.email_alerts )
+ for user_email in email_alerts:
+ if user_email in admin_users:
+ return True
+ return False
def check_tool_input_params( app, repo_dir, tool_config_name, tool, sample_files ):
"""
Check all of the tool's input parameters, looking for any that are dynamically generated using external data files to make
@@ -1411,13 +1503,49 @@
if name == stripped_file_name:
return os.path.abspath( os.path.join( root, name ) )
return file_path
+def get_categories( trans ):
+ """Get all categories from the database."""
+ return trans.sa_session.query( trans.model.Category ) \
+ .filter( trans.model.Category.table.c.deleted==False ) \
+ .order_by( trans.model.Category.table.c.name ) \
+ .all()
+def get_category( trans, id ):
+ """Get a category from the database."""
+ return trans.sa_session.query( trans.model.Category ).get( trans.security.decode_id( id ) )
+def get_category_by_name( trans, name ):
+ """Get a category from the database via name."""
+ try:
+ return trans.sa_session.query( trans.model.Category ).filter_by( name=name ).one()
+ except sqlalchemy.orm.exc.NoResultFound:
+ return None
def get_changectx_for_changeset( repo, changeset_revision, **kwd ):
- """Retrieve a specified changectx from a repository"""
+ """Retrieve a specified changectx from a repository."""
for changeset in repo.changelog:
ctx = repo.changectx( changeset )
if str( ctx ) == changeset_revision:
return ctx
return None
+def get_component( trans, id ):
+ """Get a component from the database."""
+ return trans.sa_session.query( trans.model.Component ).get( trans.security.decode_id( id ) )
+def get_component_by_name( trans, name ):
+ """Get a component from the database via a name."""
+ return trans.sa_session.query( trans.app.model.Component ) \
+ .filter( trans.app.model.Component.table.c.name==name ) \
+ .first()
+def get_component_review( trans, id ):
+ """Get a component_review from the database"""
+ return trans.sa_session.query( trans.model.ComponentReview ).get( trans.security.decode_id( id ) )
+def get_component_review_by_repository_review_id_component_id( trans, repository_review_id, component_id ):
+ """Get a component_review from the database via repository_review_id and component_id."""
+ return trans.sa_session.query( trans.model.ComponentReview ) \
+ .filter( and_( trans.model.ComponentReview.table.c.repository_review_id == trans.security.decode_id( repository_review_id ),
+ trans.model.ComponentReview.table.c.component_id == trans.security.decode_id( component_id ) ) ) \
+ .first()
+def get_components( trans ):
+ return trans.sa_session.query( trans.app.model.Component ) \
+ .order_by( trans.app.model.Component.name ) \
+ .all()
def get_config_from_disk( config_file, relative_install_dir ):
for root, dirs, files in os.walk( relative_install_dir ):
if root.find( '.hg' ) < 0:
@@ -1426,7 +1554,7 @@
return os.path.abspath( os.path.join( root, name ) )
return None
def get_configured_ui():
- # Configure any desired ui settings.
+ """Configure any desired ui settings."""
_ui = ui.ui()
# The following will suppress all messages. This is
# the same as adding the following setting to the repo
@@ -1479,6 +1607,12 @@
def get_installed_tool_shed_repository( trans, id ):
"""Get a repository on the Galaxy side from the database via id"""
return trans.sa_session.query( trans.model.ToolShedRepository ).get( trans.security.decode_id( id ) )
+def get_latest_repository_metadata( trans, decoded_repository_id ):
+ """Get last metadata defined for a specified repository from the database."""
+ return trans.sa_session.query( trans.model.RepositoryMetadata ) \
+ .filter( trans.model.RepositoryMetadata.table.c.repository_id == decoded_repository_id ) \
+ .order_by( trans.model.RepositoryMetadata.table.c.id.desc() ) \
+ .first()
def get_latest_tool_config_revision_from_repository_manifest( repo, filename, changeset_revision ):
"""
Get the latest revision of a tool config file named filename from the repository manifest up to the value of changeset_revision.
@@ -1649,6 +1783,21 @@
return INITIAL_CHANGELOG_HASH
else:
previous_changeset_revision = changeset_revision
+def get_previous_repository_reviews( trans, repository, changeset_revision ):
+ """Return an ordered dictionary of repository reviews up to and including the received changeset revision."""
+ repo = hg.repository( get_configured_ui(), repository.repo_path( trans.app ) )
+ reviewed_revision_hashes = [ review.changeset_revision for review in repository.reviews ]
+ previous_reviews_dict = odict()
+ for changeset in reversed_upper_bounded_changelog( repo, changeset_revision ):
+ previous_changeset_revision = str( repo.changectx( changeset ) )
+ if previous_changeset_revision in reviewed_revision_hashes:
+ previous_rev, previous_changeset_revision_label = get_rev_label_from_changeset_revision( repo, previous_changeset_revision )
+ revision_reviews = get_reviews_by_repository_id_changeset_revision( trans,
+ trans.security.encode_id( repository.id ),
+ previous_changeset_revision )
+ previous_reviews_dict[ previous_changeset_revision ] = dict( changeset_revision_label=previous_changeset_revision_label,
+ reviews=revision_reviews )
+ return previous_reviews_dict
def get_readme_file_names( repository_name ):
readme_files = [ 'readme', 'read_me', 'install' ]
valid_filenames = [ r for r in readme_files ]
@@ -1664,6 +1813,9 @@
elif len( repo_info_tuple ) == 7:
description, repository_clone_url, changeset_revision, ctx_rev, repository_owner, repository_dependencies, tool_dependencies = repo_info_tuple
return description, repository_clone_url, changeset_revision, ctx_rev, repository_owner, repository_dependencies, tool_dependencies
+def get_repository_by_name( trans, name ):
+ """Get a repository from the database via name."""
+ return trans.sa_session.query( trans.model.Repository ).filter_by( name=name ).one()
def get_repository_by_name_and_owner( trans, name, owner ):
"""Get a repository from the database via name and owner"""
if trans.webapp.name == 'galaxy':
@@ -1819,6 +1971,25 @@
.filter( and_( trans.model.RepositoryMetadata.table.c.repository_id == trans.security.decode_id( id ),
trans.model.RepositoryMetadata.table.c.changeset_revision == changeset_revision ) ) \
.first()
+def get_repository_metadata_revisions_for_review( repository, reviewed=True ):
+ repository_metadata_revisions = []
+ metadata_changeset_revision_hashes = []
+ if reviewed:
+ for metadata_revision in repository.metadata_revisions:
+ metadata_changeset_revision_hashes.append( metadata_revision.changeset_revision )
+ for review in repository.reviews:
+ if review.changeset_revision in metadata_changeset_revision_hashes:
+ rmcr_hashes = [ rmr.changeset_revision for rmr in repository_metadata_revisions ]
+ if review.changeset_revision not in rmcr_hashes:
+ repository_metadata_revisions.append( review.repository_metadata )
+ else:
+ for review in repository.reviews:
+ if review.changeset_revision not in metadata_changeset_revision_hashes:
+ metadata_changeset_revision_hashes.append( review.changeset_revision )
+ for metadata_revision in repository.metadata_revisions:
+ if metadata_revision.changeset_revision not in metadata_changeset_revision_hashes:
+ repository_metadata_revisions.append( metadata_revision )
+ return repository_metadata_revisions
def get_repository_tools_tups( app, metadata_dict ):
repository_tools_tups = []
index, shed_conf_dict = get_shed_tool_conf_dict( app, metadata_dict.get( 'shed_config_filename' ) )
@@ -1849,6 +2020,48 @@
relative_path_to_file.startswith( os.path.join( shed_config_dict.get( 'tool_path' ), relative_install_dir ) ):
relative_path_to_file = relative_path_to_file[ len( shed_config_dict.get( 'tool_path' ) ) + 1: ]
return relative_path_to_file
+def get_reversed_changelog_changesets( repo ):
+ reversed_changelog = []
+ for changeset in repo.changelog:
+ reversed_changelog.insert( 0, changeset )
+ return reversed_changelog
+def get_review( trans, id ):
+ """Get a repository_review from the database via id."""
+ return trans.sa_session.query( trans.model.RepositoryReview ).get( trans.security.decode_id( id ) )
+def get_reviews_by_repository_id_changeset_revision( trans, repository_id, changeset_revision ):
+ """Get all repository_reviews from the database via repository id and changeset_revision."""
+ return trans.sa_session.query( trans.model.RepositoryReview ) \
+ .filter( and_( trans.model.RepositoryReview.repository_id == trans.security.decode_id( repository_id ),
+ trans.model.RepositoryReview.changeset_revision == changeset_revision ) ) \
+ .all()
+def get_review_by_repository_id_changeset_revision_user_id( trans, repository_id, changeset_revision, user_id ):
+ """Get a repository_review from the database via repository id, changeset_revision and user_id."""
+ return trans.sa_session.query( trans.model.RepositoryReview ) \
+ .filter( and_( trans.model.RepositoryReview.repository_id == trans.security.decode_id( repository_id ),
+ trans.model.RepositoryReview.changeset_revision == changeset_revision,
+ trans.model.RepositoryReview.user_id == trans.security.decode_id( user_id ) ) ) \
+ .first()
+def get_rev_label_changeset_revision_from_repository_metadata( trans, repository_metadata, repository=None ):
+ if repository is None:
+ repository = repository_metadata.repository
+ repo = hg.repository( get_configured_ui(), repository.repo_path( trans.app ) )
+ changeset_revision = repository_metadata.changeset_revision
+ ctx = get_changectx_for_changeset( repo, changeset_revision )
+ if ctx:
+ rev = '%04d' % ctx.rev()
+ label = "%s:%s" % ( str( ctx.rev() ), changeset_revision )
+ else:
+ rev = '-1'
+ label = "-1:%s" % changeset_revision
+ return rev, label, changeset_revision
+def get_revision_label( trans, repository, changeset_revision ):
+ """Return a string consisting of the human read-able changeset rev and the changeset revision string."""
+ repo = hg.repository( get_configured_ui(), repository.repo_path( trans.app ) )
+ ctx = get_changectx_for_changeset( repo, changeset_revision )
+ if ctx:
+ return "%s:%s" % ( str( ctx.rev() ), changeset_revision )
+ else:
+ return "-1:%s" % changeset_revision
def get_sample_files_from_disk( repository_files_dir, tool_path=None, relative_install_dir=None, resetting_all_metadata_on_repository=False ):
if resetting_all_metadata_on_repository:
# Keep track of the location where the repository is temporarily cloned so that we can strip it when setting metadata.
@@ -1877,6 +2090,15 @@
relative_path_to_sample_file = relative_path_to_sample_file[ len( tool_path ) + 1 :]
sample_file_metadata_paths.append( relative_path_to_sample_file )
return sample_file_metadata_paths, sample_file_copy_paths
+def get_rev_label_from_changeset_revision( repo, changeset_revision ):
+ ctx = get_changectx_for_changeset( repo, changeset_revision )
+ if ctx:
+ rev = '%04d' % ctx.rev()
+ label = "%s:%s" % ( str( ctx.rev() ), changeset_revision )
+ else:
+ rev = '-1'
+ label = "-1:%s" % changeset_revision
+ return rev, label
def get_shed_tool_conf_dict( app, shed_tool_conf ):
"""
Return the in-memory version of the shed_tool_conf file, which is stored in the config_elems entry
@@ -2004,8 +2226,11 @@
return shed_url
# The tool shed from which the repository was originally installed must no longer be configured in tool_sheds_conf.xml.
return None
+def get_user( trans, id ):
+ """Get a user from the database by id."""
+ return trans.sa_session.query( trans.model.User ).get( trans.security.decode_id( id ) )
def get_user_by_username( trans, username ):
- """Get a user from the database by username"""
+ """Get a user from the database by username."""
return trans.sa_session.query( trans.model.User ) \
.filter( trans.model.User.table.c.username == username ) \
.one()
@@ -2044,6 +2269,94 @@
all_repository_dependencies=all_repository_dependencies,
handled_key_rd_dicts=handled_key_rd_dicts,
circular_repository_dependencies=circular_repository_dependencies )
+def handle_email_alerts( trans, repository, content_alert_str='', new_repo_alert=False, admin_only=False ):
+ # There are 2 complementary features that enable a tool shed user to receive email notification:
+ # 1. Within User Preferences, they can elect to receive email when the first (or first valid)
+ # change set is produced for a new repository.
+ # 2. When viewing or managing a repository, they can check the box labeled "Receive email alerts"
+ # which caused them to receive email alerts when updates to the repository occur. This same feature
+ # is available on a per-repository basis on the repository grid within the tool shed.
+ #
+ # There are currently 4 scenarios for sending email notification when a change is made to a repository:
+ # 1. An admin user elects to receive email when the first change set is produced for a new repository
+ # from User Preferences. The change set does not have to include any valid content. This allows for
+ # the capture of inappropriate content being uploaded to new repositories.
+ # 2. A regular user elects to receive email when the first valid change set is produced for a new repository
+ # from User Preferences. This differs from 1 above in that the user will not receive email until a
+ # change set tha tincludes valid content is produced.
+ # 3. An admin user checks the "Receive email alerts" check box on the manage repository page. Since the
+ # user is an admin user, the email will include information about both HTML and image content that was
+ # included in the change set.
+ # 4. A regular user checks the "Receive email alerts" check box on the manage repository page. Since the
+ # user is not an admin user, the email will not include any information about both HTML and image content
+ # that was included in the change set.
+ repo_dir = repository.repo_path( trans.app )
+ repo = hg.repository( get_configured_ui(), repo_dir )
+ smtp_server = trans.app.config.smtp_server
+ if smtp_server and ( new_repo_alert or repository.email_alerts ):
+ # Send email alert to users that want them.
+ if trans.app.config.email_from is not None:
+ email_from = trans.app.config.email_from
+ elif trans.request.host.split( ':' )[0] == 'localhost':
+ email_from = 'galaxy-no-reply@' + socket.getfqdn()
+ else:
+ email_from = 'galaxy-no-reply@' + trans.request.host.split( ':' )[0]
+ tip_changeset = repo.changelog.tip()
+ ctx = repo.changectx( tip_changeset )
+ t, tz = ctx.date()
+ date = datetime( *gmtime( float( t ) - tz )[:6] )
+ display_date = date.strftime( "%Y-%m-%d" )
+ try:
+ username = ctx.user().split()[0]
+ except:
+ username = ctx.user()
+ # We'll use 2 template bodies because we only want to send content
+ # alerts to tool shed admin users.
+ if new_repo_alert:
+ template = new_repo_email_alert_template
+ else:
+ template = email_alert_template
+ admin_body = string.Template( template ).safe_substitute( host=trans.request.host,
+ repository_name=repository.name,
+ revision='%s:%s' %( str( ctx.rev() ), ctx ),
+ display_date=display_date,
+ description=ctx.description(),
+ username=username,
+ content_alert_str=content_alert_str )
+ body = string.Template( template ).safe_substitute( host=trans.request.host,
+ repository_name=repository.name,
+ revision='%s:%s' %( str( ctx.rev() ), ctx ),
+ display_date=display_date,
+ description=ctx.description(),
+ username=username,
+ content_alert_str='' )
+ admin_users = trans.app.config.get( "admin_users", "" ).split( "," )
+ frm = email_from
+ if new_repo_alert:
+ subject = "Galaxy tool shed alert for new repository named %s" % str( repository.name )
+ subject = subject[ :80 ]
+ email_alerts = []
+ for user in trans.sa_session.query( trans.model.User ) \
+ .filter( and_( trans.model.User.table.c.deleted == False,
+ trans.model.User.table.c.new_repo_alert == True ) ):
+ if admin_only:
+ if user.email in admin_users:
+ email_alerts.append( user.email )
+ else:
+ email_alerts.append( user.email )
+ else:
+ subject = "Galaxy tool shed update alert for repository named %s" % str( repository.name )
+ email_alerts = json.from_json_string( repository.email_alerts )
+ for email in email_alerts:
+ to = email.strip()
+ # Send it
+ try:
+ if to in admin_users:
+ util.send_mail( frm, to, subject, admin_body, trans.app.config )
+ else:
+ util.send_mail( frm, to, subject, body, trans.app.config )
+ except Exception, e:
+ log.exception( "An error occurred sending a tool shed repository update alert by email." )
def handle_existing_tool_dependencies_that_changed_in_update( app, repository, original_dependency_dict, new_dependency_dict ):
"""
This method is called when a Galaxy admin is getting updates for an installed tool shed repository in order to cover the case where an
@@ -2160,6 +2473,15 @@
message = str( e )
error = True
return error, message
+def has_previous_repository_reviews( trans, repository, changeset_revision ):
+ """Determine if a repository has a changeset revision review prior to the received changeset revision."""
+ repo = hg.repository( get_configured_ui(), repository.repo_path( trans.app ) )
+ reviewed_revision_hashes = [ review.changeset_revision for review in repository.reviews ]
+ for changeset in reversed_upper_bounded_changelog( repo, changeset_revision ):
+ previous_changeset_revision = str( repo.changectx( changeset ) )
+ if previous_changeset_revision in reviewed_revision_hashes:
+ return True
+ return False
def in_all_repository_dependencies( repository_key, repository_dependency, all_repository_dependencies ):
"""Return True if { repository_key :repository_dependency } is in all_repository_dependencies."""
for key, val in all_repository_dependencies.items():
@@ -2348,6 +2670,89 @@
containers_dict[ 'tool_dependencies' ] = root_container
containers_dict[ 'missing_tool_dependencies' ] = None
return containers_dict
+def new_repository_dependency_metadata_required( trans, repository, metadata_dict ):
+ """
+ Compare the last saved metadata for each repository dependency in the repository with the new metadata in metadata_dict to determine if a new
+ repository_metadata table record is required or if the last saved metadata record can be updated instead.
+ """
+ if 'repository_dependencies' in metadata_dict:
+ repository_metadata = get_latest_repository_metadata( trans, repository.id )
+ if repository_metadata:
+ metadata = repository_metadata.metadata
+ if metadata:
+ if 'repository_dependencies' in metadata:
+ saved_repository_dependencies = metadata[ 'repository_dependencies' ][ 'repository_dependencies' ]
+ new_repository_dependencies = metadata_dict[ 'repository_dependencies' ][ 'repository_dependencies' ]
+ # The saved metadata must be a subset of the new metadata.
+ for new_repository_dependency_metadata in new_repository_dependencies:
+ if new_repository_dependency_metadata not in saved_repository_dependencies:
+ return True
+ for saved_repository_dependency_metadata in saved_repository_dependencies:
+ if saved_repository_dependency_metadata not in new_repository_dependencies:
+ return True
+ else:
+ # We have repository metadata that does not include metadata for any repository dependencies in the
+ # repository, so we can update the existing repository metadata.
+ return False
+ else:
+ # There is no saved repository metadata, so we need to create a new repository_metadata table record.
+ return True
+ # The received metadata_dict includes no metadata for repository dependencies, so a new repository_metadata table record is not needed.
+ return False
+def new_tool_metadata_required( trans, repository, metadata_dict ):
+ """
+ Compare the last saved metadata for each tool in the repository with the new metadata in metadata_dict to determine if a new repository_metadata
+ table record is required, or if the last saved metadata record can be updated instead.
+ """
+ if 'tools' in metadata_dict:
+ repository_metadata = get_latest_repository_metadata( trans, repository.id )
+ if repository_metadata:
+ metadata = repository_metadata.metadata
+ if metadata:
+ if 'tools' in metadata:
+ saved_tool_ids = []
+ # The metadata for one or more tools was successfully generated in the past
+ # for this repository, so we first compare the version string for each tool id
+ # in metadata_dict with what was previously saved to see if we need to create
+ # a new table record or if we can simply update the existing record.
+ for new_tool_metadata_dict in metadata_dict[ 'tools' ]:
+ for saved_tool_metadata_dict in metadata[ 'tools' ]:
+ if saved_tool_metadata_dict[ 'id' ] not in saved_tool_ids:
+ saved_tool_ids.append( saved_tool_metadata_dict[ 'id' ] )
+ if new_tool_metadata_dict[ 'id' ] == saved_tool_metadata_dict[ 'id' ]:
+ if new_tool_metadata_dict[ 'version' ] != saved_tool_metadata_dict[ 'version' ]:
+ return True
+ # So far, a new metadata record is not required, but we still have to check to see if
+ # any new tool ids exist in metadata_dict that are not in the saved metadata. We do
+ # this because if a new tarball was uploaded to a repository that included tools, it
+ # may have removed existing tool files if they were not included in the uploaded tarball.
+ for new_tool_metadata_dict in metadata_dict[ 'tools' ]:
+ if new_tool_metadata_dict[ 'id' ] not in saved_tool_ids:
+ return True
+ else:
+ # We have repository metadata that does not include metadata for any tools in the
+ # repository, so we can update the existing repository metadata.
+ return False
+ else:
+ # There is no saved repository metadata, so we need to create a new repository_metadata table record.
+ return True
+ # The received metadata_dict includes no metadata for tools, so a new repository_metadata table record is not needed.
+ return False
+def new_workflow_metadata_required( trans, repository, metadata_dict ):
+ """
+ Currently everything about an exported workflow except the name is hard-coded, so there's no real way to differentiate versions of
+ exported workflows. If this changes at some future time, this method should be enhanced accordingly.
+ """
+ if 'workflows' in metadata_dict:
+ repository_metadata = get_latest_repository_metadata( trans, repository.id )
+ if repository_metadata:
+ # The repository has metadata, so update the workflows value - no new record is needed.
+ return False
+ else:
+ # There is no saved repository metadata, so we need to create a new repository_metadata table record.
+ return True
+ # The received metadata_dict includes no metadata for workflows, so a new repository_metadata table record is not needed.
+ return False
def open_repository_files_folder( trans, folder_path ):
try:
files_list = get_repository_files( trans, folder_path )
@@ -2701,6 +3106,77 @@
return reversed_changelog
def reversed_upper_bounded_changelog( repo, included_upper_bounds_changeset_revision ):
return reversed_lower_upper_bounded_changelog( repo, INITIAL_CHANGELOG_HASH, included_upper_bounds_changeset_revision )
+def set_repository_metadata( trans, repository, content_alert_str='', **kwd ):
+ """
+ Set metadata using the repository's current disk files, returning specific error messages (if any) to alert the repository owner that the changeset
+ has problems.
+ """
+ message = ''
+ status = 'done'
+ encoded_id = trans.security.encode_id( repository.id )
+ repository_clone_url = generate_clone_url_for_repository_in_tool_shed( trans, repository )
+ repo_dir = repository.repo_path( trans.app )
+ repo = hg.repository( get_configured_ui(), repo_dir )
+ metadata_dict, invalid_file_tups = generate_metadata_for_changeset_revision( app=trans.app,
+ repository=repository,
+ repository_clone_url=repository_clone_url,
+ relative_install_dir=repo_dir,
+ repository_files_dir=None,
+ resetting_all_metadata_on_repository=False,
+ updating_installed_repository=False,
+ persist=False )
+ if metadata_dict:
+ downloadable = is_downloadable( metadata_dict )
+ repository_metadata = None
+ if new_repository_dependency_metadata_required( trans, repository, metadata_dict ) or \
+ new_tool_metadata_required( trans, repository, metadata_dict ) or \
+ new_workflow_metadata_required( trans, repository, metadata_dict ):
+ # Create a new repository_metadata table row.
+ repository_metadata = create_or_update_repository_metadata( trans, encoded_id, repository, repository.tip( trans.app ), metadata_dict )
+ # If this is the first record stored for this repository, see if we need to send any email alerts.
+ if len( repository.downloadable_revisions ) == 1:
+ handle_email_alerts( trans, repository, content_alert_str='', new_repo_alert=True, admin_only=False )
+ else:
+ repository_metadata = get_latest_repository_metadata( trans, repository.id )
+ if repository_metadata:
+ downloadable = is_downloadable( metadata_dict )
+ # Update the last saved repository_metadata table row.
+ repository_metadata.changeset_revision = repository.tip( trans.app )
+ repository_metadata.metadata = metadata_dict
+ repository_metadata.downloadable = downloadable
+ trans.sa_session.add( repository_metadata )
+ trans.sa_session.flush()
+ else:
+ # There are no tools in the repository, and we're setting metadata on the repository tip.
+ repository_metadata = create_or_update_repository_metadata( trans, encoded_id, repository, repository.tip( trans.app ), metadata_dict )
+ if 'tools' in metadata_dict and repository_metadata and status != 'error':
+ # Set tool versions on the new downloadable change set. The order of the list of changesets is critical, so we use the repo's changelog.
+ changeset_revisions = []
+ for changeset in repo.changelog:
+ changeset_revision = str( repo.changectx( changeset ) )
+ if get_repository_metadata_by_changeset_revision( trans, encoded_id, changeset_revision ):
+ changeset_revisions.append( changeset_revision )
+ add_tool_versions( trans, encoded_id, repository_metadata, changeset_revisions )
+ elif len( repo ) == 1 and not invalid_file_tups:
+ message = "Revision '%s' includes no tools, datatypes or exported workflows for which metadata can " % str( repository.tip( trans.app ) )
+ message += "be defined so this revision cannot be automatically installed into a local Galaxy instance."
+ status = "error"
+ if invalid_file_tups:
+ message = generate_message_for_invalid_tools( trans, invalid_file_tups, repository, metadata_dict )
+ status = 'error'
+ # Reset the tool_data_tables by loading the empty tool_data_table_conf.xml file.
+ reset_tool_data_tables( trans.app )
+ return message, status
+def set_repository_metadata_due_to_new_tip( trans, repository, content_alert_str=None, **kwd ):
+ # Set metadata on the repository tip.
+ error_message, status = set_repository_metadata( trans, repository, content_alert_str=content_alert_str, **kwd )
+ if error_message:
+ # If there is an error, display it.
+ return trans.response.send_redirect( web.url_for( controller='repository',
+ action='manage_repository',
+ id=trans.security.encode_id( repository.id ),
+ message=error_message,
+ status='error' ) )
def strip_path( fpath ):
if not fpath:
return fpath
@@ -2862,10 +3338,8 @@
# ? = not tracked
# I = ignored
# It would be nice if we could use mercurial's purge extension to remove untracked files. The problem is that
- # purging is not supported by the mercurial API. See the deprecated update_for_browsing() method in common.py.
- commands.update( get_configured_ui(),
- repo,
- rev=ctx_rev )
+ # purging is not supported by the mercurial API.
+ commands.update( get_configured_ui(), repo, rev=ctx_rev )
def url_join( *args ):
parts = []
for arg in args:
diff -r 6538175fb3e6483895aaadfdeddd09aac558fdf5 -r dd395d9b8a01255412b3e56219d11639ccce2e50 lib/galaxy/webapps/community/controllers/admin.py
--- a/lib/galaxy/webapps/community/controllers/admin.py
+++ b/lib/galaxy/webapps/community/controllers/admin.py
@@ -6,7 +6,6 @@
from galaxy.web.form_builder import SelectField
from galaxy.util import inflector
import galaxy.util.shed_util_common as suc
-import common
from repository import RepositoryGrid, CategoryGrid
from galaxy import eggs
@@ -474,7 +473,7 @@
if k.startswith( 'f-' ):
del kwd[ k ]
if 'user_id' in kwd:
- user = common.get_user( trans, kwd[ 'user_id' ] )
+ user = suc.get_user( trans, kwd[ 'user_id' ] )
kwd[ 'f-email' ] = user.email
del kwd[ 'user_id' ]
else:
@@ -489,7 +488,7 @@
if k.startswith( 'f-' ):
del kwd[ k ]
category_id = kwd.get( 'id', None )
- category = common.get_category( trans, category_id )
+ category = suc.get_category( trans, category_id )
kwd[ 'f-Category.name' ] = category.name
elif operation == "receive email alerts":
if kwd[ 'id' ]:
@@ -554,7 +553,7 @@
if not name or not description:
message = 'Enter a valid name and a description'
status = 'error'
- elif common.get_category_by_name( trans, name ):
+ elif suc.get_category_by_name( trans, name ):
message = 'A category with that name already exists'
status = 'error'
else:
@@ -641,7 +640,7 @@
action='manage_categories',
message=message,
status='error' ) )
- category = common.get_category( trans, id )
+ category = suc.get_category( trans, id )
if params.get( 'edit_category_button', False ):
new_name = util.restore_text( params.get( 'name', '' ) ).strip()
new_description = util.restore_text( params.get( 'description', '' ) ).strip()
@@ -649,7 +648,7 @@
if not new_name:
message = 'Enter a valid name'
status = 'error'
- elif category.name != new_name and common.get_category_by_name( trans, name ):
+ elif category.name != new_name and suc.get_category_by_name( trans, name ):
message = 'A category with that name already exists'
status = 'error'
else:
@@ -772,7 +771,7 @@
ids = util.listify( id )
message = "Deleted %d categories: " % len( ids )
for category_id in ids:
- category = common.get_category( trans, category_id )
+ category = suc.get_category( trans, category_id )
category.deleted = True
trans.sa_session.add( category )
trans.sa_session.flush()
@@ -800,7 +799,7 @@
purged_categories = ""
message = "Purged %d categories: " % len( ids )
for category_id in ids:
- category = common.get_category( trans, category_id )
+ category = suc.get_category( trans, category_id )
if category.deleted:
# Delete RepositoryCategoryAssociations
for rca in category.repositories:
@@ -827,7 +826,7 @@
count = 0
undeleted_categories = ""
for category_id in ids:
- category = common.get_category( trans, category_id )
+ category = suc.get_category( trans, category_id )
if category.deleted:
category.deleted = False
trans.sa_session.add( category )
diff -r 6538175fb3e6483895aaadfdeddd09aac558fdf5 -r dd395d9b8a01255412b3e56219d11639ccce2e50 lib/galaxy/webapps/community/controllers/common.py
--- a/lib/galaxy/webapps/community/controllers/common.py
+++ b/lib/galaxy/webapps/community/controllers/common.py
@@ -1,13 +1,4 @@
-import os, string, socket, logging, simplejson, binascii, tempfile
-from time import gmtime, strftime
-from datetime import *
-from galaxy.tools import *
-from galaxy.util.odict import odict
-from galaxy.util.json import from_json_string, to_json_string
-import galaxy.util.shed_util_common as suc
-from galaxy.web.base.controllers.admin import *
-from galaxy.webapps.community import model
-from galaxy.model.orm import and_
+import logging
from galaxy.model.item_attrs import UsesItemRatings
from galaxy import eggs
@@ -16,63 +7,6 @@
log = logging.getLogger( __name__ )
-new_repo_email_alert_template = """
-Repository name: ${repository_name}
-Revision: ${revision}
-Change description:
-${description}
-
-Uploaded by: ${username}
-Date content uploaded: ${display_date}
-
-${content_alert_str}
-
------------------------------------------------------------------------------
-This change alert was sent from the Galaxy tool shed hosted on the server
-"${host}"
------------------------------------------------------------------------------
-You received this alert because you registered to receive email when
-new repositories were created in the Galaxy tool shed named "${host}".
------------------------------------------------------------------------------
-"""
-
-email_alert_template = """
-Repository name: ${repository_name}
-Revision: ${revision}
-Change description:
-${description}
-
-Changed by: ${username}
-Date of change: ${display_date}
-
-${content_alert_str}
-
------------------------------------------------------------------------------
-This change alert was sent from the Galaxy tool shed hosted on the server
-"${host}"
------------------------------------------------------------------------------
-You received this alert because you registered to receive email whenever
-changes were made to the repository named "${repository_name}".
------------------------------------------------------------------------------
-"""
-
-contact_owner_template = """
-GALAXY TOOL SHED REPOSITORY MESSAGE
-------------------------
-
-The user '${username}' sent you the following message regarding your tool shed
-repository named '${repository_name}'. You can respond by sending a reply to
-the user's email address: ${email}.
------------------------------------------------------------------------------
-${message}
------------------------------------------------------------------------------
-This message was sent from the Galaxy Tool Shed instance hosted on the server
-'${host}'
-"""
-
-malicious_error = " This changeset cannot be downloaded because it potentially produces malicious behavior or contains inappropriate content."
-malicious_error_can_push = " Correct this changeset as soon as possible, it potentially produces malicious behavior or contains inappropriate content."
-
class ItemRatings( UsesItemRatings ):
"""Overrides rate_item method since we also allow for comments"""
def rate_item( self, trans, user, item, rating, comment='' ):
@@ -95,503 +29,3 @@
trans.sa_session.add( item_rating )
trans.sa_session.flush()
return item_rating
-
-def add_tool_versions( trans, id, repository_metadata, changeset_revisions ):
- # Build a dictionary of { 'tool id' : 'parent tool id' } pairs for each tool in repository_metadata.
- metadata = repository_metadata.metadata
- tool_versions_dict = {}
- for tool_dict in metadata.get( 'tools', [] ):
- # We have at least 2 changeset revisions to compare tool guids and tool ids.
- parent_id = suc.get_parent_id( trans,
- id,
- tool_dict[ 'id' ],
- tool_dict[ 'version' ],
- tool_dict[ 'guid' ],
- changeset_revisions )
- tool_versions_dict[ tool_dict[ 'guid' ] ] = parent_id
- if tool_versions_dict:
- repository_metadata.tool_versions = tool_versions_dict
- trans.sa_session.add( repository_metadata )
- trans.sa_session.flush()
-def changeset_is_malicious( trans, id, changeset_revision, **kwd ):
- """Check the malicious flag in repository metadata for a specified change set"""
- repository_metadata = suc.get_repository_metadata_by_changeset_revision( trans, id, changeset_revision )
- if repository_metadata:
- return repository_metadata.malicious
- return False
-def changeset_revision_reviewed_by_user( trans, user, repository, changeset_revision ):
- """Determine if the current changeset revision has been reviewed by the current user."""
- for review in repository.reviews:
- if review.changeset_revision == changeset_revision and review.user == user:
- return True
- return False
-def check_file_contents( trans ):
- # See if any admin users have chosen to receive email alerts when a repository is updated.
- # If so, the file contents of the update must be checked for inappropriate content.
- admin_users = trans.app.config.get( "admin_users", "" ).split( "," )
- for repository in trans.sa_session.query( trans.model.Repository ) \
- .filter( trans.model.Repository.table.c.email_alerts != None ):
- email_alerts = from_json_string( repository.email_alerts )
- for user_email in email_alerts:
- if user_email in admin_users:
- return True
- return False
-def get_category( trans, id ):
- """Get a category from the database"""
- return trans.sa_session.query( trans.model.Category ).get( trans.security.decode_id( id ) )
-def get_category_by_name( trans, name ):
- """Get a category from the database via name"""
- try:
- return trans.sa_session.query( trans.model.Category ).filter_by( name=name ).one()
- except sqlalchemy.orm.exc.NoResultFound:
- return None
-def get_categories( trans ):
- """Get all categories from the database"""
- return trans.sa_session.query( trans.model.Category ) \
- .filter( trans.model.Category.table.c.deleted==False ) \
- .order_by( trans.model.Category.table.c.name ) \
- .all()
-def get_component( trans, id ):
- """Get a component from the database"""
- return trans.sa_session.query( trans.model.Component ).get( trans.security.decode_id( id ) )
-def get_component_by_name( trans, name ):
- return trans.sa_session.query( trans.app.model.Component ) \
- .filter( trans.app.model.Component.table.c.name==name ) \
- .first()
-def get_component_review( trans, id ):
- """Get a component_review from the database"""
- return trans.sa_session.query( trans.model.ComponentReview ).get( trans.security.decode_id( id ) )
-def get_component_review_by_repository_review_id_component_id( trans, repository_review_id, component_id ):
- """Get a component_review from the database via repository_review_id and component_id"""
- return trans.sa_session.query( trans.model.ComponentReview ) \
- .filter( and_( trans.model.ComponentReview.table.c.repository_review_id == trans.security.decode_id( repository_review_id ),
- trans.model.ComponentReview.table.c.component_id == trans.security.decode_id( component_id ) ) ) \
- .first()
-def get_components( trans ):
- return trans.sa_session.query( trans.app.model.Component ) \
- .order_by( trans.app.model.Component.name ) \
- .all()
-def get_latest_repository_metadata( trans, decoded_repository_id ):
- """Get last metadata defined for a specified repository from the database"""
- return trans.sa_session.query( trans.model.RepositoryMetadata ) \
- .filter( trans.model.RepositoryMetadata.table.c.repository_id == decoded_repository_id ) \
- .order_by( trans.model.RepositoryMetadata.table.c.id.desc() ) \
- .first()
-def get_previous_repository_reviews( trans, repository, changeset_revision ):
- """Return an ordered dictionary of repository reviews up to and including the received changeset revision."""
- repo = hg.repository( suc.get_configured_ui(), repository.repo_path( trans.app ) )
- reviewed_revision_hashes = [ review.changeset_revision for review in repository.reviews ]
- previous_reviews_dict = odict()
- for changeset in suc.reversed_upper_bounded_changelog( repo, changeset_revision ):
- previous_changeset_revision = str( repo.changectx( changeset ) )
- if previous_changeset_revision in reviewed_revision_hashes:
- previous_rev, previous_changeset_revision_label = get_rev_label_from_changeset_revision( repo, previous_changeset_revision )
- revision_reviews = get_reviews_by_repository_id_changeset_revision( trans,
- trans.security.encode_id( repository.id ),
- previous_changeset_revision )
- previous_reviews_dict[ previous_changeset_revision ] = dict( changeset_revision_label=previous_changeset_revision_label,
- reviews=revision_reviews )
- return previous_reviews_dict
-def get_repository_by_name( trans, name ):
- """Get a repository from the database via name"""
- return trans.sa_session.query( trans.model.Repository ).filter_by( name=name ).one()
-def get_repository_metadata_revisions_for_review( repository, reviewed=True ):
- repository_metadata_revisions = []
- metadata_changeset_revision_hashes = []
- if reviewed:
- for metadata_revision in repository.metadata_revisions:
- metadata_changeset_revision_hashes.append( metadata_revision.changeset_revision )
- for review in repository.reviews:
- if review.changeset_revision in metadata_changeset_revision_hashes:
- rmcr_hashes = [ rmr.changeset_revision for rmr in repository_metadata_revisions ]
- if review.changeset_revision not in rmcr_hashes:
- repository_metadata_revisions.append( review.repository_metadata )
- else:
- for review in repository.reviews:
- if review.changeset_revision not in metadata_changeset_revision_hashes:
- metadata_changeset_revision_hashes.append( review.changeset_revision )
- for metadata_revision in repository.metadata_revisions:
- if metadata_revision.changeset_revision not in metadata_changeset_revision_hashes:
- repository_metadata_revisions.append( metadata_revision )
- return repository_metadata_revisions
-def get_rev_label_changeset_revision_from_repository_metadata( trans, repository_metadata, repository=None ):
- if repository is None:
- repository = repository_metadata.repository
- repo = hg.repository( suc.get_configured_ui(), repository.repo_path( trans.app ) )
- changeset_revision = repository_metadata.changeset_revision
- ctx = suc.get_changectx_for_changeset( repo, changeset_revision )
- if ctx:
- rev = '%04d' % ctx.rev()
- label = "%s:%s" % ( str( ctx.rev() ), changeset_revision )
- else:
- rev = '-1'
- label = "-1:%s" % changeset_revision
- return rev, label, changeset_revision
-def get_rev_label_from_changeset_revision( repo, changeset_revision ):
- ctx = suc.get_changectx_for_changeset( repo, changeset_revision )
- if ctx:
- rev = '%04d' % ctx.rev()
- label = "%s:%s" % ( str( ctx.rev() ), changeset_revision )
- else:
- rev = '-1'
- label = "-1:%s" % changeset_revision
- return rev, label
-def get_reversed_changelog_changesets( repo ):
- reversed_changelog = []
- for changeset in repo.changelog:
- reversed_changelog.insert( 0, changeset )
- return reversed_changelog
-def get_review( trans, id ):
- """Get a repository_review from the database via id"""
- return trans.sa_session.query( trans.model.RepositoryReview ).get( trans.security.decode_id( id ) )
-def get_review_by_repository_id_changeset_revision_user_id( trans, repository_id, changeset_revision, user_id ):
- """Get a repository_review from the database via repository id, changeset_revision and user_id"""
- return trans.sa_session.query( trans.model.RepositoryReview ) \
- .filter( and_( trans.model.RepositoryReview.repository_id == trans.security.decode_id( repository_id ),
- trans.model.RepositoryReview.changeset_revision == changeset_revision,
- trans.model.RepositoryReview.user_id == trans.security.decode_id( user_id ) ) ) \
- .first()
-def get_reviews_by_repository_id_changeset_revision( trans, repository_id, changeset_revision ):
- """Get all repository_reviews from the database via repository id and changeset_revision"""
- return trans.sa_session.query( trans.model.RepositoryReview ) \
- .filter( and_( trans.model.RepositoryReview.repository_id == trans.security.decode_id( repository_id ),
- trans.model.RepositoryReview.changeset_revision == changeset_revision ) ) \
- .all()
-def get_revision_label( trans, repository, changeset_revision ):
- """
- Return a string consisting of the human read-able
- changeset rev and the changeset revision string.
- """
- repo = hg.repository( suc.get_configured_ui(), repository.repo_path( trans.app ) )
- ctx = suc.get_changectx_for_changeset( repo, changeset_revision )
- if ctx:
- return "%s:%s" % ( str( ctx.rev() ), changeset_revision )
- else:
- return "-1:%s" % changeset_revision
-def get_user( trans, id ):
- """Get a user from the database by id"""
- return trans.sa_session.query( trans.model.User ).get( trans.security.decode_id( id ) )
-def handle_email_alerts( trans, repository, content_alert_str='', new_repo_alert=False, admin_only=False ):
- # There are 2 complementary features that enable a tool shed user to receive email notification:
- # 1. Within User Preferences, they can elect to receive email when the first (or first valid)
- # change set is produced for a new repository.
- # 2. When viewing or managing a repository, they can check the box labeled "Receive email alerts"
- # which caused them to receive email alerts when updates to the repository occur. This same feature
- # is available on a per-repository basis on the repository grid within the tool shed.
- #
- # There are currently 4 scenarios for sending email notification when a change is made to a repository:
- # 1. An admin user elects to receive email when the first change set is produced for a new repository
- # from User Preferences. The change set does not have to include any valid content. This allows for
- # the capture of inappropriate content being uploaded to new repositories.
- # 2. A regular user elects to receive email when the first valid change set is produced for a new repository
- # from User Preferences. This differs from 1 above in that the user will not receive email until a
- # change set tha tincludes valid content is produced.
- # 3. An admin user checks the "Receive email alerts" check box on the manage repository page. Since the
- # user is an admin user, the email will include information about both HTML and image content that was
- # included in the change set.
- # 4. A regular user checks the "Receive email alerts" check box on the manage repository page. Since the
- # user is not an admin user, the email will not include any information about both HTML and image content
- # that was included in the change set.
- repo_dir = repository.repo_path( trans.app )
- repo = hg.repository( suc.get_configured_ui(), repo_dir )
- smtp_server = trans.app.config.smtp_server
- if smtp_server and ( new_repo_alert or repository.email_alerts ):
- # Send email alert to users that want them.
- if trans.app.config.email_from is not None:
- email_from = trans.app.config.email_from
- elif trans.request.host.split( ':' )[0] == 'localhost':
- email_from = 'galaxy-no-reply@' + socket.getfqdn()
- else:
- email_from = 'galaxy-no-reply@' + trans.request.host.split( ':' )[0]
- tip_changeset = repo.changelog.tip()
- ctx = repo.changectx( tip_changeset )
- t, tz = ctx.date()
- date = datetime( *gmtime( float( t ) - tz )[:6] )
- display_date = date.strftime( "%Y-%m-%d" )
- try:
- username = ctx.user().split()[0]
- except:
- username = ctx.user()
- # We'll use 2 template bodies because we only want to send content
- # alerts to tool shed admin users.
- if new_repo_alert:
- template = new_repo_email_alert_template
- else:
- template = email_alert_template
- admin_body = string.Template( template ).safe_substitute( host=trans.request.host,
- repository_name=repository.name,
- revision='%s:%s' %( str( ctx.rev() ), ctx ),
- display_date=display_date,
- description=ctx.description(),
- username=username,
- content_alert_str=content_alert_str )
- body = string.Template( template ).safe_substitute( host=trans.request.host,
- repository_name=repository.name,
- revision='%s:%s' %( str( ctx.rev() ), ctx ),
- display_date=display_date,
- description=ctx.description(),
- username=username,
- content_alert_str='' )
- admin_users = trans.app.config.get( "admin_users", "" ).split( "," )
- frm = email_from
- if new_repo_alert:
- subject = "Galaxy tool shed alert for new repository named %s" % str( repository.name )
- subject = subject[ :80 ]
- email_alerts = []
- for user in trans.sa_session.query( trans.model.User ) \
- .filter( and_( trans.model.User.table.c.deleted == False,
- trans.model.User.table.c.new_repo_alert == True ) ):
- if admin_only:
- if user.email in admin_users:
- email_alerts.append( user.email )
- else:
- email_alerts.append( user.email )
- else:
- subject = "Galaxy tool shed update alert for repository named %s" % str( repository.name )
- email_alerts = from_json_string( repository.email_alerts )
- for email in email_alerts:
- to = email.strip()
- # Send it
- try:
- if to in admin_users:
- util.send_mail( frm, to, subject, admin_body, trans.app.config )
- else:
- util.send_mail( frm, to, subject, body, trans.app.config )
- except Exception, e:
- log.exception( "An error occurred sending a tool shed repository update alert by email." )
-def has_previous_repository_reviews( trans, repository, changeset_revision ):
- """Determine if a repository has a changeset revision review prior to the received changeset revision."""
- repo = hg.repository( suc.get_configured_ui(), repository.repo_path( trans.app ) )
- reviewed_revision_hashes = [ review.changeset_revision for review in repository.reviews ]
- for changeset in suc.reversed_upper_bounded_changelog( repo, changeset_revision ):
- previous_changeset_revision = str( repo.changectx( changeset ) )
- if previous_changeset_revision in reviewed_revision_hashes:
- return True
- return False
-def new_repository_dependency_metadata_required( trans, repository, metadata_dict ):
- """
- Compare the last saved metadata for each repository dependency in the repository with the new
- metadata in metadata_dict to determine if a new repository_metadata table record is required,
- or if the last saved metadata record can be updated instead.
- """
- if 'repository_dependencies' in metadata_dict:
- repository_metadata = get_latest_repository_metadata( trans, repository.id )
- if repository_metadata:
- metadata = repository_metadata.metadata
- if metadata:
- if 'repository_dependencies' in metadata:
- saved_repository_dependencies = metadata[ 'repository_dependencies' ][ 'repository_dependencies' ]
- new_repository_dependencies = metadata_dict[ 'repository_dependencies' ][ 'repository_dependencies' ]
- # The saved metadata must be a subset of the new metadata.
- for new_repository_dependency_metadata in new_repository_dependencies:
- if new_repository_dependency_metadata not in saved_repository_dependencies:
- return True
- for saved_repository_dependency_metadata in saved_repository_dependencies:
- if saved_repository_dependency_metadata not in new_repository_dependencies:
- return True
- else:
- # We have repository metadata that does not include metadata for any repository dependencies in the
- # repository, so we can update the existing repository metadata.
- return False
- else:
- # There is no saved repository metadata, so we need to create a new repository_metadata table record.
- return True
- # The received metadata_dict includes no metadata for repository dependencies, so a new repository_metadata table record is not needed.
- return False
-def new_tool_metadata_required( trans, repository, metadata_dict ):
- """
- Compare the last saved metadata for each tool in the repository with the new metadata in metadata_dict to determine if a new repository_metadata
- table record is required, or if the last saved metadata record can be updated instead.
- """
- if 'tools' in metadata_dict:
- repository_metadata = get_latest_repository_metadata( trans, repository.id )
- if repository_metadata:
- metadata = repository_metadata.metadata
- if metadata:
- if 'tools' in metadata:
- saved_tool_ids = []
- # The metadata for one or more tools was successfully generated in the past
- # for this repository, so we first compare the version string for each tool id
- # in metadata_dict with what was previously saved to see if we need to create
- # a new table record or if we can simply update the existing record.
- for new_tool_metadata_dict in metadata_dict[ 'tools' ]:
- for saved_tool_metadata_dict in metadata[ 'tools' ]:
- if saved_tool_metadata_dict[ 'id' ] not in saved_tool_ids:
- saved_tool_ids.append( saved_tool_metadata_dict[ 'id' ] )
- if new_tool_metadata_dict[ 'id' ] == saved_tool_metadata_dict[ 'id' ]:
- if new_tool_metadata_dict[ 'version' ] != saved_tool_metadata_dict[ 'version' ]:
- return True
- # So far, a new metadata record is not required, but we still have to check to see if
- # any new tool ids exist in metadata_dict that are not in the saved metadata. We do
- # this because if a new tarball was uploaded to a repository that included tools, it
- # may have removed existing tool files if they were not included in the uploaded tarball.
- for new_tool_metadata_dict in metadata_dict[ 'tools' ]:
- if new_tool_metadata_dict[ 'id' ] not in saved_tool_ids:
- return True
- else:
- # We have repository metadata that does not include metadata for any tools in the
- # repository, so we can update the existing repository metadata.
- return False
- else:
- # There is no saved repository metadata, so we need to create a new repository_metadata table record.
- return True
- # The received metadata_dict includes no metadata for tools, so a new repository_metadata table record is not needed.
- return False
-def new_workflow_metadata_required( trans, repository, metadata_dict ):
- """
- Currently everything about an exported workflow except the name is hard-coded, so there's no real way to differentiate versions of
- exported workflows. If this changes at some future time, this method should be enhanced accordingly.
- """
- if 'workflows' in metadata_dict:
- repository_metadata = get_latest_repository_metadata( trans, repository.id )
- if repository_metadata:
- # The repository has metadata, so update the workflows value - no new record is needed.
- return False
- else:
- # There is no saved repository metadata, so we need to create a new repository_metadata table record.
- return True
- # The received metadata_dict includes no metadata for workflows, so a new repository_metadata table record is not needed.
- return False
-def set_repository_metadata( trans, repository, content_alert_str='', **kwd ):
- """
- Set metadata using the repository's current disk files, returning specific error messages (if any) to alert the repository owner that the changeset
- has problems.
- """
- message = ''
- status = 'done'
- encoded_id = trans.security.encode_id( repository.id )
- repository_clone_url = suc.generate_clone_url_for_repository_in_tool_shed( trans, repository )
- repo_dir = repository.repo_path( trans.app )
- repo = hg.repository( suc.get_configured_ui(), repo_dir )
- metadata_dict, invalid_file_tups = suc.generate_metadata_for_changeset_revision( app=trans.app,
- repository=repository,
- repository_clone_url=repository_clone_url,
- relative_install_dir=repo_dir,
- repository_files_dir=None,
- resetting_all_metadata_on_repository=False,
- updating_installed_repository=False,
- persist=False )
- if metadata_dict:
- downloadable = suc.is_downloadable( metadata_dict )
- repository_metadata = None
- if new_repository_dependency_metadata_required( trans, repository, metadata_dict ) or \
- new_tool_metadata_required( trans, repository, metadata_dict ) or \
- new_workflow_metadata_required( trans, repository, metadata_dict ):
- # Create a new repository_metadata table row.
- repository_metadata = suc.create_or_update_repository_metadata( trans,
- encoded_id,
- repository,
- repository.tip( trans.app ),
- metadata_dict )
- # If this is the first record stored for this repository, see if we need to send any email alerts.
- if len( repository.downloadable_revisions ) == 1:
- handle_email_alerts( trans, repository, content_alert_str='', new_repo_alert=True, admin_only=False )
- else:
- repository_metadata = get_latest_repository_metadata( trans, repository.id )
- if repository_metadata:
- downloadable = suc.is_downloadable( metadata_dict )
- # Update the last saved repository_metadata table row.
- repository_metadata.changeset_revision = repository.tip( trans.app )
- repository_metadata.metadata = metadata_dict
- repository_metadata.downloadable = downloadable
- trans.sa_session.add( repository_metadata )
- trans.sa_session.flush()
- else:
- # There are no tools in the repository, and we're setting metadata on the repository tip.
- repository_metadata = suc.create_or_update_repository_metadata( trans,
- encoded_id,
- repository,
- repository.tip( trans.app ),
- metadata_dict )
- if 'tools' in metadata_dict and repository_metadata and status != 'error':
- # Set tool versions on the new downloadable change set. The order of the list of changesets is critical, so we use the repo's changelog.
- changeset_revisions = []
- for changeset in repo.changelog:
- changeset_revision = str( repo.changectx( changeset ) )
- if suc.get_repository_metadata_by_changeset_revision( trans, encoded_id, changeset_revision ):
- changeset_revisions.append( changeset_revision )
- add_tool_versions( trans, encoded_id, repository_metadata, changeset_revisions )
- elif len( repo ) == 1 and not invalid_file_tups:
- message = "Revision '%s' includes no tools, datatypes or exported workflows for which metadata can " % str( repository.tip( trans.app ) )
- message += "be defined so this revision cannot be automatically installed into a local Galaxy instance."
- status = "error"
- if invalid_file_tups:
- message = suc.generate_message_for_invalid_tools( trans, invalid_file_tups, repository, metadata_dict )
- status = 'error'
- # Reset the tool_data_tables by loading the empty tool_data_table_conf.xml file.
- suc.reset_tool_data_tables( trans.app )
- return message, status
-def set_repository_metadata_due_to_new_tip( trans, repository, content_alert_str=None, **kwd ):
- # Set metadata on the repository tip.
- error_message, status = set_repository_metadata( trans, repository, content_alert_str=content_alert_str, **kwd )
- if error_message:
- # If there is an error, display it.
- return trans.response.send_redirect( web.url_for( controller='repository',
- action='manage_repository',
- id=trans.security.encode_id( repository.id ),
- message=error_message,
- status='error' ) )
-def update_for_browsing( trans, repository, current_working_dir, commit_message='' ):
- # This method id deprecated, but we'll keep it around for a while in case we need it. The problem is that hg purge
- # is not supported by the mercurial API.
- # Make a copy of a repository's files for browsing, remove from disk all files that are not tracked, and commit all
- # added, modified or removed files that have not yet been committed.
- repo_dir = repository.repo_path( trans.app )
- repo = hg.repository( suc.get_configured_ui(), repo_dir )
- # The following will delete the disk copy of only the files in the repository.
- #os.system( 'hg update -r null > /dev/null 2>&1' )
- files_to_remove_from_disk = []
- files_to_commit = []
- # We may have files on disk in the repo directory that aren't being tracked, so they must be removed.
- # The codes used to show the status of files are as follows.
- # M = modified
- # A = added
- # R = removed
- # C = clean
- # ! = deleted, but still tracked
- # ? = not tracked
- # I = ignored
- # We'll use mercurial's purge extension to remove untracked file. Using this extension requires the
- # following entry in the repository's hgrc file which was not required for some time, so we'll add it
- # if it's missing.
- # [extensions]
- # hgext.purge=
- lines = repo.opener( 'hgrc', 'rb' ).readlines()
- if not '[extensions]\n' in lines:
- # No extensions have been added at all, so just append to the file.
- fp = repo.opener( 'hgrc', 'a' )
- fp.write( '[extensions]\n' )
- fp.write( 'hgext.purge=\n' )
- fp.close()
- elif not 'hgext.purge=\n' in lines:
- # The file includes and [extensions] section, but we need to add the
- # purge extension.
- fp = repo.opener( 'hgrc', 'wb' )
- for line in lines:
- if line.startswith( '[extensions]' ):
- fp.write( line )
- fp.write( 'hgext.purge=\n' )
- else:
- fp.write( line )
- fp.close()
- cmd = 'hg purge'
- os.chdir( repo_dir )
- proc = subprocess.Popen( args=cmd, shell=True, stdout=subprocess.PIPE, stderr=subprocess.STDOUT )
- return_code = proc.wait()
- os.chdir( current_working_dir )
- if return_code != 0:
- output = proc.stdout.read( 32768 )
- log.debug( 'hg purge failed in repository directory %s, reason: %s' % ( repo_dir, output ) )
- if files_to_commit:
- if not commit_message:
- commit_message = 'Committed changes to: %s' % ', '.join( files_to_commit )
- repo.dirstate.write()
- repo.commit( user=trans.user.username, text=commit_message )
- cmd = 'hg update > /dev/null 2>&1'
- os.chdir( repo_dir )
- proc = subprocess.Popen( args=cmd, shell=True, stdout=subprocess.PIPE, stderr=subprocess.STDOUT )
- return_code = proc.wait()
- os.chdir( current_working_dir )
- if return_code != 0:
- output = proc.stdout.read( 32768 )
- log.debug( 'hg update > /dev/null 2>&1 failed in repository directory %s, reason: %s' % ( repo_dir, output ) )
diff -r 6538175fb3e6483895aaadfdeddd09aac558fdf5 -r dd395d9b8a01255412b3e56219d11639ccce2e50 lib/galaxy/webapps/community/controllers/hg.py
--- a/lib/galaxy/webapps/community/controllers/hg.py
+++ b/lib/galaxy/webapps/community/controllers/hg.py
@@ -1,7 +1,6 @@
import os, logging
from galaxy.web.base.controller import *
-from galaxy.util.shed_util_common import get_repository_by_name_and_owner
-from galaxy.webapps.community.controllers.common import set_repository_metadata
+from galaxy.util.shed_util_common import get_repository_by_name_and_owner, set_repository_metadata
from galaxy import eggs
eggs.require('mercurial')
diff -r 6538175fb3e6483895aaadfdeddd09aac558fdf5 -r dd395d9b8a01255412b3e56219d11639ccce2e50 lib/galaxy/webapps/community/controllers/repository.py
--- a/lib/galaxy/webapps/community/controllers/repository.py
+++ b/lib/galaxy/webapps/community/controllers/repository.py
@@ -25,6 +25,8 @@
log = logging.getLogger( __name__ )
VALID_REPOSITORYNAME_RE = re.compile( "^[a-z0-9\_]+$" )
+malicious_error = " This changeset cannot be downloaded because it potentially produces malicious behavior or contains inappropriate content."
+malicious_error_can_push = " Correct this changeset as soon as possible, it potentially produces malicious behavior or contains inappropriate content."
class CategoryGrid( grids.Grid ):
class NameColumn( grids.TextColumn ):
@@ -540,7 +542,7 @@
# The value of 'id' has been set to the search string, which is a repository name. We'll try to get the desired encoded repository
# id to pass on.
try:
- repository = common.get_repository_by_name( trans, kwd[ 'id' ] )
+ repository = suc.get_repository_by_name( trans, kwd[ 'id' ] )
kwd[ 'id' ] = trans.security.encode_id( repository.id )
except:
pass
@@ -615,7 +617,7 @@
if k.startswith( 'f-' ):
del kwd[ k ]
if 'user_id' in kwd:
- user = common.get_user( trans, kwd[ 'user_id' ] )
+ user = suc.get_user( trans, kwd[ 'user_id' ] )
kwd[ 'f-email' ] = user.email
del kwd[ 'user_id' ]
else:
@@ -655,7 +657,7 @@
if k.startswith( 'f-' ):
del kwd[ k ]
category_id = kwd.get( 'id', None )
- category = common.get_category( trans, category_id )
+ category = suc.get_category( trans, category_id )
kwd[ 'f-Category.name' ] = category.name
elif operation == "receive email alerts":
if trans.user:
@@ -696,7 +698,7 @@
repo = hg.repository( suc.get_configured_ui(), repository.repo_path( trans.app ) )
# Update repository files for browsing.
suc.update_repository( repo )
- is_malicious = common.changeset_is_malicious( trans, id, repository.tip( trans.app ) )
+ is_malicious = suc.changeset_is_malicious( trans, id, repository.tip( trans.app ) )
metadata = self.get_metadata( trans, id, repository.tip( trans.app ) )
return trans.fill_template( '/webapps/community/repository/browse_repository.mako',
repository=repository,
@@ -722,7 +724,7 @@
# We'll try to get the desired encoded repository id to pass on.
try:
name = kwd[ 'id' ]
- repository = common.get_repository_by_name( trans, name )
+ repository = suc.get_repository_by_name( trans, name )
kwd[ 'id' ] = trans.security.encode_id( repository.id )
except:
pass
@@ -745,7 +747,7 @@
if 'f-Category.name' in kwd:
# The user browsed to a category and then entered a search string, so get the category associated with it's value.
category_name = kwd[ 'f-Category.name' ]
- category = common.get_category_by_name( trans, category_name )
+ category = suc.get_category_by_name( trans, category_name )
# Set the id value in kwd since it is required by the ValidRepositoryGrid.build_initial_query method.
kwd[ 'id' ] = trans.security.encode_id( category.id )
if galaxy_url:
@@ -755,7 +757,7 @@
if operation == "preview_tools_in_changeset":
repository_id = kwd.get( 'id', None )
repository = suc.get_repository_in_tool_shed( trans, repository_id )
- repository_metadata = common.get_latest_repository_metadata( trans, repository.id )
+ repository_metadata = suc.get_latest_repository_metadata( trans, repository.id )
latest_installable_changeset_revision = repository_metadata.changeset_revision
return trans.response.send_redirect( web.url_for( controller='repository',
action='preview_tools_in_changeset',
@@ -767,7 +769,7 @@
if k.startswith( 'f-' ):
del kwd[ k ]
category_id = kwd.get( 'id', None )
- category = common.get_category( trans, category_id )
+ category = suc.get_category( trans, category_id )
kwd[ 'f-Category.name' ] = category.name
# The changeset_revision_select_field in the ValidRepositoryGrid performs a refresh_on_change which sends in request parameters like
# changeset_revison_1, changeset_revision_2, etc. One of the many select fields on the grid performed the refresh_on_change, so we loop
@@ -927,7 +929,7 @@
params = util.Params( kwd )
message = util.restore_text( params.get( 'message', '' ) )
status = params.get( 'status', 'done' )
- categories = common.get_categories( trans )
+ categories = suc.get_categories( trans )
if not categories:
message = 'No categories have been configured in this instance of the Galaxy Tool Shed. ' + \
'An administrator needs to create some via the Administrator control panel before creating repositories.',
@@ -1028,7 +1030,7 @@
if message:
status = 'error'
tool_state = self.__new_state( trans )
- is_malicious = common.changeset_is_malicious( trans, repository_id, repository.tip( trans.app ) )
+ is_malicious = suc.changeset_is_malicious( trans, repository_id, repository.tip( trans.app ) )
metadata = self.get_metadata( trans, repository_id, changeset_revision )
try:
return trans.fill_template( "/webapps/community/repository/tool_form.mako",
@@ -1685,7 +1687,7 @@
status = params.get( 'status', 'error' )
repository, tool, error_message = suc.load_tool_from_changeset_revision( trans, repository_id, changeset_revision, tool_config )
tool_state = self.__new_state( trans )
- is_malicious = common.changeset_is_malicious( trans, repository_id, repository.tip( trans.app ) )
+ is_malicious = suc.changeset_is_malicious( trans, repository_id, repository.tip( trans.app ) )
invalid_file_tups = []
if tool:
invalid_file_tups = suc.check_tool_input_params( trans.app,
@@ -1883,7 +1885,7 @@
selected_value=changeset_revision,
add_id_to_name=False,
downloadable=False )
- revision_label = common.get_revision_label( trans, repository, repository.tip( trans.app ) )
+ revision_label = suc.get_revision_label( trans, repository, repository.tip( trans.app ) )
repository_metadata = None
repository_metadata_id = None
metadata = None
@@ -1892,7 +1894,7 @@
if changeset_revision != suc.INITIAL_CHANGELOG_HASH:
repository_metadata = suc.get_repository_metadata_by_changeset_revision( trans, id, changeset_revision )
if repository_metadata:
- revision_label = common.get_revision_label( trans, repository, changeset_revision )
+ revision_label = suc.get_revision_label( trans, repository, changeset_revision )
repository_metadata_id = trans.security.encode_id( repository_metadata.id )
metadata = repository_metadata.metadata
is_malicious = repository_metadata.malicious
@@ -1902,7 +1904,7 @@
if previous_changeset_revision != suc.INITIAL_CHANGELOG_HASH:
repository_metadata = suc.get_repository_metadata_by_changeset_revision( trans, id, previous_changeset_revision )
if repository_metadata:
- revision_label = common.get_revision_label( trans, repository, previous_changeset_revision )
+ revision_label = suc.get_revision_label( trans, repository, previous_changeset_revision )
repository_metadata_id = trans.security.encode_id( repository_metadata.id )
metadata = repository_metadata.metadata
is_malicious = repository_metadata.malicious
@@ -1917,20 +1919,20 @@
handled_key_rd_dicts=None )
if is_malicious:
if trans.app.security_agent.can_push( trans.app, trans.user, repository ):
- message += common.malicious_error_can_push
+ message += malicious_error_can_push
else:
- message += common.malicious_error
+ message += malicious_error
status = 'error'
malicious_check_box = CheckboxField( 'malicious', checked=is_malicious )
- categories = common.get_categories( trans )
+ categories = suc.get_categories( trans )
selected_categories = [ rca.category_id for rca in repository.categories ]
# Determine if the current changeset revision has been reviewed by the current user.
- reviewed_by_user = common.changeset_revision_reviewed_by_user( trans, trans.user, repository, changeset_revision )
+ reviewed_by_user = suc.changeset_revision_reviewed_by_user( trans, trans.user, repository, changeset_revision )
if reviewed_by_user:
- review = common.get_review_by_repository_id_changeset_revision_user_id( trans=trans,
- repository_id=id,
- changeset_revision=changeset_revision,
- user_id=trans.security.encode_id( trans.user.id ) )
+ review = suc.get_review_by_repository_id_changeset_revision_user_id( trans=trans,
+ repository_id=id,
+ changeset_revision=changeset_revision,
+ user_id=trans.security.encode_id( trans.user.id ) )
review_id = trans.security.encode_id( review.id )
else:
review_id = None
@@ -2031,7 +2033,7 @@
repository_metadata_id = None
metadata = None
repository_dependencies = None
- revision_label = common.get_revision_label( trans, repository, changeset_revision )
+ revision_label = suc.get_revision_label( trans, repository, changeset_revision )
changeset_revision_select_field = build_changeset_revision_select_field( trans,
repository,
selected_value=changeset_revision,
@@ -2103,7 +2105,7 @@
avg_rating, num_ratings = self.get_ave_item_rating_data( trans.sa_session, repository, webapp_model=trans.model )
display_reviews = util.string_as_bool( params.get( 'display_reviews', False ) )
rra = self.get_user_item_rating( trans.sa_session, trans.user, repository, webapp_model=trans.model )
- is_malicious = common.changeset_is_malicious( trans, id, repository.tip( trans.app ) )
+ is_malicious = suc.changeset_is_malicious( trans, id, repository.tip( trans.app ) )
metadata = self.get_metadata( trans, id, repository.tip( trans.app ) )
return trans.fill_template( '/webapps/community/repository/rate_repository.mako',
repository=repository,
@@ -2263,7 +2265,7 @@
if not commit_message:
commit_message = 'Deleted selected files'
commands.commit( repo.ui, repo, repo_dir, user=trans.user.username, message=commit_message )
- common.handle_email_alerts( trans, repository )
+ suc.handle_email_alerts( trans, repository )
# Update the repository files for browsing.
suc.update_repository( repo )
# Get the new repository tip.
@@ -2275,11 +2277,11 @@
else:
message += 'The selected files were deleted from the repository. '
kwd[ 'message' ] = message
- common.set_repository_metadata_due_to_new_tip( trans, repository, **kwd )
+ suc.set_repository_metadata_due_to_new_tip( trans, repository, **kwd )
else:
message = "Select at least 1 file to delete from the repository before clicking <b>Delete selected files</b>."
status = "error"
- is_malicious = common.changeset_is_malicious( trans, id, repository.tip( trans.app ) )
+ is_malicious = suc.changeset_is_malicious( trans, id, repository.tip( trans.app ) )
return trans.fill_template( '/webapps/community/repository/browse_repository.mako',
repo=repo,
repository=repository,
@@ -2302,7 +2304,7 @@
# Get the name of the server hosting the tool shed instance.
host = trans.request.host
# Build the email message
- body = string.Template( common.contact_owner_template ) \
+ body = string.Template( suc.contact_owner_template ) \
.safe_substitute( username=trans.user.username,
repository_name=repository.name,
email=trans.user.email,
@@ -2434,7 +2436,7 @@
'has_metadata' : has_metadata }
# Make sure we'll view latest changeset first.
changesets.insert( 0, change_dict )
- is_malicious = common.changeset_is_malicious( trans, id, repository.tip( trans.app ) )
+ is_malicious = suc.changeset_is_malicious( trans, id, repository.tip( trans.app ) )
metadata = self.get_metadata( trans, id, repository.tip( trans.app ) )
return trans.fill_template( '/webapps/community/repository/view_changelog.mako',
repository=repository,
@@ -2465,7 +2467,7 @@
diffs = []
for diff in patch.diff( repo, node1=ctx_parent.node(), node2=ctx.node() ):
diffs.append( suc.to_safe_string( diff, to_html=True ) )
- is_malicious = common.changeset_is_malicious( trans, id, repository.tip( trans.app ) )
+ is_malicious = suc.changeset_is_malicious( trans, id, repository.tip( trans.app ) )
metadata = self.get_metadata( trans, id, ctx_str )
return trans.fill_template( '/webapps/community/repository/view_changeset.mako',
repository=repository,
@@ -2535,7 +2537,7 @@
selected_value=changeset_revision,
add_id_to_name=False,
downloadable=False )
- revision_label = common.get_revision_label( trans, repository, changeset_revision )
+ revision_label = suc.get_revision_label( trans, repository, changeset_revision )
repository_metadata = suc.get_repository_metadata_by_changeset_revision( trans, id, changeset_revision )
if repository_metadata:
repository_metadata_id = trans.security.encode_id( repository_metadata.id )
@@ -2551,20 +2553,20 @@
else:
repository_metadata_id = None
metadata = None
- is_malicious = common.changeset_is_malicious( trans, id, repository.tip( trans.app ) )
+ is_malicious = suc.changeset_is_malicious( trans, id, repository.tip( trans.app ) )
if is_malicious:
if trans.app.security_agent.can_push( trans.app, trans.user, repository ):
- message += common.malicious_error_can_push
+ message += malicious_error_can_push
else:
- message += common.malicious_error
+ message += malicious_error
status = 'error'
# Determine if the current changeset revision has been reviewed by the current user.
- reviewed_by_user = common.changeset_revision_reviewed_by_user( trans, trans.user, repository, changeset_revision )
+ reviewed_by_user = suc.changeset_revision_reviewed_by_user( trans, trans.user, repository, changeset_revision )
if reviewed_by_user:
- review = common.get_review_by_repository_id_changeset_revision_user_id( trans=trans,
- repository_id=id,
- changeset_revision=changeset_revision,
- user_id=trans.security.encode_id( trans.user.id ) )
+ review = suc.get_review_by_repository_id_changeset_revision_user_id( trans=trans,
+ repository_id=id,
+ changeset_revision=changeset_revision,
+ user_id=trans.security.encode_id( trans.user.id ) )
review_id = trans.security.encode_id( review.id )
else:
review_id = None
@@ -2601,7 +2603,7 @@
tool = None
guid = None
original_tool_data_path = trans.app.config.tool_data_path
- revision_label = common.get_revision_label( trans, repository, changeset_revision )
+ revision_label = suc.get_revision_label( trans, repository, changeset_revision )
repository_metadata = suc.get_repository_metadata_by_changeset_revision( trans, repository_id, changeset_revision )
if repository_metadata:
metadata = repository_metadata.metadata
@@ -2636,19 +2638,19 @@
tool_lineage = self.get_versions_of_tool( trans, repository, repository_metadata, guid )
else:
metadata = None
- is_malicious = common.changeset_is_malicious( trans, repository_id, repository.tip( trans.app ) )
+ is_malicious = suc.changeset_is_malicious( trans, repository_id, repository.tip( trans.app ) )
changeset_revision_select_field = build_changeset_revision_select_field( trans,
repository,
selected_value=changeset_revision,
add_id_to_name=False,
downloadable=False )
trans.app.config.tool_data_path = original_tool_data_path
- reviewed_by_user = common.changeset_revision_reviewed_by_user( trans, trans.user, repository, changeset_revision )
+ reviewed_by_user = suc.changeset_revision_reviewed_by_user( trans, trans.user, repository, changeset_revision )
if reviewed_by_user:
- review = common.get_review_by_repository_id_changeset_revision_user_id( trans=trans,
- repository_id=repository_id,
- changeset_revision=changeset_revision,
- user_id=trans.security.encode_id( trans.user.id ) )
+ review = suc.get_review_by_repository_id_changeset_revision_user_id( trans=trans,
+ repository_id=repository_id,
+ changeset_revision=changeset_revision,
+ user_id=trans.security.encode_id( trans.user.id ) )
review_id = trans.security.encode_id( review.id )
else:
review_id = None
@@ -2719,7 +2721,7 @@
# Restrict the options to all revisions that have associated metadata.
repository_metadata_revisions = repository.metadata_revisions
for repository_metadata in repository_metadata_revisions:
- rev, label, changeset_revision = common.get_rev_label_changeset_revision_from_repository_metadata( trans, repository_metadata, repository=repository )
+ rev, label, changeset_revision = suc.get_rev_label_changeset_revision_from_repository_metadata( trans, repository_metadata, repository=repository )
changeset_tups.append( ( rev, label, changeset_revision ) )
refresh_on_change_values.append( changeset_revision )
# Sort options by the revision label. Even though the downloadable_revisions query sorts by update_time,
diff -r 6538175fb3e6483895aaadfdeddd09aac558fdf5 -r dd395d9b8a01255412b3e56219d11639ccce2e50 lib/galaxy/webapps/community/controllers/repository_review.py
--- a/lib/galaxy/webapps/community/controllers/repository_review.py
+++ b/lib/galaxy/webapps/community/controllers/repository_review.py
@@ -59,7 +59,7 @@
repo = hg.repository( suc.get_configured_ui(), repository.repo_path( trans.app ) )
for review in repository.reviews:
changeset_revision = review.changeset_revision
- rev, label = common.get_rev_label_from_changeset_revision( repo, changeset_revision )
+ rev, label = suc.get_rev_label_from_changeset_revision( repo, changeset_revision )
rval += '<a href="manage_repository_reviews_of_revision'
rval += '?id=%s&changeset_revision=%s">%s</a><br/>' % ( trans.security.encode_id( repository.id ), changeset_revision, label )
return rval
@@ -67,13 +67,13 @@
class WithoutReviewsRevisionColumn( grids.GridColumn ):
def get_value( self, trans, grid, repository ):
# Restrict the options to revisions that have not yet been reviewed.
- repository_metadata_revisions = common.get_repository_metadata_revisions_for_review( repository, reviewed=False )
+ repository_metadata_revisions = suc.get_repository_metadata_revisions_for_review( repository, reviewed=False )
if repository_metadata_revisions:
rval = ''
for repository_metadata in repository_metadata_revisions:
- rev, label, changeset_revision = common.get_rev_label_changeset_revision_from_repository_metadata( trans,
- repository_metadata,
- repository=repository )
+ rev, label, changeset_revision = suc.get_rev_label_changeset_revision_from_repository_metadata( trans,
+ repository_metadata,
+ repository=repository )
rval += '<a href="manage_repository_reviews_of_revision'
rval += '?id=%s&changeset_revision=%s">%s</a><br/>' % ( trans.security.encode_id( repository.id ), changeset_revision, label )
return rval
@@ -177,7 +177,7 @@
rval += 'edit_review'
else:
rval +='browse_review'
- rval += '?id=%s">%s</a>' % ( encoded_review_id, common.get_revision_label( trans, review.repository, review.changeset_revision ) )
+ rval += '?id=%s">%s</a>' % ( encoded_review_id, suc.get_revision_label( trans, review.repository, review.changeset_revision ) )
return rval
class RatingColumn( grids.TextColumn ):
def get_value( self, trans, grid, review ):
@@ -277,7 +277,7 @@
message = util.restore_text( params.get( 'message', '' ) )
status = params.get( 'status', 'done' )
encoded_review_id = kwd[ 'id' ]
- review = common.get_review( trans, encoded_review_id )
+ review = suc.get_review( trans, encoded_review_id )
if kwd.get( 'approve_repository_review_button', False ):
approved_select_field_name = '%s%sapproved' % ( encoded_review_id, STRSEP )
approved_select_field_value = str( kwd[ approved_select_field_name ] )
@@ -309,10 +309,10 @@
params = util.Params( kwd )
message = util.restore_text( params.get( 'message', '' ) )
status = params.get( 'status', 'done' )
- review = common.get_review( trans, kwd[ 'id' ] )
+ review = suc.get_review( trans, kwd[ 'id' ] )
repository = review.repository
repo = hg.repository( suc.get_configured_ui(), repository.repo_path( trans.app ) )
- rev, changeset_revision_label = common.get_rev_label_from_changeset_revision( repo, review.changeset_revision )
+ rev, changeset_revision_label = suc.get_rev_label_from_changeset_revision( repo, review.changeset_revision )
return trans.fill_template( '/webapps/community/repository_review/browse_review.mako',
repository=repository,
changeset_revision_label=changeset_revision_label,
@@ -345,7 +345,7 @@
if not name or not description:
message = 'Enter a valid name and a description'
status = 'error'
- elif common.get_component_by_name( trans, name ):
+ elif suc.get_component_by_name( trans, name ):
message = 'A component with that name already exists'
status = 'error'
else:
@@ -378,15 +378,15 @@
if changeset_revision:
# Make sure there is not already a review of the revision by the user.
repository = suc.get_repository_in_tool_shed( trans, repository_id )
- if common.get_review_by_repository_id_changeset_revision_user_id( trans=trans,
- repository_id=repository_id,
- changeset_revision=changeset_revision,
- user_id=trans.security.encode_id( trans.user.id ) ):
+ if suc.get_review_by_repository_id_changeset_revision_user_id( trans=trans,
+ repository_id=repository_id,
+ changeset_revision=changeset_revision,
+ user_id=trans.security.encode_id( trans.user.id ) ):
message = "You have already created a review for revision <b>%s</b> of repository <b>%s</b>." % ( changeset_revision, repository.name )
status = "error"
else:
# See if there are any reviews for previous changeset revisions that the user can copy.
- if not create_without_copying and not previous_review_id and common.has_previous_repository_reviews( trans, repository, changeset_revision ):
+ if not create_without_copying and not previous_review_id and suc.has_previous_repository_reviews( trans, repository, changeset_revision ):
return trans.response.send_redirect( web.url_for( controller='repository_review',
action='select_previous_review',
**kwd ) )
@@ -404,7 +404,7 @@
trans.sa_session.add( review )
trans.sa_session.flush()
if previous_review_id:
- review_to_copy = common.get_review( trans, previous_review_id )
+ review_to_copy = suc.get_review( trans, previous_review_id )
self.copy_review( trans, review_to_copy, review )
review_id = trans.security.encode_id( review.id )
message = "Begin your review of revision <b>%s</b> of repository <b>%s</b>." \
@@ -440,7 +440,7 @@
action='manage_categories',
message=message,
status='error' ) )
- component = common.get_component( trans, id )
+ component = suc.get_component( trans, id )
if params.get( 'edit_component_button', False ):
new_description = util.restore_text( params.get( 'description', '' ) ).strip()
if component.description != new_description:
@@ -465,9 +465,9 @@
message = util.restore_text( params.get( 'message', '' ) )
status = params.get( 'status', 'done' )
review_id = kwd.get( 'id', None )
- review = common.get_review( trans, review_id )
+ review = suc.get_review( trans, review_id )
components_dict = odict()
- for component in common.get_components( trans ):
+ for component in suc.get_components( trans ):
components_dict[ component.name ] = dict( component=component, component_review=None )
repository = review.repository
repo = hg.repository( suc.get_configured_ui(), repository.repo_path( trans.app ) )
@@ -517,8 +517,8 @@
approved = str( v )
elif component_review_attr == 'rating':
rating = int( str( v ) )
- component = common.get_component( trans, component_id )
- component_review = common.get_component_review_by_repository_review_id_component_id( trans, review_id, component_id )
+ component = suc.get_component( trans, component_id )
+ component_review = suc.get_component_review_by_repository_review_id_component_id( trans, review_id, component_id )
if component_review:
# See if the existing component review should be updated.
if component_review.comment != comment or \
@@ -572,7 +572,7 @@
name='revision_approved',
selected_value=selected_value,
for_component=False )
- rev, changeset_revision_label = common.get_rev_label_from_changeset_revision( repo, review.changeset_revision )
+ rev, changeset_revision_label = suc.get_rev_label_from_changeset_revision( repo, review.changeset_revision )
return trans.fill_template( '/webapps/community/repository_review/edit_review.mako',
repository=repository,
review=review,
@@ -659,14 +659,14 @@
metadata_revision_hashes = [ metadata_revision.changeset_revision for metadata_revision in repository.metadata_revisions ]
reviewed_revision_hashes = [ review.changeset_revision for review in repository.reviews ]
reviews_dict = odict()
- for changeset in common.get_reversed_changelog_changesets( repo ):
+ for changeset in suc.get_reversed_changelog_changesets( repo ):
ctx = repo.changectx( changeset )
changeset_revision = str( ctx )
if changeset_revision in metadata_revision_hashes or changeset_revision in reviewed_revision_hashes:
- rev, changeset_revision_label = common.get_rev_label_from_changeset_revision( repo, changeset_revision )
+ rev, changeset_revision_label = suc.get_rev_label_from_changeset_revision( repo, changeset_revision )
if changeset_revision in reviewed_revision_hashes:
# Find the review for this changeset_revision
- repository_reviews = common.get_reviews_by_repository_id_changeset_revision( trans, repository_id, changeset_revision )
+ repository_reviews = suc.get_reviews_by_repository_id_changeset_revision( trans, repository_id, changeset_revision )
# Determine if the current user can add a review to this revision.
can_add_review = trans.user not in [ repository_review.user for repository_review in repository_reviews ]
repository_metadata = suc.get_repository_metadata_by_changeset_revision( trans, repository_id, changeset_revision )
@@ -704,8 +704,8 @@
repo_dir = repository.repo_path( trans.app )
repo = hg.repository( suc.get_configured_ui(), repo_dir )
installable = changeset_revision in [ metadata_revision.changeset_revision for metadata_revision in repository.metadata_revisions ]
- rev, changeset_revision_label = common.get_rev_label_from_changeset_revision( repo, changeset_revision )
- reviews = common.get_reviews_by_repository_id_changeset_revision( trans, repository_id, changeset_revision )
+ rev, changeset_revision_label = suc.get_rev_label_from_changeset_revision( repo, changeset_revision )
+ reviews = suc.get_reviews_by_repository_id_changeset_revision( trans, repository_id, changeset_revision )
return trans.fill_template( '/webapps/community/repository_review/reviews_of_changeset_revision.mako',
repository=repository,
changeset_revision=changeset_revision,
@@ -724,7 +724,7 @@
if 'operation' in kwd:
operation = kwd['operation'].lower()
# The value of the received id is the encoded review id.
- review = common.get_review( trans, kwd[ 'id' ] )
+ review = suc.get_review( trans, kwd[ 'id' ] )
repository = review.repository
kwd[ 'id' ] = trans.security.encode_id( repository.id )
if operation == "inspect repository revisions":
@@ -737,7 +737,7 @@
action='view_or_manage_repository',
**kwd ) )
# The user may not be the current user. The value of the received id is the encoded user id.
- user = common.get_user( trans, kwd[ 'id' ] )
+ user = suc.get_user( trans, kwd[ 'id' ] )
self.repository_reviews_by_user_grid.title = "All repository revision reviews for user '%s'" % user.username
return self.repository_reviews_by_user_grid( trans, **kwd )
@web.expose
@@ -768,8 +768,8 @@
repository = suc.get_repository_in_tool_shed( trans, kwd[ 'id' ] )
changeset_revision = kwd.get( 'changeset_revision', None )
repo = hg.repository( suc.get_configured_ui(), repository.repo_path( trans.app ) )
- previous_reviews_dict = common.get_previous_repository_reviews( trans, repository, changeset_revision )
- rev, changeset_revision_label = common.get_rev_label_from_changeset_revision( repo, changeset_revision )
+ previous_reviews_dict = suc.get_previous_repository_reviews( trans, repository, changeset_revision )
+ rev, changeset_revision_label = suc.get_rev_label_from_changeset_revision( repo, changeset_revision )
return trans.fill_template( '/webapps/community/repository_review/select_previous_review.mako',
repository=repository,
changeset_revision=changeset_revision,
diff -r 6538175fb3e6483895aaadfdeddd09aac558fdf5 -r dd395d9b8a01255412b3e56219d11639ccce2e50 lib/galaxy/webapps/community/controllers/upload.py
--- a/lib/galaxy/webapps/community/controllers/upload.py
+++ b/lib/galaxy/webapps/community/controllers/upload.py
@@ -1,7 +1,6 @@
import sys, os, shutil, logging, tarfile, tempfile, urllib
from galaxy.web.base.controller import *
from galaxy.datatypes import checkers
-import common
import galaxy.util.shed_util_common as suc
from galaxy import eggs
@@ -23,7 +22,7 @@
status = params.get( 'status', 'done' )
commit_message = util.restore_text( params.get( 'commit_message', 'Uploaded' ) )
category_ids = util.listify( params.get( 'category_id', '' ) )
- categories = common.get_categories( trans )
+ categories = suc.get_categories( trans )
repository_id = params.get( 'repository_id', '' )
repository = suc.get_repository_in_tool_shed( trans, repository_id )
repo_dir = repository.repo_path( trans.app )
@@ -37,7 +36,7 @@
url = params.get( 'url', '' )
# Part of the upload process is sending email notification to those that have registered to
# receive them. One scenario occurs when the first change set is produced for the repository.
- # See the common.handle_email_alerts() method for the definition of the scenarios.
+ # See the suc.handle_email_alerts() method for the definition of the scenarios.
new_repo_alert = repository.is_new( trans.app )
uploaded_directory = None
if params.get( 'upload_button', False ):
@@ -104,22 +103,11 @@
# Uploaded directory
istar = False
if istar:
- ok, message, files_to_remove, content_alert_str, undesirable_dirs_removed, undesirable_files_removed = self.upload_tar( trans,
- repository,
- tar,
- uploaded_file,
- upload_point,
- remove_repo_files_not_in_tar,
- commit_message,
- new_repo_alert )
+ ok, message, files_to_remove, content_alert_str, undesirable_dirs_removed, undesirable_files_removed = \
+ self.upload_tar( trans, repository, tar, uploaded_file, upload_point, remove_repo_files_not_in_tar, commit_message, new_repo_alert )
elif uploaded_directory:
- ok,message, files_to_remove, content_alert_str, undesirable_dirs_removed, undesirable_files_removed = self.upload_directory( trans,
- repository,
- uploaded_directory,
- upload_point,
- remove_repo_files_not_in_tar,
- commit_message,
- new_repo_alert )
+ ok,message, files_to_remove, content_alert_str, undesirable_dirs_removed, undesirable_files_removed = \
+ self.upload_directory( trans, repository, uploaded_directory, upload_point, remove_repo_files_not_in_tar, commit_message, new_repo_alert )
else:
if ( isgzip or isbz2 ) and uncompress_file:
uploaded_file_filename = self.uncompress( repository, uploaded_file_name, uploaded_file_filename, isgzip, isbz2 )
@@ -131,7 +119,7 @@
shutil.move( uploaded_file_name, full_path )
# See if any admin users have chosen to receive email alerts when a repository is
# updated. If so, check every uploaded file to ensure content is appropriate.
- check_contents = common.check_file_contents( trans )
+ check_contents = suc.check_file_contents( trans )
if check_contents and os.path.isfile( full_path ):
content_alert_str = self.__check_file_content( full_path )
else:
@@ -148,7 +136,7 @@
message = '%s<br/>%s' % ( message, error_message )
# See if the content of the change set was valid.
admin_only = len( repository.downloadable_revisions ) != 1
- common.handle_email_alerts( trans, repository, content_alert_str=content_alert_str, new_repo_alert=new_repo_alert, admin_only=admin_only )
+ suc.handle_email_alerts( trans, repository, content_alert_str=content_alert_str, new_repo_alert=new_repo_alert, admin_only=admin_only )
if ok:
# Update the repository files for browsing.
suc.update_repository( repo )
@@ -177,17 +165,20 @@
else:
message += " %d files were removed from the repository root. " % len( files_to_remove )
kwd[ 'message' ] = message
- common.set_repository_metadata_due_to_new_tip( trans, repository, content_alert_str=content_alert_str, **kwd )
- # Provide a warning message if a tool_dependencies.xml file is provided, but tool dependencies weren't loaded due to e.g. a requirement tag mismatch
+ suc.set_repository_metadata_due_to_new_tip( trans, repository, content_alert_str=content_alert_str, **kwd )
+ # Provide a warning message if a tool_dependencies.xml file is provided, but tool dependencies weren't loaded due to a requirement tag mismatch
+ # or some other problem.
if suc.get_config_from_disk( 'tool_dependencies.xml', repo_dir ):
if repository.metadata_revisions:
+ # A repository's metadata revisions are order descending by update_time, so the zeroth revision will be the tip just after an upload.
metadata_dict = repository.metadata_revisions[0].metadata
else:
metadata_dict = {}
if 'tool_dependencies' not in metadata_dict:
- message += 'Name, version and type from a tool requirement tag does not match the information in the "tool_dependencies.xml file". '
+ message += 'Name, version and type from a tool requirement tag does not match the information in the "tool_dependencies.xml file", '
+ message += 'so the tool dependency definitions will be ignored.'
status = 'warning'
- log.debug( 'Error in tool dependencies for repository %s: %s.' % ( repository.id, repository.name ) )
+ log.debug( 'Error in tool dependencies for repository with id %s and name %s: %s' % ( str( repository.id ), str( repository.name ), message ) )
# Reset the tool_data_tables by loading the empty tool_data_table_conf.xml file.
suc.reset_tool_data_tables( trans.app )
trans.response.send_redirect( web.url_for( controller='repository',
@@ -327,7 +318,7 @@
pass
# See if any admin users have chosen to receive email alerts when a repository is
# updated. If so, check every uploaded file to ensure content is appropriate.
- check_contents = common.check_file_contents( trans )
+ check_contents = suc.check_file_contents( trans )
for filename_in_archive in filenames_in_archive:
# Check file content to ensure it is appropriate.
if check_contents and os.path.isfile( filename_in_archive ):
@@ -341,7 +332,7 @@
return False, message, files_to_remove, content_alert_str, undesirable_dirs_removed, undesirable_files_removed
commands.commit( repo.ui, repo, full_path, user=trans.user.username, message=commit_message )
admin_only = len( repository.downloadable_revisions ) != 1
- common.handle_email_alerts( trans, repository, content_alert_str=content_alert_str, new_repo_alert=new_repo_alert, admin_only=admin_only )
+ suc.handle_email_alerts( trans, repository, content_alert_str=content_alert_str, new_repo_alert=new_repo_alert, admin_only=admin_only )
return True, '', files_to_remove, content_alert_str, undesirable_dirs_removed, undesirable_files_removed
def uncompress( self, repository, uploaded_file_name, uploaded_file_filename, isgzip, isbz2 ):
if isgzip:
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/6538175fb3e6/
changeset: 6538175fb3e6
user: jgoecks
date: 2013-01-17 21:57:21
summary: Remove incorrect test parameter.
affected #: 1 file
diff -r d4a61389668164e833eef418ab03c1467ba64d13 -r 6538175fb3e6483895aaadfdeddd09aac558fdf5 tools/ngs_rna/tophat2_wrapper.xml
--- a/tools/ngs_rna/tophat2_wrapper.xml
+++ b/tools/ngs_rna/tophat2_wrapper.xml
@@ -462,7 +462,6 @@
<param name="use_annotations" value="No" /><param name="use_juncs" value="No" /><param name="no_novel_juncs" value="No" />
- <param name="report_discordant_pairs" value="No" /><param name="use_search" value="Yes" /><param name="min_coverage_intron" value="50" /><param name="max_coverage_intron" value="20000" />
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
4 new commits in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/44d9d215b9c3/
changeset: 44d9d215b9c3
user: epaniagu
date: 2011-11-22 21:42:18
summary: fix add/remove buttons in Repeat elements
affected #: 1 file
diff -r 9d6a9963b0da21fe3139fcefbd11c1ec6290a529 -r 44d9d215b9c337cf9b988976bf93f429dbd84ccb templates/tool_form.mako
--- a/templates/tool_form.mako
+++ b/templates/tool_form.mako
@@ -139,13 +139,17 @@
%><div class="form-title-row"><strong>${input.title} ${i + 1}</strong></div>
${do_inputs( input.inputs, repeat_state[i], rep_errors, prefix + input.name + "_" + str(index) + "|", other_values )}
- <div class="form-row"><input type="submit" name="${prefix}${input.name}_${index}_remove" value="Remove ${input.title} ${i+1}"></div>
+ %if input.min < len( repeat_state ):
+ <div class="form-row"><input type="submit" name="${prefix}${input.name}_${index}_remove" value="Remove ${input.title} ${i+1}"></div>
+ %endif
</div>
%if rep_errors.has_key( '__index__' ):
<div><img style="vertical-align: middle;" src="${h.url_for('/static/style/error_small.png')}"> <span style="vertical-align: middle;">${rep_errors['__index__']}</span></div>
%endif
%endfor
- <div class="form-row"><input type="submit" name="${prefix}${input.name}_add" value="Add new ${input.title}"></div>
+ %if input.max > len( repeat_state ):
+ <div class="form-row"><input type="submit" name="${prefix}${input.name}_add" value="Add new ${input.title}"></div>
+ %endif
</div>
%elif input.type == "conditional":
<%
https://bitbucket.org/galaxy/galaxy-central/commits/b6ff1a695e07/
changeset: b6ff1a695e07
user: epaniagu
date: 2011-11-22 21:53:56
summary: replace len( repeat_state ) with a variable so there's only one call
affected #: 1 file
diff -r 44d9d215b9c337cf9b988976bf93f429dbd84ccb -r b6ff1a695e07e6a2443a3d6d0aa9a9066a155dbc templates/tool_form.mako
--- a/templates/tool_form.mako
+++ b/templates/tool_form.mako
@@ -127,8 +127,11 @@
</div>
%endif
</div>
- <% repeat_state = tool_state[input.name] %>
- %for i in range( len( repeat_state ) ):
+ <%
+ repeat_state = tool_state[input.name]
+ num_repeats = len( repeat_state )
+ %>
+ %for i in range( num_repeats ):
<div class="repeat-group-item"><%
if input.name in errors:
@@ -139,7 +142,7 @@
%><div class="form-title-row"><strong>${input.title} ${i + 1}</strong></div>
${do_inputs( input.inputs, repeat_state[i], rep_errors, prefix + input.name + "_" + str(index) + "|", other_values )}
- %if input.min < len( repeat_state ):
+ %if input.min < num_repeats:
<div class="form-row"><input type="submit" name="${prefix}${input.name}_${index}_remove" value="Remove ${input.title} ${i+1}"></div>
%endif
</div>
@@ -147,7 +150,7 @@
<div><img style="vertical-align: middle;" src="${h.url_for('/static/style/error_small.png')}"> <span style="vertical-align: middle;">${rep_errors['__index__']}</span></div>
%endif
%endfor
- %if input.max > len( repeat_state ):
+ %if input.max > num_repeats:
<div class="form-row"><input type="submit" name="${prefix}${input.name}_add" value="Add new ${input.title}"></div>
%endif
</div>
https://bitbucket.org/galaxy/galaxy-central/commits/8d068273cf5b/
changeset: 8d068273cf5b
user: jgoecks
date: 2013-01-17 21:32:34
summary: Merged in epaniagu/galaxy-central (pull request #24: Fix Add/Remove buttons for Repeat groups)
affected #: 1 file
diff -r 7848d6fd1b7a3ef8330ad1b31f5a3521094ad706 -r 8d068273cf5b1160a27977727a7ab6f2237d4bb7 templates/tool_form.mako
--- a/templates/tool_form.mako
+++ b/templates/tool_form.mako
@@ -135,8 +135,11 @@
</div>
%endif
</div>
- <% repeat_state = tool_state[input.name] %>
- %for i in range( len( repeat_state ) ):
+ <%
+ repeat_state = tool_state[input.name]
+ num_repeats = len( repeat_state )
+ %>
+ %for i in range( num_repeats ):
<div class="repeat-group-item"><%
if input.name in errors:
@@ -147,13 +150,25 @@
%><div class="form-title-row"><strong>${input.title} ${i + 1}</strong></div>
${do_inputs( input.inputs, repeat_state[i], rep_errors, prefix + input.name + "_" + str(index) + "|", other_values )}
+<<<<<<< local
<div class="form-row"><input type="submit" class="btn" name="${prefix}${input.name}_${index}_remove" value="Remove ${input.title} ${i+1}"></div>
+=======
+ %if input.min < num_repeats:
+ <div class="form-row"><input type="submit" name="${prefix}${input.name}_${index}_remove" value="Remove ${input.title} ${i+1}"></div>
+ %endif
+>>>>>>> other
</div>
%if rep_errors.has_key( '__index__' ):
<div><img style="vertical-align: middle;" src="${h.url_for('/static/style/error_small.png')}"> <span style="vertical-align: middle;">${rep_errors['__index__']}</span></div>
%endif
%endfor
+<<<<<<< local
<div class="form-row"><input type="submit" class="btn" name="${prefix}${input.name}_add" value="Add new ${input.title}"></div>
+=======
+ %if input.max > num_repeats:
+ <div class="form-row"><input type="submit" name="${prefix}${input.name}_add" value="Add new ${input.title}"></div>
+ %endif
+>>>>>>> other
</div>
%elif input.type == "conditional":
<%
https://bitbucket.org/galaxy/galaxy-central/commits/d4a613896681/
changeset: d4a613896681
user: jgoecks
date: 2013-01-17 21:34:04
summary: Automated merge.
affected #: 1 file
diff -r 8d068273cf5b1160a27977727a7ab6f2237d4bb7 -r d4a61389668164e833eef418ab03c1467ba64d13 scripts/functional_tests.py
--- a/scripts/functional_tests.py
+++ b/scripts/functional_tests.py
@@ -50,6 +50,41 @@
migrated_tool_panel_config = 'migrated_tools_conf.xml'
installed_tool_panel_configs = [ 'shed_tool_conf.xml' ]
+# should this serve static resources (scripts, images, styles, etc.)
+STATIC_ENABLED = True
+
+def get_static_settings():
+ """Returns dictionary of the settings necessary for a galaxy App
+ to be wrapped in the static middleware.
+
+ This mainly consists of the filesystem locations of url-mapped
+ static resources.
+ """
+ cwd = os.getcwd()
+ static_dir = os.path.join( cwd, 'static' )
+ #TODO: these should be copied from universe_wsgi.ini
+ return dict(
+ #TODO: static_enabled needed here?
+ static_enabled = True,
+ static_cache_time = 360,
+ static_dir = static_dir,
+ static_images_dir = os.path.join( static_dir, 'images', '' ),
+ static_favicon_dir = os.path.join( static_dir, 'favicon.ico' ),
+ static_scripts_dir = os.path.join( static_dir, 'scripts', '' ),
+ static_style_dir = os.path.join( static_dir, 'june_2007_style', 'blue' ),
+ static_robots_txt = os.path.join( static_dir, 'robots.txt' ),
+ )
+
+def get_webapp_global_conf():
+ """Get the global_conf dictionary sent as the first argument to app_factory.
+ """
+ # (was originally sent 'dict()') - nothing here for now except static settings
+ global_conf = dict()
+ if STATIC_ENABLED:
+ global_conf.update( get_static_settings() )
+ return global_conf
+
+
def parse_tool_panel_config( config, shed_tools_dict ):
"""
Parse a shed-related tool panel config to generate the shed_tools_dict. This only happens when testing tools installed from the tool shed.
@@ -289,7 +324,8 @@
server = None
if start_server:
- webapp = buildapp.app_factory( dict(), use_translogger=False, static_enabled=False, app=app )
+ webapp = buildapp.app_factory( get_webapp_global_conf(), app=app,
+ use_translogger=False, static_enabled=STATIC_ENABLED )
if galaxy_test_port is not None:
server = httpserver.serve( webapp, host=galaxy_test_host, port=galaxy_test_port, start_loop=False )
else:
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: carlfeberhard: Enable serving static files when running functional tests
by Bitbucket 17 Jan '13
by Bitbucket 17 Jan '13
17 Jan '13
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/da89c3556c27/
changeset: da89c3556c27
user: carlfeberhard
date: 2013-01-17 20:58:43
summary: Enable serving static files when running functional tests
affected #: 1 file
diff -r 7848d6fd1b7a3ef8330ad1b31f5a3521094ad706 -r da89c3556c27c8f1e5acf943c6983be5437efac6 scripts/functional_tests.py
--- a/scripts/functional_tests.py
+++ b/scripts/functional_tests.py
@@ -50,6 +50,41 @@
migrated_tool_panel_config = 'migrated_tools_conf.xml'
installed_tool_panel_configs = [ 'shed_tool_conf.xml' ]
+# should this serve static resources (scripts, images, styles, etc.)
+STATIC_ENABLED = True
+
+def get_static_settings():
+ """Returns dictionary of the settings necessary for a galaxy App
+ to be wrapped in the static middleware.
+
+ This mainly consists of the filesystem locations of url-mapped
+ static resources.
+ """
+ cwd = os.getcwd()
+ static_dir = os.path.join( cwd, 'static' )
+ #TODO: these should be copied from universe_wsgi.ini
+ return dict(
+ #TODO: static_enabled needed here?
+ static_enabled = True,
+ static_cache_time = 360,
+ static_dir = static_dir,
+ static_images_dir = os.path.join( static_dir, 'images', '' ),
+ static_favicon_dir = os.path.join( static_dir, 'favicon.ico' ),
+ static_scripts_dir = os.path.join( static_dir, 'scripts', '' ),
+ static_style_dir = os.path.join( static_dir, 'june_2007_style', 'blue' ),
+ static_robots_txt = os.path.join( static_dir, 'robots.txt' ),
+ )
+
+def get_webapp_global_conf():
+ """Get the global_conf dictionary sent as the first argument to app_factory.
+ """
+ # (was originally sent 'dict()') - nothing here for now except static settings
+ global_conf = dict()
+ if STATIC_ENABLED:
+ global_conf.update( get_static_settings() )
+ return global_conf
+
+
def parse_tool_panel_config( config, shed_tools_dict ):
"""
Parse a shed-related tool panel config to generate the shed_tools_dict. This only happens when testing tools installed from the tool shed.
@@ -289,7 +324,8 @@
server = None
if start_server:
- webapp = buildapp.app_factory( dict(), use_translogger=False, static_enabled=False, app=app )
+ webapp = buildapp.app_factory( get_webapp_global_conf(), app=app,
+ use_translogger=False, static_enabled=STATIC_ENABLED )
if galaxy_test_port is not None:
server = httpserver.serve( webapp, host=galaxy_test_host, port=galaxy_test_port, start_loop=False )
else:
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
4 new commits in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/2eb6dddb3866/
changeset: 2eb6dddb3866
user: fangly
date: 2011-10-05 10:04:58
summary: Paired-end code that properly ignores description part of FASTQ headers
affected #: 1 file
diff -r 087a766b3eca312d49caffa6b821d304658825ae -r 2eb6dddb3866adef30b72e92e747d9ece4e11da9 lib/galaxy_utils/sequence/fastq.py
--- a/lib/galaxy_utils/sequence/fastq.py
+++ b/lib/galaxy_utils/sequence/fastq.py
@@ -514,9 +514,13 @@
self.apply_galaxy_conventions = apply_galaxy_conventions
def close( self ):
return self.file.close()
- def get( self, sequence_id ):
- if not isinstance( sequence_id, basestring ):
- sequence_id = sequence_id.identifier
+ def get( self, sequence_identifier ):
+ # Input is either a sequence ID or a sequence object
+ if not isinstance( sequence_identifier, basestring ):
+ # Input was a sequence object (not a sequence ID). Get the sequence ID
+ sequence_identifier = sequence_identifier.identifier
+ # Get only the ID part of the sequence header
+ sequence_id, sequence_sep, sequence_desc = sequence_identifier.partition(' ')
rval = None
if sequence_id in self.offset_dict:
initial_offset = self.file.tell()
@@ -525,7 +529,7 @@
del self.offset_dict[ sequence_id ]
self.file.seek( seq_offset )
rval = self.reader.next()
- #assert rval.identifier == sequence_id, 'seq id mismatch' #should be able to remove this
+ #assert rval.id == sequence_id, 'seq id mismatch' #should be able to remove this
self.file.seek( initial_offset )
else:
while True:
@@ -535,13 +539,14 @@
except StopIteration:
self.eof = True
break #eof, id not found, will return None
- if fastq_read.identifier == sequence_id:
+ fastq_read_id, fastq_read_sep, fastq_read_desc = fastq_read.identifier.partition(' ')
+ if fastq_read_id == sequence_id:
rval = fastq_read
break
else:
- if fastq_read.identifier not in self.offset_dict:
- self.offset_dict[ fastq_read.identifier ] = []
- self.offset_dict[ fastq_read.identifier ].append( offset )
+ if fastq_read_id not in self.offset_dict:
+ self.offset_dict[ fastq_read_id ] = []
+ self.offset_dict[ fastq_read_id ].append( offset )
if rval is not None and self.apply_galaxy_conventions:
rval.apply_galaxy_conventions()
return rval
@@ -582,16 +587,18 @@
self.format = format
self.force_quality_encoding = force_quality_encoding
def join( self, read1, read2 ):
- if read1.identifier.endswith( '/2' ) and read2.identifier.endswith( '/1' ):
+ read1_id, read1_sep, read1_desc = read1.identifier.partition(' ')
+ read2_id, read2_sep, read2_desc = read2.identifier.partition(' ')
+ if read1_id.endswith( '/2' ) and read2_id.endswith( '/1' ):
#swap 1 and 2
tmp = read1
read1 = read2
read2 = tmp
del tmp
- if read1.identifier.endswith( '/1' ) and read2.identifier.endswith( '/2' ):
- identifier = read1.identifier[:-2]
- else:
- identifier = read1.identifier
+ if read1_id.endswith( '/1' ) and read2_id.endswith( '/2' ):
+ read1_id = read1_id[:-2]
+
+ identifier = read1_id + ' ' + read1_desc
#use force quality encoding, if not present force to encoding of first read
force_quality_encoding = self.force_quality_encoding
@@ -621,17 +628,18 @@
rval.quality = "%s %s" % ( new_read1.quality.strip(), new_read2.quality.strip() )
return rval
def get_paired_identifier( self, fastq_read ):
- identifier = fastq_read.identifier
- if identifier[-2] == '/':
- if identifier[-1] == "1":
- identifier = "%s2" % identifier[:-1]
- elif identifier[-1] == "2":
- identifier = "%s1" % identifier[:-1]
- return identifier
+ read_id, read_sep, read_desc = fastq_read.identifier.partition(' ')
+ if read_id[-2] == '/':
+ if read_id[-1] == "1":
+ read_id = "%s2" % read_id[:-1]
+ elif read_id[-1] == "2":
+ read_id = "%s1" % read_id[:-1]
+ return read_id
def is_first_mate( self, sequence_id ):
is_first = None
if not isinstance( sequence_id, basestring ):
sequence_id = sequence_id.identifier
+ sequence_id, sequence_sep, sequence_desc = sequence_id.partition(' ')
if sequence_id[-2] == '/':
if sequence_id[-1] == "1":
is_first = True
https://bitbucket.org/galaxy/galaxy-central/commits/34e7cf3bcef0/
changeset: 34e7cf3bcef0
user: fangly
date: 2011-11-30 02:38:52
summary: Avoid trailing whitespace
affected #: 1 file
diff -r 2eb6dddb3866adef30b72e92e747d9ece4e11da9 -r 34e7cf3bcef0eb7bf7d0684e8ac5d91e03750d8c lib/galaxy_utils/sequence/fastq.py
--- a/lib/galaxy_utils/sequence/fastq.py
+++ b/lib/galaxy_utils/sequence/fastq.py
@@ -597,8 +597,10 @@
del tmp
if read1_id.endswith( '/1' ) and read2_id.endswith( '/2' ):
read1_id = read1_id[:-2]
-
- identifier = read1_id + ' ' + read1_desc
+
+ identifier = read1_id
+ if read1_desc:
+ identifier = identifier + ' ' + read1_desc
#use force quality encoding, if not present force to encoding of first read
force_quality_encoding = self.force_quality_encoding
https://bitbucket.org/galaxy/galaxy-central/commits/7d4a431f7188/
changeset: 7d4a431f7188
user: fangly
date: 2011-11-30 03:01:07
summary: Updated tests for FASTQ interlacer/deinterlacer tool
affected #: 2 files
diff -r 34e7cf3bcef0eb7bf7d0684e8ac5d91e03750d8c -r 7d4a431f7188d71d5e0ba2655a10145ecbdb4468 test-data/paired_end_2.fastqsanger
--- a/test-data/paired_end_2.fastqsanger
+++ b/test-data/paired_end_2.fastqsanger
@@ -1,6 +1,6 @@
-@1539:931/2
+@1539:931/2 this read has a description
GCGCGTAACGTTTCACCTCGAGATCGTTGTCGGCCGCAATCTCCTGGGGGCGCCATTCCGAATCGTAGTTGTCGGCGTCTTCCAGTGCGGCAAGGCATCGT
-+1539:931/2
++1539:931/2 this read has a description
aee_dcadeeWcaaadJbdaff[fffc]dcfe[dRc^\[^QVOZXXZSPFWNUUZ\P^`BBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBB
@2971:937/2
CTCGCACGGCCGCCTCGACCACTTGGTCTGGCGTCATGCGCAATTTTTTCTCCATGTGGAACGGGCTGGTGGCGATGAACGTATGAATATGCCCCCGCGCT
diff -r 34e7cf3bcef0eb7bf7d0684e8ac5d91e03750d8c -r 7d4a431f7188d71d5e0ba2655a10145ecbdb4468 test-data/paired_end_merged.fastqsanger
--- a/test-data/paired_end_merged.fastqsanger
+++ b/test-data/paired_end_merged.fastqsanger
@@ -2,9 +2,9 @@
NACATCAACACTCAGTAACGGCTGGCGCAAAATGGCATTGATTAACGAAGACTTCCCGCGCGTGAAGGCGCCGGCAAACGAGGCTCGGGAAGGGGCTCCCG
+1539:931/1
BBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBB
-@1539:931/2
+@1539:931/2 this read has a description
GCGCGTAACGTTTCACCTCGAGATCGTTGTCGGCCGCAATCTCCTGGGGGCGCCATTCCGAATCGTAGTTGTCGGCGTCTTCCAGTGCGGCAAGGCATCGT
-+1539:931/2
++1539:931/2 this read has a description
aee_dcadeeWcaaadJbdaff[fffc]dcfe[dRc^\[^QVOZXXZSPFWNUUZ\P^`BBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBB
@2971:937/1
NCGGAGACTTCGAGGCCATCCAGTCGATTGCCAAAGTCATCAAGGGGTCGACGATCTGCTCCCTTGCCCGTTCCAACGAGAATGAAATCCGCCGCGCGTGG
https://bitbucket.org/galaxy/galaxy-central/commits/7848d6fd1b7a/
changeset: 7848d6fd1b7a
user: jgoecks
date: 2013-01-17 20:12:57
summary: Merged in fangly/galaxy-central (pull request #8: Paired-end code mishandles description of FASTQ headers)
affected #: 3 files
diff -r 1b95e5b076fee018402e5c94534a2e65ea6c5315 -r 7848d6fd1b7a3ef8330ad1b31f5a3521094ad706 lib/galaxy_utils/sequence/fastq.py
--- a/lib/galaxy_utils/sequence/fastq.py
+++ b/lib/galaxy_utils/sequence/fastq.py
@@ -514,9 +514,13 @@
self.apply_galaxy_conventions = apply_galaxy_conventions
def close( self ):
return self.file.close()
- def get( self, sequence_id ):
- if not isinstance( sequence_id, basestring ):
- sequence_id = sequence_id.identifier
+ def get( self, sequence_identifier ):
+ # Input is either a sequence ID or a sequence object
+ if not isinstance( sequence_identifier, basestring ):
+ # Input was a sequence object (not a sequence ID). Get the sequence ID
+ sequence_identifier = sequence_identifier.identifier
+ # Get only the ID part of the sequence header
+ sequence_id, sequence_sep, sequence_desc = sequence_identifier.partition(' ')
rval = None
if sequence_id in self.offset_dict:
initial_offset = self.file.tell()
@@ -525,7 +529,7 @@
del self.offset_dict[ sequence_id ]
self.file.seek( seq_offset )
rval = self.reader.next()
- #assert rval.identifier == sequence_id, 'seq id mismatch' #should be able to remove this
+ #assert rval.id == sequence_id, 'seq id mismatch' #should be able to remove this
self.file.seek( initial_offset )
else:
while True:
@@ -535,13 +539,14 @@
except StopIteration:
self.eof = True
break #eof, id not found, will return None
- if fastq_read.identifier == sequence_id:
+ fastq_read_id, fastq_read_sep, fastq_read_desc = fastq_read.identifier.partition(' ')
+ if fastq_read_id == sequence_id:
rval = fastq_read
break
else:
- if fastq_read.identifier not in self.offset_dict:
- self.offset_dict[ fastq_read.identifier ] = []
- self.offset_dict[ fastq_read.identifier ].append( offset )
+ if fastq_read_id not in self.offset_dict:
+ self.offset_dict[ fastq_read_id ] = []
+ self.offset_dict[ fastq_read_id ].append( offset )
if rval is not None and self.apply_galaxy_conventions:
rval.apply_galaxy_conventions()
return rval
@@ -582,16 +587,20 @@
self.format = format
self.force_quality_encoding = force_quality_encoding
def join( self, read1, read2 ):
- if read1.identifier.endswith( '/2' ) and read2.identifier.endswith( '/1' ):
+ read1_id, read1_sep, read1_desc = read1.identifier.partition(' ')
+ read2_id, read2_sep, read2_desc = read2.identifier.partition(' ')
+ if read1_id.endswith( '/2' ) and read2_id.endswith( '/1' ):
#swap 1 and 2
tmp = read1
read1 = read2
read2 = tmp
del tmp
- if read1.identifier.endswith( '/1' ) and read2.identifier.endswith( '/2' ):
- identifier = read1.identifier[:-2]
- else:
- identifier = read1.identifier
+ if read1_id.endswith( '/1' ) and read2_id.endswith( '/2' ):
+ read1_id = read1_id[:-2]
+
+ identifier = read1_id
+ if read1_desc:
+ identifier = identifier + ' ' + read1_desc
#use force quality encoding, if not present force to encoding of first read
force_quality_encoding = self.force_quality_encoding
@@ -621,17 +630,18 @@
rval.quality = "%s %s" % ( new_read1.quality.strip(), new_read2.quality.strip() )
return rval
def get_paired_identifier( self, fastq_read ):
- identifier = fastq_read.identifier
- if identifier[-2] == '/':
- if identifier[-1] == "1":
- identifier = "%s2" % identifier[:-1]
- elif identifier[-1] == "2":
- identifier = "%s1" % identifier[:-1]
- return identifier
+ read_id, read_sep, read_desc = fastq_read.identifier.partition(' ')
+ if read_id[-2] == '/':
+ if read_id[-1] == "1":
+ read_id = "%s2" % read_id[:-1]
+ elif read_id[-1] == "2":
+ read_id = "%s1" % read_id[:-1]
+ return read_id
def is_first_mate( self, sequence_id ):
is_first = None
if not isinstance( sequence_id, basestring ):
sequence_id = sequence_id.identifier
+ sequence_id, sequence_sep, sequence_desc = sequence_id.partition(' ')
if sequence_id[-2] == '/':
if sequence_id[-1] == "1":
is_first = True
diff -r 1b95e5b076fee018402e5c94534a2e65ea6c5315 -r 7848d6fd1b7a3ef8330ad1b31f5a3521094ad706 test-data/paired_end_2.fastqsanger
--- a/test-data/paired_end_2.fastqsanger
+++ b/test-data/paired_end_2.fastqsanger
@@ -1,6 +1,6 @@
-@1539:931/2
+@1539:931/2 this read has a description
GCGCGTAACGTTTCACCTCGAGATCGTTGTCGGCCGCAATCTCCTGGGGGCGCCATTCCGAATCGTAGTTGTCGGCGTCTTCCAGTGCGGCAAGGCATCGT
-+1539:931/2
++1539:931/2 this read has a description
aee_dcadeeWcaaadJbdaff[fffc]dcfe[dRc^\[^QVOZXXZSPFWNUUZ\P^`BBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBB
@2971:937/2
CTCGCACGGCCGCCTCGACCACTTGGTCTGGCGTCATGCGCAATTTTTTCTCCATGTGGAACGGGCTGGTGGCGATGAACGTATGAATATGCCCCCGCGCT
diff -r 1b95e5b076fee018402e5c94534a2e65ea6c5315 -r 7848d6fd1b7a3ef8330ad1b31f5a3521094ad706 test-data/paired_end_merged.fastqsanger
--- a/test-data/paired_end_merged.fastqsanger
+++ b/test-data/paired_end_merged.fastqsanger
@@ -2,9 +2,9 @@
NACATCAACACTCAGTAACGGCTGGCGCAAAATGGCATTGATTAACGAAGACTTCCCGCGCGTGAAGGCGCCGGCAAACGAGGCTCGGGAAGGGGCTCCCG
+1539:931/1
BBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBB
-@1539:931/2
+@1539:931/2 this read has a description
GCGCGTAACGTTTCACCTCGAGATCGTTGTCGGCCGCAATCTCCTGGGGGCGCCATTCCGAATCGTAGTTGTCGGCGTCTTCCAGTGCGGCAAGGCATCGT
-+1539:931/2
++1539:931/2 this read has a description
aee_dcadeeWcaaadJbdaff[fffc]dcfe[dRc^\[^QVOZXXZSPFWNUUZ\P^`BBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBB
@2971:937/1
NCGGAGACTTCGAGGCCATCCAGTCGATTGCCAAAGTCATCAAGGGGTCGACGATCTGCTCCCTTGCCCGTTCCAACGAGAATGAAATCCGCCGCGCGTGG
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: jgoecks: Comment and naming fix for migration script 108.
by Bitbucket 17 Jan '13
by Bitbucket 17 Jan '13
17 Jan '13
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/1b95e5b076fe/
changeset: 1b95e5b076fe
user: jgoecks
date: 2013-01-17 19:56:26
summary: Comment and naming fix for migration script 108.
affected #: 1 file
diff -r 1314572f86e121aa734ea27a52601fbcdb450278 -r 1b95e5b076fee018402e5c94534a2e65ea6c5315 lib/galaxy/model/migrate/versions/0108_add_extended_metadata.py
--- a/lib/galaxy/model/migrate/versions/0108_add_extended_metadata.py
+++ b/lib/galaxy/model/migrate/versions/0108_add_extended_metadata.py
@@ -71,10 +71,10 @@
except Exception, e:
log.debug( "Dropping 'extended_metadata' table failed: %s" % ( str( e ) ) )
- # Drop the Job table's exit_code column.
+ # Drop the LDDA table's extended metadata ID column.
try:
- job_table = Table( "library_dataset_dataset_association", metadata, autoload=True )
- extended_metadata_id = job_table.c.extended_metadata_id
+ ldda_table = Table( "library_dataset_dataset_association", metadata, autoload=True )
+ extended_metadata_id = ldda_table.c.extended_metadata_id
extended_metadata_id.drop()
except Exception, e:
log.debug( "Dropping 'extended_metadata_id' column from library_dataset_dataset_association table failed: %s" % ( str( e ) ) )
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
2 new commits in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/ccc640f65971/
changeset: ccc640f65971
user: kellrott
date: 2013-01-15 22:57:14
summary: Adding exception catches to database 108 downgrade step to fix failure issue with downgrading postgres database (foreign key rules violated...)
affected #: 1 file
diff -r 4bd419751ed3e8cc54913fa37389111a0e7faaa9 -r ccc640f65971632ea6de7a94bc01d24954102487 lib/galaxy/model/migrate/versions/0108_add_extended_metadata.py
--- a/lib/galaxy/model/migrate/versions/0108_add_extended_metadata.py
+++ b/lib/galaxy/model/migrate/versions/0108_add_extended_metadata.py
@@ -61,8 +61,15 @@
def downgrade():
metadata.reflect()
- ExtendedMetadata_table.drop()
- ExtendedMetadataIndex_table.drop()
+ try:
+ ExtendedMetadataIndex_table.drop()
+ except Exception, e:
+ log.debug( "Dropping 'extended_metadata_index' table failed: %s" % ( str( e ) ) )
+
+ try:
+ ExtendedMetadata_table.drop()
+ except Exception, e:
+ log.debug( "Dropping 'extended_metadata' table failed: %s" % ( str( e ) ) )
# Drop the Job table's exit_code column.
try:
https://bitbucket.org/galaxy/galaxy-central/commits/1314572f86e1/
changeset: 1314572f86e1
user: jgoecks
date: 2013-01-17 19:52:01
summary: Merged in kellrott/galaxy-central (pull request #109: Fixing database v108 downgrade failure)
affected #: 1 file
diff -r b14f68fb3e853313d944f934cf279b3517d4a7e6 -r 1314572f86e121aa734ea27a52601fbcdb450278 lib/galaxy/model/migrate/versions/0108_add_extended_metadata.py
--- a/lib/galaxy/model/migrate/versions/0108_add_extended_metadata.py
+++ b/lib/galaxy/model/migrate/versions/0108_add_extended_metadata.py
@@ -61,8 +61,15 @@
def downgrade():
metadata.reflect()
- ExtendedMetadata_table.drop()
- ExtendedMetadataIndex_table.drop()
+ try:
+ ExtendedMetadataIndex_table.drop()
+ except Exception, e:
+ log.debug( "Dropping 'extended_metadata_index' table failed: %s" % ( str( e ) ) )
+
+ try:
+ ExtendedMetadata_table.drop()
+ except Exception, e:
+ log.debug( "Dropping 'extended_metadata' table failed: %s" % ( str( e ) ) )
# Drop the Job table's exit_code column.
try:
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: jgoecks: Use correct index tables for setting dbkey of Tophat2 outputs.
by Bitbucket 17 Jan '13
by Bitbucket 17 Jan '13
17 Jan '13
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/b14f68fb3e85/
changeset: b14f68fb3e85
user: jgoecks
date: 2013-01-17 18:56:03
summary: Use correct index tables for setting dbkey of Tophat2 outputs.
affected #: 1 file
diff -r f11abc888753d0b8efeb4cae653a21ca64a2ae5b -r b14f68fb3e853313d944f934cf279b3517d4a7e6 tools/ngs_rna/tophat2_wrapper.xml
--- a/tools/ngs_rna/tophat2_wrapper.xml
+++ b/tools/ngs_rna/tophat2_wrapper.xml
@@ -323,7 +323,7 @@
<conditional name="refGenomeSource.genomeSource"><when value="indexed"><action type="metadata" name="dbkey">
- <option type="from_data_table" name="tophat_indexes" column="1" offset="0">
+ <option type="from_data_table" name="tophat2_indexes" column="1" offset="0"><filter type="param_value" column="0" value="#" compare="startswith" keep="False"/><filter type="param_value" ref="refGenomeSource.index" column="0"/></option>
@@ -342,7 +342,7 @@
<conditional name="refGenomeSource.genomeSource"><when value="indexed"><action type="metadata" name="dbkey">
- <option type="from_data_table" name="tophat_indexes" column="1" offset="0">
+ <option type="from_data_table" name="tophat2_indexes" column="1" offset="0"><filter type="param_value" column="0" value="#" compare="startswith" keep="False"/><filter type="param_value" ref="refGenomeSource.index" column="0"/></option>
@@ -361,7 +361,7 @@
<conditional name="refGenomeSource.genomeSource"><when value="indexed"><action type="metadata" name="dbkey">
- <option type="from_data_table" name="tophat_indexes" column="1" offset="0">
+ <option type="from_data_table" name="tophat2_indexes" column="1" offset="0"><filter type="param_value" column="0" value="#" compare="startswith" keep="False"/><filter type="param_value" ref="refGenomeSource.index" column="0"/></option>
@@ -380,7 +380,7 @@
<conditional name="refGenomeSource.genomeSource"><when value="indexed"><action type="metadata" name="dbkey">
- <option type="from_data_table" name="tophat_indexes" column="1" offset="0">
+ <option type="from_data_table" name="tophat2_indexes" column="1" offset="0"><filter type="param_value" column="0" value="#" compare="startswith" keep="False"/><filter type="param_value" ref="refGenomeSource.index" column="0"/></option>
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: greg: Readd a request param that is no longer needed to a request between Galaxy and the tool shed to ensure backward compatibility.
by Bitbucket 17 Jan '13
by Bitbucket 17 Jan '13
17 Jan '13
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/f11abc888753/
changeset: f11abc888753
user: greg
date: 2013-01-17 16:40:01
summary: Readd a request param that is no longer needed to a request between Galaxy and the tool shed to ensure backward compatibility.
affected #: 1 file
diff -r 6e1e7bee1e6f1b10eed3ba023e6ed0badf16ed38 -r f11abc888753d0b8efeb4cae653a21ca64a2ae5b lib/galaxy/webapps/galaxy/controllers/admin.py
--- a/lib/galaxy/webapps/galaxy/controllers/admin.py
+++ b/lib/galaxy/webapps/galaxy/controllers/admin.py
@@ -711,7 +711,8 @@
tool_dependencies_dict = {}
repository_name = elem.get( 'name' )
changeset_revision = elem.get( 'changeset_revision' )
- url = '%s/repository/get_tool_dependencies?name=%s&owner=devteam&changeset_revision=%s' % ( tool_shed_url, repository_name, changeset_revision )
+ url = '%s/repository/get_tool_dependencies?name=%s&owner=devteam&changeset_revision=%s&from_install_manager=True' % \
+ ( tool_shed_url, repository_name, changeset_revision )
response = urllib2.urlopen( url )
text = response.read()
response.close()
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: inithello: Test for reinstalling an uninstalled repository that now has a new changeset adding repository dependencies.
by Bitbucket 17 Jan '13
by Bitbucket 17 Jan '13
17 Jan '13
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/6e1e7bee1e6f/
changeset: 6e1e7bee1e6f
user: inithello
date: 2013-01-17 16:18:36
summary: Test for reinstalling an uninstalled repository that now has a new changeset adding repository dependencies.
affected #: 2 files
diff -r e6302ee56ed03e4cc9f95ff1720b08721ced5600 -r 6e1e7bee1e6f1b10eed3ba023e6ed0badf16ed38 test/tool_shed/base/twilltestcase.py
--- a/test/tool_shed/base/twilltestcase.py
+++ b/test/tool_shed/base/twilltestcase.py
@@ -711,6 +711,8 @@
url = '/admin_toolshed/reselect_tool_panel_section?id=%s' % self.security.encode_id( installed_repository.id )
self.visit_galaxy_url( url )
self.check_for_strings( strings_displayed, strings_not_displayed=[] )
+ # Build the url that will simulate a filled-out form being submitted. Due to a limitation in twill, the reselect_tool_panel_section
+ # form doesn't get parsed correctly.
repo_dependencies = self.create_checkbox_query_string( field_name='install_repository_dependencies', value=install_repository_dependencies )
tool_dependencies = self.create_checkbox_query_string( field_name='install_tool_dependencies', value=install_tool_dependencies )
encoded_repository_id = self.security.encode_id( installed_repository.id )
diff -r e6302ee56ed03e4cc9f95ff1720b08721ced5600 -r 6e1e7bee1e6f1b10eed3ba023e6ed0badf16ed38 test/tool_shed/functional/test_1087_install_updated_repository_dependencies.py
--- /dev/null
+++ b/test/tool_shed/functional/test_1087_install_updated_repository_dependencies.py
@@ -0,0 +1,105 @@
+from tool_shed.base.twilltestcase import ShedTwillTestCase, common, os, logging
+import tool_shed.base.test_db_util as test_db_util
+
+column_repository_name = 'column_maker_1087'
+column_repository_description = "Add column"
+column_repository_long_description = "Compute an expression on every row"
+
+convert_repository_name = 'convert_chars_1087'
+convert_repository_description = "Convert delimiters"
+convert_repository_long_description = "Convert delimiters to tab"
+
+category_name = 'Test 1087 Advanced Circular Dependencies'
+category_description = 'Test circular dependency features'
+
+log = logging.getLogger( __name__ )
+
+class TestRepositoryDependencies( ShedTwillTestCase ):
+ '''Test installing a repository, then updating it to include repository dependencies.'''
+ def test_0000_create_or_login_admin_user( self ):
+ """Create necessary user accounts and login as an admin user."""
+ self.galaxy_logout()
+ self.galaxy_login( email=common.admin_email, username=common.admin_username )
+ galaxy_admin_user = test_db_util.get_galaxy_user( common.admin_email )
+ assert galaxy_admin_user is not None, 'Problem retrieving user with email %s from the database' % common.admin_email
+ galaxy_admin_user_private_role = test_db_util.get_galaxy_private_role( galaxy_admin_user )
+ self.logout()
+ self.login( email=common.test_user_1_email, username=common.test_user_1_name )
+ test_user_1 = test_db_util.get_user( common.test_user_1_email )
+ assert test_user_1 is not None, 'Problem retrieving user with email %s from the database' % test_user_1_email
+ test_user_1_private_role = test_db_util.get_private_role( test_user_1 )
+ self.logout()
+ self.login( email=common.admin_email, username=common.admin_username )
+ admin_user = test_db_util.get_user( common.admin_email )
+ assert admin_user is not None, 'Problem retrieving user with email %s from the database' % admin_email
+ admin_user_private_role = test_db_util.get_private_role( admin_user )
+ def test_0005_create_and_populate_column_repository( self ):
+ """Create a category for this test suite and add repositories to it."""
+ category = self.create_category( name=category_name, description=category_description )
+ self.logout()
+ self.login( email=common.test_user_1_email, username=common.test_user_1_name )
+ repository = self.get_or_create_repository( name=column_repository_name,
+ description=column_repository_description,
+ long_description=column_repository_long_description,
+ owner=common.test_user_1_name,
+ category_id=self.security.encode_id( category.id ),
+ strings_displayed=[] )
+ if self.repository_is_new( repository ):
+ self.upload_file( repository,
+ 'column_maker/column_maker.tar',
+ strings_displayed=[],
+ commit_message='Uploaded column_maker.tar.' )
+ def test_0010_create_and_populate_convert_repository( self ):
+ '''Create and populate the convert_chars repository.'''
+ self.logout()
+ self.login( email=common.admin_email, username=common.admin_username )
+ category = self.create_category( name=category_name, description=category_description )
+ self.logout()
+ self.login( email=common.test_user_1_email, username=common.test_user_1_name )
+ repository = self.get_or_create_repository( name=convert_repository_name,
+ description=convert_repository_description,
+ long_description=convert_repository_long_description,
+ owner=common.test_user_1_name,
+ category_id=self.security.encode_id( category.id ),
+ strings_displayed=[] )
+ if self.repository_is_new( repository ):
+ self.upload_file( repository,
+ 'convert_chars/convert_chars.tar',
+ strings_displayed=[],
+ commit_message='Uploaded convert_chars.tar.' )
+ def test_0015_install_and_uninstall_column_repository( self ):
+ '''Install and uninstall the column_maker repository.'''
+ self.galaxy_logout()
+ self.galaxy_login( email=common.admin_email, username=common.admin_username )
+ self.install_repository( column_repository_name,
+ common.test_user_1_name,
+ category_name,
+ install_tool_dependencies=False,
+ install_repository_dependencies=True,
+ new_tool_panel_section='column_maker',
+ strings_not_displayed=[ 'install_repository_dependencies' ] )
+ installed_column_repository = test_db_util.get_installed_repository_by_name_owner( column_repository_name, common.test_user_1_name )
+ self.uninstall_repository( installed_column_repository, remove_from_disk=True )
+ def test_0020_upload_dependency_xml( self ):
+ '''Upload a repository_dependencies.xml file to column_maker that specifies convert_chars.'''
+ convert_repository = test_db_util.get_repository_by_name_and_owner( convert_repository_name, common.test_user_1_name )
+ column_repository = test_db_util.get_repository_by_name_and_owner( column_repository_name, common.test_user_1_name )
+ repository_dependencies_path = self.generate_temp_path( 'test_1085', additional_paths=[ 'column' ] )
+ self.create_repository_dependency( column_repository, depends_on=[ convert_repository ], filepath=repository_dependencies_path )
+ def test_0025_verify_repository_dependency( self ):
+ '''Verify that the new revision of column_maker now depends on convert_chars.'''
+ convert_repository = test_db_util.get_repository_by_name_and_owner( convert_repository_name, common.test_user_1_name )
+ column_repository = test_db_util.get_repository_by_name_and_owner( column_repository_name, common.test_user_1_name )
+ self.check_repository_dependency( column_repository, convert_repository )
+ log.debug( [ repository.id for repository in test_db_util.get_all_installed_repositories() ] )
+ def test_0030_reinstall_column_repository( self ):
+ '''Reinstall column_maker and verify that it now shows repository dependencies.'''
+ installed_column_repository = test_db_util.get_installed_repository_by_name_owner( column_repository_name, common.test_user_1_name )
+ convert_repository = test_db_util.get_repository_by_name_and_owner( convert_repository_name, common.test_user_1_name )
+ strings_displayed=[ 'Handle repository dependencies', convert_repository.name, self.get_repository_tip( convert_repository ) ]
+ # Due to twill's limitations, only check for strings on the (redirected) reselect tool panel section page, don't actually reinstall.
+ url = '/admin_toolshed/browse_repositories?operation=activate+or+reinstall&id=%s' % self.security.encode_id( installed_column_repository.id )
+ self.visit_galaxy_url( url )
+ self.check_for_strings( strings_displayed )
+ uninstalled_repositories = [ ( column_repository_name, common.test_user_1_name ) ]
+ self.verify_installed_uninstalled_repositories( uninstalled_repositories=uninstalled_repositories, installed_repositories=[] )
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0