galaxy-commits
Threads by month
- ----- 2024 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2023 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2022 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2021 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2020 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2019 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2018 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2017 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2016 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2015 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2014 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2013 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2012 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2011 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2010 -----
- December
- November
- October
- September
- August
- July
- June
- May
December 2012
- 1 participants
- 142 discussions
13 Dec '12
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/b96fe1d76b38/
changeset: b96fe1d76b38
user: greg
date: 2012-12-13 17:41:31
summary: Utility code cleanup and import tweaking.
affected #: 19 files
diff -r c0c6cf9f5d6ee0a63d0c058439467c1340698930 -r b96fe1d76b383ef2a77ff76bdc6580362aab4f0d lib/galaxy/tool_shed/__init__.py
--- a/lib/galaxy/tool_shed/__init__.py
+++ b/lib/galaxy/tool_shed/__init__.py
@@ -3,7 +3,7 @@
"""
import os
import galaxy.util.shed_util
-from galaxy.model.orm import *
+from galaxy.model.orm import and_
from galaxy import eggs
import pkg_resources
diff -r c0c6cf9f5d6ee0a63d0c058439467c1340698930 -r b96fe1d76b383ef2a77ff76bdc6580362aab4f0d lib/galaxy/tool_shed/common_util.py
--- a/lib/galaxy/tool_shed/common_util.py
+++ b/lib/galaxy/tool_shed/common_util.py
@@ -1,7 +1,7 @@
import os, urllib2
from galaxy import util
from galaxy.util.odict import odict
-from galaxy.tool_shed.encoding_util import *
+from galaxy.tool_shed.encoding_util import tool_shed_decode
REPOSITORY_OWNER = 'devteam'
diff -r c0c6cf9f5d6ee0a63d0c058439467c1340698930 -r b96fe1d76b383ef2a77ff76bdc6580362aab4f0d lib/galaxy/tool_shed/encoding_util.py
--- a/lib/galaxy/tool_shed/encoding_util.py
+++ b/lib/galaxy/tool_shed/encoding_util.py
@@ -1,5 +1,5 @@
-import binascii
-from galaxy.util.hash_util import *
+import binascii, logging
+from galaxy.util.hash_util import hmac_new
from galaxy.util.json import json_fix
from galaxy import eggs
@@ -8,6 +8,8 @@
pkg_resources.require( "simplejson" )
import simplejson
+log = logging.getLogger( __name__ )
+
encoding_sep = '__esep__'
def tool_shed_decode( value ):
diff -r c0c6cf9f5d6ee0a63d0c058439467c1340698930 -r b96fe1d76b383ef2a77ff76bdc6580362aab4f0d lib/galaxy/tool_shed/migrate/check.py
--- a/lib/galaxy/tool_shed/migrate/check.py
+++ b/lib/galaxy/tool_shed/migrate/check.py
@@ -5,7 +5,6 @@
from migrate.versioning import repository, schema
from sqlalchemy import *
-from common import *
from galaxy.util.odict import odict
log = logging.getLogger( __name__ )
diff -r c0c6cf9f5d6ee0a63d0c058439467c1340698930 -r b96fe1d76b383ef2a77ff76bdc6580362aab4f0d lib/galaxy/tool_shed/tool_dependencies/common_util.py
--- a/lib/galaxy/tool_shed/tool_dependencies/common_util.py
+++ b/lib/galaxy/tool_shed/tool_dependencies/common_util.py
@@ -1,5 +1,5 @@
import os, shutil, tarfile, urllib2, zipfile
-from galaxy.datatypes.checkers import *
+from galaxy.datatypes import checkers
def create_env_var_dict( elem, tool_dependency_install_dir=None, tool_shed_repository_install_dir=None ):
env_var_name = elem.get( 'name', 'PATH' )
@@ -73,13 +73,13 @@
zip_archive.close()
return True
def isbz2( file_path ):
- return is_bz2( file_path )
+ return checkers.is_bz2( file_path )
def isgzip( file_path ):
- return is_gzip( file_path )
+ return checkers.is_gzip( file_path )
def istar( file_path ):
return tarfile.is_tarfile( file_path )
def iszip( file_path ):
- return check_zip( file_path )
+ return checkers.check_zip( file_path )
def make_directory( full_path ):
if not os.path.exists( full_path ):
os.makedirs( full_path )
diff -r c0c6cf9f5d6ee0a63d0c058439467c1340698930 -r b96fe1d76b383ef2a77ff76bdc6580362aab4f0d lib/galaxy/tool_shed/tool_dependencies/install_util.py
--- a/lib/galaxy/tool_shed/tool_dependencies/install_util.py
+++ b/lib/galaxy/tool_shed/tool_dependencies/install_util.py
@@ -1,8 +1,8 @@
import sys, os, subprocess, tempfile
-from common_util import *
-from fabric_util import *
-from galaxy.tool_shed.encoding_util import *
-from galaxy.model.orm import *
+import common_util
+import fabric_util
+from galaxy.tool_shed.encoding_util import tool_shed_encode
+from galaxy.model.orm import and_
from galaxy import eggs
import pkg_resources
@@ -166,7 +166,7 @@
env_var_dicts = []
for env_elem in action_elem:
if env_elem.tag == 'environment_variable':
- env_var_dict = create_env_var_dict( env_elem, tool_dependency_install_dir=install_dir )
+ env_var_dict = common_util.create_env_var_dict( env_elem, tool_dependency_install_dir=install_dir )
if env_var_dict:
env_var_dicts.append( env_var_dict )
if env_var_dicts:
@@ -185,7 +185,7 @@
else:
try:
# There is currently only one fabric method.
- install_and_build_package( app, tool_dependency, actions_dict )
+ fabric_util.install_and_build_package( app, tool_dependency, actions_dict )
except Exception, e:
tool_dependency.status = app.model.ToolDependency.installation_status.ERROR
tool_dependency.error_message = str( e )
@@ -273,7 +273,7 @@
name=env_var_name,
version=None )
tool_shed_repository_install_dir = get_tool_shed_repository_install_dir( app, tool_shed_repository )
- env_var_dict = create_env_var_dict( env_var_elem, tool_shed_repository_install_dir=tool_shed_repository_install_dir )
+ env_var_dict = common_util.create_env_var_dict( env_var_elem, tool_shed_repository_install_dir=tool_shed_repository_install_dir )
if env_var_dict:
if not os.path.exists( install_dir ):
os.makedirs( install_dir )
@@ -284,10 +284,10 @@
type='set_environment',
status=app.model.ToolDependency.installation_status.INSTALLING,
set_status=True )
- cmd = create_or_update_env_shell_file( install_dir, env_var_dict )
+ cmd = common_util.create_or_update_env_shell_file( install_dir, env_var_dict )
if env_var_version == '1.0':
# Handle setting environment variables using a fabric method.
- handle_command( app, tool_dependency, install_dir, cmd )
+ fabric_util.handle_command( app, tool_dependency, install_dir, cmd )
sa_session.refresh( tool_dependency )
if tool_dependency.status != app.model.ToolDependency.installation_status.ERROR:
tool_dependency.status = app.model.ToolDependency.installation_status.INSTALLED
diff -r c0c6cf9f5d6ee0a63d0c058439467c1340698930 -r b96fe1d76b383ef2a77ff76bdc6580362aab4f0d lib/galaxy/webapps/community/controllers/admin.py
--- a/lib/galaxy/webapps/community/controllers/admin.py
+++ b/lib/galaxy/webapps/community/controllers/admin.py
@@ -1,12 +1,12 @@
from galaxy.web.base.controller import *
from galaxy.web.base.controllers.admin import Admin
from galaxy.webapps.community import model
-from galaxy.model.orm import *
+from galaxy.model.orm import and_
from galaxy.web.framework.helpers import time_ago, iff, grids
from galaxy.web.form_builder import SelectField
from galaxy.util import inflector
import galaxy.util.shed_util_common as suc
-from common import *
+import common
from repository import RepositoryGrid, CategoryGrid
from galaxy import eggs
@@ -474,7 +474,7 @@
if k.startswith( 'f-' ):
del kwd[ k ]
if 'user_id' in kwd:
- user = get_user( trans, kwd[ 'user_id' ] )
+ user = common.get_user( trans, kwd[ 'user_id' ] )
kwd[ 'f-email' ] = user.email
del kwd[ 'user_id' ]
else:
@@ -489,7 +489,7 @@
if k.startswith( 'f-' ):
del kwd[ k ]
category_id = kwd.get( 'id', None )
- category = get_category( trans, category_id )
+ category = common.get_category( trans, category_id )
kwd[ 'f-Category.name' ] = category.name
elif operation == "receive email alerts":
if kwd[ 'id' ]:
@@ -533,7 +533,7 @@
# The received id is a RepositoryMetadata object id, so we need to get the
# associated Repository and redirect to view_or_manage_repository with the
# changeset_revision.
- repository_metadata = get_repository_metadata_by_id( trans, kwd[ 'id' ] )
+ repository_metadata = common.get_repository_metadata_by_id( trans, kwd[ 'id' ] )
repository = repository_metadata.repository
kwd[ 'id' ] = trans.security.encode_id( repository.id )
kwd[ 'changeset_revision' ] = repository_metadata.changeset_revision
@@ -554,7 +554,7 @@
if not name or not description:
message = 'Enter a valid name and a description'
status = 'error'
- elif get_category_by_name( trans, name ):
+ elif common.get_category_by_name( trans, name ):
message = 'A category with that name already exists'
status = 'error'
else:
@@ -615,7 +615,7 @@
ids = util.listify( id )
count = 0
for repository_metadata_id in ids:
- repository_metadata = get_repository_metadata_by_id( trans, repository_metadata_id )
+ repository_metadata = common.get_repository_metadata_by_id( trans, repository_metadata_id )
trans.sa_session.delete( repository_metadata )
trans.sa_session.flush()
count += 1
@@ -641,7 +641,7 @@
action='manage_categories',
message=message,
status='error' ) )
- category = get_category( trans, id )
+ category = common.get_category( trans, id )
if params.get( 'edit_category_button', False ):
new_name = util.restore_text( params.get( 'name', '' ) ).strip()
new_description = util.restore_text( params.get( 'description', '' ) ).strip()
@@ -649,7 +649,7 @@
if not new_name:
message = 'Enter a valid name'
status = 'error'
- elif category.name != new_name and get_category_by_name( trans, name ):
+ elif category.name != new_name and common.get_category_by_name( trans, name ):
message = 'A category with that name already exists'
status = 'error'
else:
@@ -772,7 +772,7 @@
ids = util.listify( id )
message = "Deleted %d categories: " % len( ids )
for category_id in ids:
- category = get_category( trans, category_id )
+ category = common.get_category( trans, category_id )
category.deleted = True
trans.sa_session.add( category )
trans.sa_session.flush()
@@ -800,7 +800,7 @@
purged_categories = ""
message = "Purged %d categories: " % len( ids )
for category_id in ids:
- category = get_category( trans, category_id )
+ category = common.get_category( trans, category_id )
if category.deleted:
# Delete RepositoryCategoryAssociations
for rca in category.repositories:
@@ -827,7 +827,7 @@
count = 0
undeleted_categories = ""
for category_id in ids:
- category = get_category( trans, category_id )
+ category = common.get_category( trans, category_id )
if category.deleted:
category.deleted = False
trans.sa_session.add( category )
diff -r c0c6cf9f5d6ee0a63d0c058439467c1340698930 -r b96fe1d76b383ef2a77ff76bdc6580362aab4f0d lib/galaxy/webapps/community/controllers/common.py
--- a/lib/galaxy/webapps/community/controllers/common.py
+++ b/lib/galaxy/webapps/community/controllers/common.py
@@ -1,16 +1,13 @@
import os, string, socket, logging, simplejson, binascii, tempfile, filecmp
-from time import strftime
+from time import gmtime, strftime
from datetime import *
-from galaxy.datatypes.checkers import *
from galaxy.tools import *
from galaxy.util.odict import odict
from galaxy.util.json import from_json_string, to_json_string
-from galaxy.util.hash_util import *
import galaxy.util.shed_util_common as suc
-from galaxy.web.base.controller import *
from galaxy.web.base.controllers.admin import *
from galaxy.webapps.community import model
-from galaxy.model.orm import *
+from galaxy.model.orm import and_
from galaxy.model.item_attrs import UsesItemRatings
from galaxy import eggs
@@ -73,9 +70,6 @@
'${host}'
"""
-# States for passing messages
-SUCCESS, INFO, WARNING, ERROR = "done", "info", "warning", "error"
-
malicious_error = " This changeset cannot be downloaded because it potentially produces malicious behavior or contains inappropriate content."
malicious_error_can_push = " Correct this changeset as soon as possible, it potentially produces malicious behavior or contains inappropriate content."
@@ -146,7 +140,6 @@
return False
def changeset_revision_reviewed_by_user( trans, user, repository, changeset_revision ):
"""Determine if the current changeset revision has been reviewed by the current user."""
- changeset_revision_reviewed_by_user = False
for review in repository.reviews:
if review.changeset_revision == changeset_revision and review.user == user:
return True
@@ -162,34 +155,6 @@
if user_email in admin_users:
return True
return False
-def copy_file_from_disk( filename, repo_dir, dir ):
- file_path = None
- found = False
- for root, dirs, files in os.walk( repo_dir ):
- if root.find( '.hg' ) < 0:
- for name in files:
- if name == filename:
- file_path = os.path.abspath( os.path.join( root, name ) )
- found = True
- break
- if found:
- break
- if file_path:
- tmp_filename = os.path.join( dir, filename )
- shutil.copy( file_path, tmp_filename )
- else:
- tmp_filename = None
- return tmp_filename
-def generate_tool_guid( trans, repository, tool ):
- """
- Generate a guid for the received tool. The form of the guid is
- <tool shed host>/repos/<tool shed username>/<tool shed repo name>/<tool id>/<tool version>
- """
- return '%s/repos/%s/%s/%s/%s' % ( trans.request.host,
- repository.user.username,
- repository.name,
- tool.id,
- tool.version )
def get_absolute_path_to_file_in_repository( repo_files_dir, file_name ):
stripped_file_name = suc.strip_path( file_name )
file_path = None
@@ -287,10 +252,6 @@
def get_repository_metadata_by_id( trans, id ):
"""Get repository metadata from the database"""
return trans.sa_session.query( trans.model.RepositoryMetadata ).get( trans.security.decode_id( id ) )
-def get_repository_metadata_by_repository_id( trans, id ):
- """Get all metadata records for a specified repository."""
- return trans.sa_session.query( trans.model.RepositoryMetadata ) \
- .filter( trans.model.RepositoryMetadata.table.c.repository_id == trans.security.decode_id( id ) )
def get_repository_metadata_revisions_for_review( repository, reviewed=True ):
repository_metadata_revisions = []
metadata_changeset_revision_hashes = []
@@ -402,7 +363,7 @@
tip_changeset = repo.changelog.tip()
ctx = repo.changectx( tip_changeset )
t, tz = ctx.date()
- date = datetime( *time.gmtime( float( t ) - tz )[:6] )
+ date = datetime( *gmtime( float( t ) - tz )[:6] )
display_date = date.strftime( "%Y-%m-%d" )
try:
username = ctx.user().split()[0]
diff -r c0c6cf9f5d6ee0a63d0c058439467c1340698930 -r b96fe1d76b383ef2a77ff76bdc6580362aab4f0d lib/galaxy/webapps/community/controllers/repository.py
--- a/lib/galaxy/webapps/community/controllers/repository.py
+++ b/lib/galaxy/webapps/community/controllers/repository.py
@@ -1,5 +1,5 @@
import os, logging, tempfile, shutil, ConfigParser
-from time import strftime
+from time import gmtime, strftime
from datetime import date, datetime
from galaxy import util
from galaxy.web.base.controller import *
@@ -8,10 +8,10 @@
from galaxy.webapps.community.model import directory_hash_id
from galaxy.web.framework.helpers import time_ago, iff, grids
from galaxy.util.json import from_json_string, to_json_string
-from galaxy.model.orm import *
+from galaxy.model.orm import and_
import galaxy.util.shed_util_common as suc
-from galaxy.tool_shed.encoding_util import *
-from common import *
+from galaxy.tool_shed.encoding_util import tool_shed_encode
+import common
from galaxy import eggs
eggs.require('mercurial')
@@ -511,7 +511,7 @@
link=( lambda item: dict( operation="view_or_manage_repository", id=item.id ) ),
attach_popup=False )
-class RepositoryController( BaseUIController, ItemRatings ):
+class RepositoryController( BaseUIController, common.ItemRatings ):
install_matched_repository_grid = InstallMatchedRepositoryGrid()
matched_repository_grid = MatchedRepositoryGrid()
@@ -535,7 +535,7 @@
# The value of 'id' has been set to the search string, which is a repository name. We'll try to get the desired encoded repository
# id to pass on.
try:
- repository = get_repository_by_name( trans, kwd[ 'id' ] )
+ repository = common.get_repository_by_name( trans, kwd[ 'id' ] )
kwd[ 'id' ] = trans.security.encode_id( repository.id )
except:
pass
@@ -610,7 +610,7 @@
if k.startswith( 'f-' ):
del kwd[ k ]
if 'user_id' in kwd:
- user = get_user( trans, kwd[ 'user_id' ] )
+ user = common.get_user( trans, kwd[ 'user_id' ] )
kwd[ 'f-email' ] = user.email
del kwd[ 'user_id' ]
else:
@@ -650,7 +650,7 @@
if k.startswith( 'f-' ):
del kwd[ k ]
category_id = kwd.get( 'id', None )
- category = get_category( trans, category_id )
+ category = common.get_category( trans, category_id )
kwd[ 'f-Category.name' ] = category.name
elif operation == "receive email alerts":
if trans.user:
@@ -691,7 +691,7 @@
repo = hg.repository( suc.get_configured_ui(), repository.repo_path( trans.app ) )
# Update repository files for browsing.
suc.update_repository( repo )
- is_malicious = changeset_is_malicious( trans, id, repository.tip( trans.app ) )
+ is_malicious = common.changeset_is_malicious( trans, id, repository.tip( trans.app ) )
metadata = self.get_metadata( trans, id, repository.tip( trans.app ) )
return trans.fill_template( '/webapps/community/repository/browse_repository.mako',
repository=repository,
@@ -716,7 +716,8 @@
# The value of 'id' has been set to the search string, which is a repository name.
# We'll try to get the desired encoded repository id to pass on.
try:
- repository = get_repository_by_name( trans, kwd[ 'id' ] )
+ name = kwd[ 'id' ]
+ repository = common.get_repository_by_name( trans, name )
kwd[ 'id' ] = trans.security.encode_id( repository.id )
except:
pass
@@ -739,7 +740,7 @@
if 'f-Category.name' in kwd:
# The user browsed to a category and then entered a search string, so get the category associated with it's value.
category_name = kwd[ 'f-Category.name' ]
- category = get_category_by_name( trans, category_name )
+ category = common.get_category_by_name( trans, category_name )
# Set the id value in kwd since it is required by the ValidRepositoryGrid.build_initial_query method.
kwd[ 'id' ] = trans.security.encode_id( category.id )
if galaxy_url:
@@ -749,7 +750,7 @@
if operation == "preview_tools_in_changeset":
repository_id = kwd.get( 'id', None )
repository = suc.get_repository_in_tool_shed( trans, repository_id )
- repository_metadata = get_latest_repository_metadata( trans, repository.id )
+ repository_metadata = common.get_latest_repository_metadata( trans, repository.id )
latest_installable_changeset_revision = repository_metadata.changeset_revision
return trans.response.send_redirect( web.url_for( controller='repository',
action='preview_tools_in_changeset',
@@ -761,7 +762,7 @@
if k.startswith( 'f-' ):
del kwd[ k ]
category_id = kwd.get( 'id', None )
- category = get_category( trans, category_id )
+ category = common.get_category( trans, category_id )
kwd[ 'f-Category.name' ] = category.name
# The changeset_revision_select_field in the ValidRepositoryGrid performs a refresh_on_change which sends in request parameters like
# changeset_revison_1, changeset_revision_2, etc. One of the many select fields on the grid performed the refresh_on_change, so we loop
@@ -921,7 +922,7 @@
params = util.Params( kwd )
message = util.restore_text( params.get( 'message', '' ) )
status = params.get( 'status', 'done' )
- categories = get_categories( trans )
+ categories = common.get_categories( trans )
if not categories:
message = 'No categories have been configured in this instance of the Galaxy Tool Shed. ' + \
'An administrator needs to create some via the Administrator control panel before creating repositories.',
@@ -1018,11 +1019,11 @@
params = util.Params( kwd )
message = util.restore_text( params.get( 'message', '' ) )
status = params.get( 'status', 'done' )
- repository, tool, message = load_tool_from_changeset_revision( trans, repository_id, changeset_revision, tool_config )
+ repository, tool, message = common.load_tool_from_changeset_revision( trans, repository_id, changeset_revision, tool_config )
if message:
status = 'error'
tool_state = self.__new_state( trans )
- is_malicious = changeset_is_malicious( trans, repository_id, repository.tip( trans.app ) )
+ is_malicious = common.changeset_is_malicious( trans, repository_id, repository.tip( trans.app ) )
metadata = self.get_metadata( trans, repository_id, changeset_revision )
try:
return trans.fill_template( "/webapps/community/repository/tool_form.mako",
@@ -1085,7 +1086,7 @@
is_admin = trans.user_is_admin()
if operation == "view_or_manage_repository":
# The received id is a RepositoryMetadata id, so we have to get the repository id.
- repository_metadata = get_repository_metadata_by_id( trans, item_id )
+ repository_metadata = common.get_repository_metadata_by_id( trans, item_id )
repository_id = trans.security.encode_id( repository_metadata.repository.id )
repository = suc.get_repository_in_tool_shed( trans, repository_id )
kwd[ 'id' ] = repository_id
@@ -1102,7 +1103,7 @@
encoded_repository_ids = []
changeset_revisions = []
for repository_metadata_id in util.listify( item_id ):
- repository_metadata = get_repository_metadata_by_id( trans, repository_metadata_id )
+ repository_metadata = common.get_repository_metadata_by_id( trans, repository_metadata_id )
encoded_repository_ids.append( trans.security.encode_id( repository_metadata.repository.id ) )
changeset_revisions.append( repository_metadata.changeset_revision )
new_kwd[ 'repository_ids' ] = encoded_repository_ids
@@ -1170,7 +1171,7 @@
is_admin = trans.user_is_admin()
if operation == "view_or_manage_repository":
# The received id is a RepositoryMetadata id, so we have to get the repository id.
- repository_metadata = get_repository_metadata_by_id( trans, item_id )
+ repository_metadata = common.get_repository_metadata_by_id( trans, item_id )
repository_id = trans.security.encode_id( repository_metadata.repository.id )
repository = suc.get_repository_in_tool_shed( trans, repository_id )
kwd[ 'id' ] = repository_id
@@ -1187,7 +1188,7 @@
encoded_repository_ids = []
changeset_revisions = []
for repository_metadata_id in util.listify( item_id ):
- repository_metadata = get_repository_metadata_by_id( trans, item_id )
+ repository_metadata = common.get_repository_metadata_by_id( trans, item_id )
encoded_repository_ids.append( trans.security.encode_id( repository_metadata.repository.id ) )
changeset_revisions.append( repository_metadata.changeset_revision )
new_kwd = {}
@@ -1581,9 +1582,9 @@
params = util.Params( kwd )
message = util.restore_text( params.get( 'message', '' ) )
status = params.get( 'status', 'error' )
- repository, tool, error_message = load_tool_from_changeset_revision( trans, repository_id, changeset_revision, tool_config )
+ repository, tool, error_message = common.load_tool_from_changeset_revision( trans, repository_id, changeset_revision, tool_config )
tool_state = self.__new_state( trans )
- is_malicious = changeset_is_malicious( trans, repository_id, repository.tip( trans.app ) )
+ is_malicious = common.changeset_is_malicious( trans, repository_id, repository.tip( trans.app ) )
invalid_file_tups = []
if tool:
invalid_file_tups = suc.check_tool_input_params( trans.app,
@@ -1781,7 +1782,7 @@
selected_value=changeset_revision,
add_id_to_name=False,
downloadable=False )
- revision_label = get_revision_label( trans, repository, repository.tip( trans.app ) )
+ revision_label = common.get_revision_label( trans, repository, repository.tip( trans.app ) )
repository_metadata = None
repository_metadata_id = None
metadata = None
@@ -1790,7 +1791,7 @@
if changeset_revision != suc.INITIAL_CHANGELOG_HASH:
repository_metadata = suc.get_repository_metadata_by_changeset_revision( trans, id, changeset_revision )
if repository_metadata:
- revision_label = get_revision_label( trans, repository, changeset_revision )
+ revision_label = common.get_revision_label( trans, repository, changeset_revision )
repository_metadata_id = trans.security.encode_id( repository_metadata.id )
metadata = repository_metadata.metadata
is_malicious = repository_metadata.malicious
@@ -1800,7 +1801,7 @@
if previous_changeset_revision != suc.INITIAL_CHANGELOG_HASH:
repository_metadata = suc.get_repository_metadata_by_changeset_revision( trans, id, previous_changeset_revision )
if repository_metadata:
- revision_label = get_revision_label( trans, repository, previous_changeset_revision )
+ revision_label = common.get_revision_label( trans, repository, previous_changeset_revision )
repository_metadata_id = trans.security.encode_id( repository_metadata.id )
metadata = repository_metadata.metadata
is_malicious = repository_metadata.malicious
@@ -1815,20 +1816,17 @@
handled_key_rd_dicts=None )
if is_malicious:
if trans.app.security_agent.can_push( trans.app, trans.user, repository ):
- message += malicious_error_can_push
+ message += common.malicious_error_can_push
else:
- message += malicious_error
+ message += common.malicious_error
status = 'error'
malicious_check_box = CheckboxField( 'malicious', checked=is_malicious )
- categories = get_categories( trans )
+ categories = common.get_categories( trans )
selected_categories = [ rca.category_id for rca in repository.categories ]
# Determine if the current changeset revision has been reviewed by the current user.
- reviewed_by_user = changeset_revision_reviewed_by_user( trans, trans.user, repository, changeset_revision )
+ reviewed_by_user = common.changeset_revision_reviewed_by_user( trans, trans.user, repository, changeset_revision )
if reviewed_by_user:
- review = get_review_by_repository_id_changeset_revision_user_id( trans,
- id,
- changeset_revision,
- trans.security.encode_id( trans.user.id ) )
+ review = common.get_review_by_repository_id_changeset_revision_user_id( trans, id, changeset_revision, trans.security.encode_id( trans.user.id ) )
review_id = trans.security.encode_id( review.id )
else:
review_id = None
@@ -1929,7 +1927,7 @@
repository_metadata_id = None
metadata = None
repository_dependencies = None
- revision_label = get_revision_label( trans, repository, changeset_revision )
+ revision_label = common.get_revision_label( trans, repository, changeset_revision )
changeset_revision_select_field = build_changeset_revision_select_field( trans,
repository,
selected_value=changeset_revision,
@@ -2001,7 +1999,7 @@
avg_rating, num_ratings = self.get_ave_item_rating_data( trans.sa_session, repository, webapp_model=trans.model )
display_reviews = util.string_as_bool( params.get( 'display_reviews', False ) )
rra = self.get_user_item_rating( trans.sa_session, trans.user, repository, webapp_model=trans.model )
- is_malicious = changeset_is_malicious( trans, id, repository.tip( trans.app ) )
+ is_malicious = common.changeset_is_malicious( trans, id, repository.tip( trans.app ) )
metadata = self.get_metadata( trans, id, repository.tip( trans.app ) )
return trans.fill_template( '/webapps/community/repository/rate_repository.mako',
repository=repository,
@@ -2161,7 +2159,7 @@
if not commit_message:
commit_message = 'Deleted selected files'
commands.commit( repo.ui, repo, repo_dir, user=trans.user.username, message=commit_message )
- handle_email_alerts( trans, repository )
+ common.handle_email_alerts( trans, repository )
# Update the repository files for browsing.
suc.update_repository( repo )
# Get the new repository tip.
@@ -2173,11 +2171,11 @@
else:
message += 'The selected files were deleted from the repository. '
kwd[ 'message' ] = message
- set_repository_metadata_due_to_new_tip( trans, repository, **kwd )
+ common.set_repository_metadata_due_to_new_tip( trans, repository, **kwd )
else:
message = "Select at least 1 file to delete from the repository before clicking <b>Delete selected files</b>."
status = "error"
- is_malicious = changeset_is_malicious( trans, id, repository.tip( trans.app ) )
+ is_malicious = common.changeset_is_malicious( trans, id, repository.tip( trans.app ) )
return trans.fill_template( '/webapps/community/repository/browse_repository.mako',
repo=repo,
repository=repository,
@@ -2200,7 +2198,7 @@
# Get the name of the server hosting the tool shed instance.
host = trans.request.host
# Build the email message
- body = string.Template( contact_owner_template ) \
+ body = string.Template( common.contact_owner_template ) \
.safe_substitute( username=trans.user.username,
repository_name=repository.name,
email=trans.user.email,
@@ -2319,7 +2317,7 @@
else:
has_metadata = False
t, tz = ctx.date()
- date = datetime( *time.gmtime( float( t ) - tz )[:6] )
+ date = datetime( *gmtime( float( t ) - tz )[:6] )
display_date = date.strftime( "%Y-%m-%d" )
change_dict = { 'ctx' : ctx,
'rev' : str( ctx.rev() ),
@@ -2332,7 +2330,7 @@
'has_metadata' : has_metadata }
# Make sure we'll view latest changeset first.
changesets.insert( 0, change_dict )
- is_malicious = changeset_is_malicious( trans, id, repository.tip( trans.app ) )
+ is_malicious = common.changeset_is_malicious( trans, id, repository.tip( trans.app ) )
metadata = self.get_metadata( trans, id, repository.tip( trans.app ) )
return trans.fill_template( '/webapps/community/repository/view_changelog.mako',
repository=repository,
@@ -2363,7 +2361,7 @@
diffs = []
for diff in patch.diff( repo, node1=ctx_parent.node(), node2=ctx.node() ):
diffs.append( suc.to_safe_string( diff, to_html=True ) )
- is_malicious = changeset_is_malicious( trans, id, repository.tip( trans.app ) )
+ is_malicious = common.changeset_is_malicious( trans, id, repository.tip( trans.app ) )
metadata = self.get_metadata( trans, id, ctx_str )
return trans.fill_template( '/webapps/community/repository/view_changeset.mako',
repository=repository,
@@ -2433,7 +2431,7 @@
selected_value=changeset_revision,
add_id_to_name=False,
downloadable=False )
- revision_label = get_revision_label( trans, repository, changeset_revision )
+ revision_label = common.get_revision_label( trans, repository, changeset_revision )
repository_metadata = suc.get_repository_metadata_by_changeset_revision( trans, id, changeset_revision )
if repository_metadata:
repository_metadata_id = trans.security.encode_id( repository_metadata.id )
@@ -2449,20 +2447,17 @@
else:
repository_metadata_id = None
metadata = None
- is_malicious = changeset_is_malicious( trans, id, repository.tip( trans.app ) )
+ is_malicious = common.changeset_is_malicious( trans, id, repository.tip( trans.app ) )
if is_malicious:
if trans.app.security_agent.can_push( trans.app, trans.user, repository ):
- message += malicious_error_can_push
+ message += common.malicious_error_can_push
else:
- message += malicious_error
+ message += common.malicious_error
status = 'error'
# Determine if the current changeset revision has been reviewed by the current user.
- reviewed_by_user = changeset_revision_reviewed_by_user( trans, trans.user, repository, changeset_revision )
+ reviewed_by_user = common.changeset_revision_reviewed_by_user( trans, trans.user, repository, changeset_revision )
if reviewed_by_user:
- review = get_review_by_repository_id_changeset_revision_user_id( trans,
- id,
- changeset_revision,
- trans.security.encode_id( trans.user.id ) )
+ review = common.get_review_by_repository_id_changeset_revision_user_id( trans, id, changeset_revision, trans.security.encode_id( trans.user.id ) )
review_id = trans.security.encode_id( review.id )
else:
review_id = None
@@ -2499,7 +2494,7 @@
tool = None
guid = None
original_tool_data_path = trans.app.config.tool_data_path
- revision_label = get_revision_label( trans, repository, changeset_revision )
+ revision_label = common.get_revision_label( trans, repository, changeset_revision )
repository_metadata = suc.get_repository_metadata_by_changeset_revision( trans, repository_id, changeset_revision )
if repository_metadata:
metadata = repository_metadata.metadata
@@ -2512,7 +2507,7 @@
guid = tool_metadata_dict[ 'guid' ]
full_path_to_tool_config = os.path.abspath( relative_path_to_tool_config )
full_path_to_dir, tool_config_filename = os.path.split( full_path_to_tool_config )
- can_use_disk_file = can_use_tool_config_disk_file( trans, repository, repo, full_path_to_tool_config, changeset_revision )
+ can_use_disk_file = common.can_use_tool_config_disk_file( trans, repository, repo, full_path_to_tool_config, changeset_revision )
if can_use_disk_file:
trans.app.config.tool_data_path = work_dir
tool, valid, message, sample_files = suc.handle_sample_files_and_load_tool_from_disk( trans,
@@ -2534,19 +2529,16 @@
tool_lineage = self.get_versions_of_tool( trans, repository, repository_metadata, guid )
else:
metadata = None
- is_malicious = changeset_is_malicious( trans, repository_id, repository.tip( trans.app ) )
+ is_malicious = common.changeset_is_malicious( trans, repository_id, repository.tip( trans.app ) )
changeset_revision_select_field = build_changeset_revision_select_field( trans,
repository,
selected_value=changeset_revision,
add_id_to_name=False,
downloadable=False )
trans.app.config.tool_data_path = original_tool_data_path
- reviewed_by_user = changeset_revision_reviewed_by_user( trans, trans.user, repository, changeset_revision )
+ reviewed_by_user = common.changeset_revision_reviewed_by_user( trans, trans.user, repository, changeset_revision )
if reviewed_by_user:
- review = get_review_by_repository_id_changeset_revision_user_id( trans,
- id,
- changeset_revision,
- trans.security.encode_id( trans.user.id ) )
+ review = common.get_review_by_repository_id_changeset_revision_user_id( trans, id, changeset_revision, trans.security.encode_id( trans.user.id ) )
review_id = trans.security.encode_id( review.id )
else:
review_id = None
@@ -2598,7 +2590,7 @@
# Restrict the options to all revisions that have associated metadata.
repository_metadata_revisions = repository.metadata_revisions
for repository_metadata in repository_metadata_revisions:
- rev, label, changeset_revision = get_rev_label_changeset_revision_from_repository_metadata( trans, repository_metadata, repository=repository )
+ rev, label, changeset_revision = common.get_rev_label_changeset_revision_from_repository_metadata( trans, repository_metadata, repository=repository )
changeset_tups.append( ( rev, label, changeset_revision ) )
refresh_on_change_values.append( changeset_revision )
# Sort options by the revision label. Even though the downloadable_revisions query sorts by update_time,
diff -r c0c6cf9f5d6ee0a63d0c058439467c1340698930 -r b96fe1d76b383ef2a77ff76bdc6580362aab4f0d lib/galaxy/webapps/community/controllers/repository_review.py
--- a/lib/galaxy/webapps/community/controllers/repository_review.py
+++ b/lib/galaxy/webapps/community/controllers/repository_review.py
@@ -6,7 +6,7 @@
from galaxy.web.framework.helpers import time_ago, iff, grids
from galaxy.model.orm import and_
from sqlalchemy.sql.expression import func
-from common import *
+import common
from galaxy.webapps.community.util.container_util import STRSEP
from repository import RepositoryGrid
import galaxy.util.shed_util_common as suc
@@ -59,7 +59,7 @@
repo = hg.repository( suc.get_configured_ui(), repository.repo_path( trans.app ) )
for review in repository.reviews:
changeset_revision = review.changeset_revision
- rev, label = get_rev_label_from_changeset_revision( repo, changeset_revision )
+ rev, label = common.get_rev_label_from_changeset_revision( repo, changeset_revision )
rval += '<a href="manage_repository_reviews_of_revision'
rval += '?id=%s&changeset_revision=%s">%s</a><br/>' % ( trans.security.encode_id( repository.id ), changeset_revision, label )
return rval
@@ -67,13 +67,13 @@
class WithoutReviewsRevisionColumn( grids.GridColumn ):
def get_value( self, trans, grid, repository ):
# Restrict the options to revisions that have not yet been reviewed.
- repository_metadata_revisions = get_repository_metadata_revisions_for_review( repository, reviewed=False )
+ repository_metadata_revisions = common.get_repository_metadata_revisions_for_review( repository, reviewed=False )
if repository_metadata_revisions:
rval = ''
for repository_metadata in repository_metadata_revisions:
- rev, label, changeset_revision = get_rev_label_changeset_revision_from_repository_metadata( trans,
- repository_metadata,
- repository=repository )
+ rev, label, changeset_revision = common.get_rev_label_changeset_revision_from_repository_metadata( trans,
+ repository_metadata,
+ repository=repository )
rval += '<a href="manage_repository_reviews_of_revision'
rval += '?id=%s&changeset_revision=%s">%s</a><br/>' % ( trans.security.encode_id( repository.id ), changeset_revision, label )
return rval
@@ -177,7 +177,7 @@
rval += 'edit_review'
else:
rval +='browse_review'
- rval += '?id=%s">%s</a>' % ( encoded_review_id, get_revision_label( trans, review.repository, review.changeset_revision ) )
+ rval += '?id=%s">%s</a>' % ( encoded_review_id, common.get_revision_label( trans, review.repository, review.changeset_revision ) )
return rval
class RatingColumn( grids.TextColumn ):
def get_value( self, trans, grid, review ):
@@ -260,7 +260,7 @@
.outerjoin( ( model.ComponentReview.table, model.ComponentReview.table.c.repository_review_id == model.RepositoryReview.table.c.id ) ) \
.outerjoin( ( model.Component.table, model.Component.table.c.id == model.ComponentReview.table.c.component_id ) )
-class RepositoryReviewController( BaseUIController, ItemRatings ):
+class RepositoryReviewController( BaseUIController, common.ItemRatings ):
component_grid = ComponentGrid()
repositories_reviewed_by_me_grid = RepositoriesReviewedByMeGrid()
@@ -277,7 +277,7 @@
message = util.restore_text( params.get( 'message', '' ) )
status = params.get( 'status', 'done' )
encoded_review_id = kwd[ 'id' ]
- review = get_review( trans, encoded_review_id )
+ review = common.get_review( trans, encoded_review_id )
if kwd.get( 'approve_repository_review_button', False ):
approved_select_field_name = '%s%sapproved' % ( encoded_review_id, STRSEP )
approved_select_field_value = str( kwd[ approved_select_field_name ] )
@@ -309,10 +309,10 @@
params = util.Params( kwd )
message = util.restore_text( params.get( 'message', '' ) )
status = params.get( 'status', 'done' )
- review = get_review( trans, kwd[ 'id' ] )
+ review = common.get_review( trans, kwd[ 'id' ] )
repository = review.repository
repo = hg.repository( suc.get_configured_ui(), repository.repo_path( trans.app ) )
- rev, changeset_revision_label = get_rev_label_from_changeset_revision( repo, review.changeset_revision )
+ rev, changeset_revision_label = common.get_rev_label_from_changeset_revision( repo, review.changeset_revision )
return trans.fill_template( '/webapps/community/repository_review/browse_review.mako',
repository=repository,
changeset_revision_label=changeset_revision_label,
@@ -345,7 +345,7 @@
if not name or not description:
message = 'Enter a valid name and a description'
status = 'error'
- elif get_component_by_name( trans, name ):
+ elif common.get_component_by_name( trans, name ):
message = 'A component with that name already exists'
status = 'error'
else:
@@ -377,16 +377,16 @@
if repository_id:
if changeset_revision:
# Make sure there is not already a review of the revision by the user.
- if get_review_by_repository_id_changeset_revision_user_id( trans,
- repository_id,
- changeset_revision,
- trans.security.encode_id( trans.user.id ) ):
+ if common.get_review_by_repository_id_changeset_revision_user_id( trans,
+ repository_id,
+ changeset_revision,
+ trans.security.encode_id( trans.user.id ) ):
message = "You have already created a review for revision <b>%s</b> of repository <b>%s</b>." % ( changeset_revision, repository.name )
status = "error"
else:
repository = suc.get_repository_in_tool_shed( trans, repository_id )
# See if there are any reviews for previous changeset revisions that the user can copy.
- if not create_without_copying and not previous_review_id and has_previous_repository_reviews( trans, repository, changeset_revision ):
+ if not create_without_copying and not previous_review_id and common.has_previous_repository_reviews( trans, repository, changeset_revision ):
return trans.response.send_redirect( web.url_for( controller='repository_review',
action='select_previous_review',
**kwd ) )
@@ -404,7 +404,7 @@
trans.sa_session.add( review )
trans.sa_session.flush()
if previous_review_id:
- review_to_copy = get_review( trans, previous_review_id )
+ review_to_copy = common.get_review( trans, previous_review_id )
self.copy_review( trans, review_to_copy, review )
review_id = trans.security.encode_id( review.id )
message = "Begin your review of revision <b>%s</b> of repository <b>%s</b>." \
@@ -440,7 +440,7 @@
action='manage_categories',
message=message,
status='error' ) )
- component = get_component( trans, id )
+ component = common.get_component( trans, id )
if params.get( 'edit_component_button', False ):
new_description = util.restore_text( params.get( 'description', '' ) ).strip()
if component.description != new_description:
@@ -465,9 +465,9 @@
message = util.restore_text( params.get( 'message', '' ) )
status = params.get( 'status', 'done' )
review_id = kwd.get( 'id', None )
- review = get_review( trans, review_id )
+ review = common.get_review( trans, review_id )
components_dict = odict()
- for component in get_components( trans ):
+ for component in common.get_components( trans ):
components_dict[ component.name ] = dict( component=component, component_review=None )
repository = review.repository
repo = hg.repository( suc.get_configured_ui(), repository.repo_path( trans.app ) )
@@ -517,8 +517,8 @@
approved = str( v )
elif component_review_attr == 'rating':
rating = int( str( v ) )
- component = get_component( trans, component_id )
- component_review = get_component_review_by_repository_review_id_component_id( trans, review_id, component_id )
+ component = common.get_component( trans, component_id )
+ component_review = common.get_component_review_by_repository_review_id_component_id( trans, review_id, component_id )
if component_review:
# See if the existing component review should be updated.
if component_review.comment != comment or \
@@ -572,7 +572,7 @@
name='revision_approved',
selected_value=selected_value,
for_component=False )
- rev, changeset_revision_label = get_rev_label_from_changeset_revision( repo, review.changeset_revision )
+ rev, changeset_revision_label = common.get_rev_label_from_changeset_revision( repo, review.changeset_revision )
return trans.fill_template( '/webapps/community/repository_review/edit_review.mako',
repository=repository,
review=review,
@@ -659,14 +659,14 @@
metadata_revision_hashes = [ metadata_revision.changeset_revision for metadata_revision in repository.metadata_revisions ]
reviewed_revision_hashes = [ review.changeset_revision for review in repository.reviews ]
reviews_dict = odict()
- for changeset in get_reversed_changelog_changesets( repo ):
+ for changeset in common.get_reversed_changelog_changesets( repo ):
ctx = repo.changectx( changeset )
changeset_revision = str( ctx )
if changeset_revision in metadata_revision_hashes or changeset_revision in reviewed_revision_hashes:
- rev, changeset_revision_label = get_rev_label_from_changeset_revision( repo, changeset_revision )
+ rev, changeset_revision_label = common.get_rev_label_from_changeset_revision( repo, changeset_revision )
if changeset_revision in reviewed_revision_hashes:
# Find the review for this changeset_revision
- repository_reviews = get_reviews_by_repository_id_changeset_revision( trans, repository_id, changeset_revision )
+ repository_reviews = common.get_reviews_by_repository_id_changeset_revision( trans, repository_id, changeset_revision )
# Determine if the current user can add a review to this revision.
can_add_review = trans.user not in [ repository_review.user for repository_review in repository_reviews ]
repository_metadata = suc.get_repository_metadata_by_changeset_revision( trans, repository_id, changeset_revision )
@@ -704,8 +704,8 @@
repo_dir = repository.repo_path( trans.app )
repo = hg.repository( suc.get_configured_ui(), repo_dir )
installable = changeset_revision in [ metadata_revision.changeset_revision for metadata_revision in repository.metadata_revisions ]
- rev, changeset_revision_label = get_rev_label_from_changeset_revision( repo, changeset_revision )
- reviews = get_reviews_by_repository_id_changeset_revision( trans, repository_id, changeset_revision )
+ rev, changeset_revision_label = common.get_rev_label_from_changeset_revision( repo, changeset_revision )
+ reviews = common.get_reviews_by_repository_id_changeset_revision( trans, repository_id, changeset_revision )
return trans.fill_template( '/webapps/community/repository_review/reviews_of_changeset_revision.mako',
repository=repository,
changeset_revision=changeset_revision,
@@ -724,7 +724,7 @@
if 'operation' in kwd:
operation = kwd['operation'].lower()
# The value of the received id is the encoded review id.
- review = get_review( trans, kwd[ 'id' ] )
+ review = common.get_review( trans, kwd[ 'id' ] )
repository = review.repository
kwd[ 'id' ] = trans.security.encode_id( repository.id )
if operation == "inspect repository revisions":
@@ -737,7 +737,7 @@
action='view_or_manage_repository',
**kwd ) )
# The user may not be the current user. The value of the received id is the encoded user id.
- user = get_user( trans, kwd[ 'id' ] )
+ user = common.get_user( trans, kwd[ 'id' ] )
self.repository_reviews_by_user_grid.title = "All repository revision reviews for user '%s'" % user.username
return self.repository_reviews_by_user_grid( trans, **kwd )
@web.expose
@@ -768,8 +768,8 @@
repository = suc.get_repository_in_tool_shed( trans, kwd[ 'id' ] )
changeset_revision = kwd.get( 'changeset_revision', None )
repo = hg.repository( suc.get_configured_ui(), repository.repo_path( trans.app ) )
- previous_reviews_dict = get_previous_repository_reviews( trans, repository, changeset_revision )
- rev, changeset_revision_label = get_rev_label_from_changeset_revision( repo, changeset_revision )
+ previous_reviews_dict = common.get_previous_repository_reviews( trans, repository, changeset_revision )
+ rev, changeset_revision_label = common.get_rev_label_from_changeset_revision( repo, changeset_revision )
return trans.fill_template( '/webapps/community/repository_review/select_previous_review.mako',
repository=repository,
changeset_revision=changeset_revision,
diff -r c0c6cf9f5d6ee0a63d0c058439467c1340698930 -r b96fe1d76b383ef2a77ff76bdc6580362aab4f0d lib/galaxy/webapps/community/controllers/upload.py
--- a/lib/galaxy/webapps/community/controllers/upload.py
+++ b/lib/galaxy/webapps/community/controllers/upload.py
@@ -1,8 +1,7 @@
import sys, os, shutil, logging, tarfile, tempfile, urllib
from galaxy.web.base.controller import *
-from galaxy.model.orm import *
-from galaxy.datatypes.checkers import *
-from common import *
+from galaxy.datatypes import checkers
+import common
import galaxy.util.shed_util_common as suc
from galaxy import eggs
@@ -13,8 +12,6 @@
undesirable_dirs = [ '.hg', '.svn', '.git', '.cvs' ]
undesirable_files = [ '.hg_archival.txt', 'hgrc', '.DS_Store' ]
-# States for passing messages
-SUCCESS, INFO, WARNING, ERROR = "done", "info", "warning", "error"
CHUNK_SIZE = 2**20 # 1Mb
class UploadController( BaseUIController ):
@@ -26,7 +23,7 @@
status = params.get( 'status', 'done' )
commit_message = util.restore_text( params.get( 'commit_message', 'Uploaded' ) )
category_ids = util.listify( params.get( 'category_id', '' ) )
- categories = get_categories( trans )
+ categories = common.get_categories( trans )
repository_id = params.get( 'repository_id', '' )
repository = suc.get_repository_in_tool_shed( trans, repository_id )
repo_dir = repository.repo_path( trans.app )
@@ -40,7 +37,7 @@
url = params.get( 'url', '' )
# Part of the upload process is sending email notification to those that have registered to
# receive them. One scenario occurs when the first change set is produced for the repository.
- # See the handle_email_alerts() method for the definition of the scenarios.
+ # See the common.handle_email_alerts() method for the definition of the scenarios.
new_repo_alert = repository.is_new( trans.app )
uploaded_directory = None
if params.get( 'upload_button', False ):
@@ -85,9 +82,9 @@
isbz2 = False
if uploaded_file:
if uncompress_file:
- isgzip = is_gzip( uploaded_file_name )
+ isgzip = checkers.is_gzip( uploaded_file_name )
if not isgzip:
- isbz2 = is_bz2( uploaded_file_name )
+ isbz2 = checkers.is_bz2( uploaded_file_name )
if isempty:
tar = None
istar = False
@@ -134,7 +131,7 @@
shutil.move( uploaded_file_name, full_path )
# See if any admin users have chosen to receive email alerts when a repository is
# updated. If so, check every uploaded file to ensure content is appropriate.
- check_contents = check_file_contents( trans )
+ check_contents = common.check_file_contents( trans )
if check_contents and os.path.isfile( full_path ):
content_alert_str = self.__check_file_content( full_path )
else:
@@ -151,7 +148,7 @@
message = '%s<br/>%s' % ( message, error_message )
# See if the content of the change set was valid.
admin_only = len( repository.downloadable_revisions ) != 1
- handle_email_alerts( trans, repository, content_alert_str=content_alert_str, new_repo_alert=new_repo_alert, admin_only=admin_only )
+ common.handle_email_alerts( trans, repository, content_alert_str=content_alert_str, new_repo_alert=new_repo_alert, admin_only=admin_only )
if ok:
# Update the repository files for browsing.
suc.update_repository( repo )
@@ -180,7 +177,7 @@
else:
message += " %d files were removed from the repository root. " % len( files_to_remove )
kwd[ 'message' ] = message
- set_repository_metadata_due_to_new_tip( trans, repository, content_alert_str=content_alert_str, **kwd )
+ common.set_repository_metadata_due_to_new_tip( trans, repository, content_alert_str=content_alert_str, **kwd )
# Provide a warning message if a tool_dependencies.xml file is provided, but tool dependencies weren't loaded due to e.g. a requirement tag mismatch
if suc.get_config_from_disk( 'tool_dependencies.xml', repo_dir ):
if repository.metadata_revisions:
@@ -330,7 +327,7 @@
pass
# See if any admin users have chosen to receive email alerts when a repository is
# updated. If so, check every uploaded file to ensure content is appropriate.
- check_contents = check_file_contents( trans )
+ check_contents = common.check_file_contents( trans )
for filename_in_archive in filenames_in_archive:
# Check file content to ensure it is appropriate.
if check_contents and os.path.isfile( filename_in_archive ):
@@ -344,7 +341,7 @@
return False, message, files_to_remove, content_alert_str, undesirable_dirs_removed, undesirable_files_removed
commands.commit( repo.ui, repo, full_path, user=trans.user.username, message=commit_message )
admin_only = len( repository.downloadable_revisions ) != 1
- handle_email_alerts( trans, repository, content_alert_str=content_alert_str, new_repo_alert=new_repo_alert, admin_only=admin_only )
+ common.handle_email_alerts( trans, repository, content_alert_str=content_alert_str, new_repo_alert=new_repo_alert, admin_only=admin_only )
return True, '', files_to_remove, content_alert_str, undesirable_dirs_removed, undesirable_files_removed
def uncompress( self, repository, uploaded_file_name, uploaded_file_filename, isgzip, isbz2 ):
if isgzip:
@@ -423,8 +420,8 @@
return True, ''
def __check_file_content( self, file_path ):
message = ''
- if check_html( file_path ):
+ if checkers.check_html( file_path ):
message = 'The file "%s" contains HTML content.\n' % str( file_path )
- elif check_image( file_path ):
+ elif checkers.check_image( file_path ):
message = 'The file "%s" contains image content.\n' % str( file_path )
return message
diff -r c0c6cf9f5d6ee0a63d0c058439467c1340698930 -r b96fe1d76b383ef2a77ff76bdc6580362aab4f0d lib/galaxy/webapps/community/controllers/workflow.py
--- a/lib/galaxy/webapps/community/controllers/workflow.py
+++ b/lib/galaxy/webapps/community/controllers/workflow.py
@@ -6,12 +6,12 @@
from galaxy.web.framework.helpers import time_ago, iff, grids
from galaxy.util.json import from_json_string, to_json_string
from galaxy.workflow.modules import InputDataModule, ToolModule, WorkflowModuleFactory
+from galaxy.web.base.controller import *
from galaxy.tools import DefaultToolState
from galaxy.webapps.galaxy.controllers.workflow import attach_ordered_steps
-from galaxy.model.orm import *
-from common import *
+import common
import galaxy.util.shed_util_common as suc
-from galaxy.tool_shed.encoding_util import *
+from galaxy.tool_shed.encoding_util import tool_shed_encode, tool_shed_decode
class RepoInputDataModule( InputDataModule ):
@@ -49,7 +49,7 @@
self.errors = None
for tool_dict in tools_metadata:
if self.tool_id in [ tool_dict[ 'id' ], tool_dict[ 'guid' ] ]:
- repository, self.tool, message = load_tool_from_changeset_revision( trans, repository_id, changeset_revision, tool_dict[ 'tool_config' ] )
+ repository, self.tool, message = common.load_tool_from_changeset_revision( trans, repository_id, changeset_revision, tool_dict[ 'tool_config' ] )
if message and self.tool is None:
self.errors = 'unavailable'
break
@@ -144,7 +144,7 @@
workflow_name = tool_shed_decode( workflow_name )
message = kwd.get( 'message', '' )
status = kwd.get( 'status', 'done' )
- repository_metadata = get_repository_metadata_by_id( trans, repository_metadata_id )
+ repository_metadata = common.get_repository_metadata_by_id( trans, repository_metadata_id )
repository = suc.get_repository_in_tool_shed( trans, trans.security.encode_id( repository_metadata.repository_id ) )
return trans.fill_template( "/webapps/community/repository/view_workflow.mako",
repository=repository,
@@ -156,7 +156,7 @@
status=status )
@web.expose
def generate_workflow_image( self, trans, repository_metadata_id, workflow_name ):
- repository_metadata = get_repository_metadata_by_id( trans, repository_metadata_id )
+ repository_metadata = common.get_repository_metadata_by_id( trans, repository_metadata_id )
repository_id = trans.security.encode_id( repository_metadata.repository_id )
changeset_revision = repository_metadata.changeset_revision
metadata = repository_metadata.metadata
diff -r c0c6cf9f5d6ee0a63d0c058439467c1340698930 -r b96fe1d76b383ef2a77ff76bdc6580362aab4f0d lib/galaxy/webapps/community/model/__init__.py
--- a/lib/galaxy/webapps/community/model/__init__.py
+++ b/lib/galaxy/webapps/community/model/__init__.py
@@ -7,7 +7,7 @@
import os.path, os, errno, sys, codecs, operator, logging, tarfile, mimetypes, ConfigParser
from galaxy import util
from galaxy.util.bunch import Bunch
-from galaxy.util.hash_util import *
+from galaxy.util.hash_util import new_secure_hash
from galaxy.web.form_builder import *
from galaxy import eggs
diff -r c0c6cf9f5d6ee0a63d0c058439467c1340698930 -r b96fe1d76b383ef2a77ff76bdc6580362aab4f0d lib/galaxy/webapps/community/model/mapping.py
--- a/lib/galaxy/webapps/community/model/mapping.py
+++ b/lib/galaxy/webapps/community/model/mapping.py
@@ -13,8 +13,8 @@
from galaxy.model.orm.ext.assignmapper import *
from galaxy.model.custom_types import *
from galaxy.util.bunch import Bunch
-from galaxy.webapps.community.util.shed_statistics import *
-from galaxy.webapps.community.util.hgweb_config import *
+import galaxy.webapps.community.util.shed_statistics as shed_statistics
+import galaxy.webapps.community.util.hgweb_config
from galaxy.webapps.community.security import CommunityRBACAgent
metadata = MetaData()
@@ -318,6 +318,6 @@
result.create_tables = create_tables
# Load local tool shed security policy
result.security_agent = CommunityRBACAgent( result )
- result.shed_counter = ShedCounter( result )
- result.hgweb_config_manager = HgWebConfigManager()
+ result.shed_counter = shed_statistics.ShedCounter( result )
+ result.hgweb_config_manager = galaxy.webapps.community.util.hgweb_config.HgWebConfigManager()
return result
diff -r c0c6cf9f5d6ee0a63d0c058439467c1340698930 -r b96fe1d76b383ef2a77ff76bdc6580362aab4f0d lib/galaxy/webapps/community/security/__init__.py
--- a/lib/galaxy/webapps/community/security/__init__.py
+++ b/lib/galaxy/webapps/community/security/__init__.py
@@ -5,7 +5,7 @@
from datetime import datetime, timedelta
from galaxy.util.bunch import Bunch
from galaxy.util import listify
-from galaxy.model.orm import *
+from galaxy.model.orm import and_
log = logging.getLogger(__name__)
diff -r c0c6cf9f5d6ee0a63d0c058439467c1340698930 -r b96fe1d76b383ef2a77ff76bdc6580362aab4f0d lib/galaxy/webapps/galaxy/controllers/admin.py
--- a/lib/galaxy/webapps/galaxy/controllers/admin.py
+++ b/lib/galaxy/webapps/galaxy/controllers/admin.py
@@ -10,7 +10,7 @@
from galaxy.web.params import QuotaParamParser
from galaxy.exceptions import *
from galaxy.util.odict import *
-from galaxy.tool_shed.encoding_util import *
+from galaxy.tool_shed.encoding_util import tool_shed_decode
import galaxy.datatypes.registry
import logging, imp, subprocess, urllib2
diff -r c0c6cf9f5d6ee0a63d0c058439467c1340698930 -r b96fe1d76b383ef2a77ff76bdc6580362aab4f0d lib/galaxy/webapps/galaxy/controllers/admin_toolshed.py
--- a/lib/galaxy/webapps/galaxy/controllers/admin_toolshed.py
+++ b/lib/galaxy/webapps/galaxy/controllers/admin_toolshed.py
@@ -3,7 +3,7 @@
from galaxy.util.json import from_json_string, to_json_string
import galaxy.util.shed_util as shed_util
import galaxy.util.shed_util_common as suc
-from galaxy.tool_shed.encoding_util import *
+from galaxy.tool_shed.encoding_util import tool_shed_encode, tool_shed_decode
from galaxy import eggs, tools
eggs.require( 'mercurial' )
diff -r c0c6cf9f5d6ee0a63d0c058439467c1340698930 -r b96fe1d76b383ef2a77ff76bdc6580362aab4f0d lib/galaxy/webapps/galaxy/controllers/workflow.py
--- a/lib/galaxy/webapps/galaxy/controllers/workflow.py
+++ b/lib/galaxy/webapps/galaxy/controllers/workflow.py
@@ -14,7 +14,7 @@
from galaxy.util.odict import odict
from galaxy.util.sanitize_html import sanitize_html
from galaxy.util.topsort import topsort, topsort_levels, CycleError
-from galaxy.tool_shed.encoding_util import *
+from galaxy.tool_shed.encoding_util import tool_shed_encode, tool_shed_decode
from galaxy.workflow.modules import *
from galaxy import model
from galaxy import util
diff -r c0c6cf9f5d6ee0a63d0c058439467c1340698930 -r b96fe1d76b383ef2a77ff76bdc6580362aab4f0d scripts/migrate_tools/migrate_tools.py
--- a/scripts/migrate_tools/migrate_tools.py
+++ b/scripts/migrate_tools/migrate_tools.py
@@ -16,7 +16,7 @@
sys.path = new_path
from galaxy import eggs
-from galaxy.tool_shed.migrate.common import *
+from galaxy.tool_shed.migrate.common import MigrateToolsApplication
app = MigrateToolsApplication( sys.argv[ 1 ] )
non_shed_tool_confs = app.install_manager.proprietary_tool_confs
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
13 Dec '12
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/c0c6cf9f5d6e/
changeset: c0c6cf9f5d6e
user: greg
date: 2012-12-13 16:14:01
summary: Utility refactoring cleanu and import tweaks.
affected #: 6 files
diff -r 70f88a048ed2b565d60250ca66302fd03a7852ff -r c0c6cf9f5d6ee0a63d0c058439467c1340698930 lib/galaxy/tool_shed/install_manager.py
--- a/lib/galaxy/tool_shed/install_manager.py
+++ b/lib/galaxy/tool_shed/install_manager.py
@@ -5,7 +5,7 @@
import urllib2, tempfile
from galaxy.tools import ToolSection
from galaxy.util.json import from_json_string, to_json_string
-from galaxy.util.shed_util import *
+import galaxy.util.shed_util as shed_util
import galaxy.util.shed_util_common as suc
from galaxy.util.odict import odict
from galaxy.tool_shed.common_util import *
@@ -36,9 +36,9 @@
self.tool_shed_install_config = tool_shed_install_config
tree = util.parse_xml( tool_shed_install_config )
root = tree.getroot()
- self.tool_shed = clean_tool_shed_url( root.get( 'name' ) )
+ self.tool_shed = shed_util.clean_tool_shed_url( root.get( 'name' ) )
self.repository_owner = REPOSITORY_OWNER
- index, self.shed_config_dict = get_shed_tool_conf_dict( app, self.migrated_tools_config )
+ index, self.shed_config_dict = shed_util.get_shed_tool_conf_dict( app, self.migrated_tools_config )
# Since tool migration scripts can be executed any number of times, we need to make sure the appropriate tools are defined in
# tool_conf.xml. If no tools associated with the migration stage are defined, no repositories will be installed on disk.
# The default behavior is that the tool shed is down.
@@ -172,7 +172,7 @@
# See if tool_config is defined inside of a section in self.proprietary_tool_panel_elems.
is_displayed, tool_sections = self.get_containing_tool_sections( tool_config )
if is_displayed:
- tool_panel_dict_for_tool_config = generate_tool_panel_dict_for_tool_config( guid, tool_config, tool_sections=tool_sections )
+ tool_panel_dict_for_tool_config = shed_util.generate_tool_panel_dict_for_tool_config( guid, tool_config, tool_sections=tool_sections )
for k, v in tool_panel_dict_for_tool_config.items():
tool_panel_dict_for_display[ k ] = v
else:
@@ -192,50 +192,50 @@
self.app.sa_session.flush()
if 'tool_dependencies' in metadata_dict:
# All tool_dependency objects must be created before the tools are processed even if no tool dependencies will be installed.
- tool_dependencies = create_tool_dependency_objects( self.app, tool_shed_repository, relative_install_dir, set_status=True )
+ tool_dependencies = shed_util.create_tool_dependency_objects( self.app, tool_shed_repository, relative_install_dir, set_status=True )
else:
tool_dependencies = None
if 'tools' in metadata_dict:
sample_files = metadata_dict.get( 'sample_files', [] )
- tool_index_sample_files = get_tool_index_sample_files( sample_files )
- copy_sample_files( self.app, tool_index_sample_files, tool_path=self.tool_path )
+ tool_index_sample_files = shed_util.get_tool_index_sample_files( sample_files )
+ shed_util.copy_sample_files( self.app, tool_index_sample_files, tool_path=self.tool_path )
sample_files_copied = [ s for s in tool_index_sample_files ]
- repository_tools_tups = get_repository_tools_tups( self.app, metadata_dict )
+ repository_tools_tups = shed_util.get_repository_tools_tups( self.app, metadata_dict )
if repository_tools_tups:
# Handle missing data table entries for tool parameters that are dynamically generated select lists.
- repository_tools_tups = handle_missing_data_table_entry( self.app, relative_install_dir, self.tool_path, repository_tools_tups )
+ repository_tools_tups = shed_util.handle_missing_data_table_entry( self.app, relative_install_dir, self.tool_path, repository_tools_tups )
# Handle missing index files for tool parameters that are dynamically generated select lists.
- repository_tools_tups, sample_files_copied = handle_missing_index_file( self.app,
- self.tool_path,
- sample_files,
- repository_tools_tups,
- sample_files_copied )
+ repository_tools_tups, sample_files_copied = shed_util.handle_missing_index_file( self.app,
+ self.tool_path,
+ sample_files,
+ repository_tools_tups,
+ sample_files_copied )
# Copy remaining sample files included in the repository to the ~/tool-data directory of the local Galaxy instance.
- copy_sample_files( self.app, sample_files, tool_path=self.tool_path, sample_files_copied=sample_files_copied )
+ shed_util.copy_sample_files( self.app, sample_files, tool_path=self.tool_path, sample_files_copied=sample_files_copied )
if install_dependencies and tool_dependencies and 'tool_dependencies' in metadata_dict:
# Install tool dependencies.
- update_tool_shed_repository_status( self.app,
- tool_shed_repository,
- self.app.model.ToolShedRepository.installation_status.INSTALLING_TOOL_DEPENDENCIES )
+ shed_util.update_tool_shed_repository_status( self.app,
+ tool_shed_repository,
+ self.app.model.ToolShedRepository.installation_status.INSTALLING_TOOL_DEPENDENCIES )
# Get the tool_dependencies.xml file from disk.
tool_dependencies_config = suc.get_config_from_disk( 'tool_dependencies.xml', repo_install_dir )
- installed_tool_dependencies = handle_tool_dependencies( app=self.app,
- tool_shed_repository=tool_shed_repository,
- tool_dependencies_config=tool_dependencies_config,
- tool_dependencies=tool_dependencies )
+ installed_tool_dependencies = shed_util.handle_tool_dependencies( app=self.app,
+ tool_shed_repository=tool_shed_repository,
+ tool_dependencies_config=tool_dependencies_config,
+ tool_dependencies=tool_dependencies )
for installed_tool_dependency in installed_tool_dependencies:
if installed_tool_dependency.status == self.app.model.ToolDependency.installation_status.ERROR:
print '\nThe following error occurred from the InstallManager while installing tool dependency ', installed_tool_dependency.name, ':'
print installed_tool_dependency.error_message, '\n\n'
- add_to_tool_panel( self.app,
- tool_shed_repository.name,
- repository_clone_url,
- tool_shed_repository.installed_changeset_revision,
- repository_tools_tups,
- self.repository_owner,
- self.migrated_tools_config,
- tool_panel_dict=tool_panel_dict_for_display,
- new_install=True )
+ shed_util.add_to_tool_panel( self.app,
+ tool_shed_repository.name,
+ repository_clone_url,
+ tool_shed_repository.installed_changeset_revision,
+ repository_tools_tups,
+ self.repository_owner,
+ self.migrated_tools_config,
+ tool_panel_dict=tool_panel_dict_for_display,
+ new_install=True )
if 'datatypes' in metadata_dict:
tool_shed_repository.status = self.app.model.ToolShedRepository.installation_status.LOADING_PROPRIETARY_DATATYPES
if not tool_shed_repository.includes_datatypes:
@@ -246,16 +246,16 @@
datatypes_config = suc.get_config_from_disk( 'datatypes_conf.xml', repo_install_dir )
# Load proprietary data types required by tools. The value of override is not important here since the Galaxy server will be started
# after this installation completes.
- converter_path, display_path = alter_config_and_load_prorietary_datatypes( self.app, datatypes_config, repo_install_dir, override=False ) #repo_install_dir was relative_install_dir
+ converter_path, display_path = shed_util.alter_config_and_load_prorietary_datatypes( self.app, datatypes_config, repo_install_dir, override=False ) #repo_install_dir was relative_install_dir
if converter_path or display_path:
# Create a dictionary of tool shed repository related information.
- repository_dict = create_repository_dict_for_proprietary_datatypes( tool_shed=self.tool_shed,
- name=tool_shed_repository.name,
- owner=self.repository_owner,
- installed_changeset_revision=tool_shed_repository.installed_changeset_revision,
- tool_dicts=metadata_dict.get( 'tools', [] ),
- converter_path=converter_path,
- display_path=display_path )
+ repository_dict = shed_util.create_repository_dict_for_proprietary_datatypes( tool_shed=self.tool_shed,
+ name=tool_shed_repository.name,
+ owner=self.repository_owner,
+ installed_changeset_revision=tool_shed_repository.installed_changeset_revision,
+ tool_dicts=metadata_dict.get( 'tools', [] ),
+ converter_path=converter_path,
+ display_path=display_path )
if converter_path:
# Load proprietary datatype converters
self.app.datatypes_registry.load_datatype_converters( self.toolbox, installed_repository_dict=repository_dict )
@@ -281,20 +281,20 @@
repository_clone_url = os.path.join( tool_shed_url, 'repos', self.repository_owner, name )
relative_install_dir = os.path.join( relative_clone_dir, name )
install_dir = os.path.join( clone_dir, name )
- ctx_rev = get_ctx_rev( tool_shed_url, name, self.repository_owner, installed_changeset_revision )
+ ctx_rev = shed_util.get_ctx_rev( tool_shed_url, name, self.repository_owner, installed_changeset_revision )
print "Adding new row (or updating an existing row) for repository '%s' in the tool_shed_repository table." % name
- tool_shed_repository = create_or_update_tool_shed_repository( app=self.app,
- name=name,
- description=description,
- installed_changeset_revision=installed_changeset_revision,
- ctx_rev=ctx_rev,
- repository_clone_url=repository_clone_url,
- metadata_dict={},
- status=self.app.model.ToolShedRepository.installation_status.NEW,
- current_changeset_revision=None,
- owner=self.repository_owner,
- dist_to_shed=True )
- update_tool_shed_repository_status( self.app, tool_shed_repository, self.app.model.ToolShedRepository.installation_status.CLONING )
+ tool_shed_repository = shed_util.create_or_update_tool_shed_repository( app=self.app,
+ name=name,
+ description=description,
+ installed_changeset_revision=installed_changeset_revision,
+ ctx_rev=ctx_rev,
+ repository_clone_url=repository_clone_url,
+ metadata_dict={},
+ status=self.app.model.ToolShedRepository.installation_status.NEW,
+ current_changeset_revision=None,
+ owner=self.repository_owner,
+ dist_to_shed=True )
+ shed_util.update_tool_shed_repository_status( self.app, tool_shed_repository, self.app.model.ToolShedRepository.installation_status.CLONING )
cloned_ok, error_message = suc.clone_repository( repository_clone_url, os.path.abspath( install_dir ), ctx_rev )
if cloned_ok:
self.handle_repository_contents( tool_shed_repository=tool_shed_repository,
@@ -305,9 +305,9 @@
self.app.sa_session.refresh( tool_shed_repository )
metadata_dict = tool_shed_repository.metadata
if 'tools' in metadata_dict:
- update_tool_shed_repository_status( self.app,
- tool_shed_repository,
- self.app.model.ToolShedRepository.installation_status.SETTING_TOOL_VERSIONS )
+ shed_util.update_tool_shed_repository_status( self.app,
+ tool_shed_repository,
+ self.app.model.ToolShedRepository.installation_status.SETTING_TOOL_VERSIONS )
# Get the tool_versions from the tool shed for each tool in the installed change set.
url = '%s/repository/get_tool_versions?name=%s&owner=%s&changeset_revision=%s' % \
( tool_shed_url, tool_shed_repository.name, self.repository_owner, installed_changeset_revision )
@@ -316,7 +316,7 @@
response.close()
if text:
tool_version_dicts = from_json_string( text )
- handle_tool_versions( self.app, tool_version_dicts, tool_shed_repository )
+ shed_util.handle_tool_versions( self.app, tool_version_dicts, tool_shed_repository )
else:
# Set the tool versions since they seem to be missing for this repository in the tool shed.
# CRITICAL NOTE: These default settings may not properly handle all parent/child associations.
@@ -325,8 +325,8 @@
tool_id = tool_dict[ 'guid' ]
old_tool_id = tool_dict[ 'id' ]
tool_version = tool_dict[ 'version' ]
- tool_version_using_old_id = get_tool_version( self.app, old_tool_id )
- tool_version_using_guid = get_tool_version( self.app, tool_id )
+ tool_version_using_old_id = shed_util.get_tool_version( self.app, old_tool_id )
+ tool_version_using_guid = shed_util.get_tool_version( self.app, tool_id )
if not tool_version_using_old_id:
tool_version_using_old_id = self.app.model.ToolVersion( tool_id=old_tool_id,
tool_shed_repository=tool_shed_repository )
@@ -338,15 +338,15 @@
self.app.sa_session.add( tool_version_using_guid )
self.app.sa_session.flush()
# Associate the two versions as parent / child.
- tool_version_association = get_tool_version_association( self.app,
- tool_version_using_old_id,
- tool_version_using_guid )
+ tool_version_association = shed_util.get_tool_version_association( self.app,
+ tool_version_using_old_id,
+ tool_version_using_guid )
if not tool_version_association:
tool_version_association = self.app.model.ToolVersionAssociation( tool_id=tool_version_using_guid.id,
parent_id=tool_version_using_old_id.id )
self.app.sa_session.add( tool_version_association )
self.app.sa_session.flush()
- update_tool_shed_repository_status( self.app, tool_shed_repository, self.app.model.ToolShedRepository.installation_status.INSTALLED )
+ shed_util.update_tool_shed_repository_status( self.app, tool_shed_repository, self.app.model.ToolShedRepository.installation_status.INSTALLED )
@property
def non_shed_tool_panel_configs( self ):
return get_non_shed_tool_panel_configs( self.app )
diff -r 70f88a048ed2b565d60250ca66302fd03a7852ff -r c0c6cf9f5d6ee0a63d0c058439467c1340698930 lib/galaxy/tool_shed/update_manager.py
--- a/lib/galaxy/tool_shed/update_manager.py
+++ b/lib/galaxy/tool_shed/update_manager.py
@@ -3,7 +3,7 @@
"""
import threading, urllib2, logging
from galaxy.util import string_as_bool
-from galaxy.util.shed_util import *
+import galaxy.util.shed_util as shed_util
log = logging.getLogger( __name__ )
diff -r 70f88a048ed2b565d60250ca66302fd03a7852ff -r c0c6cf9f5d6ee0a63d0c058439467c1340698930 lib/galaxy/tools/__init__.py
--- a/lib/galaxy/tools/__init__.py
+++ b/lib/galaxy/tools/__init__.py
@@ -32,7 +32,7 @@
from cgi import FieldStorage
from galaxy.util.hash_util import *
from galaxy.util import listify
-from galaxy.util.shed_util import *
+import galaxy.util.shed_util as shed_util
from galaxy.web import url_for
from galaxy.visualization.genome.visual_analytics import TracksterConfig
@@ -887,11 +887,11 @@
def tool_shed_repository( self ):
# If this tool is included in an installed tool shed repository, return it.
if self.tool_shed:
- return get_tool_shed_repository_by_shed_name_owner_installed_changeset_revision( self.app,
- self.tool_shed,
- self.repository_name,
- self.repository_owner,
- self.installed_changeset_revision )
+ return shed_util.get_tool_shed_repository_by_shed_name_owner_installed_changeset_revision( self.app,
+ self.tool_shed,
+ self.repository_name,
+ self.repository_owner,
+ self.installed_changeset_revision )
return None
def __get_job_run_config( self, run_configs, key, job_params=None ):
# Look through runners/handlers to find one with matching parameters.
diff -r 70f88a048ed2b565d60250ca66302fd03a7852ff -r c0c6cf9f5d6ee0a63d0c058439467c1340698930 lib/galaxy/util/shed_util.py
--- a/lib/galaxy/util/shed_util.py
+++ b/lib/galaxy/util/shed_util.py
@@ -1,4 +1,5 @@
import os, tempfile, shutil, logging, urllib2
+from galaxy.datatypes import checkers
from galaxy import util
import shed_util_common as suc
from galaxy.tools.search import ToolBoxSearch
@@ -293,10 +294,6 @@
set_status=set_status )
tool_dependency_objects.append( tool_dependency )
return tool_dependency_objects
-def generate_clone_url_for_installed_repository( trans, repository ):
- """Generate the URL for cloning a repository that has been installed into a Galaxy instance."""
- tool_shed_url = suc.get_url_from_repository_tool_shed( trans.app, repository )
- return suc.url_join( tool_shed_url, 'repos', repository.owner, repository.name )
def generate_tool_elem( tool_shed, repository_name, changeset_revision, owner, tool_file_path, tool, tool_section ):
if tool_section is not None:
tool_elem = SubElement( tool_section, 'tool' )
@@ -542,9 +539,6 @@
if idx == count:
break
return headers
-def get_installed_tool_shed_repository( trans, id ):
- """Get a repository on the Galaxy side from the database via id"""
- return trans.sa_session.query( trans.model.ToolShedRepository ).get( trans.security.decode_id( id ) )
def get_repository_owner( cleaned_repository_url ):
items = cleaned_repository_url.split( 'repos' )
repo_path = items[ 1 ]
@@ -1042,44 +1036,6 @@
trans.sa_session.add( tool_dependency )
trans.sa_session.flush()
return removed, error_message
-def tool_shed_from_repository_clone_url( repository_clone_url ):
- return suc.clean_repository_clone_url( repository_clone_url ).split( 'repos' )[ 0 ].rstrip( '/' )
-def update_in_shed_tool_config( app, repository ):
- # A tool shed repository is being updated so change the shed_tool_conf file. Parse the config file to generate the entire list
- # of config_elems instead of using the in-memory list.
- shed_conf_dict = repository.get_shed_config_dict( app )
- shed_tool_conf = shed_conf_dict[ 'config_filename' ]
- tool_path = shed_conf_dict[ 'tool_path' ]
-
- #hack for 'trans.app' used in lots of places. These places should just directly use app
- trans = util.bunch.Bunch()
- trans.app = app
-
- tool_panel_dict = generate_tool_panel_dict_from_shed_tool_conf_entries( trans, repository )
- repository_tools_tups = get_repository_tools_tups( app, repository.metadata )
- cleaned_repository_clone_url = suc.clean_repository_clone_url( generate_clone_url_for_installed_repository( trans, repository ) )
- tool_shed = tool_shed_from_repository_clone_url( cleaned_repository_clone_url )
- owner = repository.owner
- if not owner:
- owner = get_repository_owner( cleaned_repository_clone_url )
- guid_to_tool_elem_dict = {}
- for tool_config_filename, guid, tool in repository_tools_tups:
- guid_to_tool_elem_dict[ guid ] = generate_tool_elem( tool_shed, repository.name, repository.changeset_revision, repository.owner or '', tool_config_filename, tool, None )
- config_elems = []
- tree = util.parse_xml( shed_tool_conf )
- root = tree.getroot()
- for elem in root:
- if elem.tag == 'section':
- for i, tool_elem in enumerate( elem ):
- guid = tool_elem.attrib.get( 'guid' )
- if guid in guid_to_tool_elem_dict:
- elem[i] = guid_to_tool_elem_dict[ guid ]
- elif elem.tag == 'tool':
- guid = elem.attrib.get( 'guid' )
- if guid in guid_to_tool_elem_dict:
- elem = guid_to_tool_elem_dict[ guid ]
- config_elems.append( elem )
- config_elems_to_xml_file( app, config_elems, shed_tool_conf, tool_path )
def update_tool_shed_repository_status( app, tool_shed_repository, status ):
sa_session = app.model.context.current
tool_shed_repository.status = status
diff -r 70f88a048ed2b565d60250ca66302fd03a7852ff -r c0c6cf9f5d6ee0a63d0c058439467c1340698930 lib/galaxy/util/shed_util_common.py
--- a/lib/galaxy/util/shed_util_common.py
+++ b/lib/galaxy/util/shed_util_common.py
@@ -565,6 +565,10 @@
repository_dependencies,
metadata.get( 'tool_dependencies', None ) )
return repo_info_dict
+def generate_clone_url_for_installed_repository( trans, repository ):
+ """Generate the URL for cloning a repository that has been installed into a Galaxy instance."""
+ tool_shed_url = suc.get_url_from_repository_tool_shed( trans.app, repository )
+ return suc.url_join( tool_shed_url, 'repos', repository.owner, repository.name )
def generate_clone_url_for_repository_in_tool_shed( trans, repository ):
"""Generate the URL for cloning a repository that is in the tool shed."""
base_url = url_for( '/', qualified=True ).rstrip( '/' )
@@ -1056,6 +1060,9 @@
if deleted:
return 'DELETED'
return None
+def get_installed_tool_shed_repository( trans, id ):
+ """Get a repository on the Galaxy side from the database via id"""
+ return trans.sa_session.query( trans.model.ToolShedRepository ).get( trans.security.decode_id( id ) )
def get_list_of_copied_sample_files( repo, ctx, dir ):
"""
Find all sample files (files in the repository with the special .sample extension) in the reversed repository manifest up to ctx. Copy
@@ -2011,6 +2018,8 @@
if to_html:
str( markupsafe.escape( ''.join( translated ) ) )
return ''.join( translated )
+def tool_shed_from_repository_clone_url( repository_clone_url ):
+ return suc.clean_repository_clone_url( repository_clone_url ).split( 'repos' )[ 0 ].rstrip( '/' )
def tool_shed_is_this_tool_shed( toolshed_base_url ):
return toolshed_base_url.rstrip( '/' ) == str( url_for( '/', qualified=True ) ).rstrip( '/' )
def translate_string( raw_text, to_html=True ):
@@ -2090,6 +2099,42 @@
sa_session.delete( tool_dependency )
sa_session.flush()
return new_tool_dependency
+def update_in_shed_tool_config( app, repository ):
+ # A tool shed repository is being updated so change the shed_tool_conf file. Parse the config file to generate the entire list
+ # of config_elems instead of using the in-memory list.
+ shed_conf_dict = repository.get_shed_config_dict( app )
+ shed_tool_conf = shed_conf_dict[ 'config_filename' ]
+ tool_path = shed_conf_dict[ 'tool_path' ]
+
+ #hack for 'trans.app' used in lots of places. These places should just directly use app
+ trans = util.bunch.Bunch()
+ trans.app = app
+
+ tool_panel_dict = generate_tool_panel_dict_from_shed_tool_conf_entries( trans, repository )
+ repository_tools_tups = get_repository_tools_tups( app, repository.metadata )
+ cleaned_repository_clone_url = suc.clean_repository_clone_url( suc.generate_clone_url_for_installed_repository( trans, repository ) )
+ tool_shed = tool_shed_from_repository_clone_url( cleaned_repository_clone_url )
+ owner = repository.owner
+ if not owner:
+ owner = get_repository_owner( cleaned_repository_clone_url )
+ guid_to_tool_elem_dict = {}
+ for tool_config_filename, guid, tool in repository_tools_tups:
+ guid_to_tool_elem_dict[ guid ] = generate_tool_elem( tool_shed, repository.name, repository.changeset_revision, repository.owner or '', tool_config_filename, tool, None )
+ config_elems = []
+ tree = util.parse_xml( shed_tool_conf )
+ root = tree.getroot()
+ for elem in root:
+ if elem.tag == 'section':
+ for i, tool_elem in enumerate( elem ):
+ guid = tool_elem.attrib.get( 'guid' )
+ if guid in guid_to_tool_elem_dict:
+ elem[i] = guid_to_tool_elem_dict[ guid ]
+ elif elem.tag == 'tool':
+ guid = elem.attrib.get( 'guid' )
+ if guid in guid_to_tool_elem_dict:
+ elem = guid_to_tool_elem_dict[ guid ]
+ config_elems.append( elem )
+ config_elems_to_xml_file( app, config_elems, shed_tool_conf, tool_path )
def update_repository( repo, ctx_rev=None ):
"""
Update the cloned repository to changeset_revision. It is critical that the installed repository is updated to the desired
diff -r 70f88a048ed2b565d60250ca66302fd03a7852ff -r c0c6cf9f5d6ee0a63d0c058439467c1340698930 lib/galaxy/webapps/galaxy/controllers/admin_toolshed.py
--- a/lib/galaxy/webapps/galaxy/controllers/admin_toolshed.py
+++ b/lib/galaxy/webapps/galaxy/controllers/admin_toolshed.py
@@ -1,7 +1,7 @@
import urllib2, tempfile
from admin import *
from galaxy.util.json import from_json_string, to_json_string
-from galaxy.util.shed_util import *
+import galaxy.util.shed_util as shed_util
import galaxy.util.shed_util_common as suc
from galaxy.tool_shed.encoding_util import *
from galaxy import eggs, tools
@@ -284,7 +284,7 @@
model.ToolDependency.installation_status.ERROR ] ) )
]
def build_initial_query( self, trans, **kwd ):
- tool_dependency_ids = get_tool_dependency_ids( as_string=False, **kwd )
+ tool_dependency_ids = shed_util.get_tool_dependency_ids( as_string=False, **kwd )
if tool_dependency_ids:
clause_list = []
for tool_dependency_id in tool_dependency_ids:
@@ -308,15 +308,15 @@
@web.require_admin
def activate_repository( self, trans, **kwd ):
"""Activate a repository that was deactivated but not uninstalled."""
- repository = get_installed_tool_shed_repository( trans, kwd[ 'id' ] )
- shed_tool_conf, tool_path, relative_install_dir = get_tool_panel_config_tool_path_install_dir( trans.app, repository )
- repository_clone_url = generate_clone_url_for_installed_repository( trans, repository )
+ repository = suc.get_installed_tool_shed_repository( trans, kwd[ 'id' ] )
+ shed_tool_conf, tool_path, relative_install_dir = shed_util.get_tool_panel_config_tool_path_install_dir( trans.app, repository )
+ repository_clone_url = suc.generate_clone_url_for_installed_repository( trans, repository )
repository.deleted = False
repository.status = trans.model.ToolShedRepository.installation_status.INSTALLED
if repository.includes_tools:
metadata = repository.metadata
try:
- repository_tools_tups = get_repository_tools_tups( trans.app, metadata )
+ repository_tools_tups = shed_util.get_repository_tools_tups( trans.app, metadata )
except Exception, e:
error = "Error activating repository %s: %s" % ( repository.name, str( e ) )
log.debug( error )
@@ -324,25 +324,25 @@
% ( error, web.url_for( controller='admin_toolshed', action='deactivate_or_uninstall_repository', id=trans.security.encode_id( repository.id ) ) ) )
# Reload tools into the appropriate tool panel section.
tool_panel_dict = repository.metadata[ 'tool_panel_section' ]
- add_to_tool_panel( trans.app,
- repository.name,
- repository_clone_url,
- repository.changeset_revision,
- repository_tools_tups,
- repository.owner,
- shed_tool_conf,
- tool_panel_dict,
- new_install=False )
+ shed_util.add_to_tool_panel( trans.app,
+ repository.name,
+ repository_clone_url,
+ repository.changeset_revision,
+ repository_tools_tups,
+ repository.owner,
+ shed_tool_conf,
+ tool_panel_dict,
+ new_install=False )
trans.sa_session.add( repository )
trans.sa_session.flush()
if repository.includes_datatypes:
repository_install_dir = os.path.abspath ( relative_install_dir )
# Deactivate proprietary datatypes.
- installed_repository_dict = load_installed_datatypes( trans.app, repository, repository_install_dir, deactivate=False )
+ installed_repository_dict = shed_util.load_installed_datatypes( trans.app, repository, repository_install_dir, deactivate=False )
if installed_repository_dict and 'converter_path' in installed_repository_dict:
- load_installed_datatype_converters( trans.app, installed_repository_dict, deactivate=False )
+ shed_util.load_installed_datatype_converters( trans.app, installed_repository_dict, deactivate=False )
if installed_repository_dict and 'display_path' in installed_repository_dict:
- load_installed_display_applications( trans.app, installed_repository_dict, deactivate=False )
+ shed_util.load_installed_display_applications( trans.app, installed_repository_dict, deactivate=False )
message = 'The <b>%s</b> repository has been activated.' % repository.name
status = 'done'
return trans.response.send_redirect( web.url_for( controller='admin_toolshed',
@@ -355,7 +355,7 @@
params = util.Params( kwd )
message = util.restore_text( params.get( 'message', '' ) )
status = params.get( 'status', 'done' )
- repository = get_installed_tool_shed_repository( trans, kwd[ 'id' ] )
+ repository = suc.get_installed_tool_shed_repository( trans, kwd[ 'id' ] )
return trans.fill_template( '/admin/tool_shed_repository/browse_repository.mako',
repository=repository,
message=message,
@@ -379,7 +379,7 @@
action='reset_to_install',
**kwd ) )
if operation == "activate or reinstall":
- repository = get_installed_tool_shed_repository( trans, kwd[ 'id' ] )
+ repository = suc.get_installed_tool_shed_repository( trans, kwd[ 'id' ] )
if repository.uninstalled:
if repository.includes_tools:
# Only allow selecting a different section in the tool panel if the repository was uninstalled.
@@ -407,7 +407,7 @@
params = util.Params( kwd )
message = util.restore_text( params.get( 'message', '' ) )
status = params.get( 'status', 'done' )
- tool_dependency = get_tool_dependency( trans, kwd[ 'id' ] )
+ tool_dependency = shed_util.get_tool_dependency( trans, kwd[ 'id' ] )
if tool_dependency.in_error_state:
message = "This tool dependency is not installed correctly (see the <b>Tool dependency installation error</b> below). "
message += "Choose <b>Uninstall this tool dependency</b> from the <b>Repository Actions</b> menu, correct problems "
@@ -439,7 +439,7 @@
@web.require_admin
def check_for_updates( self, trans, **kwd ):
# Send a request to the relevant tool shed to see if there are any updates.
- repository = get_installed_tool_shed_repository( trans, kwd[ 'id' ] )
+ repository = suc.get_installed_tool_shed_repository( trans, kwd[ 'id' ] )
tool_shed_url = suc.get_url_from_repository_tool_shed( trans.app, repository )
url = suc.url_join( tool_shed_url,
'repository/check_for_updates?galaxy_url=%s&name=%s&owner=%s&changeset_revision=%s' % \
@@ -459,8 +459,8 @@
status = params.get( 'status', 'done' )
remove_from_disk = params.get( 'remove_from_disk', '' )
remove_from_disk_checked = CheckboxField.is_checked( remove_from_disk )
- tool_shed_repository = get_installed_tool_shed_repository( trans, kwd[ 'id' ] )
- shed_tool_conf, tool_path, relative_install_dir = get_tool_panel_config_tool_path_install_dir( trans.app, tool_shed_repository )
+ tool_shed_repository = suc.get_installed_tool_shed_repository( trans, kwd[ 'id' ] )
+ shed_tool_conf, tool_path, relative_install_dir = shed_util.get_tool_panel_config_tool_path_install_dir( trans.app, tool_shed_repository )
if relative_install_dir:
if tool_path:
relative_install_dir = os.path.join( tool_path, relative_install_dir )
@@ -471,14 +471,14 @@
if params.get( 'deactivate_or_uninstall_repository_button', False ):
if tool_shed_repository.includes_tools:
# Handle tool panel alterations.
- remove_from_tool_panel( trans, tool_shed_repository, shed_tool_conf, uninstall=remove_from_disk_checked )
+ shed_util.remove_from_tool_panel( trans, tool_shed_repository, shed_tool_conf, uninstall=remove_from_disk_checked )
if tool_shed_repository.includes_datatypes:
# Deactivate proprietary datatypes.
- installed_repository_dict = load_installed_datatypes( trans.app, tool_shed_repository, repository_install_dir, deactivate=True )
+ installed_repository_dict = shed_util.load_installed_datatypes( trans.app, tool_shed_repository, repository_install_dir, deactivate=True )
if installed_repository_dict and 'converter_path' in installed_repository_dict:
- load_installed_datatype_converters( trans.app, installed_repository_dict, deactivate=True )
+ shed_util.load_installed_datatype_converters( trans.app, installed_repository_dict, deactivate=True )
if installed_repository_dict and 'display_path' in installed_repository_dict:
- load_installed_display_applications( trans.app, installed_repository_dict, deactivate=True )
+ shed_util.load_installed_display_applications( trans.app, installed_repository_dict, deactivate=True )
if remove_from_disk_checked:
try:
# Remove the repository from disk.
@@ -496,7 +496,7 @@
tool_shed_repository.uninstalled = True
# Remove all installed tool dependencies.
for tool_dependency in tool_shed_repository.installed_tool_dependencies:
- uninstalled, error_message = remove_tool_dependency( trans, tool_dependency )
+ uninstalled, error_message = shed_util.remove_tool_dependency( trans, tool_dependency )
if error_message:
errors = '%s %s' % ( errors, error_message )
tool_shed_repository.deleted = True
@@ -553,7 +553,7 @@
items = tmp_url.split( 'repos' )
tool_shed_url = items[ 0 ]
repo_path = items[ 1 ]
- tool_shed_url = clean_tool_shed_url( tool_shed_url )
+ tool_shed_url = shed_util.clean_tool_shed_url( tool_shed_url )
return suc.url_join( tool_shed_url, 'repos', repo_path, changeset_revision )
@web.json
@web.require_admin
@@ -570,7 +570,7 @@
name, owner and changeset revision. The received repository_id is the encoded id of the installed tool shed repository in Galaxy. We
need it so that we can derive the tool shed from which it was installed.
"""
- repository = get_installed_tool_shed_repository( trans, repository_id )
+ repository = suc.get_installed_tool_shed_repository( trans, repository_id )
tool_shed_url = suc.get_url_from_repository_tool_shed( trans.app, repository )
url = suc.url_join( tool_shed_url,
'repository/get_repository_dependencies?name=%s&owner=%s&changeset_revision=%s' % \
@@ -585,7 +585,7 @@
text = ''
return text
def get_versions_of_tool( self, app, guid ):
- tool_version = get_tool_version( app, guid )
+ tool_version = shed_util.get_tool_version( app, guid )
return tool_version.get_version_ids( app, reverse=True )
@web.expose
@web.require_admin
@@ -614,10 +614,10 @@
tool_shed_repository = tool_dependencies[ 0 ].tool_shed_repository
# Get the tool_dependencies.xml file from the repository.
tool_dependencies_config = suc.get_config_from_disk( 'tool_dependencies.xml', tool_shed_repository.repo_path( trans.app ) )
- installed_tool_dependencies = handle_tool_dependencies( app=trans.app,
- tool_shed_repository=tool_shed_repository,
- tool_dependencies_config=tool_dependencies_config,
- tool_dependencies=tool_dependencies )
+ installed_tool_dependencies = shed_util.handle_tool_dependencies( app=trans.app,
+ tool_shed_repository=tool_shed_repository,
+ tool_dependencies_config=tool_dependencies_config,
+ tool_dependencies=tool_dependencies )
for installed_tool_dependency in installed_tool_dependencies:
if installed_tool_dependency.status == trans.app.model.ToolDependency.installation_status.ERROR:
message += ' %s' % installed_tool_dependency.error_message
@@ -644,7 +644,7 @@
tool_dependency_ids = util.listify( params.get( 'id', None ) )
tool_dependencies = []
for tool_dependency_id in tool_dependency_ids:
- tool_dependency = get_tool_dependency( trans, tool_dependency_id )
+ tool_dependency = shed_util.get_tool_dependency( trans, tool_dependency_id )
tool_dependencies.append( tool_dependency )
if kwd.get( 'install_tool_dependencies_button', False ):
# Filter tool dependencies to only those that are installed.
@@ -691,7 +691,7 @@
tool_shed_repository, repo_info_dict = tup
repo_info_dict = tool_shed_decode( repo_info_dict )
# Clone each repository to the configured location.
- update_tool_shed_repository_status( trans.app, tool_shed_repository, trans.model.ToolShedRepository.installation_status.CLONING )
+ shed_util.update_tool_shed_repository_status( trans.app, tool_shed_repository, trans.model.ToolShedRepository.installation_status.CLONING )
repo_info_tuple = repo_info_dict[ tool_shed_repository.name ]
description, repository_clone_url, changeset_revision, ctx_rev, repository_owner, repository_dependencies, tool_dependencies = repo_info_tuple
relative_clone_dir = self.generate_tool_path( repository_clone_url, tool_shed_repository.installed_changeset_revision )
@@ -702,10 +702,10 @@
if cloned_ok:
if reinstalling:
# Since we're reinstalling the repository we need to find the latest changeset revision to which is can be updated.
- current_changeset_revision, current_ctx_rev = get_update_to_changeset_revision_and_ctx_rev( trans, tool_shed_repository )
+ current_changeset_revision, current_ctx_rev = shed_util.get_update_to_changeset_revision_and_ctx_rev( trans, tool_shed_repository )
if current_ctx_rev != ctx_rev:
repo = hg.repository( suc.get_configured_ui(), path=os.path.abspath( install_dir ) )
- pull_repository( repo, repository_clone_url, current_changeset_revision )
+ shed_util.pull_repository( repo, repository_clone_url, current_changeset_revision )
suc.update_repository( repo, ctx_rev=current_ctx_rev )
self.handle_repository_contents( trans,
tool_shed_repository=tool_shed_repository,
@@ -720,9 +720,9 @@
metadata = tool_shed_repository.metadata
if 'tools' in metadata:
# Get the tool_versions from the tool shed for each tool in the installed change set.
- update_tool_shed_repository_status( trans.app,
- tool_shed_repository,
- trans.model.ToolShedRepository.installation_status.SETTING_TOOL_VERSIONS )
+ shed_util.update_tool_shed_repository_status( trans.app,
+ tool_shed_repository,
+ trans.model.ToolShedRepository.installation_status.SETTING_TOOL_VERSIONS )
tool_shed_url = suc.get_url_from_repository_tool_shed( trans.app, tool_shed_repository )
url = suc.url_join( tool_shed_url,
'/repository/get_tool_versions?name=%s&owner=%s&changeset_revision=%s' % \
@@ -732,7 +732,7 @@
response.close()
if text:
tool_version_dicts = from_json_string( text )
- handle_tool_versions( trans.app, tool_version_dicts, tool_shed_repository )
+ shed_util.handle_tool_versions( trans.app, tool_version_dicts, tool_shed_repository )
else:
message += "Version information for the tools included in the <b>%s</b> repository is missing. " % name
message += "Reset all of this repository's metadata in the tool shed, then set the installed tool versions "
@@ -741,20 +741,20 @@
if install_tool_dependencies and tool_shed_repository.tool_dependencies and 'tool_dependencies' in metadata:
work_dir = tempfile.mkdtemp()
# Install tool dependencies.
- update_tool_shed_repository_status( trans.app,
- tool_shed_repository,
- trans.model.ToolShedRepository.installation_status.INSTALLING_TOOL_DEPENDENCIES )
+ shed_util.update_tool_shed_repository_status( trans.app,
+ tool_shed_repository,
+ trans.model.ToolShedRepository.installation_status.INSTALLING_TOOL_DEPENDENCIES )
# Get the tool_dependencies.xml file from the repository.
tool_dependencies_config = suc.get_config_from_disk( 'tool_dependencies.xml', install_dir )#relative_install_dir )
- installed_tool_dependencies = handle_tool_dependencies( app=trans.app,
- tool_shed_repository=tool_shed_repository,
- tool_dependencies_config=tool_dependencies_config,
- tool_dependencies=tool_shed_repository.tool_dependencies )
+ installed_tool_dependencies = shed_util.handle_tool_dependencies( app=trans.app,
+ tool_shed_repository=tool_shed_repository,
+ tool_dependencies_config=tool_dependencies_config,
+ tool_dependencies=tool_shed_repository.tool_dependencies )
try:
shutil.rmtree( work_dir )
except:
pass
- update_tool_shed_repository_status( trans.app, tool_shed_repository, trans.model.ToolShedRepository.installation_status.INSTALLED )
+ shed_util.update_tool_shed_repository_status( trans.app, tool_shed_repository, trans.model.ToolShedRepository.installation_status.INSTALLED )
else:
# An error occurred while cloning the repository, so reset everything necessary to enable another attempt.
self.set_repository_attributes( trans,
@@ -788,34 +788,34 @@
trans.sa_session.add( tool_shed_repository )
trans.sa_session.flush()
if 'tool_dependencies' in metadata_dict and not reinstalling:
- tool_dependencies = create_tool_dependency_objects( trans.app, tool_shed_repository, relative_install_dir, set_status=True )
+ tool_dependencies = shed_util.create_tool_dependency_objects( trans.app, tool_shed_repository, relative_install_dir, set_status=True )
if 'tools' in metadata_dict:
- tool_panel_dict = generate_tool_panel_dict_for_new_install( metadata_dict[ 'tools' ], tool_section )
+ tool_panel_dict = shed_util.generate_tool_panel_dict_for_new_install( metadata_dict[ 'tools' ], tool_section )
sample_files = metadata_dict.get( 'sample_files', [] )
- tool_index_sample_files = get_tool_index_sample_files( sample_files )
- copy_sample_files( self.app, tool_index_sample_files, tool_path=tool_path )
+ tool_index_sample_files = shed_util.get_tool_index_sample_files( sample_files )
+ shed_util.copy_sample_files( self.app, tool_index_sample_files, tool_path=tool_path )
sample_files_copied = [ s for s in tool_index_sample_files ]
- repository_tools_tups = get_repository_tools_tups( trans.app, metadata_dict )
+ repository_tools_tups = shed_util.get_repository_tools_tups( trans.app, metadata_dict )
if repository_tools_tups:
# Handle missing data table entries for tool parameters that are dynamically generated select lists.
- repository_tools_tups = handle_missing_data_table_entry( trans.app, relative_install_dir, tool_path, repository_tools_tups )
+ repository_tools_tups = shed_util.handle_missing_data_table_entry( trans.app, relative_install_dir, tool_path, repository_tools_tups )
# Handle missing index files for tool parameters that are dynamically generated select lists.
- repository_tools_tups, sample_files_copied = handle_missing_index_file( trans.app,
- tool_path,
- sample_files,
- repository_tools_tups,
- sample_files_copied )
+ repository_tools_tups, sample_files_copied = shed_util.handle_missing_index_file( trans.app,
+ tool_path,
+ sample_files,
+ repository_tools_tups,
+ sample_files_copied )
# Copy remaining sample files included in the repository to the ~/tool-data directory of the local Galaxy instance.
- copy_sample_files( trans.app, sample_files, tool_path=tool_path, sample_files_copied=sample_files_copied )
- add_to_tool_panel( app=trans.app,
- repository_name=tool_shed_repository.name,
- repository_clone_url=repository_clone_url,
- changeset_revision=tool_shed_repository.changeset_revision,
- repository_tools_tups=repository_tools_tups,
- owner=tool_shed_repository.owner,
- shed_tool_conf=shed_tool_conf,
- tool_panel_dict=tool_panel_dict,
- new_install=True )
+ shed_util.copy_sample_files( trans.app, sample_files, tool_path=tool_path, sample_files_copied=sample_files_copied )
+ shed_util.add_to_tool_panel( app=trans.app,
+ repository_name=tool_shed_repository.name,
+ repository_clone_url=repository_clone_url,
+ changeset_revision=tool_shed_repository.changeset_revision,
+ repository_tools_tups=repository_tools_tups,
+ owner=tool_shed_repository.owner,
+ shed_tool_conf=shed_tool_conf,
+ tool_panel_dict=tool_panel_dict,
+ new_install=True )
if 'datatypes' in metadata_dict:
tool_shed_repository.status = trans.model.ToolShedRepository.installation_status.LOADING_PROPRIETARY_DATATYPES
if not tool_shed_repository.includes_datatypes:
@@ -827,16 +827,16 @@
files_dir = os.path.join( shed_config_dict['tool_path'], files_dir )
datatypes_config = suc.get_config_from_disk( 'datatypes_conf.xml', files_dir )
# Load data types required by tools.
- converter_path, display_path = alter_config_and_load_prorietary_datatypes( trans.app, datatypes_config, files_dir, override=False )
+ converter_path, display_path = shed_util.alter_config_and_load_prorietary_datatypes( trans.app, datatypes_config, files_dir, override=False )
if converter_path or display_path:
# Create a dictionary of tool shed repository related information.
- repository_dict = create_repository_dict_for_proprietary_datatypes( tool_shed=tool_shed,
- name=tool_shed_repository.name,
- owner=tool_shed_repository.owner,
- installed_changeset_revision=tool_shed_repository.installed_changeset_revision,
- tool_dicts=metadata_dict.get( 'tools', [] ),
- converter_path=converter_path,
- display_path=display_path )
+ repository_dict = shed_util.create_repository_dict_for_proprietary_datatypes( tool_shed=tool_shed,
+ name=tool_shed_repository.name,
+ owner=tool_shed_repository.owner,
+ installed_changeset_revision=tool_shed_repository.installed_changeset_revision,
+ tool_dicts=metadata_dict.get( 'tools', [] ),
+ converter_path=converter_path,
+ display_path=display_path )
if converter_path:
# Load proprietary datatype converters
trans.app.datatypes_registry.load_datatype_converters( trans.app.toolbox, installed_repository_dict=repository_dict )
@@ -851,7 +851,7 @@
status = params.get( 'status', 'done' )
repository_id = kwd[ 'id' ]
operation = kwd.get( 'operation', None )
- repository = get_installed_tool_shed_repository( trans, repository_id )
+ repository = suc.get_installed_tool_shed_repository( trans, repository_id )
if not repository:
return trans.show_error_message( 'Invalid repository specified.' )
if repository.status in [ trans.model.ToolShedRepository.installation_status.CLONING ]:
@@ -866,7 +866,7 @@
( repository.name, repository.owner, repository.installed_changeset_revision, ( url_for( '/', qualified=True ) ) ) )
return trans.response.send_redirect( url )
description = util.restore_text( params.get( 'description', repository.description ) )
- shed_tool_conf, tool_path, relative_install_dir = get_tool_panel_config_tool_path_install_dir( trans.app, repository )
+ shed_tool_conf, tool_path, relative_install_dir = shed_util.get_tool_panel_config_tool_path_install_dir( trans.app, repository )
if relative_install_dir:
repo_files_dir = os.path.abspath( os.path.join( tool_path, relative_install_dir, repository.name ) )
else:
@@ -954,9 +954,9 @@
params = util.Params( kwd )
message = util.restore_text( params.get( 'message', '' ) )
status = params.get( 'status', 'done' )
- tool_dependency_ids = get_tool_dependency_ids( as_string=False, **kwd )
+ tool_dependency_ids = shed_util.get_tool_dependency_ids( as_string=False, **kwd )
# We need a tool_shed_repository, so get it from one of the tool_dependencies.
- tool_dependency = get_tool_dependency( trans, tool_dependency_ids[ 0 ] )
+ tool_dependency = shed_util.get_tool_dependency( trans, tool_dependency_ids[ 0 ] )
tool_shed_repository = tool_dependency.tool_shed_repository
self.tool_dependency_grid.title = "Tool shed repository '%s' tool dependencies" % tool_shed_repository.name
self.tool_dependency_grid.global_actions = \
@@ -997,7 +997,7 @@
elif operation == 'uninstall':
tool_dependencies_for_uninstallation = []
for tool_dependency_id in tool_dependency_ids:
- tool_dependency = get_tool_dependency( trans, tool_dependency_id )
+ tool_dependency = shed_util.get_tool_dependency( trans, tool_dependency_id )
if tool_dependency.status in [ trans.model.ToolDependency.installation_status.INSTALLED,
trans.model.ToolDependency.installation_status.ERROR ]:
tool_dependencies_for_uninstallation.append( tool_dependency )
@@ -1012,7 +1012,7 @@
if trans.app.config.tool_dependency_dir:
tool_dependencies_for_installation = []
for tool_dependency_id in tool_dependency_ids:
- tool_dependency = get_tool_dependency( trans, tool_dependency_id )
+ tool_dependency = shed_util.get_tool_dependency( trans, tool_dependency_id )
if tool_dependency.status in [ trans.model.ToolDependency.installation_status.NEVER_INSTALLED,
trans.model.ToolDependency.installation_status.UNINSTALLED ]:
tool_dependencies_for_installation.append( tool_dependency )
@@ -1156,17 +1156,17 @@
**new_kwd ) )
else:
log.debug( "Adding new row (or updating an existing row) for repository '%s' in the tool_shed_repository table." % name )
- tool_shed_repository = create_or_update_tool_shed_repository( app=trans.app,
- name=name,
- description=description,
- installed_changeset_revision=changeset_revision,
- ctx_rev=ctx_rev,
- repository_clone_url=repository_clone_url,
- metadata_dict={},
- status=trans.model.ToolShedRepository.installation_status.NEW,
- current_changeset_revision=changeset_revision,
- owner=repository_owner,
- dist_to_shed=False )
+ tool_shed_repository = shed_util.create_or_update_tool_shed_repository( app=trans.app,
+ name=name,
+ description=description,
+ installed_changeset_revision=changeset_revision,
+ ctx_rev=ctx_rev,
+ repository_clone_url=repository_clone_url,
+ metadata_dict={},
+ status=trans.model.ToolShedRepository.installation_status.NEW,
+ current_changeset_revision=changeset_revision,
+ owner=repository_owner,
+ dist_to_shed=False )
created_or_updated_tool_shed_repositories.append( tool_shed_repository )
filtered_repo_info_dicts.append( tool_shed_encode( repo_info_dict ) )
if created_or_updated_tool_shed_repositories:
@@ -1288,14 +1288,14 @@
message = kwd.get( 'message', '' )
status = kwd.get( 'status', 'done' )
repository_id = kwd[ 'id' ]
- tool_shed_repository = get_installed_tool_shed_repository( trans, repository_id )
+ tool_shed_repository = suc.get_installed_tool_shed_repository( trans, repository_id )
no_changes = kwd.get( 'no_changes', '' )
no_changes_checked = CheckboxField.is_checked( no_changes )
install_tool_dependencies = CheckboxField.is_checked( kwd.get( 'install_tool_dependencies', '' ) )
new_tool_panel_section = kwd.get( 'new_tool_panel_section', '' )
tool_panel_section = kwd.get( 'tool_panel_section', '' )
- shed_tool_conf, tool_path, relative_install_dir = get_tool_panel_config_tool_path_install_dir( trans.app, tool_shed_repository )
- repository_clone_url = generate_clone_url_for_installed_repository( trans, tool_shed_repository )
+ shed_tool_conf, tool_path, relative_install_dir = shed_util.get_tool_panel_config_tool_path_install_dir( trans.app, tool_shed_repository )
+ repository_clone_url = suc.generate_clone_url_for_installed_repository( trans, tool_shed_repository )
clone_dir = os.path.join( tool_path, self.generate_tool_path( repository_clone_url, tool_shed_repository.installed_changeset_revision ) )
relative_install_dir = os.path.join( clone_dir, tool_shed_repository.name )
tool_shed_url = suc.get_url_from_repository_tool_shed( trans.app, tool_shed_repository )
@@ -1307,9 +1307,9 @@
if 'tool_panel_section' in metadata:
tool_panel_dict = metadata[ 'tool_panel_section' ]
if not tool_panel_dict:
- tool_panel_dict = generate_tool_panel_dict_for_new_install( metadata[ 'tools' ] )
+ tool_panel_dict = shed_util.generate_tool_panel_dict_for_new_install( metadata[ 'tools' ] )
else:
- tool_panel_dict = generate_tool_panel_dict_for_new_install( metadata[ 'tools' ] )
+ tool_panel_dict = shed_util.generate_tool_panel_dict_for_new_install( metadata[ 'tools' ] )
# Fix this to handle the case where the tools are distributed across in more than 1 ToolSection - this assumes everything was loaded into 1
# section (or no section) in the tool panel.
tool_section_dicts = tool_panel_dict[ tool_panel_dict.keys()[ 0 ] ]
@@ -1353,18 +1353,18 @@
else:
tool_section = None
# The repository's status must be updated from 'Uninstall' to 'New' when initiating reinstall so the repository_installation_updater will function.
- tool_shed_repository = create_or_update_tool_shed_repository( trans.app,
- tool_shed_repository.name,
- tool_shed_repository.description,
- tool_shed_repository.installed_changeset_revision,
- tool_shed_repository.ctx_rev,
- repository_clone_url,
- tool_shed_repository.metadata,
- trans.model.ToolShedRepository.installation_status.NEW,
- tool_shed_repository.installed_changeset_revision,
- tool_shed_repository.owner,
- tool_shed_repository.dist_to_shed )
- ctx_rev = get_ctx_rev( tool_shed_url, tool_shed_repository.name, tool_shed_repository.owner, tool_shed_repository.installed_changeset_revision )
+ tool_shed_repository = shed_util.create_or_update_tool_shed_repository( trans.app,
+ tool_shed_repository.name,
+ tool_shed_repository.description,
+ tool_shed_repository.installed_changeset_revision,
+ tool_shed_repository.ctx_rev,
+ repository_clone_url,
+ tool_shed_repository.metadata,
+ trans.model.ToolShedRepository.installation_status.NEW,
+ tool_shed_repository.installed_changeset_revision,
+ tool_shed_repository.owner,
+ tool_shed_repository.dist_to_shed )
+ ctx_rev = shed_util.get_ctx_rev( tool_shed_url, tool_shed_repository.name, tool_shed_repository.owner, tool_shed_repository.installed_changeset_revision )
repo_info_dict = kwd.get( 'repo_info_dict', None )
# The repo_info_dict should be encoded.
if not repo_info_dict:
@@ -1450,11 +1450,11 @@
#clone_path, clone_directory = os.path.split( clone_dir )
changeset_revisions = util.listify( text )
for previous_changeset_revision in changeset_revisions:
- tool_shed_repository = get_tool_shed_repository_by_shed_name_owner_installed_changeset_revision( trans.app,
- tool_shed,
- repository_name,
- repository_owner,
- previous_changeset_revision )
+ tool_shed_repository = shed_util.get_tool_shed_repository_by_shed_name_owner_installed_changeset_revision( trans.app,
+ tool_shed,
+ repository_name,
+ repository_owner,
+ previous_changeset_revision )
if tool_shed_repository and tool_shed_repository.status not in [ trans.model.ToolShedRepository.installation_status.NEW ]:
return tool_shed_repository, previous_changeset_revision
return None, None
@@ -1462,11 +1462,11 @@
@web.require_admin
def reselect_tool_panel_section( self, trans, **kwd ):
repository_id = kwd[ 'id' ]
- tool_shed_repository = get_installed_tool_shed_repository( trans, repository_id )
+ tool_shed_repository = suc.get_installed_tool_shed_repository( trans, repository_id )
metadata = tool_shed_repository.metadata
tool_shed_url = suc.get_url_from_repository_tool_shed( trans.app, tool_shed_repository )
- ctx_rev = get_ctx_rev( tool_shed_url, tool_shed_repository.name, tool_shed_repository.owner, tool_shed_repository.installed_changeset_revision )
- repository_clone_url = generate_clone_url_for_installed_repository( trans, tool_shed_repository )
+ ctx_rev = shed_util.get_ctx_rev( tool_shed_url, tool_shed_repository.name, tool_shed_repository.owner, tool_shed_repository.installed_changeset_revision )
+ repository_clone_url = suc.generate_clone_url_for_installed_repository( trans, tool_shed_repository )
repository_dependencies = self.get_repository_dependencies( trans=trans,
repository_id=repository_id,
repository_name=tool_shed_repository.name,
@@ -1486,7 +1486,7 @@
if 'tool_panel_section' in metadata:
tool_panel_dict = metadata[ 'tool_panel_section' ]
if tool_panel_dict:
- if panel_entry_per_tool( tool_panel_dict ):
+ if shed_util.panel_entry_per_tool( tool_panel_dict ):
# TODO: Fix this to handle the case where the tools are distributed across in more than 1 ToolSection. The
# following assumes everything was loaded into 1 section (or no section) in the tool panel.
tool_section_dicts = tool_panel_dict[ tool_panel_dict.keys()[ 0 ] ]
@@ -1570,9 +1570,9 @@
@web.require_admin
def reset_repository_metadata( self, trans, id ):
"""Reset all metadata on a single installed tool shed repository."""
- repository = get_installed_tool_shed_repository( trans, id )
+ repository = suc.get_installed_tool_shed_repository( trans, id )
tool_shed_url = suc.get_url_from_repository_tool_shed( trans.app, repository )
- repository_clone_url = generate_clone_url_for_installed_repository( trans, repository )
+ repository_clone_url = suc.generate_clone_url_for_installed_repository( trans, repository )
tool_path, relative_install_dir = repository.get_tool_relative_path( trans.app )
if relative_install_dir:
original_metadata_dict = repository.metadata
@@ -1587,7 +1587,7 @@
persist=False )
repository.metadata = metadata_dict
if metadata_dict != original_metadata_dict:
- update_in_shed_tool_config( trans.app, repository )
+ suc.update_in_shed_tool_config( trans.app, repository )
trans.sa_session.add( repository )
trans.sa_session.flush()
message = 'Metadata has been reset on repository <b>%s</b>.' % repository.name
@@ -1607,7 +1607,7 @@
@web.require_admin
def reset_to_install( self, trans, **kwd ):
"""An error occurred while cloning the repository, so reset everything necessary to enable another attempt."""
- repository = get_installed_tool_shed_repository( trans, kwd[ 'id' ] )
+ repository = suc.get_installed_tool_shed_repository( trans, kwd[ 'id' ] )
if kwd.get( 'reset_repository', False ):
self.set_repository_attributes( trans,
repository,
@@ -1642,7 +1642,7 @@
@web.require_admin
def set_tool_versions( self, trans, **kwd ):
# Get the tool_versions from the tool shed for each tool in the installed change set.
- repository = get_installed_tool_shed_repository( trans, kwd[ 'id' ] )
+ repository = suc.get_installed_tool_shed_repository( trans, kwd[ 'id' ] )
tool_shed_url = suc.get_url_from_repository_tool_shed( trans.app, repository )
url = suc.url_join( tool_shed_url,
'repository/get_tool_versions?name=%s&owner=%s&changeset_revision=%s' % \
@@ -1652,7 +1652,7 @@
response.close()
if text:
tool_version_dicts = from_json_string( text )
- handle_tool_versions( trans.app, tool_version_dicts, repository )
+ shed_util.handle_tool_versions( trans.app, tool_version_dicts, repository )
message = "Tool versions have been set for all included tools."
status = 'done'
else:
@@ -1660,7 +1660,7 @@
message += "Reset all of this reppository's metadata in the tool shed, then set the installed tool versions "
message ++ "from the installed repository's <b>Repository Actions</b> menu. "
status = 'error'
- shed_tool_conf, tool_path, relative_install_dir = get_tool_panel_config_tool_path_install_dir( trans.app, repository )
+ shed_tool_conf, tool_path, relative_install_dir = shed_util.get_tool_panel_config_tool_path_install_dir( trans.app, repository )
repo_files_dir = os.path.abspath( os.path.join( relative_install_dir, repository.name ) )
return trans.fill_template( '/admin/tool_shed_repository/manage_repository.mako',
repository=repository,
@@ -1699,7 +1699,7 @@
tool_dependency_ids = util.listify( params.get( 'id', None ) )
tool_dependencies = []
for tool_dependency_id in tool_dependency_ids:
- tool_dependency = get_tool_dependency( trans, tool_dependency_id )
+ tool_dependency = shed_util.get_tool_dependency( trans, tool_dependency_id )
tool_dependencies.append( tool_dependency )
tool_shed_repository = tool_dependencies[ 0 ].tool_shed_repository
if kwd.get( 'uninstall_tool_dependencies_button', False ):
@@ -1710,7 +1710,7 @@
if tool_dependency.can_uninstall:
tool_dependencies_for_uninstallation.append( tool_dependency )
for tool_dependency in tool_dependencies_for_uninstallation:
- uninstalled, error_message = remove_tool_dependency( trans, tool_dependency )
+ uninstalled, error_message = shed_util.remove_tool_dependency( trans, tool_dependency )
if error_message:
errors = True
message = '%s %s' % ( message, error_message )
@@ -1744,12 +1744,12 @@
changeset_revision = params.get( 'changeset_revision', None )
latest_changeset_revision = params.get( 'latest_changeset_revision', None )
latest_ctx_rev = params.get( 'latest_ctx_rev', None )
- repository = get_tool_shed_repository_by_shed_name_owner_changeset_revision( trans.app, tool_shed_url, name, owner, changeset_revision )
+ repository = shed_util.get_tool_shed_repository_by_shed_name_owner_changeset_revision( trans.app, tool_shed_url, name, owner, changeset_revision )
if changeset_revision and latest_changeset_revision and latest_ctx_rev:
if changeset_revision == latest_changeset_revision:
message = "The installed repository named '%s' is current, there are no updates available. " % name
else:
- shed_tool_conf, tool_path, relative_install_dir = get_tool_panel_config_tool_path_install_dir( trans.app, repository )
+ shed_tool_conf, tool_path, relative_install_dir = shed_util.get_tool_panel_config_tool_path_install_dir( trans.app, repository )
if relative_install_dir:
if tool_path:
repo_files_dir = os.path.abspath( os.path.join( tool_path, relative_install_dir, name ) )
@@ -1757,9 +1757,9 @@
repo_files_dir = os.path.abspath( os.path.join( relative_install_dir, name ) )
repo = hg.repository( suc.get_configured_ui(), path=repo_files_dir )
repository_clone_url = os.path.join( tool_shed_url, 'repos', owner, name )
- pull_repository( repo, repository_clone_url, latest_ctx_rev )
+ shed_util.pull_repository( repo, repository_clone_url, latest_ctx_rev )
suc.update_repository( repo, latest_ctx_rev )
- tool_shed = clean_tool_shed_url( tool_shed_url )
+ tool_shed = shed_util.clean_tool_shed_url( tool_shed_url )
# Update the repository metadata.
metadata_dict, invalid_file_tups = suc.generate_metadata_for_changeset_revision( app=trans.app,
repository=repository,
@@ -1779,10 +1779,10 @@
trans.sa_session.flush()
# Create tool_dependency records if necessary.
if 'tool_dependencies' in metadata_dict:
- tool_dependencies = create_tool_dependency_objects( trans.app, repository, relative_install_dir, set_status=False )
+ tool_dependencies = shed_util.create_tool_dependency_objects( trans.app, repository, relative_install_dir, set_status=False )
message = "The installed repository named '%s' has been updated to change set revision '%s'. " % ( name, latest_changeset_revision )
# See if any tool dependencies can be installed.
- shed_tool_conf, tool_path, relative_install_dir = get_tool_panel_config_tool_path_install_dir( trans.app, repository )
+ shed_tool_conf, tool_path, relative_install_dir = shed_util.get_tool_panel_config_tool_path_install_dir( trans.app, repository )
if repository.missing_tool_dependencies:
message += "Click the name of one of the missing tool dependencies listed below to install tool dependencies."
else:
@@ -1802,7 +1802,7 @@
params = util.Params( kwd )
message = util.restore_text( params.get( 'message', '' ) )
status = params.get( 'status', 'done' )
- repository = get_installed_tool_shed_repository( trans, repository_id )
+ repository = suc.get_installed_tool_shed_repository( trans, repository_id )
repository_metadata = repository.metadata
shed_config_dict = repository.get_shed_config_dict( trans.app )
tool_metadata = {}
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/70f88a048ed2/
changeset: 70f88a048ed2
user: greg
date: 2012-12-12 23:36:40
summary: Tweak some imports.
affected #: 10 files
diff -r ae60aaaf6a139e88b2849abf97fd47af711341fd -r 70f88a048ed2b565d60250ca66302fd03a7852ff lib/galaxy/tool_shed/install_manager.py
--- a/lib/galaxy/tool_shed/install_manager.py
+++ b/lib/galaxy/tool_shed/install_manager.py
@@ -6,6 +6,7 @@
from galaxy.tools import ToolSection
from galaxy.util.json import from_json_string, to_json_string
from galaxy.util.shed_util import *
+import galaxy.util.shed_util_common as suc
from galaxy.util.odict import odict
from galaxy.tool_shed.common_util import *
@@ -87,7 +88,7 @@
break
full_path = str( os.path.abspath( os.path.join( root, name ) ) )
tool = self.toolbox.load_tool( full_path )
- return generate_tool_guid( repository_clone_url, tool )
+ return suc.generate_tool_guid( repository_clone_url, tool )
def get_proprietary_tool_panel_elems( self, latest_tool_migration_script_number ):
# Parse each config in self.proprietary_tool_confs (the default is tool_conf.xml) and generate a list of Elements that are
# either ToolSection elements or Tool elements. These will be used to generate new entries in the migrated_tools_conf.xml
@@ -177,15 +178,15 @@
else:
print 'The tool "%s" (%s) has not been enabled because it is not defined in a proprietary tool config (%s).' \
% ( guid, tool_config, ", ".join( self.proprietary_tool_confs or [] ) )
- metadata_dict, invalid_file_tups = generate_metadata_for_changeset_revision( app=self.app,
- repository=tool_shed_repository,
- repository_clone_url=repository_clone_url,
- shed_config_dict = self.shed_config_dict,
- relative_install_dir=relative_install_dir,
- repository_files_dir=None,
- resetting_all_metadata_on_repository=False,
- updating_installed_repository=False,
- persist=True )
+ metadata_dict, invalid_file_tups = suc.generate_metadata_for_changeset_revision( app=self.app,
+ repository=tool_shed_repository,
+ repository_clone_url=repository_clone_url,
+ shed_config_dict = self.shed_config_dict,
+ relative_install_dir=relative_install_dir,
+ repository_files_dir=None,
+ resetting_all_metadata_on_repository=False,
+ updating_installed_repository=False,
+ persist=True )
tool_shed_repository.metadata = metadata_dict
self.app.sa_session.add( tool_shed_repository )
self.app.sa_session.flush()
@@ -217,7 +218,7 @@
tool_shed_repository,
self.app.model.ToolShedRepository.installation_status.INSTALLING_TOOL_DEPENDENCIES )
# Get the tool_dependencies.xml file from disk.
- tool_dependencies_config = get_config_from_disk( 'tool_dependencies.xml', repo_install_dir )
+ tool_dependencies_config = suc.get_config_from_disk( 'tool_dependencies.xml', repo_install_dir )
installed_tool_dependencies = handle_tool_dependencies( app=self.app,
tool_shed_repository=tool_shed_repository,
tool_dependencies_config=tool_dependencies_config,
@@ -242,7 +243,7 @@
self.app.sa_session.add( tool_shed_repository )
self.app.sa_session.flush()
work_dir = tempfile.mkdtemp()
- datatypes_config = get_config_from_disk( 'datatypes_conf.xml', repo_install_dir )
+ datatypes_config = suc.get_config_from_disk( 'datatypes_conf.xml', repo_install_dir )
# Load proprietary data types required by tools. The value of override is not important here since the Galaxy server will be started
# after this installation completes.
converter_path, display_path = alter_config_and_load_prorietary_datatypes( self.app, datatypes_config, repo_install_dir, override=False ) #repo_install_dir was relative_install_dir
@@ -294,7 +295,7 @@
owner=self.repository_owner,
dist_to_shed=True )
update_tool_shed_repository_status( self.app, tool_shed_repository, self.app.model.ToolShedRepository.installation_status.CLONING )
- cloned_ok, error_message = clone_repository( repository_clone_url, os.path.abspath( install_dir ), ctx_rev )
+ cloned_ok, error_message = suc.clone_repository( repository_clone_url, os.path.abspath( install_dir ), ctx_rev )
if cloned_ok:
self.handle_repository_contents( tool_shed_repository=tool_shed_repository,
repository_clone_url=repository_clone_url,
diff -r ae60aaaf6a139e88b2849abf97fd47af711341fd -r 70f88a048ed2b565d60250ca66302fd03a7852ff lib/galaxy/tool_shed/update_manager.py
--- a/lib/galaxy/tool_shed/update_manager.py
+++ b/lib/galaxy/tool_shed/update_manager.py
@@ -33,7 +33,7 @@
self.sleeper.sleep( self.seconds_to_sleep )
log.info( 'Transfer job restarter shutting down...' )
def check_for_update( self, repository ):
- tool_shed_url = get_url_from_repository_tool_shed( self.app, repository )
+ tool_shed_url = suc.get_url_from_repository_tool_shed( self.app, repository )
url = '%s/repository/check_for_updates?name=%s&owner=%s&changeset_revision=%s&from_update_manager=True' % \
( tool_shed_url, repository.name, repository.owner, repository.changeset_revision )
try:
diff -r ae60aaaf6a139e88b2849abf97fd47af711341fd -r 70f88a048ed2b565d60250ca66302fd03a7852ff lib/galaxy/util/shed_util.py
--- a/lib/galaxy/util/shed_util.py
+++ b/lib/galaxy/util/shed_util.py
@@ -1,10 +1,10 @@
import os, tempfile, shutil, logging, urllib2
from galaxy import util
-from shed_util_common import *
+import shed_util_common as suc
from galaxy.tools.search import ToolBoxSearch
from galaxy.tool_shed.tool_dependencies.install_util import create_or_update_tool_dependency, install_package, set_environment
-from galaxy.tool_shed.encoding_util import *
-from galaxy.model.orm import *
+from galaxy.tool_shed import encoding_util
+from galaxy.model.orm import and_
from galaxy import eggs
import pkg_resources
@@ -185,7 +185,7 @@
filename=os.path.join( tool_path, filename )
# Attempt to ensure we're copying an appropriate file.
if is_data_index_sample_file( filename ):
- copy_sample_file( app, filename, dest_path=dest_path )
+ suc.copy_sample_file( app, filename, dest_path=dest_path )
def create_repository_dict_for_proprietary_datatypes( tool_shed, name, owner, installed_changeset_revision, tool_dicts, converter_path=None, display_path=None ):
return dict( tool_shed=tool_shed,
repository_name=name,
@@ -204,7 +204,7 @@
# to it being uninstalled.
current_changeset_revision = installed_changeset_revision
sa_session = app.model.context.current
- tool_shed = get_tool_shed_from_clone_url( repository_clone_url )
+ tool_shed = suc.get_tool_shed_from_clone_url( repository_clone_url )
if not owner:
owner = get_repository_owner_from_clone_url( repository_clone_url )
includes_datatypes = 'datatypes' in metadata_dict
@@ -255,7 +255,7 @@
if shed_config_dict.get( 'tool_path' ):
relative_install_dir = os.path.join( shed_config_dict.get( 'tool_path' ), relative_install_dir )
# Get the tool_dependencies.xml file from the repository.
- tool_dependencies_config = get_config_from_disk( 'tool_dependencies.xml', relative_install_dir )
+ tool_dependencies_config = suc.get_config_from_disk( 'tool_dependencies.xml', relative_install_dir )
try:
tree = ElementTree.parse( tool_dependencies_config )
except Exception, e:
@@ -295,8 +295,8 @@
return tool_dependency_objects
def generate_clone_url_for_installed_repository( trans, repository ):
"""Generate the URL for cloning a repository that has been installed into a Galaxy instance."""
- tool_shed_url = get_url_from_repository_tool_shed( trans.app, repository )
- return url_join( tool_shed_url, 'repos', repository.owner, repository.name )
+ tool_shed_url = suc.get_url_from_repository_tool_shed( trans.app, repository )
+ return suc.url_join( tool_shed_url, 'repos', repository.owner, repository.name )
def generate_tool_elem( tool_shed, repository_name, changeset_revision, owner, tool_file_path, tool, tool_section ):
if tool_section is not None:
tool_elem = SubElement( tool_section, 'tool' )
@@ -321,7 +321,7 @@
"""Generate a list of ElementTree Element objects for each section or tool."""
elem_list = []
tool_elem = None
- cleaned_repository_clone_url = clean_repository_clone_url( repository_clone_url )
+ cleaned_repository_clone_url = suc.clean_repository_clone_url( repository_clone_url )
if not owner:
owner = get_repository_owner( cleaned_repository_clone_url )
tool_shed = cleaned_repository_clone_url.split( 'repos' )[ 0 ].rstrip( '/' )
@@ -475,12 +475,12 @@
def get_config( config_file, repo, ctx, dir ):
"""Return the latest version of config_filename from the repository manifest."""
config_file = strip_path( config_file )
- for changeset in reversed_upper_bounded_changelog( repo, ctx ):
+ for changeset in suc.reversed_upper_bounded_changelog( repo, ctx ):
changeset_ctx = repo.changectx( changeset )
for ctx_file in changeset_ctx.files():
ctx_file_name = strip_path( ctx_file )
if ctx_file_name == config_file:
- return get_named_tmpfile_from_ctx( changeset_ctx, ctx_file, dir )
+ return suc.get_named_tmpfile_from_ctx( changeset_ctx, ctx_file, dir )
return None
def get_converter_and_display_paths( registration_elem, relative_install_dir ):
"""Find the relative path to data type converters and display applications included in installed tool shed repositories."""
@@ -525,7 +525,7 @@
break
return converter_path, display_path
def get_ctx_rev( tool_shed_url, name, owner, changeset_revision ):
- url = url_join( tool_shed_url, 'repository/get_ctx_rev?name=%s&owner=%s&changeset_revision=%s' % ( name, owner, changeset_revision ) )
+ url = suc.url_join( tool_shed_url, 'repository/get_ctx_rev?name=%s&owner=%s&changeset_revision=%s' % ( name, owner, changeset_revision ) )
response = urllib2.urlopen( url )
ctx_rev = response.read()
response.close()
@@ -552,7 +552,7 @@
repo_path = repo_path.replace( '/', '', 1 )
return repo_path.lstrip( '/' ).split( '/' )[ 0 ]
def get_repository_owner_from_clone_url( repository_clone_url ):
- tmp_url = clean_repository_clone_url( repository_clone_url )
+ tmp_url = suc.clean_repository_clone_url( repository_clone_url )
tool_shed = tmp_url.split( 'repos' )[ 0 ].rstrip( '/' )
return get_repository_owner( tmp_url )
def get_repository_tools_tups( app, metadata_dict ):
@@ -674,14 +674,14 @@
.first()
def get_update_to_changeset_revision_and_ctx_rev( trans, repository ):
"""Return the changeset revision hash to which the repository can be updated."""
- tool_shed_url = get_url_from_repository_tool_shed( trans.app, repository )
- url = url_join( tool_shed_url, 'repository/get_changeset_revision_and_ctx_rev?name=%s&owner=%s&changeset_revision=%s' % \
+ tool_shed_url = suc.get_url_from_repository_tool_shed( trans.app, repository )
+ url = suc.url_join( tool_shed_url, 'repository/get_changeset_revision_and_ctx_rev?name=%s&owner=%s&changeset_revision=%s' % \
( repository.name, repository.owner, repository.installed_changeset_revision ) )
try:
response = urllib2.urlopen( url )
encoded_update_dict = response.read()
if encoded_update_dict:
- update_dict = tool_shed_decode( encoded_update_dict )
+ update_dict = encoding_util.tool_shed_decode( encoded_update_dict )
changeset_revision = update_dict[ 'changeset_revision' ]
ctx_rev = update_dict[ 'ctx_rev' ]
response.close()
@@ -704,11 +704,11 @@
break
if missing_data_table_entry:
# The repository must contain a tool_data_table_conf.xml.sample file that includes all required entries for all tools in the repository.
- sample_tool_data_table_conf = get_config_from_disk( 'tool_data_table_conf.xml.sample', relative_install_dir )
+ sample_tool_data_table_conf = suc.get_config_from_disk( 'tool_data_table_conf.xml.sample', relative_install_dir )
if sample_tool_data_table_conf:
# Add entries to the ToolDataTableManager's in-memory data_tables dictionary as well as the list of data_table_elems and the list of
# data_table_elem_names.
- error, message = handle_sample_tool_data_table_conf_file( app, sample_tool_data_table_conf, persist=True )
+ error, message = suc.handle_sample_tool_data_table_conf_file( app, sample_tool_data_table_conf, persist=True )
if error:
# TODO: Do more here than logging an exception.
log.debug( message )
@@ -716,7 +716,7 @@
repository_tool = app.toolbox.load_tool( os.path.join( tool_path, tup_path ), guid=guid )
repository_tools_tups[ index ] = ( tup_path, guid, repository_tool )
# Reset the tool_data_tables by loading the empty tool_data_table_conf.xml file.
- reset_tool_data_tables( app )
+ suc.reset_tool_data_tables( app )
return repository_tools_tups
def handle_missing_index_file( app, tool_path, sample_files, repository_tools_tups, sample_files_copied ):
"""
@@ -734,7 +734,7 @@
for sample_file in sample_files:
sample_file_name = strip_path( sample_file )
if sample_file_name == '%s.sample' % missing_file_name:
- copy_sample_file( app, sample_file )
+ suc.copy_sample_file( app, sample_file )
if options.tool_data_table and options.tool_data_table.missing_index_file:
options.tool_data_table.handle_found_index_file( options.missing_index_file )
sample_files_copied.append( options.missing_index_file )
@@ -852,7 +852,7 @@
# Load proprietary datatypes and return information needed for loading proprietary datatypes converters and display applications later.
metadata = repository.metadata
repository_dict = None
- datatypes_config = get_config_from_disk( 'datatypes_conf.xml', relative_install_dir )
+ datatypes_config = suc.get_config_from_disk( 'datatypes_conf.xml', relative_install_dir )
if datatypes_config:
converter_path, display_path = alter_config_and_load_prorietary_datatypes( app, datatypes_config, relative_install_dir, deactivate=deactivate )
if converter_path or display_path:
@@ -883,10 +883,7 @@
return False
def pull_repository( repo, repository_clone_url, ctx_rev ):
"""Pull changes from a remote repository to a local one."""
- commands.pull( get_configured_ui(),
- repo,
- source=repository_clone_url,
- rev=[ ctx_rev ] )
+ commands.pull( suc.get_configured_ui(), repo, source=repository_clone_url, rev=[ ctx_rev ] )
def remove_from_shed_tool_config( trans, shed_tool_conf_dict, guids_to_remove ):
# A tool shed repository is being uninstalled so change the shed_tool_conf file. Parse the config file to generate the entire list
# of config_elems instead of using the in-memory list since it will be a subset of the entire list if one or more repositories have
@@ -1038,7 +1035,7 @@
trans.app.toolbox.write_integrated_tool_panel_config_file()
def remove_tool_dependency( trans, tool_dependency ):
dependency_install_dir = tool_dependency.installation_directory( trans.app )
- removed, error_message = remove_tool_dependency_installation_directory( dependency_install_dir )
+ removed, error_message = suc.remove_tool_dependency_installation_directory( dependency_install_dir )
if removed:
tool_dependency.status = trans.model.ToolDependency.installation_status.UNINSTALLED
tool_dependency.error_message = None
@@ -1046,7 +1043,7 @@
trans.sa_session.flush()
return removed, error_message
def tool_shed_from_repository_clone_url( repository_clone_url ):
- return clean_repository_clone_url( repository_clone_url ).split( 'repos' )[ 0 ].rstrip( '/' )
+ return suc.clean_repository_clone_url( repository_clone_url ).split( 'repos' )[ 0 ].rstrip( '/' )
def update_in_shed_tool_config( app, repository ):
# A tool shed repository is being updated so change the shed_tool_conf file. Parse the config file to generate the entire list
# of config_elems instead of using the in-memory list.
@@ -1060,7 +1057,7 @@
tool_panel_dict = generate_tool_panel_dict_from_shed_tool_conf_entries( trans, repository )
repository_tools_tups = get_repository_tools_tups( app, repository.metadata )
- cleaned_repository_clone_url = clean_repository_clone_url( generate_clone_url_for_installed_repository( trans, repository ) )
+ cleaned_repository_clone_url = suc.clean_repository_clone_url( generate_clone_url_for_installed_repository( trans, repository ) )
tool_shed = tool_shed_from_repository_clone_url( cleaned_repository_clone_url )
owner = repository.owner
if not owner:
diff -r ae60aaaf6a139e88b2849abf97fd47af711341fd -r 70f88a048ed2b565d60250ca66302fd03a7852ff lib/galaxy/webapps/community/controllers/admin.py
--- a/lib/galaxy/webapps/community/controllers/admin.py
+++ b/lib/galaxy/webapps/community/controllers/admin.py
@@ -5,7 +5,7 @@
from galaxy.web.framework.helpers import time_ago, iff, grids
from galaxy.web.form_builder import SelectField
from galaxy.util import inflector
-from galaxy.util.shed_util_common import *
+import galaxy.util.shed_util_common as suc
from common import *
from repository import RepositoryGrid, CategoryGrid
@@ -342,8 +342,8 @@
class RevisionColumn( grids.TextColumn ):
def get_value( self, trans, grid, repository_metadata ):
repository = repository_metadata.repository
- repo = hg.repository( get_configured_ui(), repository.repo_path( trans.app ) )
- ctx = get_changectx_for_changeset( repo, repository_metadata.changeset_revision )
+ repo = hg.repository( suc.get_configured_ui(), repository.repo_path( trans.app ) )
+ ctx = suc.get_changectx_for_changeset( repo, repository_metadata.changeset_revision )
return "%s:%s" % ( str( ctx.rev() ), repository_metadata.changeset_revision )
class ToolsColumn( grids.TextColumn ):
def get_value( self, trans, grid, repository_metadata ):
@@ -481,7 +481,7 @@
# The received id is the repository id, so we need to get the id of the user
# that uploaded the repository.
repository_id = kwd.get( 'id', None )
- repository = get_repository_in_tool_shed( trans, repository_id )
+ repository = suc.get_repository_in_tool_shed( trans, repository_id )
kwd[ 'f-email' ] = repository.user.email
elif operation == "repositories_by_category":
# Eliminate the current filters if any exist.
@@ -513,7 +513,7 @@
changset_revision_str = 'changeset_revision_'
if k.startswith( changset_revision_str ):
repository_id = trans.security.encode_id( int( k.lstrip( changset_revision_str ) ) )
- repository = get_repository_in_tool_shed( trans, repository_id )
+ repository = suc.get_repository_in_tool_shed( trans, repository_id )
if repository.tip( trans.app ) != v:
return trans.response.send_redirect( web.url_for( controller='repository',
action='browse_repositories',
@@ -586,7 +586,7 @@
count = 0
deleted_repositories = ""
for repository_id in ids:
- repository = get_repository_in_tool_shed( trans, repository_id )
+ repository = suc.get_repository_in_tool_shed( trans, repository_id )
if not repository.deleted:
repository.deleted = True
trans.sa_session.add( repository )
@@ -717,12 +717,12 @@
@web.require_admin
def reset_metadata_on_selected_repositories_in_tool_shed( self, trans, **kwd ):
if 'reset_metadata_on_selected_repositories_button' in kwd:
- kwd[ 'CONTROLLER' ] = TOOL_SHED_ADMIN_CONTROLLER
- message, status = reset_metadata_on_selected_repositories( trans, **kwd )
+ kwd[ 'CONTROLLER' ] = suc.TOOL_SHED_ADMIN_CONTROLLER
+ message, status = suc.reset_metadata_on_selected_repositories( trans, **kwd )
else:
message = util.restore_text( kwd.get( 'message', '' ) )
status = kwd.get( 'status', 'done' )
- repositories_select_field = build_repository_ids_select_field( trans, TOOL_SHED_ADMIN_CONTROLLER )
+ repositories_select_field = suc.build_repository_ids_select_field( trans, suc.TOOL_SHED_ADMIN_CONTROLLER )
return trans.fill_template( '/webapps/community/admin/reset_metadata_on_selected_repositories.mako',
repositories_select_field=repositories_select_field,
message=message,
@@ -740,7 +740,7 @@
count = 0
undeleted_repositories = ""
for repository_id in ids:
- repository = get_repository_in_tool_shed( trans, repository_id )
+ repository = suc.get_repository_in_tool_shed( trans, repository_id )
if repository.deleted:
repository.deleted = False
trans.sa_session.add( repository )
diff -r ae60aaaf6a139e88b2849abf97fd47af711341fd -r 70f88a048ed2b565d60250ca66302fd03a7852ff lib/galaxy/webapps/community/controllers/common.py
--- a/lib/galaxy/webapps/community/controllers/common.py
+++ b/lib/galaxy/webapps/community/controllers/common.py
@@ -6,7 +6,7 @@
from galaxy.util.odict import odict
from galaxy.util.json import from_json_string, to_json_string
from galaxy.util.hash_util import *
-from galaxy.util.shed_util_common import *
+import galaxy.util.shed_util_common as suc
from galaxy.web.base.controller import *
from galaxy.web.base.controllers.admin import *
from galaxy.webapps.community import model
@@ -108,12 +108,12 @@
tool_versions_dict = {}
for tool_dict in metadata.get( 'tools', [] ):
# We have at least 2 changeset revisions to compare tool guids and tool ids.
- parent_id = get_parent_id( trans,
- id,
- tool_dict[ 'id' ],
- tool_dict[ 'version' ],
- tool_dict[ 'guid' ],
- changeset_revisions )
+ parent_id = suc.get_parent_id( trans,
+ id,
+ tool_dict[ 'id' ],
+ tool_dict[ 'version' ],
+ tool_dict[ 'guid' ],
+ changeset_revisions )
tool_versions_dict[ tool_dict[ 'guid' ] ] = parent_id
if tool_versions_dict:
repository_metadata.tool_versions = tool_versions_dict
@@ -130,7 +130,7 @@
return False
if changeset_revision == repository.tip( trans.app ):
return True
- file_name = strip_path( file_path )
+ file_name = suc.strip_path( file_path )
latest_version_of_file = get_latest_tool_config_revision_from_repository_manifest( repo, file_name, changeset_revision )
can_use_disk_file = filecmp.cmp( file_path, latest_version_of_file )
try:
@@ -140,7 +140,7 @@
return can_use_disk_file
def changeset_is_malicious( trans, id, changeset_revision, **kwd ):
"""Check the malicious flag in repository metadata for a specified change set"""
- repository_metadata = get_repository_metadata_by_changeset_revision( trans, id, changeset_revision )
+ repository_metadata = suc.get_repository_metadata_by_changeset_revision( trans, id, changeset_revision )
if repository_metadata:
return repository_metadata.malicious
return False
@@ -191,7 +191,7 @@
tool.id,
tool.version )
def get_absolute_path_to_file_in_repository( repo_files_dir, file_name ):
- stripped_file_name = strip_path( file_name )
+ stripped_file_name = suc.strip_path( file_name )
file_path = None
for root, dirs, files in os.walk( repo_files_dir ):
if root.find( '.hg' ) < 0:
@@ -246,11 +246,11 @@
This method is restricted to tool_config files rather than any file since it is likely that, with the exception of tool config files,
multiple files will have the same name in various directories within the repository.
"""
- stripped_filename = strip_path( filename )
- for changeset in reversed_upper_bounded_changelog( repo, changeset_revision ):
+ stripped_filename = suc.strip_path( filename )
+ for changeset in suc.reversed_upper_bounded_changelog( repo, changeset_revision ):
manifest_ctx = repo.changectx( changeset )
for ctx_file in manifest_ctx.files():
- ctx_file_name = strip_path( ctx_file )
+ ctx_file_name = suc.strip_path( ctx_file )
if ctx_file_name == stripped_filename:
try:
fctx = manifest_ctx[ ctx_file ]
@@ -268,10 +268,10 @@
return None
def get_previous_repository_reviews( trans, repository, changeset_revision ):
"""Return an ordered dictionary of repository reviews up to and including the received changeset revision."""
- repo = hg.repository( get_configured_ui(), repository.repo_path( trans.app ) )
+ repo = hg.repository( suc.get_configured_ui(), repository.repo_path( trans.app ) )
reviewed_revision_hashes = [ review.changeset_revision for review in repository.reviews ]
previous_reviews_dict = odict()
- for changeset in reversed_upper_bounded_changelog( repo, changeset_revision ):
+ for changeset in suc.reversed_upper_bounded_changelog( repo, changeset_revision ):
previous_changeset_revision = str( repo.changectx( changeset ) )
if previous_changeset_revision in reviewed_revision_hashes:
previous_rev, previous_changeset_revision_label = get_rev_label_from_changeset_revision( repo, previous_changeset_revision )
@@ -313,9 +313,9 @@
def get_rev_label_changeset_revision_from_repository_metadata( trans, repository_metadata, repository=None ):
if repository is None:
repository = repository_metadata.repository
- repo = hg.repository( get_configured_ui(), repository.repo_path( trans.app ) )
+ repo = hg.repository( suc.get_configured_ui(), repository.repo_path( trans.app ) )
changeset_revision = repository_metadata.changeset_revision
- ctx = get_changectx_for_changeset( repo, changeset_revision )
+ ctx = suc.get_changectx_for_changeset( repo, changeset_revision )
if ctx:
rev = '%04d' % ctx.rev()
label = "%s:%s" % ( str( ctx.rev() ), changeset_revision )
@@ -324,7 +324,7 @@
label = "-1:%s" % changeset_revision
return rev, label, changeset_revision
def get_rev_label_from_changeset_revision( repo, changeset_revision ):
- ctx = get_changectx_for_changeset( repo, changeset_revision )
+ ctx = suc.get_changectx_for_changeset( repo, changeset_revision )
if ctx:
rev = '%04d' % ctx.rev()
label = "%s:%s" % ( str( ctx.rev() ), changeset_revision )
@@ -358,8 +358,8 @@
Return a string consisting of the human read-able
changeset rev and the changeset revision string.
"""
- repo = hg.repository( get_configured_ui(), repository.repo_path( trans.app ) )
- ctx = get_changectx_for_changeset( repo, changeset_revision )
+ repo = hg.repository( suc.get_configured_ui(), repository.repo_path( trans.app ) )
+ ctx = suc.get_changectx_for_changeset( repo, changeset_revision )
if ctx:
return "%s:%s" % ( str( ctx.rev() ), changeset_revision )
else:
@@ -389,7 +389,7 @@
# user is not an admin user, the email will not include any information about both HTML and image content
# that was included in the change set.
repo_dir = repository.repo_path( trans.app )
- repo = hg.repository( get_configured_ui(), repo_dir )
+ repo = hg.repository( suc.get_configured_ui(), repo_dir )
smtp_server = trans.app.config.smtp_server
if smtp_server and ( new_repo_alert or repository.email_alerts ):
# Send email alert to users that want them.
@@ -457,9 +457,9 @@
log.exception( "An error occurred sending a tool shed repository update alert by email." )
def has_previous_repository_reviews( trans, repository, changeset_revision ):
"""Determine if a repository has a changeset revision review prior to the received changeset revision."""
- repo = hg.repository( get_configured_ui(), repository.repo_path( trans.app ) )
+ repo = hg.repository( suc.get_configured_ui(), repository.repo_path( trans.app ) )
reviewed_revision_hashes = [ review.changeset_revision for review in repository.reviews ]
- for changeset in reversed_upper_bounded_changelog( repo, changeset_revision ):
+ for changeset in suc.reversed_upper_bounded_changelog( repo, changeset_revision ):
previous_changeset_revision = str( repo.changectx( changeset ) )
if previous_changeset_revision in reviewed_revision_hashes:
return True
@@ -471,9 +471,9 @@
revision and the first changeset revision in the repository, searching backwards.
"""
original_tool_data_path = trans.app.config.tool_data_path
- repository = get_repository_in_tool_shed( trans, repository_id )
+ repository = suc.get_repository_in_tool_shed( trans, repository_id )
repo_files_dir = repository.repo_path( trans.app )
- repo = hg.repository( get_configured_ui(), repo_files_dir )
+ repo = hg.repository( suc.get_configured_ui(), repo_files_dir )
message = ''
tool = None
can_use_disk_file = False
@@ -482,27 +482,27 @@
can_use_disk_file = can_use_tool_config_disk_file( trans, repository, repo, tool_config_filepath, changeset_revision )
if can_use_disk_file:
trans.app.config.tool_data_path = work_dir
- tool, valid, message, sample_files = handle_sample_files_and_load_tool_from_disk( trans, repo_files_dir, tool_config_filepath, work_dir )
+ tool, valid, message, sample_files = suc.handle_sample_files_and_load_tool_from_disk( trans, repo_files_dir, tool_config_filepath, work_dir )
if tool is not None:
- invalid_files_and_errors_tups = check_tool_input_params( trans.app,
- repo_files_dir,
- tool_config_filename,
- tool,
- sample_files )
+ invalid_files_and_errors_tups = suc.check_tool_input_params( trans.app,
+ repo_files_dir,
+ tool_config_filename,
+ tool,
+ sample_files )
if invalid_files_and_errors_tups:
- message2 = generate_message_for_invalid_tools( trans,
- invalid_files_and_errors_tups,
- repository,
- metadata_dict=None,
- as_html=True,
- displaying_invalid_tool=True )
- message = concat_messages( message, message2 )
+ message2 = suc.generate_message_for_invalid_tools( trans,
+ invalid_files_and_errors_tups,
+ repository,
+ metadata_dict=None,
+ as_html=True,
+ displaying_invalid_tool=True )
+ message = suc.concat_messages( message, message2 )
else:
- tool, message, sample_files = handle_sample_files_and_load_tool_from_tmp_config( trans, repo, changeset_revision, tool_config_filename, work_dir )
- remove_dir( work_dir )
+ tool, message, sample_files = suc.handle_sample_files_and_load_tool_from_tmp_config( trans, repo, changeset_revision, tool_config_filename, work_dir )
+ suc.remove_dir( work_dir )
trans.app.config.tool_data_path = original_tool_data_path
# Reset the tool_data_tables by loading the empty tool_data_table_conf.xml file.
- reset_tool_data_tables( trans.app )
+ suc.reset_tool_data_tables( trans.app )
return repository, tool, message
def new_repository_dependency_metadata_required( trans, repository, metadata_dict ):
"""
@@ -594,36 +594,36 @@
message = ''
status = 'done'
encoded_id = trans.security.encode_id( repository.id )
- repository_clone_url = generate_clone_url_for_repository_in_tool_shed( trans, repository )
+ repository_clone_url = suc.generate_clone_url_for_repository_in_tool_shed( trans, repository )
repo_dir = repository.repo_path( trans.app )
- repo = hg.repository( get_configured_ui(), repo_dir )
- metadata_dict, invalid_file_tups = generate_metadata_for_changeset_revision( app=trans.app,
- repository=repository,
- repository_clone_url=repository_clone_url,
- relative_install_dir=repo_dir,
- repository_files_dir=None,
- resetting_all_metadata_on_repository=False,
- updating_installed_repository=False,
- persist=False )
+ repo = hg.repository( suc.get_configured_ui(), repo_dir )
+ metadata_dict, invalid_file_tups = suc.generate_metadata_for_changeset_revision( app=trans.app,
+ repository=repository,
+ repository_clone_url=repository_clone_url,
+ relative_install_dir=repo_dir,
+ repository_files_dir=None,
+ resetting_all_metadata_on_repository=False,
+ updating_installed_repository=False,
+ persist=False )
if metadata_dict:
- downloadable = is_downloadable( metadata_dict )
+ downloadable = suc.is_downloadable( metadata_dict )
repository_metadata = None
if new_repository_dependency_metadata_required( trans, repository, metadata_dict ) or \
new_tool_metadata_required( trans, repository, metadata_dict ) or \
new_workflow_metadata_required( trans, repository, metadata_dict ):
# Create a new repository_metadata table row.
- repository_metadata = create_or_update_repository_metadata( trans,
- encoded_id,
- repository,
- repository.tip( trans.app ),
- metadata_dict )
+ repository_metadata = suc.create_or_update_repository_metadata( trans,
+ encoded_id,
+ repository,
+ repository.tip( trans.app ),
+ metadata_dict )
# If this is the first record stored for this repository, see if we need to send any email alerts.
if len( repository.downloadable_revisions ) == 1:
handle_email_alerts( trans, repository, content_alert_str='', new_repo_alert=True, admin_only=False )
else:
repository_metadata = get_latest_repository_metadata( trans, repository.id )
if repository_metadata:
- downloadable = is_downloadable( metadata_dict )
+ downloadable = suc.is_downloadable( metadata_dict )
# Update the last saved repository_metadata table row.
repository_metadata.changeset_revision = repository.tip( trans.app )
repository_metadata.metadata = metadata_dict
@@ -632,17 +632,17 @@
trans.sa_session.flush()
else:
# There are no tools in the repository, and we're setting metadata on the repository tip.
- repository_metadata = create_or_update_repository_metadata( trans,
- encoded_id,
- repository,
- repository.tip( trans.app ),
- metadata_dict )
+ repository_metadata = suc.create_or_update_repository_metadata( trans,
+ encoded_id,
+ repository,
+ repository.tip( trans.app ),
+ metadata_dict )
if 'tools' in metadata_dict and repository_metadata and status != 'error':
# Set tool versions on the new downloadable change set. The order of the list of changesets is critical, so we use the repo's changelog.
changeset_revisions = []
for changeset in repo.changelog:
changeset_revision = str( repo.changectx( changeset ) )
- if get_repository_metadata_by_changeset_revision( trans, encoded_id, changeset_revision ):
+ if suc.get_repository_metadata_by_changeset_revision( trans, encoded_id, changeset_revision ):
changeset_revisions.append( changeset_revision )
add_tool_versions( trans, encoded_id, repository_metadata, changeset_revisions )
elif len( repo ) == 1 and not invalid_file_tups:
@@ -650,10 +650,10 @@
message += "be defined so this revision cannot be automatically installed into a local Galaxy instance."
status = "error"
if invalid_file_tups:
- message = generate_message_for_invalid_tools( trans, invalid_file_tups, repository, metadata_dict )
+ message = suc.generate_message_for_invalid_tools( trans, invalid_file_tups, repository, metadata_dict )
status = 'error'
# Reset the tool_data_tables by loading the empty tool_data_table_conf.xml file.
- reset_tool_data_tables( trans.app )
+ suc.reset_tool_data_tables( trans.app )
return message, status
def set_repository_metadata_due_to_new_tip( trans, repository, content_alert_str=None, **kwd ):
# Set metadata on the repository tip.
@@ -671,7 +671,7 @@
# Make a copy of a repository's files for browsing, remove from disk all files that are not tracked, and commit all
# added, modified or removed files that have not yet been committed.
repo_dir = repository.repo_path( trans.app )
- repo = hg.repository( get_configured_ui(), repo_dir )
+ repo = hg.repository( suc.get_configured_ui(), repo_dir )
# The following will delete the disk copy of only the files in the repository.
#os.system( 'hg update -r null > /dev/null 2>&1' )
files_to_remove_from_disk = []
diff -r ae60aaaf6a139e88b2849abf97fd47af711341fd -r 70f88a048ed2b565d60250ca66302fd03a7852ff lib/galaxy/webapps/community/controllers/repository.py
--- a/lib/galaxy/webapps/community/controllers/repository.py
+++ b/lib/galaxy/webapps/community/controllers/repository.py
@@ -9,7 +9,7 @@
from galaxy.web.framework.helpers import time_ago, iff, grids
from galaxy.util.json import from_json_string, to_json_string
from galaxy.model.orm import *
-from galaxy.util.shed_util_common import *
+import galaxy.util.shed_util_common as suc
from galaxy.tool_shed.encoding_util import *
from common import *
@@ -616,7 +616,7 @@
else:
# The received id is the repository id, so we need to get the id of the user that uploaded the repository.
repository_id = kwd.get( 'id', None )
- repository = get_repository_in_tool_shed( trans, repository_id )
+ repository = suc.get_repository_in_tool_shed( trans, repository_id )
kwd[ 'f-email' ] = repository.user.email
elif operation == "repositories_i_own":
# Eliminate the current filters if any exist.
@@ -673,7 +673,7 @@
changset_revision_str = 'changeset_revision_'
if k.startswith( changset_revision_str ):
repository_id = trans.security.encode_id( int( k.lstrip( changset_revision_str ) ) )
- repository = get_repository_in_tool_shed( trans, repository_id )
+ repository = suc.get_repository_in_tool_shed( trans, repository_id )
if repository.tip( trans.app ) != v:
return trans.response.send_redirect( web.url_for( controller='repository',
action='browse_repositories',
@@ -687,10 +687,10 @@
message = util.restore_text( params.get( 'message', '' ) )
status = params.get( 'status', 'done' )
commit_message = util.restore_text( params.get( 'commit_message', 'Deleted selected files' ) )
- repository = get_repository_in_tool_shed( trans, id )
- repo = hg.repository( get_configured_ui(), repository.repo_path( trans.app ) )
+ repository = suc.get_repository_in_tool_shed( trans, id )
+ repo = hg.repository( suc.get_configured_ui(), repository.repo_path( trans.app ) )
# Update repository files for browsing.
- update_repository( repo )
+ suc.update_repository( repo )
is_malicious = changeset_is_malicious( trans, id, repository.tip( trans.app ) )
metadata = self.get_metadata( trans, id, repository.tip( trans.app ) )
return trans.fill_template( '/webapps/community/repository/browse_repository.mako',
@@ -748,7 +748,7 @@
operation = kwd[ 'operation' ].lower()
if operation == "preview_tools_in_changeset":
repository_id = kwd.get( 'id', None )
- repository = get_repository_in_tool_shed( trans, repository_id )
+ repository = suc.get_repository_in_tool_shed( trans, repository_id )
repository_metadata = get_latest_repository_metadata( trans, repository.id )
latest_installable_changeset_revision = repository_metadata.changeset_revision
return trans.response.send_redirect( web.url_for( controller='repository',
@@ -772,7 +772,7 @@
changset_revision_str = 'changeset_revision_'
if k.startswith( changset_revision_str ):
repository_id = trans.security.encode_id( int( k.lstrip( changset_revision_str ) ) )
- repository = get_repository_in_tool_shed( trans, repository_id )
+ repository = suc.get_repository_in_tool_shed( trans, repository_id )
if repository.tip( trans.app ) != v:
return trans.response.send_redirect( web.url_for( controller='repository',
action='preview_tools_in_changeset',
@@ -817,11 +817,11 @@
name = params.get( 'name', None )
owner = params.get( 'owner', None )
changeset_revision = params.get( 'changeset_revision', None )
- repository = get_repository_by_name_and_owner( trans, name, owner )
+ repository = suc.get_repository_by_name_and_owner( trans, name, owner )
repo_dir = repository.repo_path( trans.app )
- repo = hg.repository( get_configured_ui(), repo_dir )
+ repo = hg.repository( suc.get_configured_ui(), repo_dir )
# Default to the current changeset revision.
- update_to_ctx = get_changectx_for_changeset( repo, changeset_revision )
+ update_to_ctx = suc.get_changectx_for_changeset( repo, changeset_revision )
latest_changeset_revision = changeset_revision
from_update_manager = kwd.get( 'from_update_manager', False )
if from_update_manager:
@@ -829,9 +829,9 @@
no_update = 'false'
else:
# Start building up the url to redirect back to the calling Galaxy instance.
- url = url_join( galaxy_url,
- 'admin_toolshed/update_to_changeset_revision?tool_shed_url=%s&name=%s&owner=%s&changeset_revision=%s&latest_changeset_revision=' % \
- ( url_for( '/', qualified=True ), repository.name, repository.user.username, changeset_revision ) )
+ url = suc.url_join( galaxy_url,
+ 'admin_toolshed/update_to_changeset_revision?tool_shed_url=%s&name=%s&owner=%s&changeset_revision=%s&latest_changeset_revision=' % \
+ ( url_for( '/', qualified=True ), repository.name, repository.user.username, changeset_revision ) )
if changeset_revision == repository.tip( trans.app ):
# If changeset_revision is the repository tip, there are no additional updates.
if from_update_manager:
@@ -839,9 +839,9 @@
# Return the same value for changeset_revision and latest_changeset_revision.
url += latest_changeset_revision
else:
- repository_metadata = get_repository_metadata_by_changeset_revision( trans,
- trans.security.encode_id( repository.id ),
- changeset_revision )
+ repository_metadata = suc.get_repository_metadata_by_changeset_revision( trans,
+ trans.security.encode_id( repository.id ),
+ changeset_revision )
if repository_metadata:
# If changeset_revision is in the repository_metadata table for this repository, there are no additional updates.
if from_update_manager:
@@ -855,19 +855,19 @@
update_to_changeset_hash = None
for changeset in repo.changelog:
changeset_hash = str( repo.changectx( changeset ) )
- ctx = get_changectx_for_changeset( repo, changeset_hash )
+ ctx = suc.get_changectx_for_changeset( repo, changeset_hash )
if update_to_changeset_hash:
if changeset_hash == repository.tip( trans.app ):
- update_to_ctx = get_changectx_for_changeset( repo, changeset_hash )
+ update_to_ctx = suc.get_changectx_for_changeset( repo, changeset_hash )
latest_changeset_revision = changeset_hash
break
else:
- repository_metadata = get_repository_metadata_by_changeset_revision( trans,
- trans.security.encode_id( repository.id ),
- changeset_hash )
+ repository_metadata = suc.get_repository_metadata_by_changeset_revision( trans,
+ trans.security.encode_id( repository.id ),
+ changeset_hash )
if repository_metadata:
# We found a RepositoryMetadata record.
- update_to_ctx = get_changectx_for_changeset( repo, changeset_hash )
+ update_to_ctx = suc.get_changectx_for_changeset( repo, changeset_hash )
latest_changeset_revision = changeset_hash
break
else:
@@ -888,7 +888,7 @@
params = util.Params( kwd )
message = util.restore_text( params.get( 'message', '' ) )
status = params.get( 'status', 'done' )
- repository = get_repository_in_tool_shed( trans, id )
+ repository = suc.get_repository_in_tool_shed( trans, id )
metadata = self.get_metadata( trans, id, repository.tip( trans.app ) )
if trans.user and trans.user.email:
return trans.fill_template( "/webapps/community/repository/contact_owner.mako",
@@ -904,7 +904,7 @@
# Since we support both http and https, we set push_ssl to False to override the default (which is True) in the mercurial api. The hg
# purge extension purges all files and directories not being tracked by mercurial in the current repository. It'll remove unknown files
# and empty directories. This is not currently used because it is not supported in the mercurial API.
- repo = hg.repository( get_configured_ui(), path=repository.repo_path( trans.app ) )
+ repo = hg.repository( suc.get_configured_ui(), path=repository.repo_path( trans.app ) )
fp = repo.opener( 'hgrc', 'wb' )
fp.write( '[paths]\n' )
fp.write( 'default = .\n' )
@@ -963,7 +963,7 @@
if not os.path.exists( repository_path ):
os.makedirs( repository_path )
# Create the local repository
- repo = hg.repository( get_configured_ui(), repository_path, create=True )
+ repo = hg.repository( suc.get_configured_ui(), repository_path, create=True )
# Add an entry in the hgweb.config file for the local repository.
lhs = "repos/%s/%s" % ( repository.user.username, repository.name )
trans.app.hgweb_config_manager.add_entry( lhs, repository_path )
@@ -999,7 +999,7 @@
message = util.restore_text( params.get( 'message', '' ) )
status = params.get( 'status', 'done' )
repository_id = params.get( 'id', None )
- repository = get_repository_in_tool_shed( trans, repository_id )
+ repository = suc.get_repository_in_tool_shed( trans, repository_id )
mark_deprecated = util.string_as_bool( params.get( 'mark_deprecated', False ) )
repository.deprecated = mark_deprecated
trans.sa_session.add( repository )
@@ -1054,7 +1054,7 @@
def download( self, trans, repository_id, changeset_revision, file_type, **kwd ):
# Download an archive of the repository files compressed as zip, gz or bz2.
params = util.Params( kwd )
- repository = get_repository_in_tool_shed( trans, repository_id )
+ repository = suc.get_repository_in_tool_shed( trans, repository_id )
# Allow hgweb to handle the download. This requires the tool shed
# server account's .hgrc file to include the following setting:
# [web]
@@ -1087,7 +1087,7 @@
# The received id is a RepositoryMetadata id, so we have to get the repository id.
repository_metadata = get_repository_metadata_by_id( trans, item_id )
repository_id = trans.security.encode_id( repository_metadata.repository.id )
- repository = get_repository_in_tool_shed( trans, repository_id )
+ repository = suc.get_repository_in_tool_shed( trans, repository_id )
kwd[ 'id' ] = repository_id
kwd[ 'changeset_revision' ] = repository_metadata.changeset_revision
if trans.webapp.name == 'community' and ( is_admin or repository.user == trans.user ):
@@ -1172,7 +1172,7 @@
# The received id is a RepositoryMetadata id, so we have to get the repository id.
repository_metadata = get_repository_metadata_by_id( trans, item_id )
repository_id = trans.security.encode_id( repository_metadata.repository.id )
- repository = get_repository_in_tool_shed( trans, repository_id )
+ repository = suc.get_repository_in_tool_shed( trans, repository_id )
kwd[ 'id' ] = repository_id
kwd[ 'changeset_revision' ] = repository_metadata.changeset_revision
if trans.webapp.name == 'community' and ( is_admin or repository.user == trans.user ):
@@ -1252,11 +1252,11 @@
name = params.get( 'name', None )
owner = params.get( 'owner', None )
changeset_revision = params.get( 'changeset_revision', None )
- repository = get_repository_by_name_and_owner( trans, name, owner )
+ repository = suc.get_repository_by_name_and_owner( trans, name, owner )
repo_dir = repository.repo_path( trans.app )
- repo = hg.repository( get_configured_ui(), repo_dir )
+ repo = hg.repository( suc.get_configured_ui(), repo_dir )
# Default to the received changeset revision and ctx_rev.
- update_to_ctx = get_changectx_for_changeset( repo, changeset_revision )
+ update_to_ctx = suc.get_changectx_for_changeset( repo, changeset_revision )
ctx_rev = str( update_to_ctx.rev() )
latest_changeset_revision = changeset_revision
update_dict = dict( changeset_revision=changeset_revision, ctx_rev=ctx_rev )
@@ -1264,9 +1264,9 @@
# If changeset_revision is the repository tip, there are no additional updates.
return tool_shed_encode( update_dict )
else:
- repository_metadata = get_repository_metadata_by_changeset_revision( trans,
- trans.security.encode_id( repository.id ),
- changeset_revision )
+ repository_metadata = suc.get_repository_metadata_by_changeset_revision( trans,
+ trans.security.encode_id( repository.id ),
+ changeset_revision )
if repository_metadata:
# If changeset_revision is in the repository_metadata table for this repository, there are no additional updates.
return tool_shed_encode( update_dict )
@@ -1276,16 +1276,16 @@
update_to_changeset_hash = None
for changeset in repo.changelog:
changeset_hash = str( repo.changectx( changeset ) )
- ctx = get_changectx_for_changeset( repo, changeset_hash )
+ ctx = suc.get_changectx_for_changeset( repo, changeset_hash )
if update_to_changeset_hash:
- if get_repository_metadata_by_changeset_revision( trans, trans.security.encode_id( repository.id ), changeset_hash ):
+ if suc.get_repository_metadata_by_changeset_revision( trans, trans.security.encode_id( repository.id ), changeset_hash ):
# We found a RepositoryMetadata record.
if changeset_hash == repository.tip( trans.app ):
# The current ctx is the repository tip, so use it.
- update_to_ctx = get_changectx_for_changeset( repo, changeset_hash )
+ update_to_ctx = suc.get_changectx_for_changeset( repo, changeset_hash )
latest_changeset_revision = changeset_hash
else:
- update_to_ctx = get_changectx_for_changeset( repo, update_to_changeset_hash )
+ update_to_ctx = suc.get_changectx_for_changeset( repo, update_to_changeset_hash )
latest_changeset_revision = update_to_changeset_hash
break
elif not update_to_changeset_hash and changeset_hash == changeset_revision:
@@ -1300,10 +1300,10 @@
repository_name = kwd[ 'name' ]
repository_owner = kwd[ 'owner' ]
changeset_revision = kwd[ 'changeset_revision' ]
- repository = get_repository_by_name_and_owner( trans, repository_name, repository_owner )
+ repository = suc.get_repository_by_name_and_owner( trans, repository_name, repository_owner )
repo_dir = repository.repo_path( trans.app )
- repo = hg.repository( get_configured_ui(), repo_dir )
- ctx = get_changectx_for_changeset( repo, changeset_revision )
+ repo = hg.repository( suc.get_configured_ui(), repo_dir )
+ ctx = suc.get_changectx_for_changeset( repo, changeset_revision )
if ctx:
return str( ctx.rev() )
return ''
@@ -1312,16 +1312,16 @@
# Avoid caching
trans.response.headers['Pragma'] = 'no-cache'
trans.response.headers['Expires'] = '0'
- return get_repository_file_contents( file_path )
+ return suc.get_repository_file_contents( file_path )
def get_file_from_changeset_revision( self, repo_files_dir, changeset_revision, file_name, dir ):
"""Return file_name from the received changeset_revision of the repository manifest."""
stripped_file_name = strip_path( file_name )
- repo = hg.repository( get_configured_ui(), repo_files_dir )
- ctx = get_changectx_for_changeset( repo, changeset_revision )
- named_tmp_file = get_named_tmpfile_from_ctx( ctx, file_name, dir )
+ repo = hg.repository( suc.get_configured_ui(), repo_files_dir )
+ ctx = suc.get_changectx_for_changeset( repo, changeset_revision )
+ named_tmp_file = suc.get_named_tmpfile_from_ctx( ctx, file_name, dir )
return named_tmp_file
def get_metadata( self, trans, repository_id, changeset_revision ):
- repository_metadata = get_repository_metadata_by_changeset_revision( trans, repository_id, changeset_revision )
+ repository_metadata = suc.get_repository_metadata_by_changeset_revision( trans, repository_id, changeset_revision )
if repository_metadata and repository_metadata.metadata:
return repository_metadata.metadata
return None
@@ -1331,21 +1331,21 @@
name = params.get( 'name', None )
owner = params.get( 'owner', None )
changeset_revision = params.get( 'changeset_revision', None )
- repository = get_repository_by_name_and_owner( trans, name, owner )
+ repository = suc.get_repository_by_name_and_owner( trans, name, owner )
repository_id = trans.security.encode_id( repository.id )
- repository_metadata = get_repository_metadata_by_changeset_revision( trans, repository_id, changeset_revision )
+ repository_metadata = suc.get_repository_metadata_by_changeset_revision( trans, repository_id, changeset_revision )
if repository_metadata:
metadata = repository_metadata.metadata
if metadata:
# Get a dictionary of all repositories upon which the contents of the received repository depends.
- repository_dependencies = get_repository_dependencies_for_changeset_revision( trans=trans,
- repository=repository,
- repository_metadata=repository_metadata,
- toolshed_base_url=str( url_for( '/', qualified=True ) ).rstrip( '/' ),
- key_rd_dicts_to_be_processed=None,
- all_repository_dependencies=None,
- handled_key_rd_dicts=None,
- circular_repository_dependencies=None )
+ repository_dependencies = suc.get_repository_dependencies_for_changeset_revision( trans=trans,
+ repository=repository,
+ repository_metadata=repository_metadata,
+ toolshed_base_url=str( url_for( '/', qualified=True ) ).rstrip( '/' ),
+ key_rd_dicts_to_be_processed=None,
+ all_repository_dependencies=None,
+ handled_key_rd_dicts=None,
+ circular_repository_dependencies=None )
if repository_dependencies:
return tool_shed_encode( repository_dependencies )
return ''
@@ -1361,9 +1361,9 @@
repo_info_dicts = []
for tup in zip( util.listify( repository_ids ), util.listify( changeset_revisions ) ):
repository_id, changeset_revision = tup
- repository = get_repository_in_tool_shed( trans, repository_id )
- repository_clone_url = generate_clone_url_for_repository_in_tool_shed( trans, repository )
- repository_metadata = get_repository_metadata_by_changeset_revision( trans, repository_id, changeset_revision )
+ repository = suc.get_repository_in_tool_shed( trans, repository_id )
+ repository_clone_url = suc.generate_clone_url_for_repository_in_tool_shed( trans, repository )
+ repository_metadata = suc.get_repository_metadata_by_changeset_revision( trans, repository_id, changeset_revision )
metadata = repository_metadata.metadata
if not includes_tools and 'tools' in metadata:
includes_tools = True
@@ -1372,17 +1372,17 @@
if not includes_tool_dependencies and 'tool_dependencies' in metadata:
includes_tool_dependencies = True
repo_dir = repository.repo_path( trans.app )
- repo = hg.repository( get_configured_ui(), repo_dir )
- ctx = get_changectx_for_changeset( repo, changeset_revision )
- repo_info_dict = create_repo_info_dict( trans=trans,
- repository_clone_url=repository_clone_url,
- changeset_revision=changeset_revision,
- ctx_rev=str( ctx.rev() ),
- repository_owner=repository.user.username,
- repository_name=repository.name,
- repository=repository,
- metadata=None,
- repository_metadata=repository_metadata )
+ repo = hg.repository( suc.get_configured_ui(), repo_dir )
+ ctx = suc.get_changectx_for_changeset( repo, changeset_revision )
+ repo_info_dict = suc.create_repo_info_dict( trans=trans,
+ repository_clone_url=repository_clone_url,
+ changeset_revision=changeset_revision,
+ ctx_rev=str( ctx.rev() ),
+ repository_owner=repository.user.username,
+ repository_name=repository.name,
+ repository=repository,
+ metadata=None,
+ repository_metadata=repository_metadata )
repo_info_dicts.append( tool_shed_encode( repo_info_dict ) )
return dict( includes_tools=includes_tools,
includes_repository_dependencies=includes_repository_dependencies,
@@ -1397,9 +1397,9 @@
repository_name = kwd[ 'name' ]
repository_owner = kwd[ 'owner' ]
changeset_revision = kwd[ 'changeset_revision' ]
- repository = get_repository_by_name_and_owner( trans, repository_name, repository_owner )
- repository_metadata = get_repository_metadata_by_changeset_revision( trans, trans.security.encode_id( repository.id ), changeset_revision )
- return build_readme_files_dict( repository_metadata )
+ repository = suc.get_repository_by_name_and_owner( trans, repository_name, repository_owner )
+ repository_metadata = suc.get_repository_metadata_by_changeset_revision( trans, trans.security.encode_id( repository.id ), changeset_revision )
+ return suc.build_readme_files_dict( repository_metadata )
@web.expose
def get_tool_dependencies( self, trans, **kwd ):
"""Handle a request from a local Galaxy instance."""
@@ -1411,7 +1411,7 @@
name = params.get( 'name', None )
owner = params.get( 'owner', None )
changeset_revision = params.get( 'changeset_revision', None )
- repository = get_repository_by_name_and_owner( trans, name, owner )
+ repository = suc.get_repository_by_name_and_owner( trans, name, owner )
for downloadable_revision in repository.downloadable_revisions:
if downloadable_revision.changeset_revision == changeset_revision:
break
@@ -1432,13 +1432,13 @@
name = kwd[ 'name' ]
owner = kwd[ 'owner' ]
changeset_revision = kwd[ 'changeset_revision' ]
- repository = get_repository_by_name_and_owner( trans, name, owner )
+ repository = suc.get_repository_by_name_and_owner( trans, name, owner )
repo_dir = repository.repo_path( trans.app )
- repo = hg.repository( get_configured_ui(), repo_dir )
+ repo = hg.repository( suc.get_configured_ui(), repo_dir )
tool_version_dicts = []
for changeset in repo.changelog:
current_changeset_revision = str( repo.changectx( changeset ) )
- repository_metadata = get_repository_metadata_by_changeset_revision( trans, trans.security.encode_id( repository.id ), current_changeset_revision )
+ repository_metadata = suc.get_repository_metadata_by_changeset_revision( trans, trans.security.encode_id( repository.id ), current_changeset_revision )
if repository_metadata and repository_metadata.tool_versions:
tool_version_dicts.append( repository_metadata.tool_versions )
if current_changeset_revision == changeset_revision:
@@ -1450,14 +1450,14 @@
"""Return the tool lineage in descendant order for the received guid contained in the received repsitory_metadata.tool_versions."""
encoded_id = trans.security.encode_id( repository.id )
repo_dir = repository.repo_path( trans.app )
- repo = hg.repository( get_configured_ui(), repo_dir )
+ repo = hg.repository( suc.get_configured_ui(), repo_dir )
# Initialize the tool lineage
tool_guid_lineage = [ guid ]
# Get all ancestor guids of the received guid.
current_child_guid = guid
- for changeset in reversed_upper_bounded_changelog( repo, repository_metadata.changeset_revision ):
+ for changeset in suc.reversed_upper_bounded_changelog( repo, repository_metadata.changeset_revision ):
ctx = repo.changectx( changeset )
- rm = get_repository_metadata_by_changeset_revision( trans, encoded_id, str( ctx ) )
+ rm = suc.get_repository_metadata_by_changeset_revision( trans, encoded_id, str( ctx ) )
if rm:
parent_guid = rm.tool_versions.get( current_child_guid, None )
if parent_guid:
@@ -1465,9 +1465,9 @@
current_child_guid = parent_guid
# Get all descendant guids of the received guid.
current_parent_guid = guid
- for changeset in reversed_lower_upper_bounded_changelog( repo, repository_metadata.changeset_revision, repository.tip( trans.app ) ):
+ for changeset in suc.reversed_lower_upper_bounded_changelog( repo, repository_metadata.changeset_revision, repository.tip( trans.app ) ):
ctx = repo.changectx( changeset )
- rm = get_repository_metadata_by_changeset_revision( trans, encoded_id, str( ctx ) )
+ rm = suc.get_repository_metadata_by_changeset_revision( trans, encoded_id, str( ctx ) )
if rm:
tool_versions = rm.tool_versions
for child_guid, parent_guid in tool_versions.items():
@@ -1566,15 +1566,15 @@
owner = kwd.get( 'owner', None )
galaxy_url = kwd.get( 'galaxy_url', None )
if not repository_ids:
- repository = get_repository_by_name_and_owner( trans, name, owner )
+ repository = suc.get_repository_by_name_and_owner( trans, name, owner )
repository_ids = trans.security.encode_id( repository.id )
if not galaxy_url:
# If galaxy_url is not in the request, it had to have been stored in a cookie by the tool shed.
galaxy_url = trans.get_cookie( name='toolshedgalaxyurl' )
# Redirect back to local Galaxy to perform install.
- url = url_join( galaxy_url,
- 'admin_toolshed/prepare_for_install?tool_shed_url=%s&repository_ids=%s&changeset_revisions=%s' % \
- ( url_for( '/', qualified=True ), ','.join( util.listify( repository_ids ) ), ','.join( util.listify( changeset_revisions ) ) ) )
+ url = suc.url_join( galaxy_url,
+ 'admin_toolshed/prepare_for_install?tool_shed_url=%s&repository_ids=%s&changeset_revisions=%s' % \
+ ( url_for( '/', qualified=True ), ','.join( util.listify( repository_ids ) ), ','.join( util.listify( changeset_revisions ) ) ) )
return trans.response.send_redirect( url )
@web.expose
def load_invalid_tool( self, trans, repository_id, tool_config, changeset_revision, **kwd ):
@@ -1586,13 +1586,13 @@
is_malicious = changeset_is_malicious( trans, repository_id, repository.tip( trans.app ) )
invalid_file_tups = []
if tool:
- invalid_file_tups = check_tool_input_params( trans.app,
- repository.repo_path( trans.app ),
- tool_config,
- tool,
- [] )
+ invalid_file_tups = suc.check_tool_input_params( trans.app,
+ repository.repo_path( trans.app ),
+ tool_config,
+ tool,
+ [] )
if invalid_file_tups:
- message = generate_message_for_invalid_tools( trans, invalid_file_tups, repository, {}, as_html=True, displaying_invalid_tool=True )
+ message = suc.generate_message_for_invalid_tools( trans, invalid_file_tups, repository, {}, as_html=True, displaying_invalid_tool=True )
elif error_message:
message = error_message
try:
@@ -1667,9 +1667,9 @@
message = util.restore_text( params.get( 'message', '' ) )
status = params.get( 'status', 'done' )
cntrller = params.get( 'cntrller', 'repository' )
- repository = get_repository_in_tool_shed( trans, id )
+ repository = suc.get_repository_in_tool_shed( trans, id )
repo_dir = repository.repo_path( trans.app )
- repo = hg.repository( get_configured_ui(), repo_dir )
+ repo = hg.repository( suc.get_configured_ui(), repo_dir )
repo_name = util.restore_text( params.get( 'repo_name', repository.name ) )
changeset_revision = util.restore_text( params.get( 'changeset_revision', repository.tip( trans.app ) ) )
description = util.restore_text( params.get( 'description', repository.description ) )
@@ -1787,8 +1787,8 @@
metadata = None
is_malicious = False
repository_dependencies = None
- if changeset_revision != INITIAL_CHANGELOG_HASH:
- repository_metadata = get_repository_metadata_by_changeset_revision( trans, id, changeset_revision )
+ if changeset_revision != suc.INITIAL_CHANGELOG_HASH:
+ repository_metadata = suc.get_repository_metadata_by_changeset_revision( trans, id, changeset_revision )
if repository_metadata:
revision_label = get_revision_label( trans, repository, changeset_revision )
repository_metadata_id = trans.security.encode_id( repository_metadata.id )
@@ -1796,9 +1796,9 @@
is_malicious = repository_metadata.malicious
else:
# There is no repository_metadata defined for the changeset_revision, so see if it was defined in a previous changeset in the changelog.
- previous_changeset_revision = get_previous_downloadable_changset_revision( repository, repo, changeset_revision )
- if previous_changeset_revision != INITIAL_CHANGELOG_HASH:
- repository_metadata = get_repository_metadata_by_changeset_revision( trans, id, previous_changeset_revision )
+ previous_changeset_revision = suc.get_previous_downloadable_changset_revision( repository, repo, changeset_revision )
+ if previous_changeset_revision != suc.INITIAL_CHANGELOG_HASH:
+ repository_metadata = suc.get_repository_metadata_by_changeset_revision( trans, id, previous_changeset_revision )
if repository_metadata:
revision_label = get_revision_label( trans, repository, previous_changeset_revision )
repository_metadata_id = trans.security.encode_id( repository_metadata.id )
@@ -1806,13 +1806,13 @@
is_malicious = repository_metadata.malicious
if repository_metadata:
# Get a dictionary of all repositories upon which the contents of the current repository_metadata record depend.
- repository_dependencies = get_repository_dependencies_for_changeset_revision( trans=trans,
- repository=repository,
- repository_metadata=repository_metadata,
- toolshed_base_url=str( url_for( '/', qualified=True ) ).rstrip( '/' ),
- key_rd_dicts_to_be_processed=None,
- all_repository_dependencies=None,
- handled_key_rd_dicts=None )
+ repository_dependencies = suc.get_repository_dependencies_for_changeset_revision( trans=trans,
+ repository=repository,
+ repository_metadata=repository_metadata,
+ toolshed_base_url=str( url_for( '/', qualified=True ) ).rstrip( '/' ),
+ key_rd_dicts_to_be_processed=None,
+ all_repository_dependencies=None,
+ handled_key_rd_dicts=None )
if is_malicious:
if trans.app.security_agent.can_push( trans.app, trans.user, repository ):
message += malicious_error_can_push
@@ -1832,7 +1832,7 @@
review_id = trans.security.encode_id( review.id )
else:
review_id = None
- containers_dict = build_repository_containers_for_tool_shed( repository, changeset_revision, repository_dependencies, repository_metadata )
+ containers_dict = suc.build_repository_containers_for_tool_shed( repository, changeset_revision, repository_dependencies, repository_metadata )
return trans.fill_template( '/webapps/community/repository/manage_repository.mako',
cntrller=cntrller,
repo_name=repo_name,
@@ -1903,28 +1903,28 @@
# Avoid caching
trans.response.headers['Pragma'] = 'no-cache'
trans.response.headers['Expires'] = '0'
- return open_repository_files_folder( trans, folder_path )
+ return suc.open_repository_files_folder( trans, folder_path )
@web.expose
def preview_tools_in_changeset( self, trans, repository_id, **kwd ):
params = util.Params( kwd )
message = util.restore_text( params.get( 'message', '' ) )
status = params.get( 'status', 'done' )
- repository = get_repository_in_tool_shed( trans, repository_id )
+ repository = suc.get_repository_in_tool_shed( trans, repository_id )
repo_dir = repository.repo_path( trans.app )
- repo = hg.repository( get_configured_ui(), repo_dir )
+ repo = hg.repository( suc.get_configured_ui(), repo_dir )
changeset_revision = util.restore_text( params.get( 'changeset_revision', repository.tip( trans.app ) ) )
- repository_metadata = get_repository_metadata_by_changeset_revision( trans, repository_id, changeset_revision )
+ repository_metadata = suc.get_repository_metadata_by_changeset_revision( trans, repository_id, changeset_revision )
if repository_metadata:
repository_metadata_id = trans.security.encode_id( repository_metadata.id ),
metadata = repository_metadata.metadata
# Get a dictionary of all repositories upon which the contents of the current repository_metadata record depend.
- repository_dependencies = get_repository_dependencies_for_changeset_revision( trans=trans,
- repository=repository,
- repository_metadata=repository_metadata,
- toolshed_base_url=str( url_for( '/', qualified=True ) ).rstrip( '/' ),
- key_rd_dicts_to_be_processed=None,
- all_repository_dependencies=None,
- handled_key_rd_dicts=None )
+ repository_dependencies = suc.get_repository_dependencies_for_changeset_revision( trans=trans,
+ repository=repository,
+ repository_metadata=repository_metadata,
+ toolshed_base_url=str( url_for( '/', qualified=True ) ).rstrip( '/' ),
+ key_rd_dicts_to_be_processed=None,
+ all_repository_dependencies=None,
+ handled_key_rd_dicts=None )
else:
repository_metadata_id = None
metadata = None
@@ -1935,7 +1935,7 @@
selected_value=changeset_revision,
add_id_to_name=False,
downloadable=False )
- containers_dict = build_repository_containers_for_tool_shed( repository, changeset_revision, repository_dependencies, repository_metadata )
+ containers_dict = suc.build_repository_containers_for_tool_shed( repository, changeset_revision, repository_dependencies, repository_metadata )
return trans.fill_template( '/webapps/community/repository/preview_tools_in_changeset.mako',
repository=repository,
containers_dict=containers_dict,
@@ -1961,14 +1961,14 @@
name = params.get( 'name', None )
owner = params.get( 'owner', None )
changeset_revision = params.get( 'changeset_revision', None )
- repository = get_repository_by_name_and_owner( trans, name, owner )
+ repository = suc.get_repository_by_name_and_owner( trans, name, owner )
repo_dir = repository.repo_path( trans.app )
- repo = hg.repository( get_configured_ui(), repo_dir )
+ repo = hg.repository( suc.get_configured_ui(), repo_dir )
# Get the lower bound changeset revision
- lower_bound_changeset_revision = get_previous_downloadable_changset_revision( repository, repo, changeset_revision )
+ lower_bound_changeset_revision = suc.get_previous_downloadable_changset_revision( repository, repo, changeset_revision )
# Build the list of changeset revision hashes.
changeset_hashes = []
- for changeset in reversed_lower_upper_bounded_changelog( repo, lower_bound_changeset_revision, changeset_revision ):
+ for changeset in suc.reversed_lower_upper_bounded_changelog( repo, lower_bound_changeset_revision, changeset_revision ):
changeset_hashes.append( str( repo.changectx( changeset ) ) )
if changeset_hashes:
changeset_hashes_str = ','.join( changeset_hashes )
@@ -1987,8 +1987,8 @@
action='browse_repositories',
message='Select a repository to rate',
status='error' ) )
- repository = get_repository_in_tool_shed( trans, id )
- repo = hg.repository( get_configured_ui(), repository.repo_path( trans.app ) )
+ repository = suc.get_repository_in_tool_shed( trans, id )
+ repo = hg.repository( suc.get_configured_ui(), repository.repo_path( trans.app ) )
if repository.user == trans.user:
return trans.response.send_redirect( web.url_for( controller='repository',
action='browse_repositories',
@@ -2017,10 +2017,10 @@
def reset_all_metadata( self, trans, id, **kwd ):
# This method is called only from the ~/templates/webapps/community/repository/manage_repository.mako template.
# It resets all metadata on the complete changelog for a single repository in the tool shed.
- invalid_file_tups, metadata_dict = reset_all_metadata_on_repository_in_tool_shed( trans, id, **kwd )
+ invalid_file_tups, metadata_dict = suc.reset_all_metadata_on_repository_in_tool_shed( trans, id, **kwd )
if invalid_file_tups:
- repository = get_repository_in_tool_shed( trans, id )
- message = generate_message_for_invalid_tools( trans, invalid_file_tups, repository, metadata_dict )
+ repository = suc.get_repository_in_tool_shed( trans, id )
+ message = suc.generate_message_for_invalid_tools( trans, invalid_file_tups, repository, metadata_dict )
status = 'error'
else:
message = "All repository metadata has been reset."
@@ -2125,9 +2125,9 @@
message = util.restore_text( params.get( 'message', '' ) )
status = params.get( 'status', 'done' )
commit_message = util.restore_text( params.get( 'commit_message', 'Deleted selected files' ) )
- repository = get_repository_in_tool_shed( trans, id )
+ repository = suc.get_repository_in_tool_shed( trans, id )
repo_dir = repository.repo_path( trans.app )
- repo = hg.repository( get_configured_ui(), repo_dir )
+ repo = hg.repository( suc.get_configured_ui(), repo_dir )
selected_files_to_delete = util.restore_text( params.get( 'selected_files_to_delete', '' ) )
if params.get( 'select_files_to_delete_button', False ):
if selected_files_to_delete:
@@ -2163,9 +2163,9 @@
commands.commit( repo.ui, repo, repo_dir, user=trans.user.username, message=commit_message )
handle_email_alerts( trans, repository )
# Update the repository files for browsing.
- update_repository( repo )
+ suc.update_repository( repo )
# Get the new repository tip.
- repo = hg.repository( get_configured_ui(), repo_dir )
+ repo = hg.repository( suc.get_configured_ui(), repo_dir )
if tip == repository.tip( trans.app ):
message += 'No changes to repository. '
kwd[ 'message' ] = message
@@ -2187,7 +2187,7 @@
status=status )
@web.expose
def send_to_owner( self, trans, id, message='' ):
- repository = get_repository_in_tool_shed( trans, id )
+ repository = suc.get_repository_in_tool_shed( trans, id )
if not message:
message = 'Enter a message'
status = 'error'
@@ -2237,7 +2237,7 @@
total_alerts_removed = 0
flush_needed = False
for repository_id in repository_ids:
- repository = get_repository_in_tool_shed( trans, repository_id )
+ repository = suc.get_repository_in_tool_shed( trans, repository_id )
if repository.email_alerts:
email_alerts = from_json_string( repository.email_alerts )
else:
@@ -2268,7 +2268,7 @@
def set_malicious( self, trans, id, ctx_str, **kwd ):
malicious = kwd.get( 'malicious', '' )
if kwd.get( 'malicious_button', False ):
- repository_metadata = get_repository_metadata_by_changeset_revision( trans, id, ctx_str )
+ repository_metadata = suc.get_repository_metadata_by_changeset_revision( trans, id, ctx_str )
malicious_checked = CheckboxField.is_checked( malicious )
repository_metadata.malicious = malicious_checked
trans.sa_session.add( repository_metadata )
@@ -2309,12 +2309,12 @@
params = util.Params( kwd )
message = util.restore_text( params.get( 'message', '' ) )
status = params.get( 'status', 'done' )
- repository = get_repository_in_tool_shed( trans, id )
- repo = hg.repository( get_configured_ui(), repository.repo_path( trans.app ) )
+ repository = suc.get_repository_in_tool_shed( trans, id )
+ repo = hg.repository( suc.get_configured_ui(), repository.repo_path( trans.app ) )
changesets = []
for changeset in repo.changelog:
ctx = repo.changectx( changeset )
- if get_repository_metadata_by_changeset_revision( trans, id, str( ctx ) ):
+ if suc.get_repository_metadata_by_changeset_revision( trans, id, str( ctx ) ):
has_metadata = True
else:
has_metadata = False
@@ -2346,9 +2346,9 @@
params = util.Params( kwd )
message = util.restore_text( params.get( 'message', '' ) )
status = params.get( 'status', 'done' )
- repository = get_repository_in_tool_shed( trans, id )
- repo = hg.repository( get_configured_ui(), repository.repo_path( trans.app ) )
- ctx = get_changectx_for_changeset( repo, ctx_str )
+ repository = suc.get_repository_in_tool_shed( trans, id )
+ repo = hg.repository( suc.get_configured_ui(), repository.repo_path( trans.app ) )
+ ctx = suc.get_changectx_for_changeset( repo, ctx_str )
if ctx is None:
message = "Repository does not include changeset revision '%s'." % str( ctx_str )
status = 'error'
@@ -2362,7 +2362,7 @@
anchors = modified + added + removed + deleted + unknown + ignored + clean
diffs = []
for diff in patch.diff( repo, node1=ctx_parent.node(), node2=ctx.node() ):
- diffs.append( to_safe_string( diff, to_html=True ) )
+ diffs.append( suc.to_safe_string( diff, to_html=True ) )
is_malicious = changeset_is_malicious( trans, id, repository.tip( trans.app ) )
metadata = self.get_metadata( trans, id, ctx_str )
return trans.fill_template( '/webapps/community/repository/view_changeset.mako',
@@ -2383,7 +2383,7 @@
status=status )
@web.expose
def view_or_manage_repository( self, trans, **kwd ):
- repository = get_repository_in_tool_shed( trans, kwd[ 'id' ] )
+ repository = suc.get_repository_in_tool_shed( trans, kwd[ 'id' ] )
if trans.user_is_admin() or repository.user == trans.user:
return trans.response.send_redirect( web.url_for( controller='repository',
action='manage_repository',
@@ -2398,8 +2398,8 @@
message = util.restore_text( params.get( 'message', '' ) )
status = params.get( 'status', 'done' )
cntrller = params.get( 'cntrller', 'repository' )
- repository = get_repository_in_tool_shed( trans, id )
- repo = hg.repository( get_configured_ui(), repository.repo_path( trans.app ) )
+ repository = suc.get_repository_in_tool_shed( trans, id )
+ repo = hg.repository( suc.get_configured_ui(), repository.repo_path( trans.app ) )
avg_rating, num_ratings = self.get_ave_item_rating_data( trans.sa_session, repository, webapp_model=trans.model )
changeset_revision = util.restore_text( params.get( 'changeset_revision', repository.tip( trans.app ) ) )
display_reviews = util.string_as_bool( params.get( 'display_reviews', False ) )
@@ -2434,18 +2434,18 @@
add_id_to_name=False,
downloadable=False )
revision_label = get_revision_label( trans, repository, changeset_revision )
- repository_metadata = get_repository_metadata_by_changeset_revision( trans, id, changeset_revision )
+ repository_metadata = suc.get_repository_metadata_by_changeset_revision( trans, id, changeset_revision )
if repository_metadata:
repository_metadata_id = trans.security.encode_id( repository_metadata.id )
metadata = repository_metadata.metadata
# Get a dictionary of all repositories upon which the contents of the current repository_metadata record depend.
- repository_dependencies = get_repository_dependencies_for_changeset_revision( trans=trans,
- repository=repository,
- repository_metadata=repository_metadata,
- toolshed_base_url=str( url_for( '/', qualified=True ) ).rstrip( '/' ),
- key_rd_dicts_to_be_processed=None,
- all_repository_dependencies=None,
- handled_key_rd_dicts=None )
+ repository_dependencies = suc.get_repository_dependencies_for_changeset_revision( trans=trans,
+ repository=repository,
+ repository_metadata=repository_metadata,
+ toolshed_base_url=str( url_for( '/', qualified=True ) ).rstrip( '/' ),
+ key_rd_dicts_to_be_processed=None,
+ all_repository_dependencies=None,
+ handled_key_rd_dicts=None )
else:
repository_metadata_id = None
metadata = None
@@ -2466,7 +2466,7 @@
review_id = trans.security.encode_id( review.id )
else:
review_id = None
- containers_dict = build_repository_containers_for_tool_shed( repository, changeset_revision, repository_dependencies, repository_metadata )
+ containers_dict = suc.build_repository_containers_for_tool_shed( repository, changeset_revision, repository_dependencies, repository_metadata )
return trans.fill_template( '/webapps/community/repository/view_repository.mako',
cntrller=cntrller,
repo=repo,
@@ -2491,16 +2491,16 @@
params = util.Params( kwd )
message = util.restore_text( params.get( 'message', '' ) )
status = params.get( 'status', 'done' )
- repository = get_repository_in_tool_shed( trans, repository_id )
+ repository = suc.get_repository_in_tool_shed( trans, repository_id )
repo_files_dir = repository.repo_path( trans.app )
- repo = hg.repository( get_configured_ui(), repo_files_dir )
+ repo = hg.repository( suc.get_configured_ui(), repo_files_dir )
tool_metadata_dict = {}
tool_lineage = []
tool = None
guid = None
original_tool_data_path = trans.app.config.tool_data_path
revision_label = get_revision_label( trans, repository, changeset_revision )
- repository_metadata = get_repository_metadata_by_changeset_revision( trans, repository_id, changeset_revision )
+ repository_metadata = suc.get_repository_metadata_by_changeset_revision( trans, repository_id, changeset_revision )
if repository_metadata:
metadata = repository_metadata.metadata
if metadata:
@@ -2515,18 +2515,18 @@
can_use_disk_file = can_use_tool_config_disk_file( trans, repository, repo, full_path_to_tool_config, changeset_revision )
if can_use_disk_file:
trans.app.config.tool_data_path = work_dir
- tool, valid, message, sample_files = handle_sample_files_and_load_tool_from_disk( trans,
- repo_files_dir,
- full_path_to_tool_config,
- work_dir )
+ tool, valid, message, sample_files = suc.handle_sample_files_and_load_tool_from_disk( trans,
+ repo_files_dir,
+ full_path_to_tool_config,
+ work_dir )
if message:
status = 'error'
else:
- tool, message, sample_files = handle_sample_files_and_load_tool_from_tmp_config( trans,
- repo,
- changeset_revision,
- tool_config_filename,
- work_dir )
+ tool, message, sample_files = suc.handle_sample_files_and_load_tool_from_tmp_config( trans,
+ repo,
+ changeset_revision,
+ tool_config_filename,
+ work_dir )
if message:
status = 'error'
break
diff -r ae60aaaf6a139e88b2849abf97fd47af711341fd -r 70f88a048ed2b565d60250ca66302fd03a7852ff lib/galaxy/webapps/community/controllers/repository_review.py
--- a/lib/galaxy/webapps/community/controllers/repository_review.py
+++ b/lib/galaxy/webapps/community/controllers/repository_review.py
@@ -4,12 +4,12 @@
from galaxy.web.form_builder import SelectField, CheckboxField
from galaxy.webapps.community import model
from galaxy.web.framework.helpers import time_ago, iff, grids
-from galaxy.model.orm import *
+from galaxy.model.orm import and_
from sqlalchemy.sql.expression import func
from common import *
from galaxy.webapps.community.util.container_util import STRSEP
from repository import RepositoryGrid
-from galaxy.util.shed_util_common import *
+import galaxy.util.shed_util_common as suc
from galaxy.util.odict import odict
from galaxy import eggs
@@ -56,7 +56,7 @@
# Restrict to revisions that have been reviewed.
if repository.reviews:
rval = ''
- repo = hg.repository( get_configured_ui(), repository.repo_path( trans.app ) )
+ repo = hg.repository( suc.get_configured_ui(), repository.repo_path( trans.app ) )
for review in repository.reviews:
changeset_revision = review.changeset_revision
rev, label = get_rev_label_from_changeset_revision( repo, changeset_revision )
@@ -311,7 +311,7 @@
status = params.get( 'status', 'done' )
review = get_review( trans, kwd[ 'id' ] )
repository = review.repository
- repo = hg.repository( get_configured_ui(), repository.repo_path( trans.app ) )
+ repo = hg.repository( suc.get_configured_ui(), repository.repo_path( trans.app ) )
rev, changeset_revision_label = get_rev_label_from_changeset_revision( repo, review.changeset_revision )
return trans.fill_template( '/webapps/community/repository_review/browse_review.mako',
repository=repository,
@@ -384,7 +384,7 @@
message = "You have already created a review for revision <b>%s</b> of repository <b>%s</b>." % ( changeset_revision, repository.name )
status = "error"
else:
- repository = get_repository_in_tool_shed( trans, repository_id )
+ repository = suc.get_repository_in_tool_shed( trans, repository_id )
# See if there are any reviews for previous changeset revisions that the user can copy.
if not create_without_copying and not previous_review_id and has_previous_repository_reviews( trans, repository, changeset_revision ):
return trans.response.send_redirect( web.url_for( controller='repository_review',
@@ -392,7 +392,7 @@
**kwd ) )
# A review can be initially performed only on an installable revision of a repository, so make sure we have metadata associated
# with the received changeset_revision.
- repository_metadata = get_repository_metadata_by_changeset_revision( trans, repository_id, changeset_revision )
+ repository_metadata = suc.get_repository_metadata_by_changeset_revision( trans, repository_id, changeset_revision )
if repository_metadata:
metadata = repository_metadata.metadata
if metadata:
@@ -470,7 +470,7 @@
for component in get_components( trans ):
components_dict[ component.name ] = dict( component=component, component_review=None )
repository = review.repository
- repo = hg.repository( get_configured_ui(), repository.repo_path( trans.app ) )
+ repo = hg.repository( suc.get_configured_ui(), repository.repo_path( trans.app ) )
for component_review in review.component_reviews:
if component_review and component_review.component:
component_name = component_review.component.name
@@ -653,9 +653,9 @@
status = params.get( 'status', 'done' )
repository_id = kwd.get( 'id', None )
if repository_id:
- repository = get_repository_in_tool_shed( trans, repository_id )
+ repository = suc.get_repository_in_tool_shed( trans, repository_id )
repo_dir = repository.repo_path( trans.app )
- repo = hg.repository( get_configured_ui(), repo_dir )
+ repo = hg.repository( suc.get_configured_ui(), repo_dir )
metadata_revision_hashes = [ metadata_revision.changeset_revision for metadata_revision in repository.metadata_revisions ]
reviewed_revision_hashes = [ review.changeset_revision for review in repository.reviews ]
reviews_dict = odict()
@@ -669,7 +669,7 @@
repository_reviews = get_reviews_by_repository_id_changeset_revision( trans, repository_id, changeset_revision )
# Determine if the current user can add a review to this revision.
can_add_review = trans.user not in [ repository_review.user for repository_review in repository_reviews ]
- repository_metadata = get_repository_metadata_by_changeset_revision( trans, repository_id, changeset_revision )
+ repository_metadata = suc.get_repository_metadata_by_changeset_revision( trans, repository_id, changeset_revision )
if repository_metadata:
repository_metadata_reviews = util.listify( repository_metadata.reviews )
else:
@@ -700,9 +700,9 @@
status = params.get( 'status', 'done' )
repository_id = kwd.get( 'id', None )
changeset_revision = kwd.get( 'changeset_revision', None )
- repository = get_repository_in_tool_shed( trans, repository_id )
+ repository = suc.get_repository_in_tool_shed( trans, repository_id )
repo_dir = repository.repo_path( trans.app )
- repo = hg.repository( get_configured_ui(), repo_dir )
+ repo = hg.repository( suc.get_configured_ui(), repo_dir )
installable = changeset_revision in [ metadata_revision.changeset_revision for metadata_revision in repository.metadata_revisions ]
rev, changeset_revision_label = get_rev_label_from_changeset_revision( repo, changeset_revision )
reviews = get_reviews_by_repository_id_changeset_revision( trans, repository_id, changeset_revision )
@@ -765,9 +765,9 @@
params = util.Params( kwd )
message = util.restore_text( params.get( 'message', '' ) )
status = params.get( 'status', 'done' )
- repository = get_repository_in_tool_shed( trans, kwd[ 'id' ] )
+ repository = suc.get_repository_in_tool_shed( trans, kwd[ 'id' ] )
changeset_revision = kwd.get( 'changeset_revision', None )
- repo = hg.repository( get_configured_ui(), repository.repo_path( trans.app ) )
+ repo = hg.repository( suc.get_configured_ui(), repository.repo_path( trans.app ) )
previous_reviews_dict = get_previous_repository_reviews( trans, repository, changeset_revision )
rev, changeset_revision_label = get_rev_label_from_changeset_revision( repo, changeset_revision )
return trans.fill_template( '/webapps/community/repository_review/select_previous_review.mako',
@@ -780,7 +780,7 @@
@web.expose
@web.require_login( "view or manage repository" )
def view_or_manage_repository( self, trans, **kwd ):
- repository = get_repository_in_tool_shed( trans, kwd[ 'id' ] )
+ repository = suc.get_repository_in_tool_shed( trans, kwd[ 'id' ] )
if trans.user_is_admin() or repository.user == trans.user:
return trans.response.send_redirect( web.url_for( controller='repository',
action='manage_repository',
diff -r ae60aaaf6a139e88b2849abf97fd47af711341fd -r 70f88a048ed2b565d60250ca66302fd03a7852ff lib/galaxy/webapps/community/controllers/upload.py
--- a/lib/galaxy/webapps/community/controllers/upload.py
+++ b/lib/galaxy/webapps/community/controllers/upload.py
@@ -3,7 +3,7 @@
from galaxy.model.orm import *
from galaxy.datatypes.checkers import *
from common import *
-from galaxy.util.shed_util_common import *
+import galaxy.util.shed_util_common as suc
from galaxy import eggs
eggs.require('mercurial')
@@ -28,9 +28,9 @@
category_ids = util.listify( params.get( 'category_id', '' ) )
categories = get_categories( trans )
repository_id = params.get( 'repository_id', '' )
- repository = get_repository_in_tool_shed( trans, repository_id )
+ repository = suc.get_repository_in_tool_shed( trans, repository_id )
repo_dir = repository.repo_path( trans.app )
- repo = hg.repository( get_configured_ui(), repo_dir )
+ repo = hg.repository( suc.get_configured_ui(), repo_dir )
uncompress_file = util.string_as_bool( params.get( 'uncompress_file', 'true' ) )
remove_repo_files_not_in_tar = util.string_as_bool( params.get( 'remove_repo_files_not_in_tar', 'true' ) )
uploaded_file = None
@@ -53,7 +53,7 @@
uploaded_directory = tempfile.mkdtemp()
repo_url = 'http%s' % url[ len( 'hg' ): ]
repo_url = repo_url.encode( 'ascii', 'replace' )
- commands.clone( get_configured_ui(), repo_url, uploaded_directory )
+ commands.clone( suc.get_configured_ui(), repo_url, uploaded_directory )
elif url:
valid_url = True
try:
@@ -146,7 +146,7 @@
if full_path.endswith( 'tool_data_table_conf.xml.sample' ):
# Handle the special case where a tool_data_table_conf.xml.sample file is being uploaded by parsing the file and adding new entries
# to the in-memory trans.app.tool_data_tables dictionary.
- error, error_message = handle_sample_tool_data_table_conf_file( trans.app, full_path )
+ error, error_message = suc.handle_sample_tool_data_table_conf_file( trans.app, full_path )
if error:
message = '%s<br/>%s' % ( message, error_message )
# See if the content of the change set was valid.
@@ -154,7 +154,7 @@
handle_email_alerts( trans, repository, content_alert_str=content_alert_str, new_repo_alert=new_repo_alert, admin_only=admin_only )
if ok:
# Update the repository files for browsing.
- update_repository( repo )
+ suc.update_repository( repo )
# Get the new repository tip.
if tip == repository.tip( trans.app ):
message = 'No changes to repository. '
@@ -181,8 +181,8 @@
message += " %d files were removed from the repository root. " % len( files_to_remove )
kwd[ 'message' ] = message
set_repository_metadata_due_to_new_tip( trans, repository, content_alert_str=content_alert_str, **kwd )
- #provide a warning message if a tool_dependencies.xml file is provided, but tool dependencies weren't loaded due to e.g. a requirement tag mismatch
- if get_config_from_disk( 'tool_dependencies.xml', repo_dir ):
+ # Provide a warning message if a tool_dependencies.xml file is provided, but tool dependencies weren't loaded due to e.g. a requirement tag mismatch
+ if suc.get_config_from_disk( 'tool_dependencies.xml', repo_dir ):
if repository.metadata_revisions:
metadata_dict = repository.metadata_revisions[0].metadata
else:
@@ -192,7 +192,7 @@
status = 'warning'
log.debug( 'Error in tool dependencies for repository %s: %s.' % ( repository.id, repository.name ) )
# Reset the tool_data_tables by loading the empty tool_data_table_conf.xml file.
- reset_tool_data_tables( trans.app )
+ suc.reset_tool_data_tables( trans.app )
trans.response.send_redirect( web.url_for( controller='repository',
action='browse_repository',
id=repository_id,
@@ -202,7 +202,7 @@
else:
status = 'error'
# Reset the tool_data_tables by loading the empty tool_data_table_conf.xml file.
- reset_tool_data_tables( trans.app )
+ suc.reset_tool_data_tables( trans.app )
selected_categories = [ trans.security.decode_id( id ) for id in category_ids ]
return trans.fill_template( '/webapps/community/repository/upload.mako',
repository=repository,
@@ -214,7 +214,7 @@
status=status )
def upload_directory( self, trans, repository, uploaded_directory, upload_point, remove_repo_files_not_in_tar, commit_message, new_repo_alert ):
repo_dir = repository.repo_path( trans.app )
- repo = hg.repository( get_configured_ui(), repo_dir )
+ repo = hg.repository( suc.get_configured_ui(), repo_dir )
undesirable_dirs_removed = 0
undesirable_files_removed = 0
if upload_point is not None:
@@ -250,7 +250,7 @@
def upload_tar( self, trans, repository, tar, uploaded_file, upload_point, remove_repo_files_not_in_tar, commit_message, new_repo_alert ):
# Upload a tar archive of files.
repo_dir = repository.repo_path( trans.app )
- repo = hg.repository( get_configured_ui(), repo_dir )
+ repo = hg.repository( suc.get_configured_ui(), repo_dir )
undesirable_dirs_removed = 0
undesirable_files_removed = 0
ok, message = self.__check_archive( tar )
@@ -283,7 +283,7 @@
return self.__handle_directory_changes(trans, repository, full_path, filenames_in_archive, remove_repo_files_not_in_tar, new_repo_alert, commit_message, undesirable_dirs_removed, undesirable_files_removed)
def __handle_directory_changes( self, trans, repository, full_path, filenames_in_archive, remove_repo_files_not_in_tar, new_repo_alert, commit_message, undesirable_dirs_removed, undesirable_files_removed ):
repo_dir = repository.repo_path( trans.app )
- repo = hg.repository( get_configured_ui(), repo_dir )
+ repo = hg.repository( suc.get_configured_ui(), repo_dir )
content_alert_str = ''
files_to_remove = []
filenames_in_archive = [ os.path.join( full_path, name ) for name in filenames_in_archive ]
@@ -339,7 +339,7 @@
if filename_in_archive.endswith( 'tool_data_table_conf.xml.sample' ):
# Handle the special case where a tool_data_table_conf.xml.sample file is being uploaded by parsing the file and adding new entries
# to the in-memory trans.app.tool_data_tables dictionary.
- error, message = handle_sample_tool_data_table_conf_file( trans.app, filename_in_archive )
+ error, message = suc.handle_sample_tool_data_table_conf_file( trans.app, filename_in_archive )
if error:
return False, message, files_to_remove, content_alert_str, undesirable_dirs_removed, undesirable_files_removed
commands.commit( repo.ui, repo, full_path, user=trans.user.username, message=commit_message )
diff -r ae60aaaf6a139e88b2849abf97fd47af711341fd -r 70f88a048ed2b565d60250ca66302fd03a7852ff lib/galaxy/webapps/community/controllers/workflow.py
--- a/lib/galaxy/webapps/community/controllers/workflow.py
+++ b/lib/galaxy/webapps/community/controllers/workflow.py
@@ -10,7 +10,7 @@
from galaxy.webapps.galaxy.controllers.workflow import attach_ordered_steps
from galaxy.model.orm import *
from common import *
-from galaxy.util.shed_util_common import *
+import galaxy.util.shed_util_common as suc
from galaxy.tool_shed.encoding_util import *
class RepoInputDataModule( InputDataModule ):
@@ -145,7 +145,7 @@
message = kwd.get( 'message', '' )
status = kwd.get( 'status', 'done' )
repository_metadata = get_repository_metadata_by_id( trans, repository_metadata_id )
- repository = get_repository_in_tool_shed( trans, trans.security.encode_id( repository_metadata.repository_id ) )
+ repository = suc.get_repository_in_tool_shed( trans, trans.security.encode_id( repository_metadata.repository_id ) )
return trans.fill_template( "/webapps/community/repository/view_workflow.mako",
repository=repository,
changeset_revision=repository_metadata.changeset_revision,
diff -r ae60aaaf6a139e88b2849abf97fd47af711341fd -r 70f88a048ed2b565d60250ca66302fd03a7852ff lib/galaxy/webapps/galaxy/controllers/admin_toolshed.py
--- a/lib/galaxy/webapps/galaxy/controllers/admin_toolshed.py
+++ b/lib/galaxy/webapps/galaxy/controllers/admin_toolshed.py
@@ -2,6 +2,7 @@
from admin import *
from galaxy.util.json import from_json_string, to_json_string
from galaxy.util.shed_util import *
+import galaxy.util.shed_util_common as suc
from galaxy.tool_shed.encoding_util import *
from galaxy import eggs, tools
@@ -423,7 +424,7 @@
def browse_tool_shed( self, trans, **kwd ):
tool_shed_url = kwd[ 'tool_shed_url' ]
galaxy_url = url_for( '/', qualified=True )
- url = url_join( tool_shed_url, 'repository/browse_valid_categories?galaxy_url=%s' % ( galaxy_url ) )
+ url = suc.url_join( tool_shed_url, 'repository/browse_valid_categories?galaxy_url=%s' % ( galaxy_url ) )
return trans.response.send_redirect( url )
@web.expose
@web.require_admin
@@ -439,10 +440,10 @@
def check_for_updates( self, trans, **kwd ):
# Send a request to the relevant tool shed to see if there are any updates.
repository = get_installed_tool_shed_repository( trans, kwd[ 'id' ] )
- tool_shed_url = get_url_from_repository_tool_shed( trans.app, repository )
- url = url_join( tool_shed_url,
- 'repository/check_for_updates?galaxy_url=%s&name=%s&owner=%s&changeset_revision=%s' % \
- ( url_for( '/', qualified=True ), repository.name, repository.owner, repository.changeset_revision ) )
+ tool_shed_url = suc.get_url_from_repository_tool_shed( trans.app, repository )
+ url = suc.url_join( tool_shed_url,
+ 'repository/check_for_updates?galaxy_url=%s&name=%s&owner=%s&changeset_revision=%s' % \
+ ( url_for( '/', qualified=True ), repository.name, repository.owner, repository.changeset_revision ) )
return trans.response.send_redirect( url )
@web.expose
@web.require_admin
@@ -531,14 +532,14 @@
def find_tools_in_tool_shed( self, trans, **kwd ):
tool_shed_url = kwd[ 'tool_shed_url' ]
galaxy_url = url_for( '/', qualified=True )
- url = url_join( tool_shed_url, 'repository/find_tools?galaxy_url=%s' % galaxy_url )
+ url = suc.url_join( tool_shed_url, 'repository/find_tools?galaxy_url=%s' % galaxy_url )
return trans.response.send_redirect( url )
@web.expose
@web.require_admin
def find_workflows_in_tool_shed( self, trans, **kwd ):
tool_shed_url = kwd[ 'tool_shed_url' ]
galaxy_url = url_for( '/', qualified=True )
- url = url_join( tool_shed_url, 'repository/find_workflows?galaxy_url=%s' % galaxy_url )
+ url = suc.url_join( tool_shed_url, 'repository/find_workflows?galaxy_url=%s' % galaxy_url )
return trans.response.send_redirect( url )
def generate_tool_path( self, repository_clone_url, changeset_revision ):
"""
@@ -547,20 +548,20 @@
<tool shed url>/repos/<repository owner>/<repository name>/<installed changeset revision>
http://test@bx.psu.edu:9009/repos/test/filter
"""
- tmp_url = clean_repository_clone_url( repository_clone_url )
+ tmp_url = suc.clean_repository_clone_url( repository_clone_url )
# Now tmp_url is something like: bx.psu.edu:9009/repos/some_username/column
items = tmp_url.split( 'repos' )
tool_shed_url = items[ 0 ]
repo_path = items[ 1 ]
tool_shed_url = clean_tool_shed_url( tool_shed_url )
- return url_join( tool_shed_url, 'repos', repo_path, changeset_revision )
+ return suc.url_join( tool_shed_url, 'repos', repo_path, changeset_revision )
@web.json
@web.require_admin
def get_file_contents( self, trans, file_path ):
# Avoid caching
trans.response.headers['Pragma'] = 'no-cache'
trans.response.headers['Expires'] = '0'
- return get_repository_file_contents( file_path )
+ return suc.get_repository_file_contents( file_path )
@web.expose
@web.require_admin
def get_repository_dependencies( self, trans, repository_id, repository_name, repository_owner, changeset_revision ):
@@ -570,10 +571,10 @@
need it so that we can derive the tool shed from which it was installed.
"""
repository = get_installed_tool_shed_repository( trans, repository_id )
- tool_shed_url = get_url_from_repository_tool_shed( trans.app, repository )
- url = url_join( tool_shed_url,
- 'repository/get_repository_dependencies?name=%s&owner=%s&changeset_revision=%s' % \
- ( repository_name, repository_owner, changeset_revision ) )
+ tool_shed_url = suc.get_url_from_repository_tool_shed( trans.app, repository )
+ url = suc.url_join( tool_shed_url,
+ 'repository/get_repository_dependencies?name=%s&owner=%s&changeset_revision=%s' % \
+ ( repository_name, repository_owner, changeset_revision ) )
response = urllib2.urlopen( url )
raw_text = response.read()
response.close()
@@ -612,7 +613,7 @@
message = ''
tool_shed_repository = tool_dependencies[ 0 ].tool_shed_repository
# Get the tool_dependencies.xml file from the repository.
- tool_dependencies_config = get_config_from_disk( 'tool_dependencies.xml', tool_shed_repository.repo_path( trans.app ) )
+ tool_dependencies_config = suc.get_config_from_disk( 'tool_dependencies.xml', tool_shed_repository.repo_path( trans.app ) )
installed_tool_dependencies = handle_tool_dependencies( app=trans.app,
tool_shed_repository=tool_shed_repository,
tool_dependencies_config=tool_dependencies_config,
@@ -697,15 +698,15 @@
clone_dir = os.path.join( tool_path, relative_clone_dir )
relative_install_dir = os.path.join( relative_clone_dir, tool_shed_repository.name )
install_dir = os.path.join( tool_path, relative_install_dir )
- cloned_ok, error_message = clone_repository( repository_clone_url, os.path.abspath( install_dir ), ctx_rev )
+ cloned_ok, error_message = suc.clone_repository( repository_clone_url, os.path.abspath( install_dir ), ctx_rev )
if cloned_ok:
if reinstalling:
# Since we're reinstalling the repository we need to find the latest changeset revision to which is can be updated.
current_changeset_revision, current_ctx_rev = get_update_to_changeset_revision_and_ctx_rev( trans, tool_shed_repository )
if current_ctx_rev != ctx_rev:
- repo = hg.repository( get_configured_ui(), path=os.path.abspath( install_dir ) )
+ repo = hg.repository( suc.get_configured_ui(), path=os.path.abspath( install_dir ) )
pull_repository( repo, repository_clone_url, current_changeset_revision )
- update_repository( repo, ctx_rev=current_ctx_rev )
+ suc.update_repository( repo, ctx_rev=current_ctx_rev )
self.handle_repository_contents( trans,
tool_shed_repository=tool_shed_repository,
tool_path=tool_path,
@@ -722,10 +723,10 @@
update_tool_shed_repository_status( trans.app,
tool_shed_repository,
trans.model.ToolShedRepository.installation_status.SETTING_TOOL_VERSIONS )
- tool_shed_url = get_url_from_repository_tool_shed( trans.app, tool_shed_repository )
- url = url_join( tool_shed_url,
- '/repository/get_tool_versions?name=%s&owner=%s&changeset_revision=%s' % \
- ( tool_shed_repository.name, tool_shed_repository.owner, tool_shed_repository.changeset_revision ) )
+ tool_shed_url = suc.get_url_from_repository_tool_shed( trans.app, tool_shed_repository )
+ url = suc.url_join( tool_shed_url,
+ '/repository/get_tool_versions?name=%s&owner=%s&changeset_revision=%s' % \
+ ( tool_shed_repository.name, tool_shed_repository.owner, tool_shed_repository.changeset_revision ) )
response = urllib2.urlopen( url )
text = response.read()
response.close()
@@ -744,7 +745,7 @@
tool_shed_repository,
trans.model.ToolShedRepository.installation_status.INSTALLING_TOOL_DEPENDENCIES )
# Get the tool_dependencies.xml file from the repository.
- tool_dependencies_config = get_config_from_disk( 'tool_dependencies.xml', install_dir )#relative_install_dir )
+ tool_dependencies_config = suc.get_config_from_disk( 'tool_dependencies.xml', install_dir )#relative_install_dir )
installed_tool_dependencies = handle_tool_dependencies( app=trans.app,
tool_shed_repository=tool_shed_repository,
tool_dependencies_config=tool_dependencies_config,
@@ -774,15 +775,15 @@
when an admin is installing a new repository or reinstalling an uninstalled repository.
"""
shed_config_dict = trans.app.toolbox.get_shed_config_dict_by_filename( shed_tool_conf )
- metadata_dict, invalid_file_tups = generate_metadata_for_changeset_revision( app=trans.app,
- repository=tool_shed_repository,
- repository_clone_url=repository_clone_url,
- shed_config_dict=shed_config_dict,
- relative_install_dir=relative_install_dir,
- repository_files_dir=None,
- resetting_all_metadata_on_repository=False,
- updating_installed_repository=False,
- persist=True )
+ metadata_dict, invalid_file_tups = suc.generate_metadata_for_changeset_revision( app=trans.app,
+ repository=tool_shed_repository,
+ repository_clone_url=repository_clone_url,
+ shed_config_dict=shed_config_dict,
+ relative_install_dir=relative_install_dir,
+ repository_files_dir=None,
+ resetting_all_metadata_on_repository=False,
+ updating_installed_repository=False,
+ persist=True )
tool_shed_repository.metadata = metadata_dict
trans.sa_session.add( tool_shed_repository )
trans.sa_session.flush()
@@ -824,7 +825,7 @@
files_dir = relative_install_dir
if shed_config_dict.get( 'tool_path' ):
files_dir = os.path.join( shed_config_dict['tool_path'], files_dir )
- datatypes_config = get_config_from_disk( 'datatypes_conf.xml', files_dir )
+ datatypes_config = suc.get_config_from_disk( 'datatypes_conf.xml', files_dir )
# Load data types required by tools.
converter_path, display_path = alter_config_and_load_prorietary_datatypes( trans.app, datatypes_config, files_dir, override=False )
if converter_path or display_path:
@@ -859,13 +860,10 @@
**kwd ) )
if repository.can_install and operation == 'install':
# Send a request to the tool shed to install the repository.
- tool_shed_url = get_url_from_repository_tool_shed( trans.app, repository )
- url = url_join( tool_shed_url,
- 'repository/install_repositories_by_revision?name=%s&owner=%s&changeset_revisions=%s&galaxy_url=%s' % \
- ( repository.name,
- repository.owner,
- repository.installed_changeset_revision,
- ( url_for( '/', qualified=True ) ) ) )
+ tool_shed_url = suc.get_url_from_repository_tool_shed( trans.app, repository )
+ url = suc.url_join( tool_shed_url,
+ 'repository/install_repositories_by_revision?name=%s&owner=%s&changeset_revisions=%s&galaxy_url=%s' % \
+ ( repository.name, repository.owner, repository.installed_changeset_revision, ( url_for( '/', qualified=True ) ) ) )
return trans.response.send_redirect( url )
description = util.restore_text( params.get( 'description', repository.description ) )
shed_tool_conf, tool_path, relative_install_dir = get_tool_panel_config_tool_path_install_dir( trans.app, repository )
@@ -1049,7 +1047,7 @@
# Avoid caching
trans.response.headers['Pragma'] = 'no-cache'
trans.response.headers['Expires'] = '0'
- return open_repository_files_folder( trans, folder_path )
+ return suc.open_repository_files_folder( trans, folder_path )
@web.expose
@web.require_admin
def prepare_for_install( self, trans, **kwd ):
@@ -1081,9 +1079,9 @@
repository_ids = kwd.get( 'repository_ids', None )
changeset_revisions = kwd.get( 'changeset_revisions', None )
# Get the information necessary to install each repository.
- url = url_join( tool_shed_url,
- 'repository/get_repository_information?repository_ids=%s&changeset_revisions=%s' % \
- ( repository_ids, changeset_revisions ) )
+ url = suc.url_join( tool_shed_url,
+ 'repository/get_repository_information?repository_ids=%s&changeset_revisions=%s' % \
+ ( repository_ids, changeset_revisions ) )
response = urllib2.urlopen( url )
raw_text = response.read()
response.close()
@@ -1240,21 +1238,21 @@
repository_dependencies = None
elif len( repo_info_tuple ) == 7:
description, repository_clone_url, changeset_revision, ctx_rev, repository_owner, repository_dependencies, tool_dependencies = repo_info_tuple
- url = url_join( tool_shed_url,
- 'repository/get_readme_files?name=%s&owner=%s&changeset_revision=%s' % \
- ( name, repository_owner, changeset_revision ) )
+ url = suc.url_join( tool_shed_url,
+ 'repository/get_readme_files?name=%s&owner=%s&changeset_revision=%s' % \
+ ( name, repository_owner, changeset_revision ) )
response = urllib2.urlopen( url )
raw_text = response.read()
response.close()
readme_files_dict = from_json_string( raw_text )
- containers_dict = build_repository_containers_for_galaxy( trans=trans,
- toolshed_base_url=tool_shed_url,
- repository_name=name,
- repository_owner=repository_owner,
- changeset_revision=changeset_revision,
- readme_files_dict=readme_files_dict,
- repository_dependencies=repository_dependencies,
- tool_dependencies=tool_dependencies )
+ containers_dict = suc.build_repository_containers_for_galaxy( trans=trans,
+ toolshed_base_url=tool_shed_url,
+ repository_name=name,
+ repository_owner=repository_owner,
+ changeset_revision=changeset_revision,
+ readme_files_dict=readme_files_dict,
+ repository_dependencies=repository_dependencies,
+ tool_dependencies=tool_dependencies )
else:
containers_dict = dict( readme_files_dict=None, repository_dependencies=None, tool_dependencies=None )
# Handle tool dependencies chack box.
@@ -1300,7 +1298,7 @@
repository_clone_url = generate_clone_url_for_installed_repository( trans, tool_shed_repository )
clone_dir = os.path.join( tool_path, self.generate_tool_path( repository_clone_url, tool_shed_repository.installed_changeset_revision ) )
relative_install_dir = os.path.join( clone_dir, tool_shed_repository.name )
- tool_shed_url = get_url_from_repository_tool_shed( trans.app, tool_shed_repository )
+ tool_shed_url = suc.get_url_from_repository_tool_shed( trans.app, tool_shed_repository )
tool_section = None
tool_panel_section_key = None
metadata = tool_shed_repository.metadata
@@ -1377,17 +1375,17 @@
repository_name=tool_shed_repository.name,
repository_owner=tool_shed_repository.owner,
changeset_revision=tool_shed_repository.installed_changeset_revision )
- repo = hg.repository( get_configured_ui(), path=os.path.abspath( tool_shed_repository.repo_path( trans.app ) ) )
- repo_info_dict = create_repo_info_dict( trans=trans,
- repository_clone_url=repository_clone_url,
- changeset_revision=tool_shed_repository.installed_changeset_revision,
- ctx_rev=ctx_rev,
- repository_owner=tool_shed_repository.owner,
- repository_name=tool_shed_repository.name,
- repository=None,
- repository_metadata=None,
- metadata=metadata,
- repository_dependencies=repository_dependencies )
+ repo = hg.repository( suc.get_configured_ui(), path=os.path.abspath( tool_shed_repository.repo_path( trans.app ) ) )
+ repo_info_dict = suc.create_repo_info_dict( trans=trans,
+ repository_clone_url=repository_clone_url,
+ changeset_revision=tool_shed_repository.installed_changeset_revision,
+ ctx_rev=ctx_rev,
+ repository_owner=tool_shed_repository.owner,
+ repository_name=tool_shed_repository.name,
+ repository=None,
+ repository_metadata=None,
+ metadata=metadata,
+ repository_dependencies=repository_dependencies )
repo_info_dict = tool_shed_encode( repo_info_dict )
new_kwd = dict( includes_tool_dependencies=tool_shed_repository.includes_tool_dependencies,
includes_tools=tool_shed_repository.includes_tools,
@@ -1439,12 +1437,12 @@
repository_dependencies = None
elif len( repo_info_tuple ) == 7:
description, repository_clone_url, changeset_revision, ctx_rev, repository_owner, repository_dependencies, tool_dependencies = repo_info_tuple
- tool_shed = get_tool_shed_from_clone_url( repository_clone_url )
+ tool_shed = suc.get_tool_shed_from_clone_url( repository_clone_url )
# Get all previous change set revisions from the tool shed for the repository back to, but excluding, the previous valid changeset
# revision to see if it was previously installed using one of them.
- url = url_join( tool_shed_url,
- 'repository/previous_changeset_revisions?galaxy_url=%s&name=%s&owner=%s&changeset_revision=%s' % \
- ( url_for( '/', qualified=True ), repository_name, repository_owner, changeset_revision ) )
+ url = suc.url_join( tool_shed_url,
+ 'repository/previous_changeset_revisions?galaxy_url=%s&name=%s&owner=%s&changeset_revision=%s' % \
+ ( url_for( '/', qualified=True ), repository_name, repository_owner, changeset_revision ) )
response = urllib2.urlopen( url )
text = response.read()
response.close()
@@ -1466,7 +1464,7 @@
repository_id = kwd[ 'id' ]
tool_shed_repository = get_installed_tool_shed_repository( trans, repository_id )
metadata = tool_shed_repository.metadata
- tool_shed_url = get_url_from_repository_tool_shed( trans.app, tool_shed_repository )
+ tool_shed_url = suc.get_url_from_repository_tool_shed( trans.app, tool_shed_repository )
ctx_rev = get_ctx_rev( tool_shed_url, tool_shed_repository.name, tool_shed_repository.owner, tool_shed_repository.installed_changeset_revision )
repository_clone_url = generate_clone_url_for_installed_repository( trans, tool_shed_repository )
repository_dependencies = self.get_repository_dependencies( trans=trans,
@@ -1474,16 +1472,16 @@
repository_name=tool_shed_repository.name,
repository_owner=tool_shed_repository.owner,
changeset_revision=tool_shed_repository.installed_changeset_revision )
- repo_info_dict = create_repo_info_dict( trans=trans,
- repository_clone_url=repository_clone_url,
- changeset_revision=tool_shed_repository.installed_changeset_revision,
- ctx_rev=ctx_rev,
- repository_owner=tool_shed_repository.owner,
- repository_name=tool_shed_repository.name,
- repository=None,
- repository_metadata=None,
- metadata=metadata,
- repository_dependencies=repository_dependencies )
+ repo_info_dict = suc.create_repo_info_dict( trans=trans,
+ repository_clone_url=repository_clone_url,
+ changeset_revision=tool_shed_repository.installed_changeset_revision,
+ ctx_rev=ctx_rev,
+ repository_owner=tool_shed_repository.owner,
+ repository_name=tool_shed_repository.name,
+ repository=None,
+ repository_metadata=None,
+ metadata=metadata,
+ repository_dependencies=repository_dependencies )
# Get the location in the tool panel in which the tool was originally loaded.
if 'tool_panel_section' in metadata:
tool_panel_dict = metadata[ 'tool_panel_section' ]
@@ -1513,21 +1511,21 @@
message += "Uncheck the <b>No changes</b> check box and select a tool panel section to load the tools into that section."
status = 'warning'
if metadata and 'readme_files' in metadata:
- url = url_join( tool_shed_url,
- 'repository/get_readme_files?name=%s&owner=%s&changeset_revision=%s' % \
- ( tool_shed_repository.name, tool_shed_repository.owner, tool_shed_repository.installed_changeset_revision ) )
+ url = suc.url_join( tool_shed_url,
+ 'repository/get_readme_files?name=%s&owner=%s&changeset_revision=%s' % \
+ ( tool_shed_repository.name, tool_shed_repository.owner, tool_shed_repository.installed_changeset_revision ) )
response = urllib2.urlopen( url )
raw_text = response.read()
response.close()
readme_files_dict = from_json_string( raw_text )
- containers_dict = build_repository_containers_for_galaxy( trans=trans,
- toolshed_base_url=tool_shed_url,
- repository_name=name,
- repository_owner=repository_owner,
- changeset_revision=changeset_revision,
- readme_files_dict=readme_files_dict,
- repository_dependencies=repository_dependencies,
- tool_dependencies=tool_dependencies )
+ containers_dict = suc.build_repository_containers_for_galaxy( trans=trans,
+ toolshed_base_url=tool_shed_url,
+ repository_name=name,
+ repository_owner=repository_owner,
+ changeset_revision=changeset_revision,
+ readme_files_dict=readme_files_dict,
+ repository_dependencies=repository_dependencies,
+ tool_dependencies=tool_dependencies )
else:
containers_dict = dict( readme_files_dict=None, repository_dependencies=None, tool_dependencies=None )
# Handle repository dependencies check box.
@@ -1558,12 +1556,12 @@
@web.require_admin
def reset_metadata_on_selected_installed_repositories( self, trans, **kwd ):
if 'reset_metadata_on_selected_repositories_button' in kwd:
- kwd[ 'CONTROLLER' ] = GALAXY_ADMIN_TOOL_SHED_CONTROLLER
- message, status = reset_metadata_on_selected_repositories( trans, **kwd )
+ kwd[ 'CONTROLLER' ] = suc.GALAXY_ADMIN_TOOL_SHED_CONTROLLER
+ message, status = suc.reset_metadata_on_selected_repositories( trans, **kwd )
else:
message = util.restore_text( kwd.get( 'message', '' ) )
status = kwd.get( 'status', 'done' )
- repositories_select_field = build_repository_ids_select_field( trans, GALAXY_ADMIN_TOOL_SHED_CONTROLLER )
+ repositories_select_field = suc.build_repository_ids_select_field( trans, suc.GALAXY_ADMIN_TOOL_SHED_CONTROLLER )
return trans.fill_template( '/admin/tool_shed_repository/reset_metadata_on_selected_repositories.mako',
repositories_select_field=repositories_select_field,
message=message,
@@ -1573,20 +1571,20 @@
def reset_repository_metadata( self, trans, id ):
"""Reset all metadata on a single installed tool shed repository."""
repository = get_installed_tool_shed_repository( trans, id )
- tool_shed_url = get_url_from_repository_tool_shed( trans.app, repository )
+ tool_shed_url = suc.get_url_from_repository_tool_shed( trans.app, repository )
repository_clone_url = generate_clone_url_for_installed_repository( trans, repository )
tool_path, relative_install_dir = repository.get_tool_relative_path( trans.app )
if relative_install_dir:
original_metadata_dict = repository.metadata
- metadata_dict, invalid_file_tups = generate_metadata_for_changeset_revision( app=trans.app,
- repository=repository,
- repository_clone_url=repository_clone_url,
- shed_config_dict = repository.get_shed_config_dict( trans.app ),
- relative_install_dir=relative_install_dir,
- repository_files_dir=None,
- resetting_all_metadata_on_repository=False,
- updating_installed_repository=False,
- persist=False )
+ metadata_dict, invalid_file_tups = suc.generate_metadata_for_changeset_revision( app=trans.app,
+ repository=repository,
+ repository_clone_url=repository_clone_url,
+ shed_config_dict = repository.get_shed_config_dict( trans.app ),
+ relative_install_dir=relative_install_dir,
+ repository_files_dir=None,
+ resetting_all_metadata_on_repository=False,
+ updating_installed_repository=False,
+ persist=False )
repository.metadata = metadata_dict
if metadata_dict != original_metadata_dict:
update_in_shed_tool_config( trans.app, repository )
@@ -1645,10 +1643,10 @@
def set_tool_versions( self, trans, **kwd ):
# Get the tool_versions from the tool shed for each tool in the installed change set.
repository = get_installed_tool_shed_repository( trans, kwd[ 'id' ] )
- tool_shed_url = get_url_from_repository_tool_shed( trans.app, repository )
- url = url_join( tool_shed_url,
- 'repository/get_tool_versions?name=%s&owner=%s&changeset_revision=%s' % \
- ( repository.name, repository.owner, repository.changeset_revision ) )
+ tool_shed_url = suc.get_url_from_repository_tool_shed( trans.app, repository )
+ url = suc.url_join( tool_shed_url,
+ 'repository/get_tool_versions?name=%s&owner=%s&changeset_revision=%s' % \
+ ( repository.name, repository.owner, repository.changeset_revision ) )
response = urllib2.urlopen( url )
text = response.read()
response.close()
@@ -1757,21 +1755,21 @@
repo_files_dir = os.path.abspath( os.path.join( tool_path, relative_install_dir, name ) )
else:
repo_files_dir = os.path.abspath( os.path.join( relative_install_dir, name ) )
- repo = hg.repository( get_configured_ui(), path=repo_files_dir )
+ repo = hg.repository( suc.get_configured_ui(), path=repo_files_dir )
repository_clone_url = os.path.join( tool_shed_url, 'repos', owner, name )
pull_repository( repo, repository_clone_url, latest_ctx_rev )
- update_repository( repo, latest_ctx_rev )
+ suc.update_repository( repo, latest_ctx_rev )
tool_shed = clean_tool_shed_url( tool_shed_url )
# Update the repository metadata.
- metadata_dict, invalid_file_tups = generate_metadata_for_changeset_revision( app=trans.app,
- repository=repository,
- repository_clone_url=repository_clone_url,
- shed_config_dict = repository.get_shed_config_dict( trans.app ),
- relative_install_dir=relative_install_dir,
- repository_files_dir=None,
- resetting_all_metadata_on_repository=False,
- updating_installed_repository=True,
- persist=True )
+ metadata_dict, invalid_file_tups = suc.generate_metadata_for_changeset_revision( app=trans.app,
+ repository=repository,
+ repository_clone_url=repository_clone_url,
+ shed_config_dict = repository.get_shed_config_dict( trans.app ),
+ relative_install_dir=relative_install_dir,
+ repository_files_dir=None,
+ resetting_all_metadata_on_repository=False,
+ updating_installed_repository=True,
+ persist=True )
repository.metadata = metadata_dict
# Update the repository changeset_revision in the database.
repository.changeset_revision = latest_changeset_revision
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: inithello: Added functional tests to verify repository installation on the Galaxy side.
by Bitbucket 12 Dec '12
by Bitbucket 12 Dec '12
12 Dec '12
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/ae60aaaf6a13/
changeset: ae60aaaf6a13
user: inithello
date: 2012-12-12 22:56:14
summary: Added functional tests to verify repository installation on the Galaxy side.
affected #: 6 files
diff -r dad76985fc571feed65cc63de37b2685312348e3 -r ae60aaaf6a139e88b2849abf97fd47af711341fd test/tool_shed/base/test_db_util.py
--- a/test/tool_shed/base/test_db_util.py
+++ b/test/tool_shed/base/test_db_util.py
@@ -1,6 +1,8 @@
+import galaxy.model
import galaxy.webapps.community.model as model
from galaxy.model.orm import *
from galaxy.webapps.community.model.mapping import context as sa_session
+from galaxy.model.mapping import context as ga_session
def delete_obj( obj ):
sa_session.delete( obj )
@@ -12,6 +14,10 @@
def flush( obj ):
sa_session.add( obj )
sa_session.flush()
+def get_category_by_name( name ):
+ return sa_session.query( model.Category ) \
+ .filter( model.Category.table.c.name == name ) \
+ .first()
def get_default_user_permissions_by_role( role ):
return sa_session.query( model.DefaultUserPermissions ) \
.filter( model.DefaultUserPermissions.table.c.role_id == role.id ) \
@@ -20,6 +26,12 @@
return sa_session.query( model.DefaultUserPermissions ) \
.filter( model.DefaultUserPermissions.table.c.user_id==user.id ) \
.all()
+def get_galaxy_repository_by_name_owner_changeset_revision( repository_name, owner, changeset_revision ):
+ return ga_session.query( galaxy.model.ToolShedRepository ) \
+ .filter( and_( galaxy.model.ToolShedRepository.table.c.name == repository_name,
+ galaxy.model.ToolShedRepository.table.c.owner == owner,
+ galaxy.model.ToolShedRepository.table.c.changeset_revision == changeset_revision ) ) \
+ .first()
def get_private_role( user ):
for role in user.all_roles():
if role.name == user.email and role.description == 'Private Role for %s' % user.email:
@@ -39,6 +51,17 @@
sa_session.flush()
def refresh( obj ):
sa_session.refresh( obj )
+def ga_refresh( obj ):
+ ga_session.refresh( obj )
+def get_galaxy_private_role( user ):
+ for role in user.all_roles():
+ if role.name == user.email and role.description == 'Private Role for %s' % user.email:
+ return role
+ raise AssertionError( "Private role not found for user '%s'" % user.email )
+def get_galaxy_user( email ):
+ return ga_session.query( galaxy.model.User ) \
+ .filter( galaxy.model.User.table.c.email==email ) \
+ .first()
def get_repository_by_name_and_owner( name, owner_username ):
owner = get_user_by_name( owner_username )
repository = sa_session.query( model.Repository ) \
diff -r dad76985fc571feed65cc63de37b2685312348e3 -r ae60aaaf6a139e88b2849abf97fd47af711341fd test/tool_shed/base/twilltestcase.py
--- a/test/tool_shed/base/twilltestcase.py
+++ b/test/tool_shed/base/twilltestcase.py
@@ -1,7 +1,9 @@
import galaxy.webapps.community.util.hgweb_config
-import common, string, os
+import galaxy.model as galaxy_model
+import common, string, os, re
from base.twilltestcase import tc, from_json_string, TwillTestCase, security
-from test_db_util import get_repository_metadata_by_repository_id_changeset_revision
+from test_db_util import get_repository_by_name_and_owner, get_repository_metadata_by_repository_id_changeset_revision, \
+ get_galaxy_repository_by_name_owner_changeset_revision
from galaxy import eggs
eggs.require('mercurial')
@@ -19,14 +21,25 @@
self.host = os.environ.get( 'TOOL_SHED_TEST_HOST' )
self.port = os.environ.get( 'TOOL_SHED_TEST_PORT' )
self.url = "http://%s:%s" % ( self.host, self.port )
+ self.galaxy_host = os.environ.get( 'GALAXY_TEST_HOST' )
+ self.galaxy_port = os.environ.get( 'GALAXY_TEST_PORT' )
+ self.galaxy_url = "http://%s:%s" % ( self.galaxy_host, self.galaxy_port )
self.file_dir = os.environ.get( 'TOOL_SHED_TEST_FILE_DIR', None )
self.tool_shed_test_file = None
self.shed_tools_dict = {}
self.home()
+ def browse_category( self, category, strings_displayed=[], strings_not_displayed=[] ):
+ url = '/repository/browse_valid_categories?sort=name&operation=valid_repositories_by_category&id=%s' % \
+ self.security.encode_id( category.id )
+ self.visit_url( url )
+ self.check_for_strings( strings_displayed, strings_not_displayed )
def browse_repository( self, repository, strings_displayed=[], strings_not_displayed=[] ):
url = '/repository/browse_repository?id=%s' % self.security.encode_id( repository.id )
self.visit_url( url )
self.check_for_strings( strings_displayed, strings_not_displayed )
+ def browse_tool_shed( self, url, strings_displayed=[], strings_not_displayed=[] ):
+ self.visit_galaxy_url( '/admin_toolshed/browse_tool_shed?tool_shed_url=%s' % url )
+ self.check_for_strings( strings_displayed, strings_not_displayed )
def check_for_strings( self, strings_displayed=[], strings_not_displayed=[] ):
if strings_displayed:
for string in strings_displayed:
@@ -122,6 +135,36 @@
tc.fv( "1", "category_id", "+%s" % category )
tc.submit( "create_repository_button" )
self.check_for_strings( strings_displayed, strings_not_displayed )
+ def create_user_in_galaxy( self, cntrller='user', email='test(a)bx.psu.edu', password='testuser', username='admin-user', redirect='' ):
+ self.visit_galaxy_url( "/user/create?cntrller=%s&use_panels=False" % cntrller )
+ tc.fv( '1', 'email', email )
+ tc.fv( '1', 'redirect', redirect )
+ tc.fv( '1', 'password', password )
+ tc.fv( '1', 'confirm', password )
+ tc.fv( '1', 'username', username )
+ tc.submit( 'create_user_button' )
+ previously_created = False
+ username_taken = False
+ invalid_username = False
+ try:
+ self.check_page_for_string( "Created new user account" )
+ except:
+ try:
+ # May have created the account in a previous test run...
+ self.check_page_for_string( "User with that email already exists" )
+ previously_created = True
+ except:
+ try:
+ self.check_page_for_string( 'Public name is taken; please choose another' )
+ username_taken = True
+ except:
+ try:
+ # Note that we're only checking if the usr name is >< 4 chars here...
+ self.check_page_for_string( 'Public name must be at least 4 characters in length' )
+ invalid_username = True
+ except:
+ pass
+ return previously_created, username_taken, invalid_username
def delete_files_from_repository( self, repository, filenames=[], strings_displayed=[ 'were deleted from the repository' ], strings_not_displayed=[] ):
files_to_delete = []
basepath = self.get_repo_path( repository )
@@ -212,6 +255,21 @@
else:
string = string.replace( character, replacement )
return string
+ def galaxy_login( self, email='test(a)bx.psu.edu', password='testuser', username='admin-user', redirect='' ):
+ previously_created, username_taken, invalid_username = \
+ self.create_user_in_galaxy( email=email, password=password, username=username, redirect=redirect )
+ if previously_created:
+ self.visit_galaxy_url( "/user/login?use_panels=False" )
+ tc.fv( '1', 'email', email )
+ tc.fv( '1', 'redirect', redirect )
+ tc.fv( '1', 'password', password )
+ tc.submit( 'login_button' )
+ def galaxy_logout( self ):
+ self.home()
+ self.visit_galaxy_url( "/user/logout" )
+ self.check_page_for_string( "You have been logged out" )
+ self.home()
+
def generate_repository_dependency_xml( self, repositories, xml_filename, dependency_description='' ):
file_path = os.path.split( xml_filename )[0]
if not os.path.exists( file_path ):
@@ -239,9 +297,6 @@
return os.path.abspath( os.path.join( filepath, filename ) )
else:
return os.path.abspath( os.path.join( self.file_dir, filename ) )
- def get_latest_repository_metadata_for_repository( self, repository ):
- # TODO: This will not work as expected. Fix it.
- return repository.metadata_revisions[ 0 ]
def get_repo_path( self, repository ):
# An entry in the hgweb.config file looks something like: repos/test/mira_assembler = database/community_files/000/repo_123
lhs = "repos/%s/%s" % ( repository.user.username, repository.name )
@@ -304,6 +359,29 @@
tc.fv( "3", "allow_push", '+%s' % username )
tc.submit( 'user_access_button' )
self.check_for_strings( strings_displayed, strings_not_displayed )
+ def install_repository( self, name, owner, changeset_revision=None, strings_displayed=[], strings_not_displayed=[] ):
+ repository = get_repository_by_name_and_owner( name, owner )
+ repository_id = self.security.encode_id( repository.id )
+ if changeset_revision is None:
+ changeset_revision = self.get_repository_tip( repository )
+ url = '/repository/install_repositories_by_revision?changeset_revisions=%s&repository_ids=%s&galaxy_url=%s' % \
+ ( changeset_revision, repository_id, self.galaxy_url )
+ self.visit_url( url )
+ self.check_for_strings( strings_displayed, strings_not_displayed )
+ tc.submit( 'select_tool_panel_section_button' )
+ html = self.last_page()
+ # Since the installation process is by necessity asynchronous, we have to get the parameters to 'manually' initiate the
+ # installation process. This regex will return the tool shed repository IDs in group(1), the encoded_kwd parameter in
+ # group(2), and the reinstalling flag in group(3) and pass them to the manage_repositories method in the Galaxy
+ # admin_toolshed controller.
+ install_parameters = re.search( 'initiate_repository_installation\( "([^"]+)", "([^"]+)", "([^"]+)" \);', html )
+ iri_ids = install_parameters.group(1)
+ encoded_kwd = install_parameters.group(2)
+ reinstalling = install_parameters.group(3)
+ url = '/admin_toolshed/manage_repositories?operation=install&tool_shed_repository_ids=%s&encoded_kwd=%s&reinstalling=%s' % \
+ ( iri_ids, encoded_kwd, reinstalling )
+ self.visit_galaxy_url( url )
+ self.wait_for_repository_installation( repository )
def load_invalid_tool_page( self, repository, tool_xml, changeset_revision, strings_displayed=[], strings_not_displayed=[] ):
url = '/repository/load_invalid_tool?repository_id=%s&tool_config=%s&changeset_revision=%s' % \
( self.security.encode_id( repository.id ), tool_xml, changeset_revision )
@@ -314,6 +392,13 @@
( self.security.encode_id( repository.id ), tool_xml_path, changeset_revision )
self.visit_url( url )
self.check_for_strings( strings_displayed, strings_not_displayed )
+ def preview_repository_in_tool_shed( self, name, owner, changeset_revision=None, strings_displayed=[], strings_not_displayed=[] ):
+ repository = get_repository_by_name_and_owner( name, owner )
+ if changeset_revision is None:
+ changeset_revision = self.get_repository_tip( repository )
+ self.visit_url( '/repository/preview_tools_in_changeset?repository_id=%s&changeset_revision=%s' % \
+ ( self.security.encode_id( repository.id ), changeset_revision ) )
+ self.check_for_strings( strings_displayed, strings_not_displayed )
def repository_is_new( self, repository ):
repo = hg.repository( ui.ui(), self.get_repo_path( repository ) )
tip_ctx = repo.changectx( repo.changelog.tip() )
@@ -361,3 +446,18 @@
tc.formfile( "1", "file_data", self.get_filename( filename, filepath ) )
tc.submit( "upload_button" )
self.check_for_strings( strings_displayed, strings_not_displayed )
+ def visit_galaxy_url( self, url ):
+ url = '%s%s' % ( self.galaxy_url, url )
+ self.visit_url( url )
+ def wait_for_repository_installation( self, repository ):
+ final_states = [ galaxy_model.ToolShedRepository.installation_status.ERROR,
+ galaxy_model.ToolShedRepository.installation_status.INSTALLED,
+ galaxy_model.ToolShedRepository.installation_status.UNINSTALLED,
+ galaxy_model.ToolShedRepository.installation_status.DEACTIVATED ]
+ repository_name = repository.name
+ owner = repository.user.username
+ changeset_revision = self.get_repository_tip( repository )
+ galaxy_repository = get_galaxy_repository_by_name_owner_changeset_revision( repository_name, owner, changeset_revision )
+ while galaxy_repository.status not in final_states:
+ ga_refresh( galaxy_repository )
+ time.sleep( 1 )
diff -r dad76985fc571feed65cc63de37b2685312348e3 -r ae60aaaf6a139e88b2849abf97fd47af711341fd test/tool_shed/functional/test_0030_repository_dependency_revisions.py
--- a/test/tool_shed/functional/test_0030_repository_dependency_revisions.py
+++ b/test/tool_shed/functional/test_0030_repository_dependency_revisions.py
@@ -1,7 +1,7 @@
from tool_shed.base.twilltestcase import ShedTwillTestCase, common, os
from tool_shed.base.test_db_util import get_repository_by_name_and_owner, get_user, get_private_role
-datatypes_repository_name = 'emboss_datatypes'
+datatypes_repository_name = 'emboss_datatypes_0030'
datatypes_repository_description = "Galaxy applicable data formats used by Emboss tools."
datatypes_repository_long_description = "Galaxy applicable data formats used by Emboss tools. This repository contains no tools."
@@ -29,7 +29,7 @@
"""Create a category for this test suite"""
self.create_category( 'Test 0030 Repository Dependency Revisions', 'Testing repository dependencies by revision.' )
def test_0010_create_repositories( self ):
- '''Create the emboss_5_0030, emboss_6_0030, emboss_datatypes, and emboss repositories and populate the emboss_datatypes repository.'''
+ '''Create the emboss_5_0030, emboss_6_0030, emboss_datatypes_0030, and emboss_0030 repositories and populate the emboss_datatypes repository.'''
self.logout()
self.login( email=common.test_user_1_email, username=common.test_user_1_name )
emboss_5_repository = get_repository_by_name_and_owner( emboss_5_repository_name, common.test_user_1_name )
diff -r dad76985fc571feed65cc63de37b2685312348e3 -r ae60aaaf6a139e88b2849abf97fd47af711341fd test/tool_shed/functional/test_0050_circular_n_levels.py
--- a/test/tool_shed/functional/test_0050_circular_n_levels.py
+++ b/test/tool_shed/functional/test_0050_circular_n_levels.py
@@ -79,8 +79,8 @@
repository_long_description=filtering_repository_long_description,
categories=[ default_category ],
strings_displayed=[] )
- repository = get_repository_by_name_and_owner( filtering_repository_name, common.test_user_1_name )
- self.upload_file( repository,
+ filtering_repository = get_repository_by_name_and_owner( filtering_repository_name, common.test_user_1_name )
+ self.upload_file( filtering_repository,
'filtering/filtering_1.1.0.tar',
strings_displayed=[],
commit_message='Uploaded filtering.tar.' )
@@ -89,7 +89,7 @@
self.generate_repository_dependency_xml( [ emboss_repository ],
self.get_filename( 'repository_dependencies.xml', filepath=repository_dependencies_path ),
dependency_description='Filtering depends on the emboss repository.' )
- self.upload_file( repository,
+ self.upload_file( filtering_repository,
'repository_dependencies.xml',
filepath=repository_dependencies_path,
commit_message='Uploaded dependency on emboss.' )
@@ -109,6 +109,13 @@
emboss_repository = get_repository_by_name_and_owner( emboss_repository_name, common.test_user_1_name )
filtering_repository = get_repository_by_name_and_owner( filtering_repository_name, common.test_user_1_name )
repository_dependencies_path = self.generate_temp_path( 'test_0050', additional_paths=[ 'freebayes' ] )
+ self.generate_repository_dependency_xml( [ filtering_repository ],
+ self.get_filename( 'repository_dependencies.xml', filepath=repository_dependencies_path ),
+ dependency_description='Emboss depends on the filtering repository.' )
+ self.upload_file( emboss_repository,
+ 'repository_dependencies.xml',
+ filepath=repository_dependencies_path,
+ commit_message='Uploaded dependency on filtering.' )
previous_tip = self.get_repository_tip( repository )
self.generate_repository_dependency_xml( [ emboss_datatypes_repository, emboss_repository, filtering_repository, repository ],
self.get_filename( 'repository_dependencies.xml', filepath=repository_dependencies_path ),
@@ -119,11 +126,24 @@
commit_message='Uploaded dependency on filtering.' )
self.display_manage_repository_page( repository, strings_not_displayed=[ previous_tip ] )
def test_0030_verify_repository_dependencies( self ):
- '''Verify that the generated dependency circle does not cause an infinite loop.'''
+ '''Verify that the generated dependency circle does not cause an infinite loop.
+
+ Expected structure:
+
+ id: 2 key: http://localhost:8634__ESEP__freebayes_0050__ESEP__user1__ESEP__2e73d8e1b59d
+ ['http://localhost:8634', 'emboss_datatypes_0050', 'user1', '596029c334b1']
+ ['http://localhost:8634', 'emboss_0050', 'user1', '9f1503046640']
+ id: 3 key: http://localhost:8634__ESEP__filtering_0050__ESEP__user1__ESEP__eefdd8bc0db9
+ ['http://localhost:8634', 'emboss_0050', 'user1', '9f1503046640']
+ id: 4 key: http://localhost:8634__ESEP__emboss_0050__ESEP__user1__ESEP__9f1503046640
+ ['http://localhost:8634', 'emboss_datatypes_0050', 'user1', '596029c334b1']
+ '''
emboss_datatypes_repository = get_repository_by_name_and_owner( emboss_datatypes_repository_name, common.test_user_1_name )
emboss_repository = get_repository_by_name_and_owner( emboss_repository_name, common.test_user_1_name )
filtering_repository = get_repository_by_name_and_owner( filtering_repository_name, common.test_user_1_name )
freebayes_repository = get_repository_by_name_and_owner( freebayes_repository_name, common.test_user_1_name )
for repository in [ emboss_datatypes_repository, emboss_repository, filtering_repository ]:
self.check_repository_dependency( freebayes_repository, repository, self.get_repository_tip( repository ) )
+ for changeset_revision in self.get_repository_metadata_revisions( emboss_repository ):
+ self.check_repository_dependency( freebayes_repository, emboss_repository, changeset_revision )
self.display_manage_repository_page( freebayes_repository, strings_displayed=[ 'Freebayes depends on the filtering repository.' ] )
diff -r dad76985fc571feed65cc63de37b2685312348e3 -r ae60aaaf6a139e88b2849abf97fd47af711341fd test/tool_shed/functional/test_1000_install_filtering_repository.py
--- /dev/null
+++ b/test/tool_shed/functional/test_1000_install_filtering_repository.py
@@ -0,0 +1,31 @@
+from tool_shed.base.twilltestcase import ShedTwillTestCase, common, os
+from tool_shed.base.test_db_util import get_repository_by_name_and_owner, get_galaxy_user, get_galaxy_private_role, get_category_by_name
+
+class BasicToolShedFeatures( ShedTwillTestCase ):
+ '''Test installing a basic repository.'''
+ def test_0000_initiate_users( self ):
+ """Create necessary user accounts."""
+ self.galaxy_logout()
+ self.galaxy_login( email=common.test_user_1_email, username=common.test_user_1_name )
+ test_user_1 = get_galaxy_user( common.test_user_1_email )
+ assert test_user_1 is not None, 'Problem retrieving user with email %s from the database' % test_user_1_email
+ test_user_1_private_role = get_galaxy_private_role( test_user_1 )
+ self.galaxy_logout()
+ self.galaxy_login( email=common.admin_email, username=common.admin_username )
+ admin_user = get_galaxy_user( common.admin_email )
+ assert admin_user is not None, 'Problem retrieving user with email %s from the database' % admin_email
+ admin_user_private_role = get_galaxy_private_role( admin_user )
+ def test_0005_browse_tool_sheds( self ):
+ """Browse the available tool sheds in this Galaxy instance."""
+ self.visit_galaxy_url( '/admin_toolshed/browse_tool_sheds' )
+ self.check_page_for_string( 'Embedded tool shed for functional tests' )
+ self.browse_tool_shed( url=self.url, strings_displayed=[ 'Test 0000 Basic Repository Features 1', 'Test 0000 Basic Repository Features 2' ] )
+ def test_0010_browse_test_0000_category( self ):
+ '''Browse the category created in test 0000. It should contain the filtering_0000 repository also created in that test.'''
+ category = get_category_by_name( 'Test 0000 Basic Repository Features 1' )
+ self.browse_category( category, strings_displayed=[ 'filtering_0000' ] )
+ def test_0015_preview_filtering_repository( self ):
+ '''Load the preview page for the filtering_0000 repository in the tool shed.'''
+ self.preview_repository_in_tool_shed( 'filtering_0000', common.test_user_1_name, strings_displayed=[ 'filtering_0000', 'Valid tools' ] )
+ def test_0020_install_filtering_repository( self ):
+ self.install_repository( 'filtering_0000', common.test_user_1_name )
diff -r dad76985fc571feed65cc63de37b2685312348e3 -r ae60aaaf6a139e88b2849abf97fd47af711341fd test/tool_shed/functional_tests.py
--- a/test/tool_shed/functional_tests.py
+++ b/test/tool_shed/functional_tests.py
@@ -1,6 +1,6 @@
#!/usr/bin/env python
-import os, sys, shutil, tempfile, re
+import os, sys, shutil, tempfile, re, string
# Assume we are run from the galaxy root directory, add lib to the python path
cwd = os.getcwd()
@@ -33,9 +33,14 @@
import sys, threading, random
import httplib, socket
from paste import httpserver
+# This is for the tool shed application.
import galaxy.webapps.community.app
-from galaxy.webapps.community.app import UniverseApplication
-from galaxy.webapps.community import buildapp
+from galaxy.webapps.community.app import UniverseApplication as ToolshedUniverseApplication
+from galaxy.webapps.community import buildapp as toolshedbuildapp
+# This is for the galaxy application.
+import galaxy.app
+from galaxy.app import UniverseApplication as GalaxyUniverseApplication
+from galaxy.web import buildapp as galaxybuildapp
import nose.core
import nose.config
@@ -46,8 +51,22 @@
default_tool_shed_test_host = "localhost"
default_tool_shed_test_port_min = 8000
-default_tool_shed_test_port_max = 9999
+default_tool_shed_test_port_max = 8999
default_tool_shed_locales = 'en'
+default_galaxy_test_port_min = 9000
+default_galaxy_test_port_max = 9999
+default_galaxy_test_host = 'localhost'
+
+tool_sheds_conf_xml_template = '''<?xml version="1.0"?>
+<tool_sheds>
+ <tool_shed name="Embedded tool shed for functional tests" url="http://${shed_url}:${shed_port}/"/>
+</tool_sheds>
+'''
+
+shed_tool_conf_xml_template = '''<?xml version="1.0"?>
+<toolbox tool_path="${shed_tool_path}">
+</toolbox>
+'''
def run_tests( test_config ):
loader = nose.loader.TestLoader( config=test_config )
@@ -67,6 +86,8 @@
# ---- Configuration ------------------------------------------------------
tool_shed_test_host = os.environ.get( 'TOOL_SHED_TEST_HOST', default_tool_shed_test_host )
tool_shed_test_port = os.environ.get( 'TOOL_SHED_TEST_PORT', None )
+ galaxy_test_host = os.environ.get( 'GALAXY_TEST_HOST', default_galaxy_test_host )
+ galaxy_test_port = os.environ.get( 'GALAXY_TEST_PORT', None )
tool_path = os.environ.get( 'TOOL_SHED_TEST_TOOL_PATH', 'tools' )
if 'HTTP_ACCEPT_LANGUAGE' not in os.environ:
os.environ[ 'HTTP_ACCEPT_LANGUAGE' ] = default_tool_shed_locales
@@ -85,18 +106,33 @@
if not os.path.isdir( tool_shed_test_tmp_dir ):
os.mkdir( tool_shed_test_tmp_dir )
tool_shed_test_proxy_port = None
+ galaxy_test_proxy_port = None
if 'TOOL_SHED_TEST_DBPATH' in os.environ:
- db_path = os.environ[ 'TOOL_SHED_TEST_DBPATH' ]
+ shed_db_path = os.environ[ 'TOOL_SHED_TEST_DBPATH' ]
else:
tempdir = tempfile.mkdtemp( dir=tool_shed_test_tmp_dir )
- db_path = os.path.join( tempdir, 'database' )
- file_path = os.path.join( db_path, 'files' )
+ shed_db_path = os.path.join( tempdir, 'database' )
+ galaxy_shed_tool_conf_file = os.environ.get( 'GALAXY_TEST_TOOL_CONF', os.path.join( tool_shed_test_tmp_dir, 'test_tool_conf.xml' ) )
+ galaxy_tool_sheds_conf_file = os.environ.get( 'GALAXY_TEST_SHED_TOOLS_CONF', os.path.join( tool_shed_test_tmp_dir, 'test_sheds_conf.xml' ) )
+ if 'GALAXY_TEST_DBPATH' in os.environ:
+ galaxy_db_path = os.environ[ 'GALAXY_TEST_DBPATH' ]
+ else:
+ tempdir = tempfile.mkdtemp( dir=tool_shed_test_tmp_dir )
+ galaxy_db_path = os.path.join( tempdir, 'database' )
+ shed_file_path = os.path.join( shed_db_path, 'files' )
+ galaxy_file_path = os.path.join( galaxy_db_path, 'files' )
hgweb_config_file_path = tempfile.mkdtemp( dir=tool_shed_test_tmp_dir )
new_repos_path = tempfile.mkdtemp( dir=tool_shed_test_tmp_dir )
+ galaxy_shed_tool_path = tempfile.mkdtemp( dir=tool_shed_test_tmp_dir )
+ galaxy_tool_dependency_dir = tempfile.mkdtemp( dir=tool_shed_test_tmp_dir )
if 'TOOL_SHED_TEST_DBURI' in os.environ:
- database_connection = os.environ[ 'TOOL_SHED_TEST_DBURI' ]
+ toolshed_database_connection = os.environ[ 'TOOL_SHED_TEST_DBURI' ]
else:
- database_connection = 'sqlite:///' + os.path.join( db_path, 'universe.sqlite' )
+ toolshed_database_connection = 'sqlite:///' + os.path.join( shed_db_path, 'community_test.sqlite' )
+ if 'GALAXY_TEST_DBURI' in os.environ:
+ galaxy_database_connection = os.environ[ 'GALAXY_TEST_DBURI' ]
+ else:
+ galaxy_database_connection = 'sqlite:///' + os.path.join( galaxy_db_path, 'universe_test.sqlite' )
kwargs = {}
for dir in [ tool_shed_test_tmp_dir ]:
try:
@@ -104,30 +140,31 @@
except OSError:
pass
- print "Database connection:", database_connection
+ print "Tool shed database connection:", toolshed_database_connection
+ print "Galaxy database connection:", galaxy_database_connection
hgweb_config_dir = hgweb_config_file_path
os.environ[ 'TEST_HG_WEB_CONFIG_DIR' ] = hgweb_config_dir
print "Directory location for hgweb.config:", hgweb_config_dir
- # ---- Build Application --------------------------------------------------
- app = None
+ # ---- Build Tool Shed Application --------------------------------------------------
+ toolshedapp = None
global_conf = { '__file__' : 'community_wsgi.ini.sample' }
- if not database_connection.startswith( 'sqlite://' ):
- kwargs[ 'database_engine_option_max_overflow' ] = '20'
+# if not toolshed_database_connection.startswith( 'sqlite://' ):
+# kwargs[ 'database_engine_option_max_overflow' ] = '20'
if tool_dependency_dir is not None:
kwargs[ 'tool_dependency_dir' ] = tool_dependency_dir
if use_distributed_object_store:
kwargs[ 'object_store' ] = 'distributed'
kwargs[ 'distributed_object_store_config_file' ] = 'distributed_object_store_conf.xml.sample'
- app = UniverseApplication( job_queue_workers = 5,
+ toolshedapp = ToolshedUniverseApplication( job_queue_workers = 5,
id_secret = 'changethisinproductiontoo',
template_path = 'templates',
- database_connection = database_connection,
+ database_connection = toolshed_database_connection,
database_engine_option_pool_size = '10',
- file_path = file_path,
+ file_path = shed_file_path,
new_file_path = new_repos_path,
tool_path=tool_path,
datatype_converters_config_file = 'datatype_converters_conf.xml.sample',
@@ -144,23 +181,23 @@
hgweb_config_dir = hgweb_config_dir,
**kwargs )
- log.info( "Embedded Universe application started" )
+ log.info( "Embedded Toolshed application started" )
- # ---- Run webserver ------------------------------------------------------
- server = None
- webapp = buildapp.app_factory( dict( database_file=database_connection ),
- use_translogger=False,
- static_enabled=False,
- app=app )
+ # ---- Run tool shed webserver ------------------------------------------------------
+ tool_shed_server = None
+ toolshedwebapp = toolshedbuildapp.app_factory( dict( database_file=toolshed_database_connection ),
+ use_translogger=False,
+ static_enabled=False,
+ app=toolshedapp )
if tool_shed_test_port is not None:
- server = httpserver.serve( webapp, host=tool_shed_test_host, port=tool_shed_test_port, start_loop=False )
+ tool_shed_server = httpserver.serve( toolshedwebapp, host=tool_shed_test_host, port=tool_shed_test_port, start_loop=False )
else:
random.seed()
for i in range( 0, 9 ):
try:
tool_shed_test_port = str( random.randint( default_tool_shed_test_port_min, default_tool_shed_test_port_max ) )
log.debug( "Attempting to serve app on randomly chosen port: %s" % tool_shed_test_port )
- server = httpserver.serve( webapp, host=tool_shed_test_host, port=tool_shed_test_port, start_loop=False )
+ tool_shed_server = httpserver.serve( toolshedwebapp, host=tool_shed_test_host, port=tool_shed_test_port, start_loop=False )
break
except socket.error, e:
if e[0] == 98:
@@ -172,7 +209,7 @@
os.environ[ 'TOOL_SHED_TEST_PORT' ] = tool_shed_test_proxy_port
else:
os.environ[ 'TOOL_SHED_TEST_PORT' ] = tool_shed_test_port
- t = threading.Thread( target=server.serve_forever )
+ t = threading.Thread( target=tool_shed_server.serve_forever )
t.start()
# Test if the server is up
for i in range( 10 ):
@@ -185,6 +222,90 @@
else:
raise Exception( "Test HTTP server did not return '200 OK' after 10 tries" )
log.info( "Embedded web server started" )
+
+ # ---- Optionally start up a Galaxy instance ------------------------------------------------------
+ if 'TEST_TOOL_SHED_START_GALAXY' in os.environ:
+ # Generate the shed_tool_conf.xml and tool_sheds_conf.xml files
+ tool_sheds_conf_template_parser = string.Template( tool_sheds_conf_xml_template )
+ tool_sheds_conf_xml = tool_sheds_conf_template_parser.safe_substitute( shed_url=tool_shed_test_host, shed_port=tool_shed_test_port )
+ file( galaxy_tool_sheds_conf_file, 'w' ).write( tool_sheds_conf_xml )
+ shed_tool_conf_template_parser = string.Template( shed_tool_conf_xml_template )
+ shed_tool_conf_xml = shed_tool_conf_template_parser.safe_substitute( shed_tool_path=galaxy_shed_tool_path )
+ file( galaxy_shed_tool_conf_file, 'w' ).write( shed_tool_conf_xml )
+
+ # ---- Build Galaxy Application --------------------------------------------------
+ galaxy_global_conf = { '__file__' : 'universe_wsgi.ini.sample' }
+ if not galaxy_database_connection.startswith( 'sqlite://' ):
+ kwargs[ 'database_engine_option_max_overflow' ] = '20'
+ galaxyapp = GalaxyUniverseApplication( job_queue_workers = 5,
+ id_secret = 'changethisinproductiontoo',
+ template_path = "templates",
+ database_connection = galaxy_database_connection,
+ database_engine_option_pool_size = '10',
+ file_path = galaxy_file_path,
+ tool_path = tool_path,
+ tool_dependency_dir=galaxy_tool_dependency_dir,
+ shed_tool_path=galaxy_shed_tool_path,
+ update_integrated_tool_panel = False,
+ tool_config_file = galaxy_shed_tool_conf_file,
+ tool_sheds_config_file = galaxy_tool_sheds_conf_file,
+ datatype_converters_config_file = "datatype_converters_conf.xml.sample",
+ tool_parse_help = False,
+ tool_data_table_config_path = tool_data_table_config_path,
+ shed_tool_data_table_config = shed_tool_data_table_config,
+ log_destination = "stdout",
+ use_heartbeat = False,
+ allow_user_creation = True,
+ allow_user_deletion = True,
+ admin_users = 'test(a)bx.psu.edu',
+ allow_library_path_paste = True,
+ global_conf = global_conf,
+ running_functional_tests=True,
+ **kwargs )
+
+ log.info( "Embedded Galaxy application started" )
+
+ # ---- Run galaxy webserver ------------------------------------------------------
+ galaxy_server = None
+ galaxywebapp = galaxybuildapp.app_factory( dict( database_file=galaxy_database_connection ),
+ use_translogger=False,
+ static_enabled=False,
+ app=galaxyapp )
+
+ if galaxy_test_port is not None:
+ galaxy_server = httpserver.serve( galaxywebapp, host=galaxy_test_host, port=galaxy_test_port, start_loop=False )
+ else:
+ random.seed()
+ for i in range( 0, 9 ):
+ try:
+ galaxy_test_port = str( random.randint( default_galaxy_test_port_min, default_galaxy_test_port_max ) )
+ log.debug( "Attempting to serve app on randomly chosen port: %s" % galaxy_test_port )
+ galaxy_server = httpserver.serve( galaxywebapp, host=galaxy_test_host, port=galaxy_test_port, start_loop=False )
+ break
+ except socket.error, e:
+ if e[0] == 98:
+ continue
+ raise
+ else:
+ raise Exception( "Unable to open a port between %s and %s to start Galaxy server" % \
+ ( default_galaxy_test_port_min, default_galaxy_test_port_max ) )
+ if galaxy_test_proxy_port:
+ os.environ[ 'GALAXY_TEST_PORT' ] = galaxy_test_proxy_port
+ else:
+ os.environ[ 'GALAXY_TEST_PORT' ] = galaxy_test_port
+ t = threading.Thread( target=galaxy_server.serve_forever )
+ t.start()
+ # Test if the server is up
+ for i in range( 10 ):
+ # Directly test the app, not the proxy.
+ conn = httplib.HTTPConnection( galaxy_test_host, galaxy_test_port )
+ conn.request( "GET", "/" )
+ if conn.getresponse().status == 200:
+ break
+ time.sleep( 0.1 )
+ else:
+ raise Exception( "Test HTTP server did not return '200 OK' after 10 tries" )
+ log.info( "Embedded galaxy web server started" )
# We don't add the tests to the path until everything is up and running
new_path = [ os.path.join( cwd, 'test' ) ]
new_path.extend( sys.path[1:] )
@@ -194,10 +315,15 @@
log.info( "Functional tests will be run against %s:%s" % ( tool_shed_test_host, tool_shed_test_proxy_port ) )
else:
log.info( "Functional tests will be run against %s:%s" % ( tool_shed_test_host, tool_shed_test_port ) )
+ if galaxy_test_proxy_port:
+ log.info( "Galaxy tests will be run against %s:%s" % ( galaxy_test_host, galaxy_test_proxy_port ) )
+ else:
+ log.info( "Galaxy tests will be run against %s:%s" % ( galaxy_test_host, galaxy_test_port ) )
success = False
try:
# Pass in through script set env, will leave a copy of ALL test validate files.
os.environ[ 'TOOL_SHED_TEST_HOST' ] = tool_shed_test_host
+ os.environ[ 'GALAXY_TEST_HOST' ] = galaxy_test_host
if tool_shed_test_file_dir:
os.environ[ 'TOOL_SHED_TEST_FILE_DIR' ] = tool_shed_test_file_dir
test_config = nose.config.Config( env=os.environ, ignoreFiles=ignore_files, plugins=nose.plugins.manager.DefaultPluginManager() )
@@ -210,16 +336,26 @@
log.info( "Shutting down" )
# ---- Tear down -----------------------------------------------------------
- if server:
+ if tool_shed_server:
log.info( "Shutting down embedded web server" )
- server.server_close()
- server = None
+ tool_shed_server.server_close()
+ tool_shed_server = None
log.info( "Embedded web server stopped" )
- if app:
- log.info( "Shutting down app" )
- app.shutdown()
- app = None
- log.info( "Embedded Universe application stopped" )
+ if toolshedapp:
+ log.info( "Shutting down tool shed app" )
+ toolshedapp.shutdown()
+ toolshedapp = None
+ log.info( "Embedded tool shed application stopped" )
+ if galaxy_server:
+ log.info( "Shutting down galaxy web server" )
+ galaxy_server.server_close()
+ galaxy_server = None
+ log.info( "Embedded galaxy server stopped" )
+ if galaxyapp:
+ log.info( "Shutting down galaxy app" )
+ galaxyapp.shutdown()
+ galaxyapp = None
+ log.info( "Embedded galaxy application stopped" )
if 'TOOL_SHED_TEST_NO_CLEANUP' not in os.environ:
try:
for dir in [ tool_shed_test_tmp_dir ]:
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: greg: Fix for rendering the repository dependencies container.
by Bitbucket 12 Dec '12
by Bitbucket 12 Dec '12
12 Dec '12
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/dad76985fc57/
changeset: dad76985fc57
user: greg
date: 2012-12-12 20:58:13
summary: Fix for rendering the repository dependencies container.
affected #: 1 file
diff -r 3301ed8fea42b1067f88138573b7d716c3b631e5 -r dad76985fc571feed65cc63de37b2685312348e3 lib/galaxy/webapps/community/util/container_util.py
--- a/lib/galaxy/webapps/community/util/container_util.py
+++ b/lib/galaxy/webapps/community/util/container_util.py
@@ -214,6 +214,7 @@
del repository_dependencies[ 'description' ]
repository_dependencies_folder, folder_id, repository_dependency_id = \
populate_repository_dependencies_container( repository_dependencies_folder, repository_dependencies, folder_id, repository_dependency_id )
+ repository_dependencies_folder = prune_repository_dependencies( repository_dependencies_folder )
else:
repository_dependencies_root_folder = None
return folder_id, repository_dependencies_root_folder
@@ -346,6 +347,10 @@
workflows_root_folder = None
return folder_id, workflows_root_folder
def cast_empty_repository_dependency_folders( folder, repository_dependency_id ):
+ """
+ Change any empty folders contained within the repository dependencies container into a repository dependency since it has no repository dependencies
+ of it's own. This method is not used (and may not be needed), but here it is just in case.
+ """
if not folder.folders and not folder.repository_dependencies:
repository_dependency_id += 1
repository_dependency = folder.to_repository_dependency( repository_dependency_id )
@@ -439,4 +444,20 @@
print ' %s%s' % ( pad_str, repository_dependency.listify )
for sub_folder in folder.folders:
print_folders( pad+5, sub_folder )
+def prune_repository_dependencies( folder ):
+ """
+ Since the object used to generate a repository dependencies container is a dictionary and not an odict() (it must be json-serialize-able), the
+ order in which the dictionary is processed to create the container sometimes results in repository dependency entries in a folder that also
+ includes the repository dependency as a sub-folder (if the repository dependency has it's own repository dependency). This method will remove
+ all repository dependencies from folder that are also sub-folders of folder.
+ """
+ repository_dependencies = [ rd for rd in folder.repository_dependencies ]
+ for repository_dependency in repository_dependencies:
+ listified_repository_dependency = repository_dependency.listify
+ if is_subfolder_of( folder, listified_repository_dependency ):
+ repository_dependencies.remove( repository_dependency )
+ folder.repository_dependencies = repository_dependencies
+ for sub_folder in folder.folders:
+ return prune_repository_dependencies( sub_folder )
+ return folder
\ No newline at end of file
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
12 Dec '12
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/3301ed8fea42/
changeset: 3301ed8fea42
user: greg
date: 2012-12-12 20:35:12
summary: Fixes for installing tool shed repositories.
affected #: 8 files
diff -r c8c181a904677adf0c9c65a1151848d9d2da16fb -r 3301ed8fea42b1067f88138573b7d716c3b631e5 lib/galaxy/util/shed_util.py
--- a/lib/galaxy/util/shed_util.py
+++ b/lib/galaxy/util/shed_util.py
@@ -636,9 +636,6 @@
relative_install_dir = os.path.join( tool_path, partial_install_dir )
return tool_path, relative_install_dir
return None, None
-def get_tool_shed_from_clone_url( repository_clone_url ):
- tmp_url = clean_repository_clone_url( repository_clone_url )
- return tmp_url.split( 'repos' )[ 0 ].rstrip( '/' )
def get_tool_shed_repository_by_shed_name_owner_changeset_revision( app, tool_shed, name, owner, changeset_revision ):
# This method is used only in Galaxy, not the tool shed.
sa_session = app.model.context.current
@@ -693,18 +690,6 @@
changeset_revision = None
ctx_rev = None
return changeset_revision, ctx_rev
-def get_url_from_repository_tool_shed( app, repository ):
- """
- The stored value of repository.tool_shed is something like: toolshed.g2.bx.psu.edu. We need the URL to this tool shed, which is
- something like: http://toolshed.g2.bx.psu.edu/.
- """
- for shed_name, shed_url in app.tool_shed_registry.tool_sheds.items():
- if shed_url.find( repository.tool_shed ) >= 0:
- if shed_url.endswith( '/' ):
- shed_url = shed_url.rstrip( '/' )
- return shed_url
- # The tool shed from which the repository was originally installed must no longer be configured in tool_sheds_conf.xml.
- return None
def handle_missing_data_table_entry( app, relative_install_dir, tool_path, repository_tools_tups ):
"""
Inspect each tool to see if any have input parameters that are dynamically generated select lists that require entries in the
diff -r c8c181a904677adf0c9c65a1151848d9d2da16fb -r 3301ed8fea42b1067f88138573b7d716c3b631e5 lib/galaxy/util/shed_util_common.py
--- a/lib/galaxy/util/shed_util_common.py
+++ b/lib/galaxy/util/shed_util_common.py
@@ -1,4 +1,4 @@
-import os, shutil, tempfile, logging, string, threading
+import os, shutil, tempfile, logging, string, threading, urllib2
from galaxy import util
from galaxy.tools import parameters
from galaxy.util import inflector
@@ -91,12 +91,12 @@
folder_id, readme_files_root_folder = build_readme_files_folder( folder_id, readme_files_dict )
containers_dict[ 'readme_files' ] = readme_files_root_folder
if repository_dependencies:
- folder_id, repository_dependencies_root_folder = build_repository_dependencies_folder( toolshed_base_url=toolshed_base_url,
- repository_name=repository_name,
- repository_owner=repository_owner,
- changeset_revision=changeset_revision,
- folder_id=folder_id,
- repository_dependencies=repository_dependencies )
+ folder_id, repository_dependencies_root_folder = container_util.build_repository_dependencies_folder( toolshed_base_url=toolshed_base_url,
+ repository_name=repository_name,
+ repository_owner=repository_owner,
+ changeset_revision=changeset_revision,
+ folder_id=folder_id,
+ repository_dependencies=repository_dependencies )
containers_dict[ 'repository_dependencies' ] = repository_dependencies_root_folder
if tool_dependencies:
folder_id, tool_dependencies_root_folder = container_util.build_tool_dependencies_folder( folder_id, tool_dependencies, for_galaxy=True )
@@ -526,42 +526,44 @@
trans.sa_session.add( repository_metadata )
trans.sa_session.flush()
return repository_metadata
-def create_repo_info_dict( trans, repo, repository_clone_url, changeset_revision, ctx_rev, repository_owner, repository_name=None,
- repository=None, repository_metadata=None ):
+def create_repo_info_dict( trans, repository_clone_url, changeset_revision, ctx_rev, repository_owner, repository_name=None, repository=None,
+ repository_metadata=None, metadata=None, repository_dependencies=None ):
"""
- Return a dictionary that includes all of the information needed to install a repository into a local Galaxy instance. The dictionary will also contain
- the recursive list of repository dependencies defined for the repository, as well as the defined tool dependencies. This method is called during the
- tool shed repository installation process from Galaxy. In this case both the received repository and repository_metadata will be objects, but repository_name
- sill be None. This method is also called when a tool shed repository that was uninstalled from a Galaxy instance is being re-installed. In this case, both
- repository and repository_metadata will be None, but repository_name will have a value.
+ Return a dictionary that includes all of the information needed to install a repository into a local Galaxy instance. The dictionary will also
+ contain the recursive list of repository dependencies defined for the repository, as well as the defined tool dependencies.
+
+ This method is called from Galaxy from two places:
+ 1. During the tool shed repository installation process (via the tool shed's get_repository_information() method)- in this case both the received
+ repository and repository_metadata will be objects.
+ 2. When a tool shed repository that was uninstalled from a Galaxy instance is being re-installed - in this case, both repository and
+ repository_metadata will be None, but metadata will be the tool_shed_repository metadata on the Galaxy side, and the repository_dependencies will
+ be an object previously retrieved from the tool shed.
"""
repo_info_dict = {}
- if repository is None and repository_metadata is None:
- # The repository associated with the received repository_clone_url is being re-installed into a Galaxy instance, so we need to retrieve the
- # appropriate repository from the tool shed using the received information.
- repository = get_repository_by_name_and_owner( trans, repository_name, repository_owner )
+ repository = get_repository_by_name_and_owner( trans, repository_name, repository_owner )
+ if trans.webapp.name == 'community':
+ # We're in the tool shed.
repository_metadata = get_repository_metadata_by_changeset_revision( trans, trans.security.encode_id( repository.id ), changeset_revision )
- if repository_metadata:
- metadata = repository_metadata.metadata
- if metadata:
- # Get a dictionary of all repositories upon which the contents of the received repository depends.
- repository_dependencies = get_repository_dependencies_for_changeset_revision( trans=trans,
- repo=repo,
- repository=repository,
- repository_metadata=repository_metadata,
- toolshed_base_url=str( url_for( '/', qualified=True ) ).rstrip( '/' ),
- key_rd_dicts_to_be_processed=None,
- all_repository_dependencies=None,
- handled_key_rd_dicts=None,
- circular_repository_dependencies=None )
- # Cast unicode to string.
- repo_info_dict[ str( repository.name ) ] = ( str( repository.description ),
- str( repository_clone_url ),
- str( changeset_revision ),
- str( ctx_rev ),
- str( repository_owner ),
- repository_dependencies,
- metadata.get( 'tool_dependencies', None ) )
+ if repository_metadata:
+ metadata = repository_metadata.metadata
+ if metadata:
+ # Get a dictionary of all repositories upon which the contents of the received repository depends.
+ repository_dependencies = get_repository_dependencies_for_changeset_revision( trans=trans,
+ repository=repository,
+ repository_metadata=repository_metadata,
+ toolshed_base_url=str( url_for( '/', qualified=True ) ).rstrip( '/' ),
+ key_rd_dicts_to_be_processed=None,
+ all_repository_dependencies=None,
+ handled_key_rd_dicts=None,
+ circular_repository_dependencies=None )
+ # Cast unicode to string.
+ repo_info_dict[ str( repository.name ) ] = ( str( repository.description ),
+ str( repository_clone_url ),
+ str( changeset_revision ),
+ str( ctx_rev ),
+ str( repository_owner ),
+ repository_dependencies,
+ metadata.get( 'tool_dependencies', None ) )
return repo_info_dict
def generate_clone_url_for_repository_in_tool_shed( trans, repository ):
"""Generate the URL for cloning a repository that is in the tool shed."""
@@ -1189,12 +1191,18 @@
return valid_filenames
def get_repository_by_name_and_owner( trans, name, owner ):
"""Get a repository from the database via name and owner"""
+ if trans.webapp.name == 'galaxy':
+ return trans.sa_session.query( trans.model.ToolShedRepository ) \
+ .filter( and_( trans.model.ToolShedRepository.table.c.name == name,
+ trans.model.ToolShedRepository.table.c.owner == owner ) ) \
+ .first()
+ # We're in the tool shed.
user = get_user_by_username( trans, owner )
return trans.sa_session.query( trans.model.Repository ) \
.filter( and_( trans.model.Repository.table.c.name == name,
trans.model.Repository.table.c.user_id == user.id ) ) \
.first()
-def get_repository_dependencies_for_changeset_revision( trans, repo, repository, repository_metadata, toolshed_base_url,
+def get_repository_dependencies_for_changeset_revision( trans, repository, repository_metadata, toolshed_base_url,
key_rd_dicts_to_be_processed=None, all_repository_dependencies=None,
handled_key_rd_dicts=None, circular_repository_dependencies=None ):
"""
@@ -1291,7 +1299,7 @@
"""Get a repository on the tool shed side from the database via id"""
return trans.sa_session.query( trans.model.Repository ).get( trans.security.decode_id( id ) )
def get_repository_metadata_by_changeset_revision( trans, id, changeset_revision ):
- """Get metadata for a specified repository change set from the database"""
+ """Get metadata for a specified repository change set from the database."""
# Make sure there are no duplicate records, and return the single unique record for the changeset_revision. Duplicate records were somehow
# created in the past. The cause of this issue has been resolved, but we'll leave this method as is for a while longer to ensure all duplicate
# records are removed.
@@ -1357,6 +1365,9 @@
relative_path_to_sample_file = relative_path_to_sample_file[ len( tool_path ) + 1 :]
sample_file_metadata_paths.append( relative_path_to_sample_file )
return sample_file_metadata_paths, sample_file_copy_paths
+def get_tool_shed_from_clone_url( repository_clone_url ):
+ tmp_url = clean_repository_clone_url( repository_clone_url )
+ return tmp_url.split( 'repos' )[ 0 ].rstrip( '/' )
def get_updated_changeset_revisions_for_repository_dependencies( trans, key_rd_dicts ):
updated_key_rd_dicts = []
for key_rd_dict in key_rd_dicts:
@@ -1387,6 +1398,18 @@
# We have the updated changset revision.
updated_key_rd_dicts.append( new_key_rd_dict )
return updated_key_rd_dicts
+def get_url_from_repository_tool_shed( app, repository ):
+ """
+ The stored value of repository.tool_shed is something like: toolshed.g2.bx.psu.edu. We need the URL to this tool shed, which is
+ something like: http://toolshed.g2.bx.psu.edu/.
+ """
+ for shed_name, shed_url in app.tool_shed_registry.tool_sheds.items():
+ if shed_url.find( repository.tool_shed ) >= 0:
+ if shed_url.endswith( '/' ):
+ shed_url = shed_url.rstrip( '/' )
+ return shed_url
+ # The tool shed from which the repository was originally installed must no longer be configured in tool_sheds_conf.xml.
+ return None
def get_user_by_username( trans, username ):
"""Get a user from the database by username"""
return trans.sa_session.query( trans.model.User ) \
@@ -1412,7 +1435,7 @@
new_key_rd_dict[ current_repository_key ] = rd_copy
current_repository_key_rd_dicts.append( new_key_rd_dict )
if current_repository_key_rd_dicts:
- toolshed, required_repo, required_repository, required_repository_metadata, repository_key_rd_dicts, key_rd_dicts_to_be_processed, handled_key_rd_dicts = \
+ toolshed, required_repository, required_repository_metadata, repository_key_rd_dicts, key_rd_dicts_to_be_processed, handled_key_rd_dicts = \
handle_key_rd_dicts_for_repository( trans,
current_repository_key,
current_repository_key_rd_dicts,
@@ -1420,7 +1443,6 @@
handled_key_rd_dicts,
circular_repository_dependencies )
return get_repository_dependencies_for_changeset_revision( trans=trans,
- repo=required_repo,
repository=required_repository,
repository_metadata=required_repository_metadata,
toolshed_base_url=toolshed,
@@ -1456,8 +1478,6 @@
trans.security.encode_id( required_repository.id ),
changeset_revision )
if required_repository_metadata:
- required_repo_dir = required_repository.repo_path( trans.app )
- required_repo = hg.repository( get_configured_ui(), required_repo_dir )
# The required_repository_metadata changeset_revision is installable.
required_metadata = required_repository_metadata.metadata
if required_metadata:
@@ -1475,12 +1495,12 @@
error_message = "Repository dependencies are currently supported only within the same tool shed. Ignoring repository dependency definition "
error_message += "for tool shed %s, name %s, owner %s, changeset revision %s" % ( toolshed, name, owner, changeset_revision )
log.debug( error_message )
- return toolshed, required_repo, required_repository, required_repository_metadata, repository_key_rd_dicts, key_rd_dicts_to_be_processed, handled_key_rd_dicts
+ return toolshed, required_repository, required_repository_metadata, repository_key_rd_dicts, key_rd_dicts_to_be_processed, handled_key_rd_dicts
def handle_next_repository_dependency( trans, key_rd_dicts_to_be_processed, all_repository_dependencies, handled_key_rd_dicts, circular_repository_dependencies ):
next_repository_key_rd_dict = key_rd_dicts_to_be_processed.pop( 0 )
next_repository_key_rd_dicts = [ next_repository_key_rd_dict ]
next_repository_key = next_repository_key_rd_dict.keys()[ 0 ]
- toolshed, required_repo, required_repository, required_repository_metadata, repository_key_rd_dicts, key_rd_dicts_to_be_processed, handled_key_rd_dicts = \
+ toolshed, required_repository, required_repository_metadata, repository_key_rd_dicts, key_rd_dicts_to_be_processed, handled_key_rd_dicts = \
handle_key_rd_dicts_for_repository( trans,
next_repository_key,
next_repository_key_rd_dicts,
@@ -1488,7 +1508,6 @@
handled_key_rd_dicts,
circular_repository_dependencies )
return get_repository_dependencies_for_changeset_revision( trans=trans,
- repo=required_repo,
repository=required_repository,
repository_metadata=required_repository_metadata,
toolshed_base_url=toolshed,
diff -r c8c181a904677adf0c9c65a1151848d9d2da16fb -r 3301ed8fea42b1067f88138573b7d716c3b631e5 lib/galaxy/webapps/community/controllers/repository.py
--- a/lib/galaxy/webapps/community/controllers/repository.py
+++ b/lib/galaxy/webapps/community/controllers/repository.py
@@ -1243,6 +1243,58 @@
message=message,
status=status )
@web.expose
+ def get_changeset_revision_and_ctx_rev( self, trans, **kwd ):
+ """Handle a request from a local Galaxy instance to retrieve the changeset revision hash to which an installed repository can be updated."""
+ params = util.Params( kwd )
+ message = util.restore_text( params.get( 'message', '' ) )
+ status = params.get( 'status', 'done' )
+ galaxy_url = kwd.get( 'galaxy_url', '' )
+ name = params.get( 'name', None )
+ owner = params.get( 'owner', None )
+ changeset_revision = params.get( 'changeset_revision', None )
+ repository = get_repository_by_name_and_owner( trans, name, owner )
+ repo_dir = repository.repo_path( trans.app )
+ repo = hg.repository( get_configured_ui(), repo_dir )
+ # Default to the received changeset revision and ctx_rev.
+ update_to_ctx = get_changectx_for_changeset( repo, changeset_revision )
+ ctx_rev = str( update_to_ctx.rev() )
+ latest_changeset_revision = changeset_revision
+ update_dict = dict( changeset_revision=changeset_revision, ctx_rev=ctx_rev )
+ if changeset_revision == repository.tip( trans.app ):
+ # If changeset_revision is the repository tip, there are no additional updates.
+ return tool_shed_encode( update_dict )
+ else:
+ repository_metadata = get_repository_metadata_by_changeset_revision( trans,
+ trans.security.encode_id( repository.id ),
+ changeset_revision )
+ if repository_metadata:
+ # If changeset_revision is in the repository_metadata table for this repository, there are no additional updates.
+ return tool_shed_encode( update_dict )
+ else:
+ # The changeset_revision column in the repository_metadata table has been updated with a new changeset_revision value since the
+ # repository was installed. We need to find the changeset_revision to which we need to update.
+ update_to_changeset_hash = None
+ for changeset in repo.changelog:
+ changeset_hash = str( repo.changectx( changeset ) )
+ ctx = get_changectx_for_changeset( repo, changeset_hash )
+ if update_to_changeset_hash:
+ if get_repository_metadata_by_changeset_revision( trans, trans.security.encode_id( repository.id ), changeset_hash ):
+ # We found a RepositoryMetadata record.
+ if changeset_hash == repository.tip( trans.app ):
+ # The current ctx is the repository tip, so use it.
+ update_to_ctx = get_changectx_for_changeset( repo, changeset_hash )
+ latest_changeset_revision = changeset_hash
+ else:
+ update_to_ctx = get_changectx_for_changeset( repo, update_to_changeset_hash )
+ latest_changeset_revision = update_to_changeset_hash
+ break
+ elif not update_to_changeset_hash and changeset_hash == changeset_revision:
+ # We've found the changeset in the changelog for which we need to get the next update.
+ update_to_changeset_hash = changeset_hash
+ update_dict[ 'changeset_revision' ] = str( latest_changeset_revision )
+ update_dict[ 'ctx_rev' ] = str( update_to_ctx.rev() )
+ return tool_shed_encode( update_dict )
+ @web.expose
def get_ctx_rev( self, trans, **kwd ):
"""Given a repository and changeset_revision, return the correct ctx.rev() value."""
repository_name = kwd[ 'name' ]
@@ -1274,6 +1326,30 @@
return repository_metadata.metadata
return None
@web.json
+ def get_repository_dependencies( self, trans, **kwd ):
+ params = util.Params( kwd )
+ name = params.get( 'name', None )
+ owner = params.get( 'owner', None )
+ changeset_revision = params.get( 'changeset_revision', None )
+ repository = get_repository_by_name_and_owner( trans, name, owner )
+ repository_id = trans.security.encode_id( repository.id )
+ repository_metadata = get_repository_metadata_by_changeset_revision( trans, repository_id, changeset_revision )
+ if repository_metadata:
+ metadata = repository_metadata.metadata
+ if metadata:
+ # Get a dictionary of all repositories upon which the contents of the received repository depends.
+ repository_dependencies = get_repository_dependencies_for_changeset_revision( trans=trans,
+ repository=repository,
+ repository_metadata=repository_metadata,
+ toolshed_base_url=str( url_for( '/', qualified=True ) ).rstrip( '/' ),
+ key_rd_dicts_to_be_processed=None,
+ all_repository_dependencies=None,
+ handled_key_rd_dicts=None,
+ circular_repository_dependencies=None )
+ if repository_dependencies:
+ return tool_shed_encode( repository_dependencies )
+ return ''
+ @web.json
def get_repository_information( self, trans, repository_ids, changeset_revisions, **kwd ):
"""
Generate a list of dictionaries, each of which contains the information about a repository that will be necessary for installing
@@ -1299,13 +1375,13 @@
repo = hg.repository( get_configured_ui(), repo_dir )
ctx = get_changectx_for_changeset( repo, changeset_revision )
repo_info_dict = create_repo_info_dict( trans=trans,
- repo=repo,
repository_clone_url=repository_clone_url,
changeset_revision=changeset_revision,
ctx_rev=str( ctx.rev() ),
repository_owner=repository.user.username,
- repository_name=None,
+ repository_name=repository.name,
repository=repository,
+ metadata=None,
repository_metadata=repository_metadata )
repo_info_dicts.append( tool_shed_encode( repo_info_dict ) )
return dict( includes_tools=includes_tools,
@@ -1370,58 +1446,6 @@
if tool_version_dicts:
return to_json_string( tool_version_dicts )
return ''
- @web.expose
- def get_changeset_revision_and_ctx_rev( self, trans, **kwd ):
- """Handle a request from a local Galaxy instance to retrieve the changeset revision hash to which an installed repository can be updated."""
- params = util.Params( kwd )
- message = util.restore_text( params.get( 'message', '' ) )
- status = params.get( 'status', 'done' )
- galaxy_url = kwd.get( 'galaxy_url', '' )
- name = params.get( 'name', None )
- owner = params.get( 'owner', None )
- changeset_revision = params.get( 'changeset_revision', None )
- repository = get_repository_by_name_and_owner( trans, name, owner )
- repo_dir = repository.repo_path( trans.app )
- repo = hg.repository( get_configured_ui(), repo_dir )
- # Default to the received changeset revision and ctx_rev.
- update_to_ctx = get_changectx_for_changeset( repo, changeset_revision )
- ctx_rev = str( update_to_ctx.rev() )
- latest_changeset_revision = changeset_revision
- update_dict = dict( changeset_revision=changeset_revision, ctx_rev=ctx_rev )
- if changeset_revision == repository.tip( trans.app ):
- # If changeset_revision is the repository tip, there are no additional updates.
- return tool_shed_encode( update_dict )
- else:
- repository_metadata = get_repository_metadata_by_changeset_revision( trans,
- trans.security.encode_id( repository.id ),
- changeset_revision )
- if repository_metadata:
- # If changeset_revision is in the repository_metadata table for this repository, there are no additional updates.
- return tool_shed_encode( update_dict )
- else:
- # The changeset_revision column in the repository_metadata table has been updated with a new changeset_revision value since the
- # repository was installed. We need to find the changeset_revision to which we need to update.
- update_to_changeset_hash = None
- for changeset in repo.changelog:
- changeset_hash = str( repo.changectx( changeset ) )
- ctx = get_changectx_for_changeset( repo, changeset_hash )
- if update_to_changeset_hash:
- if get_repository_metadata_by_changeset_revision( trans, trans.security.encode_id( repository.id ), changeset_hash ):
- # We found a RepositoryMetadata record.
- if changeset_hash == repository.tip( trans.app ):
- # The current ctx is the repository tip, so use it.
- update_to_ctx = get_changectx_for_changeset( repo, changeset_hash )
- latest_changeset_revision = changeset_hash
- else:
- update_to_ctx = get_changectx_for_changeset( repo, update_to_changeset_hash )
- latest_changeset_revision = update_to_changeset_hash
- break
- elif not update_to_changeset_hash and changeset_hash == changeset_revision:
- # We've found the changeset in the changelog for which we need to get the next update.
- update_to_changeset_hash = changeset_hash
- update_dict[ 'changeset_revision' ] = str( latest_changeset_revision )
- update_dict[ 'ctx_rev' ] = str( update_to_ctx.rev() )
- return tool_shed_encode( update_dict )
def get_versions_of_tool( self, trans, repository, repository_metadata, guid ):
"""Return the tool lineage in descendant order for the received guid contained in the received repsitory_metadata.tool_versions."""
encoded_id = trans.security.encode_id( repository.id )
@@ -1783,7 +1807,6 @@
if repository_metadata:
# Get a dictionary of all repositories upon which the contents of the current repository_metadata record depend.
repository_dependencies = get_repository_dependencies_for_changeset_revision( trans=trans,
- repo=repo,
repository=repository,
repository_metadata=repository_metadata,
toolshed_base_url=str( url_for( '/', qualified=True ) ).rstrip( '/' ),
@@ -1896,7 +1919,6 @@
metadata = repository_metadata.metadata
# Get a dictionary of all repositories upon which the contents of the current repository_metadata record depend.
repository_dependencies = get_repository_dependencies_for_changeset_revision( trans=trans,
- repo=repo,
repository=repository,
repository_metadata=repository_metadata,
toolshed_base_url=str( url_for( '/', qualified=True ) ).rstrip( '/' ),
@@ -2418,7 +2440,6 @@
metadata = repository_metadata.metadata
# Get a dictionary of all repositories upon which the contents of the current repository_metadata record depend.
repository_dependencies = get_repository_dependencies_for_changeset_revision( trans=trans,
- repo=repo,
repository=repository,
repository_metadata=repository_metadata,
toolshed_base_url=str( url_for( '/', qualified=True ) ).rstrip( '/' ),
diff -r c8c181a904677adf0c9c65a1151848d9d2da16fb -r 3301ed8fea42b1067f88138573b7d716c3b631e5 lib/galaxy/webapps/galaxy/controllers/admin_toolshed.py
--- a/lib/galaxy/webapps/galaxy/controllers/admin_toolshed.py
+++ b/lib/galaxy/webapps/galaxy/controllers/admin_toolshed.py
@@ -561,6 +561,28 @@
trans.response.headers['Pragma'] = 'no-cache'
trans.response.headers['Expires'] = '0'
return get_repository_file_contents( file_path )
+ @web.expose
+ @web.require_admin
+ def get_repository_dependencies( self, trans, repository_id, repository_name, repository_owner, changeset_revision ):
+ """
+ Send a request to the appropriate tool shed to retrieve the dictionary of repository dependencies defined for the received repository
+ name, owner and changeset revision. The received repository_id is the encoded id of the installed tool shed repository in Galaxy. We
+ need it so that we can derive the tool shed from which it was installed.
+ """
+ repository = get_installed_tool_shed_repository( trans, repository_id )
+ tool_shed_url = get_url_from_repository_tool_shed( trans.app, repository )
+ url = url_join( tool_shed_url,
+ 'repository/get_repository_dependencies?name=%s&owner=%s&changeset_revision=%s' % \
+ ( repository_name, repository_owner, changeset_revision ) )
+ response = urllib2.urlopen( url )
+ raw_text = response.read()
+ response.close()
+ if len( raw_text ) > 2:
+ text = json.from_json_string( tool_shed_decode( raw_text ) )
+ log.debug( text )
+ else:
+ text = ''
+ return text
def get_versions_of_tool( self, app, guid ):
tool_version = get_tool_version( app, guid )
return tool_version.get_version_ids( app, reverse=True )
@@ -1348,17 +1370,24 @@
repo_info_dict = kwd.get( 'repo_info_dict', None )
# The repo_info_dict should be encoded.
if not repo_info_dict:
- # This should only happen if the tool_shed_repository does not include any valid tools.
+ # Entering this if block used to happen only if the tool_shed_repository does not include any valid tools. After repository dependencies
+ # were introduced, it may never happen, but will keep the block just in case.
+ repository_dependencies = self.get_repository_dependencies( trans=trans,
+ repository_id=repository_id,
+ repository_name=tool_shed_repository.name,
+ repository_owner=tool_shed_repository.owner,
+ changeset_revision=tool_shed_repository.installed_changeset_revision )
repo = hg.repository( get_configured_ui(), path=os.path.abspath( tool_shed_repository.repo_path( trans.app ) ) )
repo_info_dict = create_repo_info_dict( trans=trans,
- repo=repo,
repository_clone_url=repository_clone_url,
changeset_revision=tool_shed_repository.installed_changeset_revision,
ctx_rev=ctx_rev,
repository_owner=tool_shed_repository.owner,
repository_name=tool_shed_repository.name,
repository=None,
- repository_metadata=None )
+ repository_metadata=None,
+ metadata=metadata,
+ repository_dependencies=repository_dependencies )
repo_info_dict = tool_shed_encode( repo_info_dict )
new_kwd = dict( includes_tool_dependencies=tool_shed_repository.includes_tool_dependencies,
includes_tools=tool_shed_repository.includes_tools,
@@ -1434,21 +1463,27 @@
@web.expose
@web.require_admin
def reselect_tool_panel_section( self, trans, **kwd ):
- repository = get_installed_tool_shed_repository( trans, kwd[ 'id' ] )
- metadata = repository.metadata
- tool_shed_url = get_url_from_repository_tool_shed( trans.app, repository )
- ctx_rev = get_ctx_rev( tool_shed_url, repository.name, repository.owner, repository.installed_changeset_revision )
- repository_clone_url = generate_clone_url_for_installed_repository( trans, repository )
- repo = hg.repository( get_configured_ui(), path=os.path.abspath( tool_shed_repository.repo_path( trans.app ) ) )
+ repository_id = kwd[ 'id' ]
+ tool_shed_repository = get_installed_tool_shed_repository( trans, repository_id )
+ metadata = tool_shed_repository.metadata
+ tool_shed_url = get_url_from_repository_tool_shed( trans.app, tool_shed_repository )
+ ctx_rev = get_ctx_rev( tool_shed_url, tool_shed_repository.name, tool_shed_repository.owner, tool_shed_repository.installed_changeset_revision )
+ repository_clone_url = generate_clone_url_for_installed_repository( trans, tool_shed_repository )
+ repository_dependencies = self.get_repository_dependencies( trans=trans,
+ repository_id=repository_id,
+ repository_name=tool_shed_repository.name,
+ repository_owner=tool_shed_repository.owner,
+ changeset_revision=tool_shed_repository.installed_changeset_revision )
repo_info_dict = create_repo_info_dict( trans=trans,
- repo=repo,
repository_clone_url=repository_clone_url,
- changeset_revision=repository.installed_changeset_revision,
+ changeset_revision=tool_shed_repository.installed_changeset_revision,
ctx_rev=ctx_rev,
- repository_owner=repository.owner,
- repository_name=repository.name,
+ repository_owner=tool_shed_repository.owner,
+ repository_name=tool_shed_repository.name,
repository=None,
- repository_metadata=None )
+ repository_metadata=None,
+ metadata=metadata,
+ repository_dependencies=repository_dependencies )
# Get the location in the tool panel in which the tool was originally loaded.
if 'tool_panel_section' in metadata:
tool_panel_dict = metadata[ 'tool_panel_section' ]
@@ -1469,18 +1504,18 @@
no_changes_check_box = CheckboxField( 'no_changes', checked=True )
if original_section_name:
message = "The tools contained in your <b>%s</b> repository were last loaded into the tool panel section <b>%s</b>. " \
- % ( repository.name, original_section_name )
+ % ( tool_shed_repository.name, original_section_name )
message += "Uncheck the <b>No changes</b> check box and select a different tool panel section to load the tools in a "
message += "different section in the tool panel."
status = 'warning'
else:
- message = "The tools contained in your <b>%s</b> repository were last loaded into the tool panel outside of any sections. " % repository.name
+ message = "The tools contained in your <b>%s</b> repository were last loaded into the tool panel outside of any sections. " % tool_shed_repository.name
message += "Uncheck the <b>No changes</b> check box and select a tool panel section to load the tools into that section."
status = 'warning'
if metadata and 'readme_files' in metadata:
url = url_join( tool_shed_url,
'repository/get_readme_files?name=%s&owner=%s&changeset_revision=%s' % \
- ( repository.name, repository.owner, repository.installed_changeset_revision ) )
+ ( tool_shed_repository.name, tool_shed_repository.owner, tool_shed_repository.installed_changeset_revision ) )
response = urllib2.urlopen( url )
raw_text = response.read()
response.close()
@@ -1508,7 +1543,7 @@
install_tool_dependencies_check_box_checked = True
install_tool_dependencies_check_box = CheckboxField( 'install_tool_dependencies', checked=install_tool_dependencies_check_box_checked )
return trans.fill_template( '/admin/tool_shed_repository/reselect_tool_panel_section.mako',
- repository=repository,
+ repository=tool_shed_repository,
no_changes_check_box=no_changes_check_box,
original_section_name=original_section_name,
install_repository_dependencies_check_box=install_repository_dependencies_check_box,
diff -r c8c181a904677adf0c9c65a1151848d9d2da16fb -r 3301ed8fea42b1067f88138573b7d716c3b631e5 templates/admin/tool_shed_repository/common.mako
--- a/templates/admin/tool_shed_repository/common.mako
+++ b/templates/admin/tool_shed_repository/common.mako
@@ -159,7 +159,7 @@
def __str__( self ):
return str( self.count )
- readme_files_root_folder = containers_dict[ 'readme_files' ]
+ readme_files_root_folder = containers_dict.get( 'readme_files', None )
%>
%if readme_files_root_folder:
<p/>
diff -r c8c181a904677adf0c9c65a1151848d9d2da16fb -r 3301ed8fea42b1067f88138573b7d716c3b631e5 templates/admin/tool_shed_repository/reselect_tool_panel_section.mako
--- a/templates/admin/tool_shed_repository/reselect_tool_panel_section.mako
+++ b/templates/admin/tool_shed_repository/reselect_tool_panel_section.mako
@@ -11,7 +11,7 @@
<div class="toolFormBody"><form name="reselect_tool_panel_section" id="reselect_tool_panel_section" action="${h.url_for( controller='admin_toolshed', action='reinstall_repository', id=trans.security.encode_id( repository.id ), repo_info_dict=encoded_repo_info_dict )}" method="post" ><div style="clear: both"></div>
- <% readme_files_dict = containers_dict[ 'readme_files' ] %>
+ <% readme_files_dict = containers_dict.get( 'readme_files', None ) %>
%if readme_files_dict:
<div class="form-row"><table class="colored" width="100%">
diff -r c8c181a904677adf0c9c65a1151848d9d2da16fb -r 3301ed8fea42b1067f88138573b7d716c3b631e5 templates/admin/tool_shed_repository/select_tool_panel_section.mako
--- a/templates/admin/tool_shed_repository/select_tool_panel_section.mako
+++ b/templates/admin/tool_shed_repository/select_tool_panel_section.mako
@@ -37,7 +37,7 @@
<div class="toolFormBody"><form name="select_tool_panel_section" id="select_tool_panel_section" action="${h.url_for( controller='admin_toolshed', action='prepare_for_install', tool_shed_url=tool_shed_url, encoded_repo_info_dicts=encoded_repo_info_dicts, includes_tools=includes_tools, includes_tool_dependencies=includes_tool_dependencies )}" method="post" ><div style="clear: both"></div>
- <% readme_files_dict = containers_dict[ 'readme_files' ] %>
+ <% readme_files_dict = containers_dict.get( 'readme_files', None ) %>
%if readme_files_dict:
<div class="form-row"><table class="colored" width="100%">
diff -r c8c181a904677adf0c9c65a1151848d9d2da16fb -r 3301ed8fea42b1067f88138573b7d716c3b631e5 templates/webapps/community/repository/common.mako
--- a/templates/webapps/community/repository/common.mako
+++ b/templates/webapps/community/repository/common.mako
@@ -467,13 +467,13 @@
has_readme_files = metadata and 'readme_files' in metadata
has_workflows = metadata and 'workflows' in metadata
- datatypes_root_folder = containers_dict[ 'datatypes' ]
- invalid_tools_root_folder = containers_dict[ 'invalid_tools' ]
- readme_files_root_folder = containers_dict[ 'readme_files' ]
- repository_dependencies_root_folder = containers_dict[ 'repository_dependencies' ]
- tool_dependencies_root_folder = containers_dict[ 'tool_dependencies' ]
- valid_tools_root_folder = containers_dict[ 'valid_tools' ]
- workflows_root_folder = containers_dict[ 'workflows' ]
+ datatypes_root_folder = containers_dict.get( 'datatypes', None )
+ invalid_tools_root_folder = containers_dict.get( 'invalid_tools', None )
+ readme_files_root_folder = containers_dict.get( 'readme_files', None )
+ repository_dependencies_root_folder = containers_dict.get( 'repository_dependencies', None )
+ tool_dependencies_root_folder = containers_dict.get( 'tool_dependencies', None )
+ valid_tools_root_folder = containers_dict.get( 'valid_tools', none )
+ workflows_root_folder = containers_dict.get( 'workflows', None )
has_contents = datatypes_root_folder or invalid_tools_root_folder or valid_tools_root_folder or workflows_root_folder
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: dan: Fix for displaying old-style external display applications. Fixes e.g. display at UCSC for BED in client-side history items.
by Bitbucket 12 Dec '12
by Bitbucket 12 Dec '12
12 Dec '12
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/c8c181a90467/
changeset: c8c181a90467
user: dan
date: 2012-12-12 20:15:25
summary: Fix for displaying old-style external display applications. Fixes e.g. display at UCSC for BED in client-side history items.
affected #: 1 file
diff -r bfbf96a1435087fe4aaea6c0e16c072777ccdabd -r c8c181a904677adf0c9c65a1151848d9d2da16fb lib/galaxy/webapps/galaxy/api/history_contents.py
--- a/lib/galaxy/webapps/galaxy/api/history_contents.py
+++ b/lib/galaxy/webapps/galaxy/api/history_contents.py
@@ -196,6 +196,7 @@
hda_dict[ 'meta_files' ] = meta_files
hda_dict[ 'display_apps' ] = get_display_apps( trans, hda )
+ hda_dict[ 'display_types' ] = get_display_types( trans, hda )
hda_dict[ 'visualizations' ] = hda.get_visualizations()
hda_dict[ 'peek' ] = to_unicode( hda.display_peek() )
@@ -226,3 +227,22 @@
display_apps.append( dict( label=display_app.name, links=app_links ) )
return display_apps
+
+def get_display_types( trans, hda ):
+ #TODO: make more straightforward (somehow)
+ #FIXME: need to force a transition to all new-style display applications
+ display_apps = []
+
+ for display_app in hda.datatype.get_display_types():
+ app_links = []
+ target_frame, display_links = hda.datatype.get_display_links( hda, display_app, trans.app, trans.request.base )
+ for display_name, display_link in display_links:
+ app_links.append({
+ 'target' : target_frame,
+ 'href' : display_link,
+ 'text' : display_name
+ })
+ if app_links:
+ display_apps.append( dict( label=hda.datatype.get_display_label( display_app ), links=app_links ) )
+
+ return display_apps
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: greg: Fixes for generating the repository dependencies container.
by Bitbucket 12 Dec '12
by Bitbucket 12 Dec '12
12 Dec '12
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/bfbf96a14350/
changeset: bfbf96a14350
user: greg
date: 2012-12-12 16:36:12
summary: Fixes for generating the repository dependencies container.
affected #: 1 file
diff -r 7b91f9d888d33ce51e07661c77958d2a3d8d68f2 -r bfbf96a1435087fe4aaea6c0e16c072777ccdabd lib/galaxy/webapps/community/util/container_util.py
--- a/lib/galaxy/webapps/community/util/container_util.py
+++ b/lib/galaxy/webapps/community/util/container_util.py
@@ -212,39 +212,8 @@
repository_dependencies_folder.description = repository_dependencies.get( 'description', None )
repository_dependencies_root_folder.folders.append( repository_dependencies_folder )
del repository_dependencies[ 'description' ]
- # The current keys in repository_dependencies should all be folders.
- folder_keys = repository_dependencies.keys()
- # If repository_dependencies_folder_key is an entry in repository_dependencies, process it first.
- if repository_dependencies_folder_key in repository_dependencies:
- val = repository_dependencies[ repository_dependencies_folder_key ]
- repository_dependencies_folder, folder_id, repository_dependency_id = handle_repository_dependencies_entry( repository_dependencies_root_folder,
- repository_dependencies_folder,
- repository_dependencies_folder_key,
- folder_keys,
- folder_id,
- repository_dependency_id,
- repository_name,
- repository_owner,
- changeset_revision,
- repository_dependencies_folder_key,
- val )
- del repository_dependencies[ repository_dependencies_folder_key ]
- for key, val in repository_dependencies.items():
- repository_dependencies_folder, folder_id, repository_dependency_id = handle_repository_dependencies_entry( repository_dependencies_root_folder,
- repository_dependencies_folder,
- repository_dependencies_folder_key,
- folder_keys,
- folder_id,
- repository_dependency_id,
- repository_name,
- repository_owner,
- changeset_revision,
- key,
- val )
- # Cast empty folders to be repository dependencies.
- repository_dependencies_folder, repository_dependency_id = cast_empty_repository_dependency_folders( repository_dependencies_folder,
- repository_dependency_id )
- # Remove repository_dependencies that are also folders, and coerce empty folders into repository dependencies.
+ repository_dependencies_folder, folder_id, repository_dependency_id = \
+ populate_repository_dependencies_container( repository_dependencies_folder, repository_dependencies, folder_id, repository_dependency_id )
else:
repository_dependencies_root_folder = None
return folder_id, repository_dependencies_root_folder
@@ -391,7 +360,6 @@
if key_is_current_repositorys_key( repository_name, repository_owner, changeset_revision, key ):
label = 'Repository dependencies'
else:
- #toolshed_base_url, name, owner, revision = get_components_from_key( key )
label = "Repository <b>%s</b> revision <b>%s</b> owned by <b>%s</b>" % ( repository_name, changeset_revision, repository_owner )
return label
def generate_repository_dependencies_key_for_repository( toolshed_base_url, repository_name, repository_owner, changeset_revision ):
@@ -417,61 +385,58 @@
repository_owner = items[ 2 ]
changeset_revision = items[ 3 ]
return toolshed_base_url, repository_name, repository_owner, changeset_revision
-def handle_repository_dependencies_entry( repository_dependencies_root_folder, repository_dependencies_folder, repository_dependencies_folder_key,
- folder_keys, folder_id, repository_dependency_id, repository_name, repository_owner, changeset_revision,
- key, val ):
- # Only create a new folder object if necessary.
- folder = get_folder( repository_dependencies_folder, key )
- if not folder:
+def handle_repository_dependencies_container_entry( repository_dependencies_folder, rd_key, rd_value, folder_id, repository_dependency_id, folder_keys ):
+ toolshed, repository_name, repository_owner, changeset_revision = get_components_from_key( rd_key )
+ folder = get_folder( repository_dependencies_folder, rd_key )
+ label = generate_repository_dependencies_folder_label_from_key( repository_name, repository_owner, changeset_revision, repository_dependencies_folder.key )
+ if folder:
+ if rd_key not in folder_keys:
+ folder_id += 1
+ sub_folder = Folder( id=folder_id, key=rd_key, label=label, parent=folder )
+ folder.folders.append( sub_folder )
+ else:
+ sub_folder = folder
+ else:
folder_id += 1
- label = generate_repository_dependencies_folder_label_from_key( repository_name, repository_owner, changeset_revision, key )
- folder = Folder( id=folder_id, key=key, label=label, parent=repository_dependencies_folder )
- for repository_dependency_tup in val:
- toolshed, name, owner, changeset_revision = repository_dependency_tup
- if is_or_should_be_folder( folder_keys, toolshed, name, owner, changeset_revision ):
- check_folder_key = generate_repository_dependencies_key_for_repository( toolshed, name, owner, changeset_revision )
- check_folder = get_folder( repository_dependencies_folder, check_folder_key )
- if check_folder:
- repository_dependency_id += 1
- repository_dependency = RepositoryDependency( id=repository_dependency_id,
- toolshed=toolshed,
- repository_name=name,
- repository_owner=owner,
- changeset_revision=changeset_revision )
- if not check_folder.contains_repository_dependency( repository_dependency ):
- check_folder.repository_dependencies.append( repository_dependency )
- else:
- # Create a new folder, which may be populated later.
- folder_id += 1
- label = generate_repository_dependencies_folder_label_from_key( name, owner, changeset_revision, key )
- sub_folder = Folder( id=folder_id, key=check_folder_key, label=label, parent=folder )
- folder.folders.append( sub_folder )
- else:
+ sub_folder = Folder( id=folder_id, key=rd_key, label=label, parent=repository_dependencies_folder )
+ repository_dependencies_folder.folders.append( sub_folder )
+ for repository_dependency in rd_value:
+ can_create_dependency = not is_subfolder_of( sub_folder, repository_dependency )
+ if can_create_dependency:
+ toolshed, repository_name, repository_owner, changeset_revision = repository_dependency
repository_dependency_id += 1
repository_dependency = RepositoryDependency( id=repository_dependency_id,
toolshed=toolshed,
- repository_name=name,
- repository_owner=owner,
+ repository_name=repository_name,
+ repository_owner=repository_owner,
changeset_revision=changeset_revision )
# Insert the repository_dependency into the folder.
- folder.repository_dependencies.append( repository_dependency )
- if not get_folder( repository_dependencies_folder, folder.key ):
- if folder.folders:
- # Insert the folder into the list.
- repository_dependencies_folder.folders.append( folder )
+ sub_folder.repository_dependencies.append( repository_dependency )
return repository_dependencies_folder, folder_id, repository_dependency_id
-def is_or_should_be_folder( folder_keys, toolshed, repository_name, repository_owner, changeset_revision ):
- key = '%s%s%s%s%s%s%s' % ( toolshed, STRSEP, repository_name, STRSEP, repository_owner, STRSEP, changeset_revision )
- return key in folder_keys
+def is_subfolder_of( folder, repository_dependency ):
+ toolshed, repository_name, repository_owner, changeset_revision = repository_dependency
+ key = generate_repository_dependencies_key_for_repository( toolshed, repository_name, repository_owner, changeset_revision )
+ for sub_folder in folder.folders:
+ if key == sub_folder.key:
+ return True
+ return False
def key_is_current_repositorys_key( repository_name, repository_owner, changeset_revision, key ):
toolshed_base_url, key_name, key_owner, key_changeset_revision = get_components_from_key( key )
return repository_name == key_name and repository_owner == key_owner and changeset_revision == key_changeset_revision
+def populate_repository_dependencies_container( repository_dependencies_folder, repository_dependencies, folder_id, repository_dependency_id ):
+ folder_keys = repository_dependencies.keys()
+ for key, value in repository_dependencies.items():
+ repository_dependencies_folder, folder_id, repository_dependency_id = \
+ handle_repository_dependencies_container_entry( repository_dependencies_folder, key, value, folder_id, repository_dependency_id, folder_keys )
+ return repository_dependencies_folder, folder_id, repository_dependency_id
def print_folders( pad, folder ):
# For debugging...
pad_str = ''
for i in range( 1, pad ):
pad_str += ' '
- print '%s%s' % ( pad_str, folder.key )
+ print '%sid: %s key: %s' % ( pad_str, str( folder.id ), folder.key )
+ for repository_dependency in folder.repository_dependencies:
+ print ' %s%s' % ( pad_str, repository_dependency.listify )
for sub_folder in folder.folders:
print_folders( pad+5, sub_folder )
\ No newline at end of file
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
11 Dec '12
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/7b91f9d888d3/
changeset: 7b91f9d888d3
user: carlfeberhard
date: 2012-12-11 23:29:35
summary: Fixes to upload functional tests (test_get_data.py); TwillTestCase: add is_history_empty which checks the length of HDA JSON instead of relying on html, add check_hda_json_key_value which checks a specific HDA's JSON for a specific value
affected #: 2 files
diff -r d9e2418fb00aefd0a8f65a4686dae4e8cd6cd16d -r 7b91f9d888d33ce51e07661c77958d2a3d8d68f2 test/base/twilltestcase.py
--- a/test/base/twilltestcase.py
+++ b/test/base/twilltestcase.py
@@ -2,6 +2,7 @@
pkg_resources.require( "twill==0.9" )
import StringIO, os, filecmp, time, unittest, urllib, logging, difflib, tarfile, zipfile, tempfile, re, shutil, subprocess
+import pprint
import twill
import twill.commands as tc
@@ -299,10 +300,11 @@
# twill stores the regex match in a special stack variable
match = twill.namespaces.get_twill_glocals()[1][ '__match__' ]
json_data = from_json_string( match )
- assert check_fn( json_data ), 'failed check_fn'
+ assert check_fn( json_data ), 'failed check_fn: %s' %( check_fn.func_name )
except Exception, exc:
log.error( exc, exc_info=True )
+ log.debug( 'json_data: %s', ( '\n' + pprint.pformat( json_data ) if json_data else '(no match)' ) )
fname = self.write_temp_file( tc.browser.get_html() )
errmsg = ( "json '%s' could not be found or failed check_fn" % ( pattern ) +
"\npage content written to '%s'" % ( fname ) )
@@ -310,6 +312,49 @@
self.home()
+ def is_history_empty( self ):
+ """
+ Uses history page JSON to determine whether this history is empty
+ (i.e. has no undeleted datasets).
+ """
+ def has_no_undeleted_hdas( hda_list ):
+ if not len( hda_list ):
+ return True
+ for hda in hda_list:
+ if not( hda[ 'deleted' ] or hda[ 'purged' ] ):
+ return False
+ return True
+ try:
+ self.check_history_json( r'\bhdas\s*=\s*(.*);', has_no_undeleted_hdas )
+ except AssertionError, exc:
+ log.error( 'history is not empty' )
+ raise exc
+
+ def check_hda_json_for_key_value( self, hda_id, key, value, use_string_contains=False ):
+ """
+ Uses history page JSON to determine whether the current history:
+ (1) has an hda with hda_id,
+ (2) that hda has a JSON var named 'key',
+ (3) that var 'key' == value
+ If use_string_contains=True, this will search for value in var 'key'
+ instead of testing for an entire, exact match (string only).
+ """
+ #TODO: multi key, value
+ def hda_has_key_value( hda_list ):
+ for hda in hda_list:
+ # if we found the hda and there's a var in the json named key
+ if( ( hda[ 'id' ] == hda_id )
+ and ( key in hda ) ):
+ var = hda[ key ]
+ # test for partial string containment if str and requested
+ if( ( type( var ) == str )
+ and ( use_string_contains ) ):
+ return ( value in var )
+ # otherwise, test for equivalence
+ return ( var == value )
+ return False
+ self.check_history_json( r'\bhdas\s*=\s*(.*);', hda_has_key_value )
+
def clear_history( self ):
"""Empties a history of all datasets"""
self.visit_page( "clear_history" )
diff -r d9e2418fb00aefd0a8f65a4686dae4e8cd6cd16d -r 7b91f9d888d33ce51e07661c77958d2a3d8d68f2 test/functional/test_get_data.py
--- a/test/functional/test_get_data.py
+++ b/test/functional/test_get_data.py
@@ -5,131 +5,210 @@
from base.test_db_util import *
class UploadData( TwillTestCase ):
+
def test_0000_setup_upload_tests( self ):
- """Configuring upload tests, setting admin_user"""
+ """
+ Configuring upload tests, setting admin_user
+ """
self.logout()
self.login( email='test(a)bx.psu.edu' )
global admin_user
admin_user = get_user( email='test(a)bx.psu.edu' )
+
+ def create_fresh_history( self, user ):
+ """
+ Deletes latest history for the given user, checks for an empty history,
+ and returns that new, empty history
+ """
+ # in order to remove a lot of boiler plate - and not have cascading errors
+ history = get_latest_history_for_user( user )
+ self.delete_history( id=self.security.encode_id( history.id ) )
+ self.is_history_empty()
+ return get_latest_history_for_user( user )
+
def test_0005_upload_file( self ):
- """Test uploading 1.bed, NOT setting the file format"""
- history = get_latest_history_for_user( admin_user )
+ """
+ Test uploading 1.bed, NOT setting the file format
+ """
+ history = self.create_fresh_history( admin_user )
+
self.upload_file( '1.bed' )
hda = get_latest_hda()
assert hda is not None, "Problem retrieving hda from database"
self.verify_dataset_correctness( '1.bed', hid=str( hda.hid ) )
self.check_history_for_string( "<th>1.Chrom</th><th>2.Start</th><th>3.End</th>" )
+
self.delete_history( id=self.security.encode_id( history.id ) )
+
def test_0006_upload_file( self ):
- """Test uploading 1.bed.spaces, with space to tab selected, NOT setting the file format"""
- history = get_latest_history_for_user( admin_user )
+ """
+ Test uploading 1.bed.spaces, with space to tab selected, NOT setting the file format
+ """
+ history = self.create_fresh_history( admin_user )
+
self.upload_file( '1.bed.spaces', space_to_tab = True )
hda = get_latest_hda()
assert hda is not None, "Problem retrieving hda from database"
self.verify_dataset_correctness( '1.bed', hid=str( hda.hid ) )
self.check_history_for_string( "<th>1.Chrom</th><th>2.Start</th><th>3.End</th>" )
+
self.delete_history( id=self.security.encode_id( history.id ) )
+
def test_0010_upload_file( self ):
- """Test uploading 4.bed.gz, manually setting the file format"""
- self.check_history_for_string( 'Your history is empty' )
- history = get_latest_history_for_user( admin_user )
+ """
+ Test uploading 4.bed.gz, manually setting the file format
+ """
+ history = self.create_fresh_history( admin_user )
+
self.upload_file( '4.bed.gz', dbkey='hg17', ftype='bed' )
hda = get_latest_hda()
assert hda is not None, "Problem retrieving hda from database"
self.verify_dataset_correctness( '4.bed', hid=str( hda.hid ) )
- self.check_history_for_string( "<th>1.Chrom</th><th>2.Start</th><th>3.End</th>" )
+ self.check_hda_json_for_key_value( self.security.encode_id( hda.id ),
+ "peek", "<th>1.Chrom</th><th>2.Start</th><th>3.End</th>", use_string_contains=True )
+
self.delete_history( id=self.security.encode_id( history.id ) )
+
def test_0012_upload_file( self ):
- """Test uploading 4.bed.bz2, manually setting the file format"""
- self.check_history_for_string( 'Your history is empty' )
- history = get_latest_history_for_user( admin_user )
+ """
+ Test uploading 4.bed.bz2, manually setting the file format
+ """
+ history = self.create_fresh_history( admin_user )
+
self.upload_file( '4.bed.bz2', dbkey='hg17', ftype='bed' )
hda = get_latest_hda()
assert hda is not None, "Problem retrieving hda from database"
+
self.verify_dataset_correctness( '4.bed', hid=str( hda.hid ) )
- self.check_history_for_string( "<th>1.Chrom</th><th>2.Start</th><th>3.End</th>" )
+ self.check_hda_json_for_key_value( self.security.encode_id( hda.id ),
+ "peek", "<th>1.Chrom</th><th>2.Start</th><th>3.End</th>", use_string_contains=True )
+
self.delete_history( id=self.security.encode_id( history.id ) )
+
def test_0015_upload_file( self ):
- """Test uploading 1.scf, manually setting the file format"""
- self.check_history_for_string( 'Your history is empty' )
- history = get_latest_history_for_user( admin_user )
+ """
+ Test uploading 1.scf, manually setting the file format
+ """
+ history = self.create_fresh_history( admin_user )
+
self.upload_file( '1.scf', ftype='scf' )
hda = get_latest_hda()
assert hda is not None, "Problem retrieving hda from database"
self.verify_dataset_correctness( '1.scf', hid=str( hda.hid ) )
- self.check_history_for_string( "Binary scf sequence file</pre>" )
+ self.check_hda_json_for_key_value( self.security.encode_id( hda.id ),
+ "peek", "Binary scf sequence file", use_string_contains=True )
+
self.delete_history( id=self.security.encode_id( history.id ) )
+
def test_0020_upload_file( self ):
- """Test uploading 1.scf, NOT setting the file format"""
- self.check_history_for_string( 'Your history is empty' )
- history = get_latest_history_for_user( admin_user )
+ """
+ Test uploading 1.scf, NOT setting the file format
+ """
+ history = self.create_fresh_history( admin_user )
+
self.upload_file( '1.scf' )
hda = get_latest_hda()
assert hda is not None, "Problem retrieving hda from database"
- self.check_history_for_string( "File Format' to 'Scf' when uploading scf files" )
+ self.check_hda_json_for_key_value( self.security.encode_id( hda.id ),
+ "misc_info", "File Format' to 'Scf' when uploading scf files", use_string_contains=True )
+
self.delete_history( id=self.security.encode_id( history.id ) )
+
def test_0025_upload_file( self ):
- """Test uploading 4.bed.zip, manually setting the file format"""
- self.check_history_for_string( 'Your history is empty' )
- history = get_latest_history_for_user( admin_user )
+ """
+ Test uploading 4.bed.zip, manually setting the file format
+ """
+ history = self.create_fresh_history( admin_user )
+
self.upload_file( '4.bed.zip', ftype='bed' )
hda = get_latest_hda()
assert hda is not None, "Problem retrieving hda from database"
self.verify_dataset_correctness( '4.bed', hid=str( hda.hid ) )
- self.check_history_for_string( "<th>1.Chrom</th><th>2.Start</th><th>3.End</th>" )
+ self.check_hda_json_for_key_value( self.security.encode_id( hda.id ),
+ "peek", "<th>1.Chrom</th><th>2.Start</th><th>3.End</th>", use_string_contains=True )
+
self.delete_history( id=self.security.encode_id( history.id ) )
+
def test_0030_upload_file( self ):
- """Test uploading 4.bed.zip, NOT setting the file format"""
- self.check_history_for_string( 'Your history is empty' )
- history = get_latest_history_for_user( admin_user )
+ """
+ Test uploading 4.bed.zip, NOT setting the file format
+ """
+ history = self.create_fresh_history( admin_user )
+
self.upload_file( '4.bed.zip' )
hda = get_latest_hda()
assert hda is not None, "Problem retrieving hda from database"
self.verify_dataset_correctness( '4.bed', hid=str( hda.hid ) )
- self.check_history_for_string( "<th>1.Chrom</th><th>2.Start</th><th>3.End</th>" )
+ self.check_hda_json_for_key_value( self.security.encode_id( hda.id ),
+ "peek", "<th>1.Chrom</th><th>2.Start</th><th>3.End</th>", use_string_contains=True )
+
self.delete_history( id=self.security.encode_id( history.id ) )
+
def test_0035_upload_file( self ):
- """Test uploading 1.sam NOT setting the file format"""
- self.check_history_for_string( 'Your history is empty' )
- history = get_latest_history_for_user( admin_user )
+ """
+ Test uploading 1.sam NOT setting the file format
+ """
+ history = self.create_fresh_history( admin_user )
+
self.upload_file( '1.sam' )
hda = get_latest_hda()
assert hda is not None, "Problem retrieving hda from database"
self.verify_dataset_correctness( '1.sam', hid=str( hda.hid ) )
- self.check_history_for_string( "<th>1.QNAME</th><th>2.FLAG</th><th>3.RNAME</th><th>4.POS</th>" )
+ self.check_hda_json_for_key_value( self.security.encode_id( hda.id ),
+ "peek", "<th>1.QNAME</th><th>2.FLAG</th><th>3.RNAME</th><th>4.POS</th>", use_string_contains=True )
+
self.delete_history( id=self.security.encode_id( history.id ) )
+
def test_0040_upload_file( self ):
- """Test uploading 1.sff, NOT setting the file format"""
- self.check_history_for_string( 'Your history is empty' )
- history = get_latest_history_for_user( admin_user )
+ """
+ Test uploading 1.sff, NOT setting the file format
+ """
+ history = self.create_fresh_history( admin_user )
+
self.upload_file( '1.sff' )
hda = get_latest_hda()
assert hda is not None, "Problem retrieving hda from database"
self.verify_dataset_correctness( '1.sff', hid=str( hda.hid ) )
- self.check_history_for_string( 'format: <span class="sff">sff' )
+ self.check_hda_json_for_key_value( self.security.encode_id( hda.id ),
+ "misc_info", "sff", use_string_contains=True )
+
self.delete_history( id=self.security.encode_id( history.id ) )
+
def test_0045_upload_file( self ):
- """Test uploading 454Score.pdf, NOT setting the file format"""
- self.check_history_for_string( 'Your history is empty' )
- history = get_latest_history_for_user( admin_user )
+ """
+ Test uploading 454Score.pdf, NOT setting the file format
+ """
+ history = self.create_fresh_history( admin_user )
+
self.upload_file( '454Score.pdf' )
hda = get_latest_hda()
assert hda is not None, "Problem retrieving hda from database"
- self.check_history_for_string( "1: 454Score.pdf</span>" )
+ self.check_hda_json_for_key_value( self.security.encode_id( hda.id ),
+ "name", "454Score.pdf" )
+
self.delete_history( id=self.security.encode_id( history.id ) )
+
def test_0050_upload_file( self ):
- """Test uploading 454Score.png, NOT setting the file format"""
- self.check_history_for_string( 'Your history is empty' )
- history = get_latest_history_for_user( admin_user )
+ """
+ Test uploading 454Score.png, NOT setting the file format
+ """
+ history = self.create_fresh_history( admin_user )
+
self.upload_file( '454Score.png' )
hda = get_latest_hda()
assert hda is not None, "Problem retrieving hda from database"
- self.check_history_for_string( "454Score.png" )
+ self.check_hda_json_for_key_value( self.security.encode_id( hda.id ),
+ "name", "454Score.png" )
+
+ self.delete_history( id=self.security.encode_id( history.id ) )
+
def test_0055_upload_file( self ):
- """Test uploading lped composite datatype file, manually setting the file format"""
+ """
+ Test uploading lped composite datatype file, manually setting the file format
+ """
# Logged in as admin_user
- self.check_history_for_string( 'Your history is empty' )
- history = get_latest_history_for_user( admin_user )
+ history = self.create_fresh_history( admin_user )
+
# lped data types include a ped_file and a map_file ( which is binary )
self.upload_file( None, ftype='lped', metadata = [ { 'name':'base_name', 'value':'rgenetics' } ], composite_data = [ { 'name':'ped_file', 'value':'tinywga.ped' }, { 'name':'map_file', 'value':'tinywga.map'} ] )
# Get the latest hid for testing
@@ -138,13 +217,18 @@
# We'll test against the resulting ped file and map file for correctness
self.verify_composite_datatype_file_content( 'tinywga.ped', str( hda.id ), base_name = 'rgenetics.ped' )
self.verify_composite_datatype_file_content( 'tinywga.map', str( hda.id ), base_name = 'rgenetics.map' )
- self.check_history_for_string( "rgenetics" )
+ self.check_hda_json_for_key_value( self.security.encode_id( hda.id ),
+ "metadata_base_name", "rgenetics", use_string_contains=True )
+
self.delete_history( id=self.security.encode_id( history.id ) )
+
def test_0056_upload_file( self ):
- """Test uploading lped composite datatype file, manually setting the file format, and using space to tab on one file (tinywga.ped)"""
+ """
+ Test uploading lped composite datatype file, manually setting the file format, and using space to tab on one file (tinywga.ped)
+ """
# Logged in as admin_user
- self.check_history_for_string( 'Your history is empty' )
- history = get_latest_history_for_user( admin_user )
+ history = self.create_fresh_history( admin_user )
+
# lped data types include a ped_file and a map_file ( which is binary )
self.upload_file( None, ftype='lped', metadata = [ { 'name':'base_name', 'value':'rgenetics' } ], composite_data = [ { 'name':'ped_file', 'value':'tinywga.ped', 'space_to_tab':True }, { 'name':'map_file', 'value':'tinywga.map'} ] )
# Get the latest hid for testing
@@ -153,15 +237,25 @@
# We'll test against the resulting ped file and map file for correctness
self.verify_composite_datatype_file_content( 'tinywga.ped.space_to_tab', str( hda.id ), base_name = 'rgenetics.ped' )
self.verify_composite_datatype_file_content( 'tinywga.map', str( hda.id ), base_name = 'rgenetics.map' )
- self.check_history_for_string( "rgenetics" )
+ self.check_hda_json_for_key_value( self.security.encode_id( hda.id ),
+ "metadata_base_name", "rgenetics", use_string_contains=True )
+
self.delete_history( id=self.security.encode_id( history.id ) )
+
def test_0060_upload_file( self ):
- """Test uploading pbed composite datatype file, manually setting the file format"""
+ """
+ Test uploading pbed composite datatype file, manually setting the file format
+ """
# Logged in as admin_user
- self.check_history_for_string( 'Your history is empty' )
- history = get_latest_history_for_user( admin_user )
+ history = self.create_fresh_history( admin_user )
+
# pbed data types include a bim_file, a bed_file and a fam_file
- self.upload_file( None, ftype='pbed', metadata = [ { 'name':'base_name', 'value':'rgenetics' } ], composite_data = [ { 'name':'bim_file', 'value':'tinywga.bim' }, { 'name':'bed_file', 'value':'tinywga.bed'}, { 'name':'fam_file', 'value':'tinywga.fam' } ] )
+ self.upload_file( None, ftype='pbed',
+ metadata = [ { 'name':'base_name', 'value':'rgenetics' } ],
+ composite_data = [
+ { 'name':'bim_file', 'value':'tinywga.bim' },
+ { 'name':'bed_file', 'value':'tinywga.bed' },
+ { 'name':'fam_file', 'value':'tinywga.fam' } ])
# Get the latest hid for testing
hda = get_latest_hda()
assert hda is not None, "Problem retrieving hda from database"
@@ -169,280 +263,373 @@
self.verify_composite_datatype_file_content( 'tinywga.bim', str( hda.id ), base_name = 'rgenetics.bim' )
self.verify_composite_datatype_file_content( 'tinywga.bed', str( hda.id ), base_name = 'rgenetics.bed' )
self.verify_composite_datatype_file_content( 'tinywga.fam', str( hda.id ), base_name = 'rgenetics.fam' )
- self.check_history_for_string( "rgenetics" )
+ self.check_hda_json_for_key_value( self.security.encode_id( hda.id ),
+ "metadata_base_name", "rgenetics", use_string_contains=True )
+
self.delete_history( id=self.security.encode_id( history.id ) )
+
def test_0065_upload_file( self ):
- """Test uploading asian_chars_1.txt, NOT setting the file format"""
+ """
+ Test uploading asian_chars_1.txt, NOT setting the file format
+ """
# Logged in as admin_user
- self.check_history_for_string( 'Your history is empty' )
- history = get_latest_history_for_user( admin_user )
+ history = self.create_fresh_history( admin_user )
+
self.upload_file( 'asian_chars_1.txt' )
hda = get_latest_hda()
assert hda is not None, "Problem retrieving hda from database"
self.verify_dataset_correctness( 'asian_chars_1.txt', hid=str( hda.hid ) )
- self.check_history_for_string( 'uploaded multi-byte char file' )
+ self.check_hda_json_for_key_value( self.security.encode_id( hda.id ),
+ "misc_info", "uploaded multi-byte char file", use_string_contains=True )
+
self.delete_history( id=self.security.encode_id( history.id ) )
+
def test_0070_upload_file( self ):
- """Test uploading 2gen.fastq, NOT setting the file format"""
+ """
+ Test uploading 2gen.fastq, NOT setting the file format
+ """
# Logged in as admin_user
- self.check_history_for_string( 'Your history is empty' )
- history = get_latest_history_for_user( admin_user )
+ history = self.create_fresh_history( admin_user )
+
self.upload_file( '2gen.fastq' )
hda = get_latest_hda()
assert hda is not None, "Problem retrieving hda from database"
self.verify_dataset_correctness( '2gen.fastq', hid=str( hda.hid ) )
- self.check_history_for_string( '2gen.fastq format: <span class="fastq">fastq</span>, database: \? Info: uploaded fastq file' )
+ self.check_hda_json_for_key_value( self.security.encode_id( hda.id ), "data_type", "fastq" )
+
self.delete_history( id=self.security.encode_id( history.id ) )
+
def test_0075_upload_file( self ):
- """Test uploading 1.wig, NOT setting the file format"""
+ """
+ Test uploading 1.wig, NOT setting the file format
+ """
# Logged in as admin_user
- self.check_history_for_string( 'Your history is empty' )
- history = get_latest_history_for_user( admin_user )
+ history = self.create_fresh_history( admin_user )
+
self.upload_file( '1.wig' )
hda = get_latest_hda()
assert hda is not None, "Problem retrieving hda from database"
self.verify_dataset_correctness( '1.wig', hid=str( hda.hid ) )
- self.check_history_for_string( '1.wig format: <span class="wig">wig</span>, database: \? Info: uploaded file' )
+ self.check_hda_json_for_key_value( self.security.encode_id( hda.id ), "data_type", "wig" )
self.check_metadata_for_string( 'value="1.wig" value="\?"' )
self.check_metadata_for_string( 'Change data type selected value="wig" selected="yes"' )
+
self.delete_history( id=self.security.encode_id( history.id ) )
+
def test_0080_upload_file( self ):
- """Test uploading 1.tabular, NOT setting the file format"""
+ """
+ Test uploading 1.tabular, NOT setting the file format
+ """
# Logged in as admin_user
- self.check_history_for_string( 'Your history is empty' )
- history = get_latest_history_for_user( admin_user )
+ history = self.create_fresh_history( admin_user )
+
self.upload_file( '1.tabular' )
hda = get_latest_hda()
assert hda is not None, "Problem retrieving hda from database"
self.verify_dataset_correctness( '1.tabular', hid=str( hda.hid ) )
- self.check_history_for_string( '1.tabular format: <span class="tabular">tabular</span>, database: \? Info: uploaded file' )
+ self.check_hda_json_for_key_value( self.security.encode_id( hda.id ), "data_type", "tabular" )
self.check_metadata_for_string( 'value="1.tabular" value="\?"' )
self.check_metadata_for_string( 'Change data type selected value="tabular" selected="yes"' )
+
self.delete_history( id=self.security.encode_id( history.id ) )
+
def test_0085_upload_file( self ):
- """Test uploading qualscores.qualsolid, NOT setting the file format"""
+ """
+ Test uploading qualscores.qualsolid, NOT setting the file format
+ """
# Logged in as admin_user
- self.check_history_for_string( 'Your history is empty' )
- history = get_latest_history_for_user( admin_user )
+ history = self.create_fresh_history( admin_user )
+
self.upload_file( 'qualscores.qualsolid' )
hda = get_latest_hda()
assert hda is not None, "Problem retrieving hda from database"
self.verify_dataset_correctness( 'qualscores.qualsolid', hid=str( hda.hid ) )
- self.check_history_for_string( '48 lines format: <span class="qualsolid">qualsolid</span>, database: \? Info: uploaded file' )
+ self.check_hda_json_for_key_value( self.security.encode_id( hda.id ), "data_type", "qualsolid" )
self.check_metadata_for_string( 'Change data type value="qualsolid" selected="yes">qualsolid' )
+
self.delete_history( id=self.security.encode_id( history.id ) )
+
def test_0090_upload_file( self ):
- """Test uploading qualscores.qual454, NOT setting the file format"""
+ """
+ Test uploading qualscores.qual454, NOT setting the file format
+ """
# Logged in as admin_user
- self.check_history_for_string( 'Your history is empty' )
- history = get_latest_history_for_user( admin_user )
+ history = self.create_fresh_history( admin_user )
+
self.upload_file( 'qualscores.qual454' )
hda = get_latest_hda()
assert hda is not None, "Problem retrieving hda from database"
self.verify_dataset_correctness( 'qualscores.qual454', hid=str( hda.hid ) )
- self.check_history_for_string( '49 lines format: <span class="qual454">qual454</span>, database: \?' )
+ self.check_hda_json_for_key_value( self.security.encode_id( hda.id ), "data_type", "qual454" )
self.check_metadata_for_string( 'Change data type value="qual454" selected="yes">qual454' )
+
self.delete_history( id=self.security.encode_id( history.id ) )
+
def test_0095_upload_file( self ):
- """Test uploading 3.maf, NOT setting the file format"""
+ """
+ Test uploading 3.maf, NOT setting the file format
+ """
# Logged in as admin_user
- self.check_history_for_string( 'Your history is empty' )
- history = get_latest_history_for_user( admin_user )
+ history = self.create_fresh_history( admin_user )
+
self.upload_file( '3.maf' )
hda = get_latest_hda()
assert hda is not None, "Problem retrieving hda from database"
self.verify_dataset_correctness( '3.maf', hid=str( hda.hid ) )
- self.check_history_for_string( '3.maf format: <span class="maf">maf</span>, database: \? Info: uploaded file' )
+ self.check_hda_json_for_key_value( self.security.encode_id( hda.id ), "data_type", "maf" )
self.check_metadata_for_string( 'value="3.maf" value="\?"' )
self.check_metadata_for_string( 'Convert to new format <option value="interval">Convert MAF to Genomic Intervals <option value="fasta">Convert MAF to Fasta' )
self.check_metadata_for_string( 'Change data type selected value="maf" selected="yes"' )
+
self.delete_history( id=self.security.encode_id( history.id ) )
+
def test_0100_upload_file( self ):
- """Test uploading 1.lav, NOT setting the file format"""
+ """
+ Test uploading 1.lav, NOT setting the file format
+ """
# Logged in as admin_user
- self.check_history_for_string( 'Your history is empty' )
- history = get_latest_history_for_user( admin_user )
+ history = self.create_fresh_history( admin_user )
+
self.upload_file( '1.lav' )
hda = get_latest_hda()
assert hda is not None, "Problem retrieving hda from database"
self.verify_dataset_correctness( '1.lav', hid=str( hda.hid ) )
- self.check_history_for_string( '1.lav format: <span class="lav">lav</span>, database: \? Info: uploaded file' )
+ self.check_hda_json_for_key_value( self.security.encode_id( hda.id ), "data_type", "lav" )
self.check_metadata_for_string( 'value="1.lav" value="\?"' )
self.check_metadata_for_string( 'Change data type selected value="lav" selected="yes"' )
+
self.delete_history( id=self.security.encode_id( history.id ) )
+
def test_0105_upload_file( self ):
- """Test uploading 1.interval, NOT setting the file format"""
+ """
+ Test uploading 1.interval, NOT setting the file format
+ """
# Logged in as admin_user
- self.check_history_for_string( 'Your history is empty' )
- history = get_latest_history_for_user( admin_user )
+ history = self.create_fresh_history( admin_user )
+
self.upload_file( '1.interval' )
hda = get_latest_hda()
assert hda is not None, "Problem retrieving hda from database"
self.verify_dataset_correctness( '1.interval', hid=str( hda.hid ) )
- self.check_history_for_string( '1.interval format: <span class="interval">interval</span>, database: \? Info: uploaded file' )
+ self.check_hda_json_for_key_value( self.security.encode_id( hda.id ), "data_type", "interval" )
self.check_metadata_for_string( 'value="1.interval" value="\?"' )
self.check_metadata_for_string( 'Chrom column: <option value="1" selected> Start column: <option value="2" selected>' )
self.check_metadata_for_string( 'End column: <option value="3" selected> Strand column <option value="6" selected>' )
self.check_metadata_for_string( 'Convert to new format <option value="bed">Convert Genomic Intervals To BED' )
self.check_metadata_for_string( 'Change data type selected value="interval" selected="yes"' )
+
self.delete_history( id=self.security.encode_id( history.id ) )
+
def test_0110_upload_file( self ):
- """Test uploading 5.gff3, NOT setting the file format"""
+ """
+ Test uploading 5.gff3, NOT setting the file format
+ """
# Logged in as admin_user
- self.check_history_for_string( 'Your history is empty' )
- history = get_latest_history_for_user( admin_user )
+ history = self.create_fresh_history( admin_user )
+
self.upload_file( '5.gff3' )
hda = get_latest_hda()
assert hda is not None, "Problem retrieving hda from database"
self.verify_dataset_correctness( '5.gff3', hid=str( hda.hid ) )
- self.check_history_for_string( '5.gff3 format: <span class="gff3">gff3</span>, database: \? Info: uploaded file' )
+ self.check_hda_json_for_key_value( self.security.encode_id( hda.id ), "data_type", "gff3" )
self.check_metadata_for_string( 'value="5.gff3" value="\?"' )
self.check_metadata_for_string( 'Convert to new format <option value="bed">Convert GFF to BED' )
self.check_metadata_for_string( 'Change data type selected value="gff3" selected="yes"' )
+
self.delete_history( id=self.security.encode_id( history.id ) )
+
def test_0115_upload_file( self ):
- """Test uploading html_file.txt, NOT setting the file format"""
+ """
+ Test uploading html_file.txt, NOT setting the file format
+ """
# Logged in as admin_user
- self.check_history_for_string( 'Your history is empty' )
- history = get_latest_history_for_user( admin_user )
+ history = self.create_fresh_history( admin_user )
+
self.upload_file( 'html_file.txt' )
hda = get_latest_hda()
assert hda is not None, "Problem retrieving hda from database"
- self.check_history_for_string( 'The uploaded file contains inappropriate content' )
+ self.check_hda_json_for_key_value( self.security.encode_id( hda.id ),
+ "misc_info", "The uploaded file contains inappropriate HTML content", use_string_contains=True )
+
self.delete_history( id=self.security.encode_id( history.id ) )
+
def test_0120_upload_file( self ):
- """Test uploading 5.gff, NOT setting the file format"""
- # Logged in as admin_user
- self.check_history_for_string( 'Your history is empty' )
- history = get_latest_history_for_user( admin_user )
+ """
+ Test uploading 5.gff, NOT setting the file format
+
+ Test sniffer for gff.
+ """
+ history = self.create_fresh_history( admin_user )
+
self.upload_file( '5.gff' )
hda = get_latest_hda()
assert hda is not None, "Problem retrieving hda from database"
self.verify_dataset_correctness( '5.gff', hid=str( hda.hid ) )
- self.check_history_for_string( '5.gff format: <span class="gff">gff</span>, database: \? Info: uploaded file' )
+ self.check_hda_json_for_key_value( self.security.encode_id( hda.id ), "data_type", "gff" )
self.check_metadata_for_string( 'value="5.gff" value="\?"' )
self.check_metadata_for_string( 'Convert to new format <option value="bed">Convert GFF to BED' )
self.check_metadata_for_string( 'Change data type selected value="gff" selected="yes"' )
+
self.delete_history( id=self.security.encode_id( history.id ) )
+
def test_0125_upload_file( self ):
- """Test uploading 1.fasta, NOT setting the file format"""
+ """
+ Test uploading 1.fasta, NOT setting the file format
+ """
# Logged in as admin_user
- self.check_history_for_string( 'Your history is empty' )
- history = get_latest_history_for_user( admin_user )
+ history = self.create_fresh_history( admin_user )
+
self.upload_file( '1.fasta' )
hda = get_latest_hda()
assert hda is not None, "Problem retrieving hda from database"
self.verify_dataset_correctness( '1.fasta', hid=str( hda.hid ) )
- self.check_history_for_string( '1.fasta format: <span class="fasta">fasta</span>, database: \? Info: uploaded file' )
+ self.check_hda_json_for_key_value( self.security.encode_id( hda.id ), "data_type", "fasta" )
self.check_metadata_for_string( 'value="1.fasta" value="\?" Change data type selected value="fasta" selected="yes"' )
+
self.delete_history( id=self.security.encode_id( history.id ) )
+
def test_0130_upload_file( self ):
- """Test uploading 1.customtrack, NOT setting the file format"""
+ """
+ Test uploading 1.customtrack, NOT setting the file format
+ """
# Logged in as admin_user
- self.check_history_for_string( 'Your history is empty' )
- history = get_latest_history_for_user( admin_user )
+ history = self.create_fresh_history( admin_user )
+
self.upload_file( '1.customtrack' )
hda = get_latest_hda()
assert hda is not None, "Problem retrieving hda from database"
self.verify_dataset_correctness( '1.customtrack', hid=str( hda.hid ) )
- self.check_history_for_string( '1.customtrack format: <span class="customtrack">customtrack</span>, database: \? Info: uploaded file' )
+ self.check_hda_json_for_key_value( self.security.encode_id( hda.id ), "data_type", "customtrack" )
self.check_metadata_for_string( 'value="1.customtrack" value="\?" Change data type selected value="customtrack" selected="yes"' )
+
self.delete_history( id=self.security.encode_id( history.id ) )
+
def test_0135_upload_file( self ):
- """Test uploading shrimp_cs_test1.csfasta, NOT setting the file format"""
+ """
+ Test uploading shrimp_cs_test1.csfasta, NOT setting the file format
+ """
# Logged in as admin_user
- self.check_history_for_string( 'Your history is empty' )
- history = get_latest_history_for_user( admin_user )
+ history = self.create_fresh_history( admin_user )
+
self.upload_file( 'shrimp_cs_test1.csfasta' )
hda = get_latest_hda()
assert hda is not None, "Problem retrieving hda from database"
self.verify_dataset_correctness( 'shrimp_cs_test1.csfasta', hid=str( hda.hid ) )
- self.check_history_for_string( '2,500 sequences format: <span class="csfasta">csfasta</span>, <td>>2_14_26_F3,-1282216.0</td>' )
+ self.check_hda_json_for_key_value( self.security.encode_id( hda.id ), "data_type", "csfasta" )
self.check_metadata_for_string( 'value="shrimp_cs_test1.csfasta" value="\?" Change data type value="csfasta" selected="yes"' )
+
self.delete_history( id=self.security.encode_id( history.id ) )
+
def test_0145_upload_file( self ):
- """Test uploading 1.axt, NOT setting the file format"""
+ """
+ Test uploading 1.axt, NOT setting the file format
+ """
# Logged in as admin_user
- self.check_history_for_string( 'Your history is empty' )
- history = get_latest_history_for_user( admin_user )
+ history = self.create_fresh_history( admin_user )
+
self.upload_file( '1.axt' )
hda = get_latest_hda()
assert hda is not None, "Problem retrieving hda from database"
self.verify_dataset_correctness( '1.axt', hid=str( hda.hid ) )
- self.check_history_for_string( '1.axt format: <span class="axt">axt</span>, database: \? Info: uploaded file' )
+ self.check_hda_json_for_key_value( self.security.encode_id( hda.id ), "data_type", "axt" )
self.check_metadata_for_string( 'value="1.axt" value="\?" Change data type selected value="axt" selected="yes"' )
+
self.delete_history( id=self.security.encode_id( history.id ) )
+
def test_0150_upload_file( self ):
- """Test uploading 1.bam, which is a sorted Bam file creaed by the Galaxy sam_to_bam tool, NOT setting the file format"""
- self.check_history_for_string( 'Your history is empty' )
- history = get_latest_history_for_user( admin_user )
+ """
+ Test uploading 1.bam, which is a sorted Bam file creaed by the Galaxy sam_to_bam tool, NOT setting the file format
+ """
+ history = self.create_fresh_history( admin_user )
+
self.upload_file( '1.bam' )
hda = get_latest_hda()
assert hda is not None, "Problem retrieving hda from database"
self.verify_dataset_correctness( '1.bam', hid=str( hda.hid ), attributes={ 'ftype' : 'bam' } )
- self.check_history_for_string( '<span class="bam">bam</span>' )
+ self.check_hda_json_for_key_value( self.security.encode_id( hda.id ), "data_type", "bam" )
# Make sure the Bam index was created
assert hda.metadata.bam_index is not None, "Bam index was not correctly created for 1.bam"
+
self.delete_history( id=self.security.encode_id( history.id ) )
+
def test_0155_upload_file( self ):
- """Test uploading 3unsorted.bam, which is an unsorted Bam file, NOT setting the file format"""
- self.check_history_for_string( 'Your history is empty' )
- history = get_latest_history_for_user( admin_user )
+ """
+ Test uploading 3unsorted.bam, which is an unsorted Bam file, NOT setting the file format
+ """
+ history = self.create_fresh_history( admin_user )
+
self.upload_file( '3unsorted.bam' )
hda = get_latest_hda()
assert hda is not None, "Problem retrieving hda from database"
# Since 3unsorted.bam is not sorted, we cannot verify dataset correctness since the uploaded
# dataset will be sorted. However, the check below to see if the index was created is
# sufficient.
- self.check_history_for_string( '<span class="bam">bam</span>' )
+ self.check_hda_json_for_key_value( self.security.encode_id( hda.id ), "data_type", "bam" )
# Make sure the Bam index was created
assert hda.metadata.bam_index is not None, "Bam index was not correctly created for 3unsorted.bam"
+
self.delete_history( id=self.security.encode_id( history.id ) )
+
def test_0160_url_paste( self ):
- """Test url paste behavior"""
+ """
+ Test url paste behavior
+ """
# Logged in as admin_user
- # Deleting the current history should have created a new history
- self.check_history_for_string( 'Your history is empty' )
- history = get_latest_history_for_user( admin_user )
+ history = self.create_fresh_history( admin_user )
+
self.upload_url_paste( 'hello world' )
- self.check_history_for_string( 'Pasted Entry' )
- self.check_history_for_string( 'hello world' )
+ self.check_history_for_exact_string( 'Pasted Entry' )
+ self.check_history_for_exact_string( 'hello world' )
self.upload_url_paste( u'hello world' )
- self.check_history_for_string( 'Pasted Entry' )
- self.check_history_for_string( 'hello world' )
+ self.check_history_for_exact_string( 'Pasted Entry' )
+ self.check_history_for_exact_string( 'hello world' )
+
self.delete_history( id=self.security.encode_id( history.id ) )
+
def test_0165_upload_file( self ):
- """Test uploading 1.pileup, NOT setting the file format"""
- self.check_history_for_string( 'Your history is empty' )
- history = get_latest_history_for_user( admin_user )
+ """
+ Test uploading 1.pileup, NOT setting the file format
+ """
+ history = self.create_fresh_history( admin_user )
+
self.upload_file( '1.pileup' )
hda = get_latest_hda()
assert hda is not None, "Problem retrieving hda from database"
self.verify_dataset_correctness( '1.pileup', hid=str( hda.hid ) )
- self.check_history_for_string( '1.pileup format: <span class="pileup">pileup</span>, database: \? Info: uploaded file' )
+ self.check_hda_json_for_key_value( self.security.encode_id( hda.id ), "data_type", "pileup" )
self.check_metadata_for_string( 'value="1.pileup" value="\?" Change data type selected value="pileup" selected="yes"' )
+
self.delete_history( id=self.security.encode_id( history.id ) )
+
def test_0170_upload_file( self ):
- """Test uploading 1.bigbed, NOT setting the file format"""
- self.check_history_for_string( 'Your history is empty' )
- history = get_latest_history_for_user( admin_user )
+ """
+ Test uploading 1.bigbed, NOT setting the file format
+ """
+ history = self.create_fresh_history( admin_user )
+
self.upload_file( '1.bigbed' )
hda = get_latest_hda()
assert hda is not None, "Problem retrieving hda from database"
self.verify_dataset_correctness( '1.bigbed', hid=str( hda.hid ) )
- self.check_history_for_string( '1.bigbed</span> database: \? Info: uploaded bigbed file' )
+ self.check_hda_json_for_key_value( self.security.encode_id( hda.id ), "data_type", "bigbed" )
self.check_metadata_for_string( 'value="1.bigbed" value="\?" Change data type selected value="bigbed" selected="yes"' )
+
self.delete_history( id=self.security.encode_id( history.id ) )
+
def test_0175_upload_file( self ):
- """Test uploading 1.bigwig, NOT setting the file format"""
- self.check_history_for_string( 'Your history is empty' )
- history = get_latest_history_for_user( admin_user )
+ """
+ Test uploading 1.bigwig, NOT setting the file format
+ """
+ history = self.create_fresh_history( admin_user )
+
self.upload_file( '1.bigwig' )
hda = get_latest_hda()
assert hda is not None, "Problem retrieving hda from database"
self.verify_dataset_correctness( '1.bigwig', hid=str( hda.hid ) )
- self.check_history_for_string( '1.bigwig</span> database: \? Info: uploaded bigwig file' )
+ self.check_hda_json_for_key_value( self.security.encode_id( hda.id ), "data_type", "bigwig" )
self.check_metadata_for_string( 'value="1.bigwig" value="\?" Change data type selected value="bigwig" selected="yes"' )
+
self.delete_history( id=self.security.encode_id( history.id ) )
+
def test_9999_clean_up( self ):
self.logout()
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: jgoecks: Trackster: when creating a visualization, default the view to the first chromosome.
by Bitbucket 11 Dec '12
by Bitbucket 11 Dec '12
11 Dec '12
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/d9e2418fb00a/
changeset: d9e2418fb00a
user: jgoecks
date: 2012-12-11 22:47:54
summary: Trackster: when creating a visualization, default the view to the first chromosome.
affected #: 2 files
diff -r 376a3714dc1510d3a1405575fc2667b68a28a695 -r d9e2418fb00aefd0a8f65a4686dae4e8cd6cd16d static/scripts/viz/trackster/tracks.js
--- a/static/scripts/viz/trackster/tracks.js
+++ b/static/scripts/viz/trackster/tracks.js
@@ -1190,7 +1190,7 @@
view.chrom_select.html(chrom_options);
view.chrom_start_index = result.start_index;
- chrom_data.resolve(result);
+ chrom_data.resolve(result.chrom_info);
},
error: function() {
alert("Could not load chroms for this dbkey:", view.dbkey);
diff -r 376a3714dc1510d3a1405575fc2667b68a28a695 -r d9e2418fb00aefd0a8f65a4686dae4e8cd6cd16d static/scripts/viz/trackster_ui.js
--- a/static/scripts/viz/trackster_ui.js
+++ b/static/scripts/viz/trackster_ui.js
@@ -188,7 +188,7 @@
var self = this,
view = new tracks.TracksterView(view_config);
view.editor = true;
- $.when( view.load_chroms_deferred ).then(function() {
+ $.when( view.load_chroms_deferred ).then(function(chrom_info) {
// Viewport config.
if (viewport_config) {
var
@@ -201,6 +201,10 @@
view.change_chrom(chrom, start, end);
}
}
+ else {
+ // No viewport, so use first chromosome.
+ view.change_chrom(chrom_info[0].chrom);
+ }
// Add drawables to view.
if (drawables_config) {
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0