galaxy-commits
Threads by month
- ----- 2025 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2024 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2023 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2022 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2021 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2020 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2019 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2018 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2017 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2016 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2015 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2014 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2013 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2012 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2011 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2010 -----
- December
- November
- October
- September
- August
- July
- June
- May
- 15302 discussions
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/34f61e53a563/
changeset: 34f61e53a563
user: greg
date: 2012-12-13 18:00:53
summary: More import tweaks.
affected #: 6 files
diff -r b96fe1d76b383ef2a77ff76bdc6580362aab4f0d -r 34f61e53a563e4b2b44b45b94aff8ed2d1e17738 lib/galaxy/tool_shed/install_manager.py
--- a/lib/galaxy/tool_shed/install_manager.py
+++ b/lib/galaxy/tool_shed/install_manager.py
@@ -2,13 +2,14 @@
Manage automatic installation of tools configured in the xxx.xml files in ~/scripts/migrate_tools (e.g., 0002_tools.xml).
All of the tools were at some point included in the Galaxy distribution, but are now hosted in the main Galaxy tool shed.
"""
-import urllib2, tempfile
+import os, urllib2, tempfile
+from galaxy import util
from galaxy.tools import ToolSection
from galaxy.util.json import from_json_string, to_json_string
import galaxy.util.shed_util as shed_util
import galaxy.util.shed_util_common as suc
from galaxy.util.odict import odict
-from galaxy.tool_shed.common_util import *
+from galaxy.tool_shed import common_util
class InstallManager( object ):
def __init__( self, app, latest_migration_script_number, tool_shed_install_config, migrated_tools_config, install_dependencies ):
@@ -37,17 +38,17 @@
tree = util.parse_xml( tool_shed_install_config )
root = tree.getroot()
self.tool_shed = shed_util.clean_tool_shed_url( root.get( 'name' ) )
- self.repository_owner = REPOSITORY_OWNER
+ self.repository_owner = common_util.REPOSITORY_OWNER
index, self.shed_config_dict = shed_util.get_shed_tool_conf_dict( app, self.migrated_tools_config )
# Since tool migration scripts can be executed any number of times, we need to make sure the appropriate tools are defined in
# tool_conf.xml. If no tools associated with the migration stage are defined, no repositories will be installed on disk.
# The default behavior is that the tool shed is down.
tool_shed_accessible = False
- tool_panel_configs = get_non_shed_tool_panel_configs( app )
+ tool_panel_configs = common_util.get_non_shed_tool_panel_configs( app )
if tool_panel_configs:
# The missing_tool_configs_dict contents are something like:
# {'emboss_antigenic.xml': [('emboss', '5.0.0', 'package', '\nreadme blah blah blah\n')]}
- tool_shed_accessible, missing_tool_configs_dict = check_for_missing_tools( app, tool_panel_configs, latest_migration_script_number )
+ tool_shed_accessible, missing_tool_configs_dict = common_util.check_for_missing_tools( app, tool_panel_configs, latest_migration_script_number )
else:
# It doesn't matter if the tool shed is accessible since there are no migrated tools defined in the local Galaxy instance, but
# we have to set the value of tool_shed_accessible to True so that the value of migrate_tools.version can be correctly set in
@@ -112,7 +113,7 @@
# Tools outside of sections.
file_path = elem.get( 'file', None )
if file_path:
- name = strip_path( file_path )
+ name = suc.strip_path( file_path )
if name in migrated_tool_configs:
if elem not in tool_panel_elems:
tool_panel_elems.append( elem )
@@ -122,7 +123,7 @@
if section_elem.tag == 'tool':
file_path = section_elem.get( 'file', None )
if file_path:
- name = strip_path( file_path )
+ name = suc.strip_path( file_path )
if name in migrated_tool_configs:
# Append the section, not the tool.
if elem not in tool_panel_elems:
@@ -139,7 +140,7 @@
if proprietary_tool_panel_elem.tag == 'tool':
# The proprietary_tool_panel_elem looks something like <tool file="emboss_5/emboss_antigenic.xml" />.
proprietary_tool_config = proprietary_tool_panel_elem.get( 'file' )
- proprietary_name = strip_path( proprietary_tool_config )
+ proprietary_name = suc.strip_path( proprietary_tool_config )
if tool_config == proprietary_name:
# The tool is loaded outside of any sections.
tool_sections.append( None )
@@ -151,7 +152,7 @@
if section_elem.tag == 'tool':
# The section_elem looks something like <tool file="emboss_5/emboss_antigenic.xml" />.
proprietary_tool_config = section_elem.get( 'file' )
- proprietary_name = strip_path( proprietary_tool_config )
+ proprietary_name = suc.strip_path( proprietary_tool_config )
if tool_config == proprietary_name:
# The tool is loaded inside of the section_elem.
tool_sections.append( ToolSection( proprietary_tool_panel_elem ) )
@@ -349,7 +350,7 @@
shed_util.update_tool_shed_repository_status( self.app, tool_shed_repository, self.app.model.ToolShedRepository.installation_status.INSTALLED )
@property
def non_shed_tool_panel_configs( self ):
- return get_non_shed_tool_panel_configs( self.app )
+ return common_util.get_non_shed_tool_panel_configs( self.app )
def __get_url_from_tool_shed( self, tool_shed ):
# The value of tool_shed is something like: toolshed.g2.bx.psu.edu. We need the URL to this tool shed, which is something like:
# http://toolshed.g2.bx.psu.edu/
diff -r b96fe1d76b383ef2a77ff76bdc6580362aab4f0d -r 34f61e53a563e4b2b44b45b94aff8ed2d1e17738 lib/galaxy/tool_shed/migrate/check.py
--- a/lib/galaxy/tool_shed/migrate/check.py
+++ b/lib/galaxy/tool_shed/migrate/check.py
@@ -6,6 +6,7 @@
from migrate.versioning import repository, schema
from sqlalchemy import *
from galaxy.util.odict import odict
+from galaxy.tool_shed import common_util
log = logging.getLogger( __name__ )
@@ -48,11 +49,11 @@
# New installations will not be missing tools, so we don't need to worry about them.
missing_tool_configs_dict = odict()
else:
- tool_panel_configs = get_non_shed_tool_panel_configs( app )
+ tool_panel_configs = common_util.get_non_shed_tool_panel_configs( app )
if tool_panel_configs:
# The missing_tool_configs_dict contents are something like:
# {'emboss_antigenic.xml': [('emboss', '5.0.0', 'package', '\nreadme blah blah blah\n')]}
- tool_shed_accessible, missing_tool_configs_dict = check_for_missing_tools( app, tool_panel_configs, latest_tool_migration_script_number )
+ tool_shed_accessible, missing_tool_configs_dict = common_util.check_for_missing_tools( app, tool_panel_configs, latest_tool_migration_script_number )
else:
# It doesn't matter if the tool shed is accessible since there are no migrated tools defined in the local Galaxy instance, but
# we have to set the value of tool_shed_accessible to True so that the value of migrate_tools.version can be correctly set in
diff -r b96fe1d76b383ef2a77ff76bdc6580362aab4f0d -r 34f61e53a563e4b2b44b45b94aff8ed2d1e17738 lib/galaxy/tools/__init__.py
--- a/lib/galaxy/tools/__init__.py
+++ b/lib/galaxy/tools/__init__.py
@@ -32,7 +32,7 @@
from cgi import FieldStorage
from galaxy.util.hash_util import *
from galaxy.util import listify
-import galaxy.util.shed_util as shed_util
+import galaxy.util.shed_util
from galaxy.web import url_for
from galaxy.visualization.genome.visual_analytics import TracksterConfig
@@ -887,11 +887,11 @@
def tool_shed_repository( self ):
# If this tool is included in an installed tool shed repository, return it.
if self.tool_shed:
- return shed_util.get_tool_shed_repository_by_shed_name_owner_installed_changeset_revision( self.app,
- self.tool_shed,
- self.repository_name,
- self.repository_owner,
- self.installed_changeset_revision )
+ return galaxy.util.shed_util.get_tool_shed_repository_by_shed_name_owner_installed_changeset_revision( self.app,
+ self.tool_shed,
+ self.repository_name,
+ self.repository_owner,
+ self.installed_changeset_revision )
return None
def __get_job_run_config( self, run_configs, key, job_params=None ):
# Look through runners/handlers to find one with matching parameters.
diff -r b96fe1d76b383ef2a77ff76bdc6580362aab4f0d -r 34f61e53a563e4b2b44b45b94aff8ed2d1e17738 lib/galaxy/util/shed_util.py
--- a/lib/galaxy/util/shed_util.py
+++ b/lib/galaxy/util/shed_util.py
@@ -393,7 +393,7 @@
for tool_dict in metadata[ 'tools' ]:
guid = tool_dict[ 'guid' ]
tool_config = tool_dict[ 'tool_config' ]
- file_name = strip_path( tool_config )
+ file_name = suc.strip_path( tool_config )
guids_and_configs[ guid ] = file_name
# Parse the shed_tool_conf file in which all of this repository's tools are defined and generate the tool_panel_dict.
tree = util.parse_xml( shed_tool_conf )
@@ -434,7 +434,7 @@
{<Tool guid> : [{ tool_config : <tool_config_file>, id: <ToolSection id>, version : <ToolSection version>, name : <TooSection name>}]}
"""
tool_panel_dict = {}
- file_name = strip_path( tool_config )
+ file_name = suc.strip_path( tool_config )
tool_section_dicts = generate_tool_section_dicts( tool_config=file_name, tool_sections=tool_sections )
tool_panel_dict[ guid ] = tool_section_dicts
return tool_panel_dict
@@ -471,11 +471,11 @@
return tool_section
def get_config( config_file, repo, ctx, dir ):
"""Return the latest version of config_filename from the repository manifest."""
- config_file = strip_path( config_file )
+ config_file = suc.strip_path( config_file )
for changeset in suc.reversed_upper_bounded_changelog( repo, ctx ):
changeset_ctx = repo.changectx( changeset )
for ctx_file in changeset_ctx.files():
- ctx_file_name = strip_path( ctx_file )
+ ctx_file_name = suc.strip_path( ctx_file )
if ctx_file_name == config_file:
return suc.get_named_tmpfile_from_ctx( changeset_ctx, ctx_file, dir )
return None
@@ -491,7 +491,7 @@
for converter in elem.findall( 'converter' ):
converter_config = converter.get( 'file', None )
if converter_config:
- converter_config_file_name = strip_path( converter_config )
+ converter_config_file_name = suc.strip_path( converter_config )
for root, dirs, files in os.walk( relative_install_dir ):
if root.find( '.hg' ) < 0:
for name in files:
@@ -508,7 +508,7 @@
for display_app in elem.findall( 'display' ):
display_config = display_app.get( 'file', None )
if display_config:
- display_config_file_name = strip_path( display_config )
+ display_config_file_name = suc.strip_path( display_config )
for root, dirs, files in os.walk( relative_install_dir ):
if root.find( '.hg' ) < 0:
for name in files:
@@ -574,7 +574,7 @@
if shed_tool_conf == shed_tool_conf_dict[ 'config_filename' ]:
return index, shed_tool_conf_dict
else:
- file_name = strip_path( shed_tool_conf_dict[ 'config_filename' ] )
+ file_name = suc.strip_path( shed_tool_conf_dict[ 'config_filename' ] )
if shed_tool_conf == file_name:
return index, shed_tool_conf_dict
def get_tool_index_sample_files( sample_files ):
@@ -722,11 +722,11 @@
params_with_missing_index_file = repository_tool.params_with_missing_index_file
for param in params_with_missing_index_file:
options = param.options
- missing_file_name = strip_path( options.missing_index_file )
+ missing_file_name = suc.strip_path( options.missing_index_file )
if missing_file_name not in sample_files_copied:
# The repository must contain the required xxx.loc.sample file.
for sample_file in sample_files:
- sample_file_name = strip_path( sample_file )
+ sample_file_name = suc.strip_path( sample_file )
if sample_file_name == '%s.sample' % missing_file_name:
suc.copy_sample_file( app, sample_file )
if options.tool_data_table and options.tool_data_table.missing_index_file:
diff -r b96fe1d76b383ef2a77ff76bdc6580362aab4f0d -r 34f61e53a563e4b2b44b45b94aff8ed2d1e17738 lib/galaxy/webapps/community/controllers/repository.py
--- a/lib/galaxy/webapps/community/controllers/repository.py
+++ b/lib/galaxy/webapps/community/controllers/repository.py
@@ -1316,7 +1316,7 @@
return suc.get_repository_file_contents( file_path )
def get_file_from_changeset_revision( self, repo_files_dir, changeset_revision, file_name, dir ):
"""Return file_name from the received changeset_revision of the repository manifest."""
- stripped_file_name = strip_path( file_name )
+ stripped_file_name = suc.strip_path( file_name )
repo = hg.repository( suc.get_configured_ui(), repo_files_dir )
ctx = suc.get_changectx_for_changeset( repo, changeset_revision )
named_tmp_file = suc.get_named_tmpfile_from_ctx( ctx, file_name, dir )
diff -r b96fe1d76b383ef2a77ff76bdc6580362aab4f0d -r 34f61e53a563e4b2b44b45b94aff8ed2d1e17738 lib/galaxy/webapps/galaxy/controllers/admin_toolshed.py
--- a/lib/galaxy/webapps/galaxy/controllers/admin_toolshed.py
+++ b/lib/galaxy/webapps/galaxy/controllers/admin_toolshed.py
@@ -1109,7 +1109,7 @@
tool_path = shed_tool_conf_dict[ 'tool_path' ]
break
else:
- file_name = strip_path( config_filename )
+ file_name = suc.strip_path( config_filename )
if file_name == shed_tool_conf:
tool_path = shed_tool_conf_dict[ 'tool_path' ]
break
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
13 Dec '12
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/b96fe1d76b38/
changeset: b96fe1d76b38
user: greg
date: 2012-12-13 17:41:31
summary: Utility code cleanup and import tweaking.
affected #: 19 files
diff -r c0c6cf9f5d6ee0a63d0c058439467c1340698930 -r b96fe1d76b383ef2a77ff76bdc6580362aab4f0d lib/galaxy/tool_shed/__init__.py
--- a/lib/galaxy/tool_shed/__init__.py
+++ b/lib/galaxy/tool_shed/__init__.py
@@ -3,7 +3,7 @@
"""
import os
import galaxy.util.shed_util
-from galaxy.model.orm import *
+from galaxy.model.orm import and_
from galaxy import eggs
import pkg_resources
diff -r c0c6cf9f5d6ee0a63d0c058439467c1340698930 -r b96fe1d76b383ef2a77ff76bdc6580362aab4f0d lib/galaxy/tool_shed/common_util.py
--- a/lib/galaxy/tool_shed/common_util.py
+++ b/lib/galaxy/tool_shed/common_util.py
@@ -1,7 +1,7 @@
import os, urllib2
from galaxy import util
from galaxy.util.odict import odict
-from galaxy.tool_shed.encoding_util import *
+from galaxy.tool_shed.encoding_util import tool_shed_decode
REPOSITORY_OWNER = 'devteam'
diff -r c0c6cf9f5d6ee0a63d0c058439467c1340698930 -r b96fe1d76b383ef2a77ff76bdc6580362aab4f0d lib/galaxy/tool_shed/encoding_util.py
--- a/lib/galaxy/tool_shed/encoding_util.py
+++ b/lib/galaxy/tool_shed/encoding_util.py
@@ -1,5 +1,5 @@
-import binascii
-from galaxy.util.hash_util import *
+import binascii, logging
+from galaxy.util.hash_util import hmac_new
from galaxy.util.json import json_fix
from galaxy import eggs
@@ -8,6 +8,8 @@
pkg_resources.require( "simplejson" )
import simplejson
+log = logging.getLogger( __name__ )
+
encoding_sep = '__esep__'
def tool_shed_decode( value ):
diff -r c0c6cf9f5d6ee0a63d0c058439467c1340698930 -r b96fe1d76b383ef2a77ff76bdc6580362aab4f0d lib/galaxy/tool_shed/migrate/check.py
--- a/lib/galaxy/tool_shed/migrate/check.py
+++ b/lib/galaxy/tool_shed/migrate/check.py
@@ -5,7 +5,6 @@
from migrate.versioning import repository, schema
from sqlalchemy import *
-from common import *
from galaxy.util.odict import odict
log = logging.getLogger( __name__ )
diff -r c0c6cf9f5d6ee0a63d0c058439467c1340698930 -r b96fe1d76b383ef2a77ff76bdc6580362aab4f0d lib/galaxy/tool_shed/tool_dependencies/common_util.py
--- a/lib/galaxy/tool_shed/tool_dependencies/common_util.py
+++ b/lib/galaxy/tool_shed/tool_dependencies/common_util.py
@@ -1,5 +1,5 @@
import os, shutil, tarfile, urllib2, zipfile
-from galaxy.datatypes.checkers import *
+from galaxy.datatypes import checkers
def create_env_var_dict( elem, tool_dependency_install_dir=None, tool_shed_repository_install_dir=None ):
env_var_name = elem.get( 'name', 'PATH' )
@@ -73,13 +73,13 @@
zip_archive.close()
return True
def isbz2( file_path ):
- return is_bz2( file_path )
+ return checkers.is_bz2( file_path )
def isgzip( file_path ):
- return is_gzip( file_path )
+ return checkers.is_gzip( file_path )
def istar( file_path ):
return tarfile.is_tarfile( file_path )
def iszip( file_path ):
- return check_zip( file_path )
+ return checkers.check_zip( file_path )
def make_directory( full_path ):
if not os.path.exists( full_path ):
os.makedirs( full_path )
diff -r c0c6cf9f5d6ee0a63d0c058439467c1340698930 -r b96fe1d76b383ef2a77ff76bdc6580362aab4f0d lib/galaxy/tool_shed/tool_dependencies/install_util.py
--- a/lib/galaxy/tool_shed/tool_dependencies/install_util.py
+++ b/lib/galaxy/tool_shed/tool_dependencies/install_util.py
@@ -1,8 +1,8 @@
import sys, os, subprocess, tempfile
-from common_util import *
-from fabric_util import *
-from galaxy.tool_shed.encoding_util import *
-from galaxy.model.orm import *
+import common_util
+import fabric_util
+from galaxy.tool_shed.encoding_util import tool_shed_encode
+from galaxy.model.orm import and_
from galaxy import eggs
import pkg_resources
@@ -166,7 +166,7 @@
env_var_dicts = []
for env_elem in action_elem:
if env_elem.tag == 'environment_variable':
- env_var_dict = create_env_var_dict( env_elem, tool_dependency_install_dir=install_dir )
+ env_var_dict = common_util.create_env_var_dict( env_elem, tool_dependency_install_dir=install_dir )
if env_var_dict:
env_var_dicts.append( env_var_dict )
if env_var_dicts:
@@ -185,7 +185,7 @@
else:
try:
# There is currently only one fabric method.
- install_and_build_package( app, tool_dependency, actions_dict )
+ fabric_util.install_and_build_package( app, tool_dependency, actions_dict )
except Exception, e:
tool_dependency.status = app.model.ToolDependency.installation_status.ERROR
tool_dependency.error_message = str( e )
@@ -273,7 +273,7 @@
name=env_var_name,
version=None )
tool_shed_repository_install_dir = get_tool_shed_repository_install_dir( app, tool_shed_repository )
- env_var_dict = create_env_var_dict( env_var_elem, tool_shed_repository_install_dir=tool_shed_repository_install_dir )
+ env_var_dict = common_util.create_env_var_dict( env_var_elem, tool_shed_repository_install_dir=tool_shed_repository_install_dir )
if env_var_dict:
if not os.path.exists( install_dir ):
os.makedirs( install_dir )
@@ -284,10 +284,10 @@
type='set_environment',
status=app.model.ToolDependency.installation_status.INSTALLING,
set_status=True )
- cmd = create_or_update_env_shell_file( install_dir, env_var_dict )
+ cmd = common_util.create_or_update_env_shell_file( install_dir, env_var_dict )
if env_var_version == '1.0':
# Handle setting environment variables using a fabric method.
- handle_command( app, tool_dependency, install_dir, cmd )
+ fabric_util.handle_command( app, tool_dependency, install_dir, cmd )
sa_session.refresh( tool_dependency )
if tool_dependency.status != app.model.ToolDependency.installation_status.ERROR:
tool_dependency.status = app.model.ToolDependency.installation_status.INSTALLED
diff -r c0c6cf9f5d6ee0a63d0c058439467c1340698930 -r b96fe1d76b383ef2a77ff76bdc6580362aab4f0d lib/galaxy/webapps/community/controllers/admin.py
--- a/lib/galaxy/webapps/community/controllers/admin.py
+++ b/lib/galaxy/webapps/community/controllers/admin.py
@@ -1,12 +1,12 @@
from galaxy.web.base.controller import *
from galaxy.web.base.controllers.admin import Admin
from galaxy.webapps.community import model
-from galaxy.model.orm import *
+from galaxy.model.orm import and_
from galaxy.web.framework.helpers import time_ago, iff, grids
from galaxy.web.form_builder import SelectField
from galaxy.util import inflector
import galaxy.util.shed_util_common as suc
-from common import *
+import common
from repository import RepositoryGrid, CategoryGrid
from galaxy import eggs
@@ -474,7 +474,7 @@
if k.startswith( 'f-' ):
del kwd[ k ]
if 'user_id' in kwd:
- user = get_user( trans, kwd[ 'user_id' ] )
+ user = common.get_user( trans, kwd[ 'user_id' ] )
kwd[ 'f-email' ] = user.email
del kwd[ 'user_id' ]
else:
@@ -489,7 +489,7 @@
if k.startswith( 'f-' ):
del kwd[ k ]
category_id = kwd.get( 'id', None )
- category = get_category( trans, category_id )
+ category = common.get_category( trans, category_id )
kwd[ 'f-Category.name' ] = category.name
elif operation == "receive email alerts":
if kwd[ 'id' ]:
@@ -533,7 +533,7 @@
# The received id is a RepositoryMetadata object id, so we need to get the
# associated Repository and redirect to view_or_manage_repository with the
# changeset_revision.
- repository_metadata = get_repository_metadata_by_id( trans, kwd[ 'id' ] )
+ repository_metadata = common.get_repository_metadata_by_id( trans, kwd[ 'id' ] )
repository = repository_metadata.repository
kwd[ 'id' ] = trans.security.encode_id( repository.id )
kwd[ 'changeset_revision' ] = repository_metadata.changeset_revision
@@ -554,7 +554,7 @@
if not name or not description:
message = 'Enter a valid name and a description'
status = 'error'
- elif get_category_by_name( trans, name ):
+ elif common.get_category_by_name( trans, name ):
message = 'A category with that name already exists'
status = 'error'
else:
@@ -615,7 +615,7 @@
ids = util.listify( id )
count = 0
for repository_metadata_id in ids:
- repository_metadata = get_repository_metadata_by_id( trans, repository_metadata_id )
+ repository_metadata = common.get_repository_metadata_by_id( trans, repository_metadata_id )
trans.sa_session.delete( repository_metadata )
trans.sa_session.flush()
count += 1
@@ -641,7 +641,7 @@
action='manage_categories',
message=message,
status='error' ) )
- category = get_category( trans, id )
+ category = common.get_category( trans, id )
if params.get( 'edit_category_button', False ):
new_name = util.restore_text( params.get( 'name', '' ) ).strip()
new_description = util.restore_text( params.get( 'description', '' ) ).strip()
@@ -649,7 +649,7 @@
if not new_name:
message = 'Enter a valid name'
status = 'error'
- elif category.name != new_name and get_category_by_name( trans, name ):
+ elif category.name != new_name and common.get_category_by_name( trans, name ):
message = 'A category with that name already exists'
status = 'error'
else:
@@ -772,7 +772,7 @@
ids = util.listify( id )
message = "Deleted %d categories: " % len( ids )
for category_id in ids:
- category = get_category( trans, category_id )
+ category = common.get_category( trans, category_id )
category.deleted = True
trans.sa_session.add( category )
trans.sa_session.flush()
@@ -800,7 +800,7 @@
purged_categories = ""
message = "Purged %d categories: " % len( ids )
for category_id in ids:
- category = get_category( trans, category_id )
+ category = common.get_category( trans, category_id )
if category.deleted:
# Delete RepositoryCategoryAssociations
for rca in category.repositories:
@@ -827,7 +827,7 @@
count = 0
undeleted_categories = ""
for category_id in ids:
- category = get_category( trans, category_id )
+ category = common.get_category( trans, category_id )
if category.deleted:
category.deleted = False
trans.sa_session.add( category )
diff -r c0c6cf9f5d6ee0a63d0c058439467c1340698930 -r b96fe1d76b383ef2a77ff76bdc6580362aab4f0d lib/galaxy/webapps/community/controllers/common.py
--- a/lib/galaxy/webapps/community/controllers/common.py
+++ b/lib/galaxy/webapps/community/controllers/common.py
@@ -1,16 +1,13 @@
import os, string, socket, logging, simplejson, binascii, tempfile, filecmp
-from time import strftime
+from time import gmtime, strftime
from datetime import *
-from galaxy.datatypes.checkers import *
from galaxy.tools import *
from galaxy.util.odict import odict
from galaxy.util.json import from_json_string, to_json_string
-from galaxy.util.hash_util import *
import galaxy.util.shed_util_common as suc
-from galaxy.web.base.controller import *
from galaxy.web.base.controllers.admin import *
from galaxy.webapps.community import model
-from galaxy.model.orm import *
+from galaxy.model.orm import and_
from galaxy.model.item_attrs import UsesItemRatings
from galaxy import eggs
@@ -73,9 +70,6 @@
'${host}'
"""
-# States for passing messages
-SUCCESS, INFO, WARNING, ERROR = "done", "info", "warning", "error"
-
malicious_error = " This changeset cannot be downloaded because it potentially produces malicious behavior or contains inappropriate content."
malicious_error_can_push = " Correct this changeset as soon as possible, it potentially produces malicious behavior or contains inappropriate content."
@@ -146,7 +140,6 @@
return False
def changeset_revision_reviewed_by_user( trans, user, repository, changeset_revision ):
"""Determine if the current changeset revision has been reviewed by the current user."""
- changeset_revision_reviewed_by_user = False
for review in repository.reviews:
if review.changeset_revision == changeset_revision and review.user == user:
return True
@@ -162,34 +155,6 @@
if user_email in admin_users:
return True
return False
-def copy_file_from_disk( filename, repo_dir, dir ):
- file_path = None
- found = False
- for root, dirs, files in os.walk( repo_dir ):
- if root.find( '.hg' ) < 0:
- for name in files:
- if name == filename:
- file_path = os.path.abspath( os.path.join( root, name ) )
- found = True
- break
- if found:
- break
- if file_path:
- tmp_filename = os.path.join( dir, filename )
- shutil.copy( file_path, tmp_filename )
- else:
- tmp_filename = None
- return tmp_filename
-def generate_tool_guid( trans, repository, tool ):
- """
- Generate a guid for the received tool. The form of the guid is
- <tool shed host>/repos/<tool shed username>/<tool shed repo name>/<tool id>/<tool version>
- """
- return '%s/repos/%s/%s/%s/%s' % ( trans.request.host,
- repository.user.username,
- repository.name,
- tool.id,
- tool.version )
def get_absolute_path_to_file_in_repository( repo_files_dir, file_name ):
stripped_file_name = suc.strip_path( file_name )
file_path = None
@@ -287,10 +252,6 @@
def get_repository_metadata_by_id( trans, id ):
"""Get repository metadata from the database"""
return trans.sa_session.query( trans.model.RepositoryMetadata ).get( trans.security.decode_id( id ) )
-def get_repository_metadata_by_repository_id( trans, id ):
- """Get all metadata records for a specified repository."""
- return trans.sa_session.query( trans.model.RepositoryMetadata ) \
- .filter( trans.model.RepositoryMetadata.table.c.repository_id == trans.security.decode_id( id ) )
def get_repository_metadata_revisions_for_review( repository, reviewed=True ):
repository_metadata_revisions = []
metadata_changeset_revision_hashes = []
@@ -402,7 +363,7 @@
tip_changeset = repo.changelog.tip()
ctx = repo.changectx( tip_changeset )
t, tz = ctx.date()
- date = datetime( *time.gmtime( float( t ) - tz )[:6] )
+ date = datetime( *gmtime( float( t ) - tz )[:6] )
display_date = date.strftime( "%Y-%m-%d" )
try:
username = ctx.user().split()[0]
diff -r c0c6cf9f5d6ee0a63d0c058439467c1340698930 -r b96fe1d76b383ef2a77ff76bdc6580362aab4f0d lib/galaxy/webapps/community/controllers/repository.py
--- a/lib/galaxy/webapps/community/controllers/repository.py
+++ b/lib/galaxy/webapps/community/controllers/repository.py
@@ -1,5 +1,5 @@
import os, logging, tempfile, shutil, ConfigParser
-from time import strftime
+from time import gmtime, strftime
from datetime import date, datetime
from galaxy import util
from galaxy.web.base.controller import *
@@ -8,10 +8,10 @@
from galaxy.webapps.community.model import directory_hash_id
from galaxy.web.framework.helpers import time_ago, iff, grids
from galaxy.util.json import from_json_string, to_json_string
-from galaxy.model.orm import *
+from galaxy.model.orm import and_
import galaxy.util.shed_util_common as suc
-from galaxy.tool_shed.encoding_util import *
-from common import *
+from galaxy.tool_shed.encoding_util import tool_shed_encode
+import common
from galaxy import eggs
eggs.require('mercurial')
@@ -511,7 +511,7 @@
link=( lambda item: dict( operation="view_or_manage_repository", id=item.id ) ),
attach_popup=False )
-class RepositoryController( BaseUIController, ItemRatings ):
+class RepositoryController( BaseUIController, common.ItemRatings ):
install_matched_repository_grid = InstallMatchedRepositoryGrid()
matched_repository_grid = MatchedRepositoryGrid()
@@ -535,7 +535,7 @@
# The value of 'id' has been set to the search string, which is a repository name. We'll try to get the desired encoded repository
# id to pass on.
try:
- repository = get_repository_by_name( trans, kwd[ 'id' ] )
+ repository = common.get_repository_by_name( trans, kwd[ 'id' ] )
kwd[ 'id' ] = trans.security.encode_id( repository.id )
except:
pass
@@ -610,7 +610,7 @@
if k.startswith( 'f-' ):
del kwd[ k ]
if 'user_id' in kwd:
- user = get_user( trans, kwd[ 'user_id' ] )
+ user = common.get_user( trans, kwd[ 'user_id' ] )
kwd[ 'f-email' ] = user.email
del kwd[ 'user_id' ]
else:
@@ -650,7 +650,7 @@
if k.startswith( 'f-' ):
del kwd[ k ]
category_id = kwd.get( 'id', None )
- category = get_category( trans, category_id )
+ category = common.get_category( trans, category_id )
kwd[ 'f-Category.name' ] = category.name
elif operation == "receive email alerts":
if trans.user:
@@ -691,7 +691,7 @@
repo = hg.repository( suc.get_configured_ui(), repository.repo_path( trans.app ) )
# Update repository files for browsing.
suc.update_repository( repo )
- is_malicious = changeset_is_malicious( trans, id, repository.tip( trans.app ) )
+ is_malicious = common.changeset_is_malicious( trans, id, repository.tip( trans.app ) )
metadata = self.get_metadata( trans, id, repository.tip( trans.app ) )
return trans.fill_template( '/webapps/community/repository/browse_repository.mako',
repository=repository,
@@ -716,7 +716,8 @@
# The value of 'id' has been set to the search string, which is a repository name.
# We'll try to get the desired encoded repository id to pass on.
try:
- repository = get_repository_by_name( trans, kwd[ 'id' ] )
+ name = kwd[ 'id' ]
+ repository = common.get_repository_by_name( trans, name )
kwd[ 'id' ] = trans.security.encode_id( repository.id )
except:
pass
@@ -739,7 +740,7 @@
if 'f-Category.name' in kwd:
# The user browsed to a category and then entered a search string, so get the category associated with it's value.
category_name = kwd[ 'f-Category.name' ]
- category = get_category_by_name( trans, category_name )
+ category = common.get_category_by_name( trans, category_name )
# Set the id value in kwd since it is required by the ValidRepositoryGrid.build_initial_query method.
kwd[ 'id' ] = trans.security.encode_id( category.id )
if galaxy_url:
@@ -749,7 +750,7 @@
if operation == "preview_tools_in_changeset":
repository_id = kwd.get( 'id', None )
repository = suc.get_repository_in_tool_shed( trans, repository_id )
- repository_metadata = get_latest_repository_metadata( trans, repository.id )
+ repository_metadata = common.get_latest_repository_metadata( trans, repository.id )
latest_installable_changeset_revision = repository_metadata.changeset_revision
return trans.response.send_redirect( web.url_for( controller='repository',
action='preview_tools_in_changeset',
@@ -761,7 +762,7 @@
if k.startswith( 'f-' ):
del kwd[ k ]
category_id = kwd.get( 'id', None )
- category = get_category( trans, category_id )
+ category = common.get_category( trans, category_id )
kwd[ 'f-Category.name' ] = category.name
# The changeset_revision_select_field in the ValidRepositoryGrid performs a refresh_on_change which sends in request parameters like
# changeset_revison_1, changeset_revision_2, etc. One of the many select fields on the grid performed the refresh_on_change, so we loop
@@ -921,7 +922,7 @@
params = util.Params( kwd )
message = util.restore_text( params.get( 'message', '' ) )
status = params.get( 'status', 'done' )
- categories = get_categories( trans )
+ categories = common.get_categories( trans )
if not categories:
message = 'No categories have been configured in this instance of the Galaxy Tool Shed. ' + \
'An administrator needs to create some via the Administrator control panel before creating repositories.',
@@ -1018,11 +1019,11 @@
params = util.Params( kwd )
message = util.restore_text( params.get( 'message', '' ) )
status = params.get( 'status', 'done' )
- repository, tool, message = load_tool_from_changeset_revision( trans, repository_id, changeset_revision, tool_config )
+ repository, tool, message = common.load_tool_from_changeset_revision( trans, repository_id, changeset_revision, tool_config )
if message:
status = 'error'
tool_state = self.__new_state( trans )
- is_malicious = changeset_is_malicious( trans, repository_id, repository.tip( trans.app ) )
+ is_malicious = common.changeset_is_malicious( trans, repository_id, repository.tip( trans.app ) )
metadata = self.get_metadata( trans, repository_id, changeset_revision )
try:
return trans.fill_template( "/webapps/community/repository/tool_form.mako",
@@ -1085,7 +1086,7 @@
is_admin = trans.user_is_admin()
if operation == "view_or_manage_repository":
# The received id is a RepositoryMetadata id, so we have to get the repository id.
- repository_metadata = get_repository_metadata_by_id( trans, item_id )
+ repository_metadata = common.get_repository_metadata_by_id( trans, item_id )
repository_id = trans.security.encode_id( repository_metadata.repository.id )
repository = suc.get_repository_in_tool_shed( trans, repository_id )
kwd[ 'id' ] = repository_id
@@ -1102,7 +1103,7 @@
encoded_repository_ids = []
changeset_revisions = []
for repository_metadata_id in util.listify( item_id ):
- repository_metadata = get_repository_metadata_by_id( trans, repository_metadata_id )
+ repository_metadata = common.get_repository_metadata_by_id( trans, repository_metadata_id )
encoded_repository_ids.append( trans.security.encode_id( repository_metadata.repository.id ) )
changeset_revisions.append( repository_metadata.changeset_revision )
new_kwd[ 'repository_ids' ] = encoded_repository_ids
@@ -1170,7 +1171,7 @@
is_admin = trans.user_is_admin()
if operation == "view_or_manage_repository":
# The received id is a RepositoryMetadata id, so we have to get the repository id.
- repository_metadata = get_repository_metadata_by_id( trans, item_id )
+ repository_metadata = common.get_repository_metadata_by_id( trans, item_id )
repository_id = trans.security.encode_id( repository_metadata.repository.id )
repository = suc.get_repository_in_tool_shed( trans, repository_id )
kwd[ 'id' ] = repository_id
@@ -1187,7 +1188,7 @@
encoded_repository_ids = []
changeset_revisions = []
for repository_metadata_id in util.listify( item_id ):
- repository_metadata = get_repository_metadata_by_id( trans, item_id )
+ repository_metadata = common.get_repository_metadata_by_id( trans, item_id )
encoded_repository_ids.append( trans.security.encode_id( repository_metadata.repository.id ) )
changeset_revisions.append( repository_metadata.changeset_revision )
new_kwd = {}
@@ -1581,9 +1582,9 @@
params = util.Params( kwd )
message = util.restore_text( params.get( 'message', '' ) )
status = params.get( 'status', 'error' )
- repository, tool, error_message = load_tool_from_changeset_revision( trans, repository_id, changeset_revision, tool_config )
+ repository, tool, error_message = common.load_tool_from_changeset_revision( trans, repository_id, changeset_revision, tool_config )
tool_state = self.__new_state( trans )
- is_malicious = changeset_is_malicious( trans, repository_id, repository.tip( trans.app ) )
+ is_malicious = common.changeset_is_malicious( trans, repository_id, repository.tip( trans.app ) )
invalid_file_tups = []
if tool:
invalid_file_tups = suc.check_tool_input_params( trans.app,
@@ -1781,7 +1782,7 @@
selected_value=changeset_revision,
add_id_to_name=False,
downloadable=False )
- revision_label = get_revision_label( trans, repository, repository.tip( trans.app ) )
+ revision_label = common.get_revision_label( trans, repository, repository.tip( trans.app ) )
repository_metadata = None
repository_metadata_id = None
metadata = None
@@ -1790,7 +1791,7 @@
if changeset_revision != suc.INITIAL_CHANGELOG_HASH:
repository_metadata = suc.get_repository_metadata_by_changeset_revision( trans, id, changeset_revision )
if repository_metadata:
- revision_label = get_revision_label( trans, repository, changeset_revision )
+ revision_label = common.get_revision_label( trans, repository, changeset_revision )
repository_metadata_id = trans.security.encode_id( repository_metadata.id )
metadata = repository_metadata.metadata
is_malicious = repository_metadata.malicious
@@ -1800,7 +1801,7 @@
if previous_changeset_revision != suc.INITIAL_CHANGELOG_HASH:
repository_metadata = suc.get_repository_metadata_by_changeset_revision( trans, id, previous_changeset_revision )
if repository_metadata:
- revision_label = get_revision_label( trans, repository, previous_changeset_revision )
+ revision_label = common.get_revision_label( trans, repository, previous_changeset_revision )
repository_metadata_id = trans.security.encode_id( repository_metadata.id )
metadata = repository_metadata.metadata
is_malicious = repository_metadata.malicious
@@ -1815,20 +1816,17 @@
handled_key_rd_dicts=None )
if is_malicious:
if trans.app.security_agent.can_push( trans.app, trans.user, repository ):
- message += malicious_error_can_push
+ message += common.malicious_error_can_push
else:
- message += malicious_error
+ message += common.malicious_error
status = 'error'
malicious_check_box = CheckboxField( 'malicious', checked=is_malicious )
- categories = get_categories( trans )
+ categories = common.get_categories( trans )
selected_categories = [ rca.category_id for rca in repository.categories ]
# Determine if the current changeset revision has been reviewed by the current user.
- reviewed_by_user = changeset_revision_reviewed_by_user( trans, trans.user, repository, changeset_revision )
+ reviewed_by_user = common.changeset_revision_reviewed_by_user( trans, trans.user, repository, changeset_revision )
if reviewed_by_user:
- review = get_review_by_repository_id_changeset_revision_user_id( trans,
- id,
- changeset_revision,
- trans.security.encode_id( trans.user.id ) )
+ review = common.get_review_by_repository_id_changeset_revision_user_id( trans, id, changeset_revision, trans.security.encode_id( trans.user.id ) )
review_id = trans.security.encode_id( review.id )
else:
review_id = None
@@ -1929,7 +1927,7 @@
repository_metadata_id = None
metadata = None
repository_dependencies = None
- revision_label = get_revision_label( trans, repository, changeset_revision )
+ revision_label = common.get_revision_label( trans, repository, changeset_revision )
changeset_revision_select_field = build_changeset_revision_select_field( trans,
repository,
selected_value=changeset_revision,
@@ -2001,7 +1999,7 @@
avg_rating, num_ratings = self.get_ave_item_rating_data( trans.sa_session, repository, webapp_model=trans.model )
display_reviews = util.string_as_bool( params.get( 'display_reviews', False ) )
rra = self.get_user_item_rating( trans.sa_session, trans.user, repository, webapp_model=trans.model )
- is_malicious = changeset_is_malicious( trans, id, repository.tip( trans.app ) )
+ is_malicious = common.changeset_is_malicious( trans, id, repository.tip( trans.app ) )
metadata = self.get_metadata( trans, id, repository.tip( trans.app ) )
return trans.fill_template( '/webapps/community/repository/rate_repository.mako',
repository=repository,
@@ -2161,7 +2159,7 @@
if not commit_message:
commit_message = 'Deleted selected files'
commands.commit( repo.ui, repo, repo_dir, user=trans.user.username, message=commit_message )
- handle_email_alerts( trans, repository )
+ common.handle_email_alerts( trans, repository )
# Update the repository files for browsing.
suc.update_repository( repo )
# Get the new repository tip.
@@ -2173,11 +2171,11 @@
else:
message += 'The selected files were deleted from the repository. '
kwd[ 'message' ] = message
- set_repository_metadata_due_to_new_tip( trans, repository, **kwd )
+ common.set_repository_metadata_due_to_new_tip( trans, repository, **kwd )
else:
message = "Select at least 1 file to delete from the repository before clicking <b>Delete selected files</b>."
status = "error"
- is_malicious = changeset_is_malicious( trans, id, repository.tip( trans.app ) )
+ is_malicious = common.changeset_is_malicious( trans, id, repository.tip( trans.app ) )
return trans.fill_template( '/webapps/community/repository/browse_repository.mako',
repo=repo,
repository=repository,
@@ -2200,7 +2198,7 @@
# Get the name of the server hosting the tool shed instance.
host = trans.request.host
# Build the email message
- body = string.Template( contact_owner_template ) \
+ body = string.Template( common.contact_owner_template ) \
.safe_substitute( username=trans.user.username,
repository_name=repository.name,
email=trans.user.email,
@@ -2319,7 +2317,7 @@
else:
has_metadata = False
t, tz = ctx.date()
- date = datetime( *time.gmtime( float( t ) - tz )[:6] )
+ date = datetime( *gmtime( float( t ) - tz )[:6] )
display_date = date.strftime( "%Y-%m-%d" )
change_dict = { 'ctx' : ctx,
'rev' : str( ctx.rev() ),
@@ -2332,7 +2330,7 @@
'has_metadata' : has_metadata }
# Make sure we'll view latest changeset first.
changesets.insert( 0, change_dict )
- is_malicious = changeset_is_malicious( trans, id, repository.tip( trans.app ) )
+ is_malicious = common.changeset_is_malicious( trans, id, repository.tip( trans.app ) )
metadata = self.get_metadata( trans, id, repository.tip( trans.app ) )
return trans.fill_template( '/webapps/community/repository/view_changelog.mako',
repository=repository,
@@ -2363,7 +2361,7 @@
diffs = []
for diff in patch.diff( repo, node1=ctx_parent.node(), node2=ctx.node() ):
diffs.append( suc.to_safe_string( diff, to_html=True ) )
- is_malicious = changeset_is_malicious( trans, id, repository.tip( trans.app ) )
+ is_malicious = common.changeset_is_malicious( trans, id, repository.tip( trans.app ) )
metadata = self.get_metadata( trans, id, ctx_str )
return trans.fill_template( '/webapps/community/repository/view_changeset.mako',
repository=repository,
@@ -2433,7 +2431,7 @@
selected_value=changeset_revision,
add_id_to_name=False,
downloadable=False )
- revision_label = get_revision_label( trans, repository, changeset_revision )
+ revision_label = common.get_revision_label( trans, repository, changeset_revision )
repository_metadata = suc.get_repository_metadata_by_changeset_revision( trans, id, changeset_revision )
if repository_metadata:
repository_metadata_id = trans.security.encode_id( repository_metadata.id )
@@ -2449,20 +2447,17 @@
else:
repository_metadata_id = None
metadata = None
- is_malicious = changeset_is_malicious( trans, id, repository.tip( trans.app ) )
+ is_malicious = common.changeset_is_malicious( trans, id, repository.tip( trans.app ) )
if is_malicious:
if trans.app.security_agent.can_push( trans.app, trans.user, repository ):
- message += malicious_error_can_push
+ message += common.malicious_error_can_push
else:
- message += malicious_error
+ message += common.malicious_error
status = 'error'
# Determine if the current changeset revision has been reviewed by the current user.
- reviewed_by_user = changeset_revision_reviewed_by_user( trans, trans.user, repository, changeset_revision )
+ reviewed_by_user = common.changeset_revision_reviewed_by_user( trans, trans.user, repository, changeset_revision )
if reviewed_by_user:
- review = get_review_by_repository_id_changeset_revision_user_id( trans,
- id,
- changeset_revision,
- trans.security.encode_id( trans.user.id ) )
+ review = common.get_review_by_repository_id_changeset_revision_user_id( trans, id, changeset_revision, trans.security.encode_id( trans.user.id ) )
review_id = trans.security.encode_id( review.id )
else:
review_id = None
@@ -2499,7 +2494,7 @@
tool = None
guid = None
original_tool_data_path = trans.app.config.tool_data_path
- revision_label = get_revision_label( trans, repository, changeset_revision )
+ revision_label = common.get_revision_label( trans, repository, changeset_revision )
repository_metadata = suc.get_repository_metadata_by_changeset_revision( trans, repository_id, changeset_revision )
if repository_metadata:
metadata = repository_metadata.metadata
@@ -2512,7 +2507,7 @@
guid = tool_metadata_dict[ 'guid' ]
full_path_to_tool_config = os.path.abspath( relative_path_to_tool_config )
full_path_to_dir, tool_config_filename = os.path.split( full_path_to_tool_config )
- can_use_disk_file = can_use_tool_config_disk_file( trans, repository, repo, full_path_to_tool_config, changeset_revision )
+ can_use_disk_file = common.can_use_tool_config_disk_file( trans, repository, repo, full_path_to_tool_config, changeset_revision )
if can_use_disk_file:
trans.app.config.tool_data_path = work_dir
tool, valid, message, sample_files = suc.handle_sample_files_and_load_tool_from_disk( trans,
@@ -2534,19 +2529,16 @@
tool_lineage = self.get_versions_of_tool( trans, repository, repository_metadata, guid )
else:
metadata = None
- is_malicious = changeset_is_malicious( trans, repository_id, repository.tip( trans.app ) )
+ is_malicious = common.changeset_is_malicious( trans, repository_id, repository.tip( trans.app ) )
changeset_revision_select_field = build_changeset_revision_select_field( trans,
repository,
selected_value=changeset_revision,
add_id_to_name=False,
downloadable=False )
trans.app.config.tool_data_path = original_tool_data_path
- reviewed_by_user = changeset_revision_reviewed_by_user( trans, trans.user, repository, changeset_revision )
+ reviewed_by_user = common.changeset_revision_reviewed_by_user( trans, trans.user, repository, changeset_revision )
if reviewed_by_user:
- review = get_review_by_repository_id_changeset_revision_user_id( trans,
- id,
- changeset_revision,
- trans.security.encode_id( trans.user.id ) )
+ review = common.get_review_by_repository_id_changeset_revision_user_id( trans, id, changeset_revision, trans.security.encode_id( trans.user.id ) )
review_id = trans.security.encode_id( review.id )
else:
review_id = None
@@ -2598,7 +2590,7 @@
# Restrict the options to all revisions that have associated metadata.
repository_metadata_revisions = repository.metadata_revisions
for repository_metadata in repository_metadata_revisions:
- rev, label, changeset_revision = get_rev_label_changeset_revision_from_repository_metadata( trans, repository_metadata, repository=repository )
+ rev, label, changeset_revision = common.get_rev_label_changeset_revision_from_repository_metadata( trans, repository_metadata, repository=repository )
changeset_tups.append( ( rev, label, changeset_revision ) )
refresh_on_change_values.append( changeset_revision )
# Sort options by the revision label. Even though the downloadable_revisions query sorts by update_time,
diff -r c0c6cf9f5d6ee0a63d0c058439467c1340698930 -r b96fe1d76b383ef2a77ff76bdc6580362aab4f0d lib/galaxy/webapps/community/controllers/repository_review.py
--- a/lib/galaxy/webapps/community/controllers/repository_review.py
+++ b/lib/galaxy/webapps/community/controllers/repository_review.py
@@ -6,7 +6,7 @@
from galaxy.web.framework.helpers import time_ago, iff, grids
from galaxy.model.orm import and_
from sqlalchemy.sql.expression import func
-from common import *
+import common
from galaxy.webapps.community.util.container_util import STRSEP
from repository import RepositoryGrid
import galaxy.util.shed_util_common as suc
@@ -59,7 +59,7 @@
repo = hg.repository( suc.get_configured_ui(), repository.repo_path( trans.app ) )
for review in repository.reviews:
changeset_revision = review.changeset_revision
- rev, label = get_rev_label_from_changeset_revision( repo, changeset_revision )
+ rev, label = common.get_rev_label_from_changeset_revision( repo, changeset_revision )
rval += '<a href="manage_repository_reviews_of_revision'
rval += '?id=%s&changeset_revision=%s">%s</a><br/>' % ( trans.security.encode_id( repository.id ), changeset_revision, label )
return rval
@@ -67,13 +67,13 @@
class WithoutReviewsRevisionColumn( grids.GridColumn ):
def get_value( self, trans, grid, repository ):
# Restrict the options to revisions that have not yet been reviewed.
- repository_metadata_revisions = get_repository_metadata_revisions_for_review( repository, reviewed=False )
+ repository_metadata_revisions = common.get_repository_metadata_revisions_for_review( repository, reviewed=False )
if repository_metadata_revisions:
rval = ''
for repository_metadata in repository_metadata_revisions:
- rev, label, changeset_revision = get_rev_label_changeset_revision_from_repository_metadata( trans,
- repository_metadata,
- repository=repository )
+ rev, label, changeset_revision = common.get_rev_label_changeset_revision_from_repository_metadata( trans,
+ repository_metadata,
+ repository=repository )
rval += '<a href="manage_repository_reviews_of_revision'
rval += '?id=%s&changeset_revision=%s">%s</a><br/>' % ( trans.security.encode_id( repository.id ), changeset_revision, label )
return rval
@@ -177,7 +177,7 @@
rval += 'edit_review'
else:
rval +='browse_review'
- rval += '?id=%s">%s</a>' % ( encoded_review_id, get_revision_label( trans, review.repository, review.changeset_revision ) )
+ rval += '?id=%s">%s</a>' % ( encoded_review_id, common.get_revision_label( trans, review.repository, review.changeset_revision ) )
return rval
class RatingColumn( grids.TextColumn ):
def get_value( self, trans, grid, review ):
@@ -260,7 +260,7 @@
.outerjoin( ( model.ComponentReview.table, model.ComponentReview.table.c.repository_review_id == model.RepositoryReview.table.c.id ) ) \
.outerjoin( ( model.Component.table, model.Component.table.c.id == model.ComponentReview.table.c.component_id ) )
-class RepositoryReviewController( BaseUIController, ItemRatings ):
+class RepositoryReviewController( BaseUIController, common.ItemRatings ):
component_grid = ComponentGrid()
repositories_reviewed_by_me_grid = RepositoriesReviewedByMeGrid()
@@ -277,7 +277,7 @@
message = util.restore_text( params.get( 'message', '' ) )
status = params.get( 'status', 'done' )
encoded_review_id = kwd[ 'id' ]
- review = get_review( trans, encoded_review_id )
+ review = common.get_review( trans, encoded_review_id )
if kwd.get( 'approve_repository_review_button', False ):
approved_select_field_name = '%s%sapproved' % ( encoded_review_id, STRSEP )
approved_select_field_value = str( kwd[ approved_select_field_name ] )
@@ -309,10 +309,10 @@
params = util.Params( kwd )
message = util.restore_text( params.get( 'message', '' ) )
status = params.get( 'status', 'done' )
- review = get_review( trans, kwd[ 'id' ] )
+ review = common.get_review( trans, kwd[ 'id' ] )
repository = review.repository
repo = hg.repository( suc.get_configured_ui(), repository.repo_path( trans.app ) )
- rev, changeset_revision_label = get_rev_label_from_changeset_revision( repo, review.changeset_revision )
+ rev, changeset_revision_label = common.get_rev_label_from_changeset_revision( repo, review.changeset_revision )
return trans.fill_template( '/webapps/community/repository_review/browse_review.mako',
repository=repository,
changeset_revision_label=changeset_revision_label,
@@ -345,7 +345,7 @@
if not name or not description:
message = 'Enter a valid name and a description'
status = 'error'
- elif get_component_by_name( trans, name ):
+ elif common.get_component_by_name( trans, name ):
message = 'A component with that name already exists'
status = 'error'
else:
@@ -377,16 +377,16 @@
if repository_id:
if changeset_revision:
# Make sure there is not already a review of the revision by the user.
- if get_review_by_repository_id_changeset_revision_user_id( trans,
- repository_id,
- changeset_revision,
- trans.security.encode_id( trans.user.id ) ):
+ if common.get_review_by_repository_id_changeset_revision_user_id( trans,
+ repository_id,
+ changeset_revision,
+ trans.security.encode_id( trans.user.id ) ):
message = "You have already created a review for revision <b>%s</b> of repository <b>%s</b>." % ( changeset_revision, repository.name )
status = "error"
else:
repository = suc.get_repository_in_tool_shed( trans, repository_id )
# See if there are any reviews for previous changeset revisions that the user can copy.
- if not create_without_copying and not previous_review_id and has_previous_repository_reviews( trans, repository, changeset_revision ):
+ if not create_without_copying and not previous_review_id and common.has_previous_repository_reviews( trans, repository, changeset_revision ):
return trans.response.send_redirect( web.url_for( controller='repository_review',
action='select_previous_review',
**kwd ) )
@@ -404,7 +404,7 @@
trans.sa_session.add( review )
trans.sa_session.flush()
if previous_review_id:
- review_to_copy = get_review( trans, previous_review_id )
+ review_to_copy = common.get_review( trans, previous_review_id )
self.copy_review( trans, review_to_copy, review )
review_id = trans.security.encode_id( review.id )
message = "Begin your review of revision <b>%s</b> of repository <b>%s</b>." \
@@ -440,7 +440,7 @@
action='manage_categories',
message=message,
status='error' ) )
- component = get_component( trans, id )
+ component = common.get_component( trans, id )
if params.get( 'edit_component_button', False ):
new_description = util.restore_text( params.get( 'description', '' ) ).strip()
if component.description != new_description:
@@ -465,9 +465,9 @@
message = util.restore_text( params.get( 'message', '' ) )
status = params.get( 'status', 'done' )
review_id = kwd.get( 'id', None )
- review = get_review( trans, review_id )
+ review = common.get_review( trans, review_id )
components_dict = odict()
- for component in get_components( trans ):
+ for component in common.get_components( trans ):
components_dict[ component.name ] = dict( component=component, component_review=None )
repository = review.repository
repo = hg.repository( suc.get_configured_ui(), repository.repo_path( trans.app ) )
@@ -517,8 +517,8 @@
approved = str( v )
elif component_review_attr == 'rating':
rating = int( str( v ) )
- component = get_component( trans, component_id )
- component_review = get_component_review_by_repository_review_id_component_id( trans, review_id, component_id )
+ component = common.get_component( trans, component_id )
+ component_review = common.get_component_review_by_repository_review_id_component_id( trans, review_id, component_id )
if component_review:
# See if the existing component review should be updated.
if component_review.comment != comment or \
@@ -572,7 +572,7 @@
name='revision_approved',
selected_value=selected_value,
for_component=False )
- rev, changeset_revision_label = get_rev_label_from_changeset_revision( repo, review.changeset_revision )
+ rev, changeset_revision_label = common.get_rev_label_from_changeset_revision( repo, review.changeset_revision )
return trans.fill_template( '/webapps/community/repository_review/edit_review.mako',
repository=repository,
review=review,
@@ -659,14 +659,14 @@
metadata_revision_hashes = [ metadata_revision.changeset_revision for metadata_revision in repository.metadata_revisions ]
reviewed_revision_hashes = [ review.changeset_revision for review in repository.reviews ]
reviews_dict = odict()
- for changeset in get_reversed_changelog_changesets( repo ):
+ for changeset in common.get_reversed_changelog_changesets( repo ):
ctx = repo.changectx( changeset )
changeset_revision = str( ctx )
if changeset_revision in metadata_revision_hashes or changeset_revision in reviewed_revision_hashes:
- rev, changeset_revision_label = get_rev_label_from_changeset_revision( repo, changeset_revision )
+ rev, changeset_revision_label = common.get_rev_label_from_changeset_revision( repo, changeset_revision )
if changeset_revision in reviewed_revision_hashes:
# Find the review for this changeset_revision
- repository_reviews = get_reviews_by_repository_id_changeset_revision( trans, repository_id, changeset_revision )
+ repository_reviews = common.get_reviews_by_repository_id_changeset_revision( trans, repository_id, changeset_revision )
# Determine if the current user can add a review to this revision.
can_add_review = trans.user not in [ repository_review.user for repository_review in repository_reviews ]
repository_metadata = suc.get_repository_metadata_by_changeset_revision( trans, repository_id, changeset_revision )
@@ -704,8 +704,8 @@
repo_dir = repository.repo_path( trans.app )
repo = hg.repository( suc.get_configured_ui(), repo_dir )
installable = changeset_revision in [ metadata_revision.changeset_revision for metadata_revision in repository.metadata_revisions ]
- rev, changeset_revision_label = get_rev_label_from_changeset_revision( repo, changeset_revision )
- reviews = get_reviews_by_repository_id_changeset_revision( trans, repository_id, changeset_revision )
+ rev, changeset_revision_label = common.get_rev_label_from_changeset_revision( repo, changeset_revision )
+ reviews = common.get_reviews_by_repository_id_changeset_revision( trans, repository_id, changeset_revision )
return trans.fill_template( '/webapps/community/repository_review/reviews_of_changeset_revision.mako',
repository=repository,
changeset_revision=changeset_revision,
@@ -724,7 +724,7 @@
if 'operation' in kwd:
operation = kwd['operation'].lower()
# The value of the received id is the encoded review id.
- review = get_review( trans, kwd[ 'id' ] )
+ review = common.get_review( trans, kwd[ 'id' ] )
repository = review.repository
kwd[ 'id' ] = trans.security.encode_id( repository.id )
if operation == "inspect repository revisions":
@@ -737,7 +737,7 @@
action='view_or_manage_repository',
**kwd ) )
# The user may not be the current user. The value of the received id is the encoded user id.
- user = get_user( trans, kwd[ 'id' ] )
+ user = common.get_user( trans, kwd[ 'id' ] )
self.repository_reviews_by_user_grid.title = "All repository revision reviews for user '%s'" % user.username
return self.repository_reviews_by_user_grid( trans, **kwd )
@web.expose
@@ -768,8 +768,8 @@
repository = suc.get_repository_in_tool_shed( trans, kwd[ 'id' ] )
changeset_revision = kwd.get( 'changeset_revision', None )
repo = hg.repository( suc.get_configured_ui(), repository.repo_path( trans.app ) )
- previous_reviews_dict = get_previous_repository_reviews( trans, repository, changeset_revision )
- rev, changeset_revision_label = get_rev_label_from_changeset_revision( repo, changeset_revision )
+ previous_reviews_dict = common.get_previous_repository_reviews( trans, repository, changeset_revision )
+ rev, changeset_revision_label = common.get_rev_label_from_changeset_revision( repo, changeset_revision )
return trans.fill_template( '/webapps/community/repository_review/select_previous_review.mako',
repository=repository,
changeset_revision=changeset_revision,
diff -r c0c6cf9f5d6ee0a63d0c058439467c1340698930 -r b96fe1d76b383ef2a77ff76bdc6580362aab4f0d lib/galaxy/webapps/community/controllers/upload.py
--- a/lib/galaxy/webapps/community/controllers/upload.py
+++ b/lib/galaxy/webapps/community/controllers/upload.py
@@ -1,8 +1,7 @@
import sys, os, shutil, logging, tarfile, tempfile, urllib
from galaxy.web.base.controller import *
-from galaxy.model.orm import *
-from galaxy.datatypes.checkers import *
-from common import *
+from galaxy.datatypes import checkers
+import common
import galaxy.util.shed_util_common as suc
from galaxy import eggs
@@ -13,8 +12,6 @@
undesirable_dirs = [ '.hg', '.svn', '.git', '.cvs' ]
undesirable_files = [ '.hg_archival.txt', 'hgrc', '.DS_Store' ]
-# States for passing messages
-SUCCESS, INFO, WARNING, ERROR = "done", "info", "warning", "error"
CHUNK_SIZE = 2**20 # 1Mb
class UploadController( BaseUIController ):
@@ -26,7 +23,7 @@
status = params.get( 'status', 'done' )
commit_message = util.restore_text( params.get( 'commit_message', 'Uploaded' ) )
category_ids = util.listify( params.get( 'category_id', '' ) )
- categories = get_categories( trans )
+ categories = common.get_categories( trans )
repository_id = params.get( 'repository_id', '' )
repository = suc.get_repository_in_tool_shed( trans, repository_id )
repo_dir = repository.repo_path( trans.app )
@@ -40,7 +37,7 @@
url = params.get( 'url', '' )
# Part of the upload process is sending email notification to those that have registered to
# receive them. One scenario occurs when the first change set is produced for the repository.
- # See the handle_email_alerts() method for the definition of the scenarios.
+ # See the common.handle_email_alerts() method for the definition of the scenarios.
new_repo_alert = repository.is_new( trans.app )
uploaded_directory = None
if params.get( 'upload_button', False ):
@@ -85,9 +82,9 @@
isbz2 = False
if uploaded_file:
if uncompress_file:
- isgzip = is_gzip( uploaded_file_name )
+ isgzip = checkers.is_gzip( uploaded_file_name )
if not isgzip:
- isbz2 = is_bz2( uploaded_file_name )
+ isbz2 = checkers.is_bz2( uploaded_file_name )
if isempty:
tar = None
istar = False
@@ -134,7 +131,7 @@
shutil.move( uploaded_file_name, full_path )
# See if any admin users have chosen to receive email alerts when a repository is
# updated. If so, check every uploaded file to ensure content is appropriate.
- check_contents = check_file_contents( trans )
+ check_contents = common.check_file_contents( trans )
if check_contents and os.path.isfile( full_path ):
content_alert_str = self.__check_file_content( full_path )
else:
@@ -151,7 +148,7 @@
message = '%s<br/>%s' % ( message, error_message )
# See if the content of the change set was valid.
admin_only = len( repository.downloadable_revisions ) != 1
- handle_email_alerts( trans, repository, content_alert_str=content_alert_str, new_repo_alert=new_repo_alert, admin_only=admin_only )
+ common.handle_email_alerts( trans, repository, content_alert_str=content_alert_str, new_repo_alert=new_repo_alert, admin_only=admin_only )
if ok:
# Update the repository files for browsing.
suc.update_repository( repo )
@@ -180,7 +177,7 @@
else:
message += " %d files were removed from the repository root. " % len( files_to_remove )
kwd[ 'message' ] = message
- set_repository_metadata_due_to_new_tip( trans, repository, content_alert_str=content_alert_str, **kwd )
+ common.set_repository_metadata_due_to_new_tip( trans, repository, content_alert_str=content_alert_str, **kwd )
# Provide a warning message if a tool_dependencies.xml file is provided, but tool dependencies weren't loaded due to e.g. a requirement tag mismatch
if suc.get_config_from_disk( 'tool_dependencies.xml', repo_dir ):
if repository.metadata_revisions:
@@ -330,7 +327,7 @@
pass
# See if any admin users have chosen to receive email alerts when a repository is
# updated. If so, check every uploaded file to ensure content is appropriate.
- check_contents = check_file_contents( trans )
+ check_contents = common.check_file_contents( trans )
for filename_in_archive in filenames_in_archive:
# Check file content to ensure it is appropriate.
if check_contents and os.path.isfile( filename_in_archive ):
@@ -344,7 +341,7 @@
return False, message, files_to_remove, content_alert_str, undesirable_dirs_removed, undesirable_files_removed
commands.commit( repo.ui, repo, full_path, user=trans.user.username, message=commit_message )
admin_only = len( repository.downloadable_revisions ) != 1
- handle_email_alerts( trans, repository, content_alert_str=content_alert_str, new_repo_alert=new_repo_alert, admin_only=admin_only )
+ common.handle_email_alerts( trans, repository, content_alert_str=content_alert_str, new_repo_alert=new_repo_alert, admin_only=admin_only )
return True, '', files_to_remove, content_alert_str, undesirable_dirs_removed, undesirable_files_removed
def uncompress( self, repository, uploaded_file_name, uploaded_file_filename, isgzip, isbz2 ):
if isgzip:
@@ -423,8 +420,8 @@
return True, ''
def __check_file_content( self, file_path ):
message = ''
- if check_html( file_path ):
+ if checkers.check_html( file_path ):
message = 'The file "%s" contains HTML content.\n' % str( file_path )
- elif check_image( file_path ):
+ elif checkers.check_image( file_path ):
message = 'The file "%s" contains image content.\n' % str( file_path )
return message
diff -r c0c6cf9f5d6ee0a63d0c058439467c1340698930 -r b96fe1d76b383ef2a77ff76bdc6580362aab4f0d lib/galaxy/webapps/community/controllers/workflow.py
--- a/lib/galaxy/webapps/community/controllers/workflow.py
+++ b/lib/galaxy/webapps/community/controllers/workflow.py
@@ -6,12 +6,12 @@
from galaxy.web.framework.helpers import time_ago, iff, grids
from galaxy.util.json import from_json_string, to_json_string
from galaxy.workflow.modules import InputDataModule, ToolModule, WorkflowModuleFactory
+from galaxy.web.base.controller import *
from galaxy.tools import DefaultToolState
from galaxy.webapps.galaxy.controllers.workflow import attach_ordered_steps
-from galaxy.model.orm import *
-from common import *
+import common
import galaxy.util.shed_util_common as suc
-from galaxy.tool_shed.encoding_util import *
+from galaxy.tool_shed.encoding_util import tool_shed_encode, tool_shed_decode
class RepoInputDataModule( InputDataModule ):
@@ -49,7 +49,7 @@
self.errors = None
for tool_dict in tools_metadata:
if self.tool_id in [ tool_dict[ 'id' ], tool_dict[ 'guid' ] ]:
- repository, self.tool, message = load_tool_from_changeset_revision( trans, repository_id, changeset_revision, tool_dict[ 'tool_config' ] )
+ repository, self.tool, message = common.load_tool_from_changeset_revision( trans, repository_id, changeset_revision, tool_dict[ 'tool_config' ] )
if message and self.tool is None:
self.errors = 'unavailable'
break
@@ -144,7 +144,7 @@
workflow_name = tool_shed_decode( workflow_name )
message = kwd.get( 'message', '' )
status = kwd.get( 'status', 'done' )
- repository_metadata = get_repository_metadata_by_id( trans, repository_metadata_id )
+ repository_metadata = common.get_repository_metadata_by_id( trans, repository_metadata_id )
repository = suc.get_repository_in_tool_shed( trans, trans.security.encode_id( repository_metadata.repository_id ) )
return trans.fill_template( "/webapps/community/repository/view_workflow.mako",
repository=repository,
@@ -156,7 +156,7 @@
status=status )
@web.expose
def generate_workflow_image( self, trans, repository_metadata_id, workflow_name ):
- repository_metadata = get_repository_metadata_by_id( trans, repository_metadata_id )
+ repository_metadata = common.get_repository_metadata_by_id( trans, repository_metadata_id )
repository_id = trans.security.encode_id( repository_metadata.repository_id )
changeset_revision = repository_metadata.changeset_revision
metadata = repository_metadata.metadata
diff -r c0c6cf9f5d6ee0a63d0c058439467c1340698930 -r b96fe1d76b383ef2a77ff76bdc6580362aab4f0d lib/galaxy/webapps/community/model/__init__.py
--- a/lib/galaxy/webapps/community/model/__init__.py
+++ b/lib/galaxy/webapps/community/model/__init__.py
@@ -7,7 +7,7 @@
import os.path, os, errno, sys, codecs, operator, logging, tarfile, mimetypes, ConfigParser
from galaxy import util
from galaxy.util.bunch import Bunch
-from galaxy.util.hash_util import *
+from galaxy.util.hash_util import new_secure_hash
from galaxy.web.form_builder import *
from galaxy import eggs
diff -r c0c6cf9f5d6ee0a63d0c058439467c1340698930 -r b96fe1d76b383ef2a77ff76bdc6580362aab4f0d lib/galaxy/webapps/community/model/mapping.py
--- a/lib/galaxy/webapps/community/model/mapping.py
+++ b/lib/galaxy/webapps/community/model/mapping.py
@@ -13,8 +13,8 @@
from galaxy.model.orm.ext.assignmapper import *
from galaxy.model.custom_types import *
from galaxy.util.bunch import Bunch
-from galaxy.webapps.community.util.shed_statistics import *
-from galaxy.webapps.community.util.hgweb_config import *
+import galaxy.webapps.community.util.shed_statistics as shed_statistics
+import galaxy.webapps.community.util.hgweb_config
from galaxy.webapps.community.security import CommunityRBACAgent
metadata = MetaData()
@@ -318,6 +318,6 @@
result.create_tables = create_tables
# Load local tool shed security policy
result.security_agent = CommunityRBACAgent( result )
- result.shed_counter = ShedCounter( result )
- result.hgweb_config_manager = HgWebConfigManager()
+ result.shed_counter = shed_statistics.ShedCounter( result )
+ result.hgweb_config_manager = galaxy.webapps.community.util.hgweb_config.HgWebConfigManager()
return result
diff -r c0c6cf9f5d6ee0a63d0c058439467c1340698930 -r b96fe1d76b383ef2a77ff76bdc6580362aab4f0d lib/galaxy/webapps/community/security/__init__.py
--- a/lib/galaxy/webapps/community/security/__init__.py
+++ b/lib/galaxy/webapps/community/security/__init__.py
@@ -5,7 +5,7 @@
from datetime import datetime, timedelta
from galaxy.util.bunch import Bunch
from galaxy.util import listify
-from galaxy.model.orm import *
+from galaxy.model.orm import and_
log = logging.getLogger(__name__)
diff -r c0c6cf9f5d6ee0a63d0c058439467c1340698930 -r b96fe1d76b383ef2a77ff76bdc6580362aab4f0d lib/galaxy/webapps/galaxy/controllers/admin.py
--- a/lib/galaxy/webapps/galaxy/controllers/admin.py
+++ b/lib/galaxy/webapps/galaxy/controllers/admin.py
@@ -10,7 +10,7 @@
from galaxy.web.params import QuotaParamParser
from galaxy.exceptions import *
from galaxy.util.odict import *
-from galaxy.tool_shed.encoding_util import *
+from galaxy.tool_shed.encoding_util import tool_shed_decode
import galaxy.datatypes.registry
import logging, imp, subprocess, urllib2
diff -r c0c6cf9f5d6ee0a63d0c058439467c1340698930 -r b96fe1d76b383ef2a77ff76bdc6580362aab4f0d lib/galaxy/webapps/galaxy/controllers/admin_toolshed.py
--- a/lib/galaxy/webapps/galaxy/controllers/admin_toolshed.py
+++ b/lib/galaxy/webapps/galaxy/controllers/admin_toolshed.py
@@ -3,7 +3,7 @@
from galaxy.util.json import from_json_string, to_json_string
import galaxy.util.shed_util as shed_util
import galaxy.util.shed_util_common as suc
-from galaxy.tool_shed.encoding_util import *
+from galaxy.tool_shed.encoding_util import tool_shed_encode, tool_shed_decode
from galaxy import eggs, tools
eggs.require( 'mercurial' )
diff -r c0c6cf9f5d6ee0a63d0c058439467c1340698930 -r b96fe1d76b383ef2a77ff76bdc6580362aab4f0d lib/galaxy/webapps/galaxy/controllers/workflow.py
--- a/lib/galaxy/webapps/galaxy/controllers/workflow.py
+++ b/lib/galaxy/webapps/galaxy/controllers/workflow.py
@@ -14,7 +14,7 @@
from galaxy.util.odict import odict
from galaxy.util.sanitize_html import sanitize_html
from galaxy.util.topsort import topsort, topsort_levels, CycleError
-from galaxy.tool_shed.encoding_util import *
+from galaxy.tool_shed.encoding_util import tool_shed_encode, tool_shed_decode
from galaxy.workflow.modules import *
from galaxy import model
from galaxy import util
diff -r c0c6cf9f5d6ee0a63d0c058439467c1340698930 -r b96fe1d76b383ef2a77ff76bdc6580362aab4f0d scripts/migrate_tools/migrate_tools.py
--- a/scripts/migrate_tools/migrate_tools.py
+++ b/scripts/migrate_tools/migrate_tools.py
@@ -16,7 +16,7 @@
sys.path = new_path
from galaxy import eggs
-from galaxy.tool_shed.migrate.common import *
+from galaxy.tool_shed.migrate.common import MigrateToolsApplication
app = MigrateToolsApplication( sys.argv[ 1 ] )
non_shed_tool_confs = app.install_manager.proprietary_tool_confs
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
13 Dec '12
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/c0c6cf9f5d6e/
changeset: c0c6cf9f5d6e
user: greg
date: 2012-12-13 16:14:01
summary: Utility refactoring cleanu and import tweaks.
affected #: 6 files
diff -r 70f88a048ed2b565d60250ca66302fd03a7852ff -r c0c6cf9f5d6ee0a63d0c058439467c1340698930 lib/galaxy/tool_shed/install_manager.py
--- a/lib/galaxy/tool_shed/install_manager.py
+++ b/lib/galaxy/tool_shed/install_manager.py
@@ -5,7 +5,7 @@
import urllib2, tempfile
from galaxy.tools import ToolSection
from galaxy.util.json import from_json_string, to_json_string
-from galaxy.util.shed_util import *
+import galaxy.util.shed_util as shed_util
import galaxy.util.shed_util_common as suc
from galaxy.util.odict import odict
from galaxy.tool_shed.common_util import *
@@ -36,9 +36,9 @@
self.tool_shed_install_config = tool_shed_install_config
tree = util.parse_xml( tool_shed_install_config )
root = tree.getroot()
- self.tool_shed = clean_tool_shed_url( root.get( 'name' ) )
+ self.tool_shed = shed_util.clean_tool_shed_url( root.get( 'name' ) )
self.repository_owner = REPOSITORY_OWNER
- index, self.shed_config_dict = get_shed_tool_conf_dict( app, self.migrated_tools_config )
+ index, self.shed_config_dict = shed_util.get_shed_tool_conf_dict( app, self.migrated_tools_config )
# Since tool migration scripts can be executed any number of times, we need to make sure the appropriate tools are defined in
# tool_conf.xml. If no tools associated with the migration stage are defined, no repositories will be installed on disk.
# The default behavior is that the tool shed is down.
@@ -172,7 +172,7 @@
# See if tool_config is defined inside of a section in self.proprietary_tool_panel_elems.
is_displayed, tool_sections = self.get_containing_tool_sections( tool_config )
if is_displayed:
- tool_panel_dict_for_tool_config = generate_tool_panel_dict_for_tool_config( guid, tool_config, tool_sections=tool_sections )
+ tool_panel_dict_for_tool_config = shed_util.generate_tool_panel_dict_for_tool_config( guid, tool_config, tool_sections=tool_sections )
for k, v in tool_panel_dict_for_tool_config.items():
tool_panel_dict_for_display[ k ] = v
else:
@@ -192,50 +192,50 @@
self.app.sa_session.flush()
if 'tool_dependencies' in metadata_dict:
# All tool_dependency objects must be created before the tools are processed even if no tool dependencies will be installed.
- tool_dependencies = create_tool_dependency_objects( self.app, tool_shed_repository, relative_install_dir, set_status=True )
+ tool_dependencies = shed_util.create_tool_dependency_objects( self.app, tool_shed_repository, relative_install_dir, set_status=True )
else:
tool_dependencies = None
if 'tools' in metadata_dict:
sample_files = metadata_dict.get( 'sample_files', [] )
- tool_index_sample_files = get_tool_index_sample_files( sample_files )
- copy_sample_files( self.app, tool_index_sample_files, tool_path=self.tool_path )
+ tool_index_sample_files = shed_util.get_tool_index_sample_files( sample_files )
+ shed_util.copy_sample_files( self.app, tool_index_sample_files, tool_path=self.tool_path )
sample_files_copied = [ s for s in tool_index_sample_files ]
- repository_tools_tups = get_repository_tools_tups( self.app, metadata_dict )
+ repository_tools_tups = shed_util.get_repository_tools_tups( self.app, metadata_dict )
if repository_tools_tups:
# Handle missing data table entries for tool parameters that are dynamically generated select lists.
- repository_tools_tups = handle_missing_data_table_entry( self.app, relative_install_dir, self.tool_path, repository_tools_tups )
+ repository_tools_tups = shed_util.handle_missing_data_table_entry( self.app, relative_install_dir, self.tool_path, repository_tools_tups )
# Handle missing index files for tool parameters that are dynamically generated select lists.
- repository_tools_tups, sample_files_copied = handle_missing_index_file( self.app,
- self.tool_path,
- sample_files,
- repository_tools_tups,
- sample_files_copied )
+ repository_tools_tups, sample_files_copied = shed_util.handle_missing_index_file( self.app,
+ self.tool_path,
+ sample_files,
+ repository_tools_tups,
+ sample_files_copied )
# Copy remaining sample files included in the repository to the ~/tool-data directory of the local Galaxy instance.
- copy_sample_files( self.app, sample_files, tool_path=self.tool_path, sample_files_copied=sample_files_copied )
+ shed_util.copy_sample_files( self.app, sample_files, tool_path=self.tool_path, sample_files_copied=sample_files_copied )
if install_dependencies and tool_dependencies and 'tool_dependencies' in metadata_dict:
# Install tool dependencies.
- update_tool_shed_repository_status( self.app,
- tool_shed_repository,
- self.app.model.ToolShedRepository.installation_status.INSTALLING_TOOL_DEPENDENCIES )
+ shed_util.update_tool_shed_repository_status( self.app,
+ tool_shed_repository,
+ self.app.model.ToolShedRepository.installation_status.INSTALLING_TOOL_DEPENDENCIES )
# Get the tool_dependencies.xml file from disk.
tool_dependencies_config = suc.get_config_from_disk( 'tool_dependencies.xml', repo_install_dir )
- installed_tool_dependencies = handle_tool_dependencies( app=self.app,
- tool_shed_repository=tool_shed_repository,
- tool_dependencies_config=tool_dependencies_config,
- tool_dependencies=tool_dependencies )
+ installed_tool_dependencies = shed_util.handle_tool_dependencies( app=self.app,
+ tool_shed_repository=tool_shed_repository,
+ tool_dependencies_config=tool_dependencies_config,
+ tool_dependencies=tool_dependencies )
for installed_tool_dependency in installed_tool_dependencies:
if installed_tool_dependency.status == self.app.model.ToolDependency.installation_status.ERROR:
print '\nThe following error occurred from the InstallManager while installing tool dependency ', installed_tool_dependency.name, ':'
print installed_tool_dependency.error_message, '\n\n'
- add_to_tool_panel( self.app,
- tool_shed_repository.name,
- repository_clone_url,
- tool_shed_repository.installed_changeset_revision,
- repository_tools_tups,
- self.repository_owner,
- self.migrated_tools_config,
- tool_panel_dict=tool_panel_dict_for_display,
- new_install=True )
+ shed_util.add_to_tool_panel( self.app,
+ tool_shed_repository.name,
+ repository_clone_url,
+ tool_shed_repository.installed_changeset_revision,
+ repository_tools_tups,
+ self.repository_owner,
+ self.migrated_tools_config,
+ tool_panel_dict=tool_panel_dict_for_display,
+ new_install=True )
if 'datatypes' in metadata_dict:
tool_shed_repository.status = self.app.model.ToolShedRepository.installation_status.LOADING_PROPRIETARY_DATATYPES
if not tool_shed_repository.includes_datatypes:
@@ -246,16 +246,16 @@
datatypes_config = suc.get_config_from_disk( 'datatypes_conf.xml', repo_install_dir )
# Load proprietary data types required by tools. The value of override is not important here since the Galaxy server will be started
# after this installation completes.
- converter_path, display_path = alter_config_and_load_prorietary_datatypes( self.app, datatypes_config, repo_install_dir, override=False ) #repo_install_dir was relative_install_dir
+ converter_path, display_path = shed_util.alter_config_and_load_prorietary_datatypes( self.app, datatypes_config, repo_install_dir, override=False ) #repo_install_dir was relative_install_dir
if converter_path or display_path:
# Create a dictionary of tool shed repository related information.
- repository_dict = create_repository_dict_for_proprietary_datatypes( tool_shed=self.tool_shed,
- name=tool_shed_repository.name,
- owner=self.repository_owner,
- installed_changeset_revision=tool_shed_repository.installed_changeset_revision,
- tool_dicts=metadata_dict.get( 'tools', [] ),
- converter_path=converter_path,
- display_path=display_path )
+ repository_dict = shed_util.create_repository_dict_for_proprietary_datatypes( tool_shed=self.tool_shed,
+ name=tool_shed_repository.name,
+ owner=self.repository_owner,
+ installed_changeset_revision=tool_shed_repository.installed_changeset_revision,
+ tool_dicts=metadata_dict.get( 'tools', [] ),
+ converter_path=converter_path,
+ display_path=display_path )
if converter_path:
# Load proprietary datatype converters
self.app.datatypes_registry.load_datatype_converters( self.toolbox, installed_repository_dict=repository_dict )
@@ -281,20 +281,20 @@
repository_clone_url = os.path.join( tool_shed_url, 'repos', self.repository_owner, name )
relative_install_dir = os.path.join( relative_clone_dir, name )
install_dir = os.path.join( clone_dir, name )
- ctx_rev = get_ctx_rev( tool_shed_url, name, self.repository_owner, installed_changeset_revision )
+ ctx_rev = shed_util.get_ctx_rev( tool_shed_url, name, self.repository_owner, installed_changeset_revision )
print "Adding new row (or updating an existing row) for repository '%s' in the tool_shed_repository table." % name
- tool_shed_repository = create_or_update_tool_shed_repository( app=self.app,
- name=name,
- description=description,
- installed_changeset_revision=installed_changeset_revision,
- ctx_rev=ctx_rev,
- repository_clone_url=repository_clone_url,
- metadata_dict={},
- status=self.app.model.ToolShedRepository.installation_status.NEW,
- current_changeset_revision=None,
- owner=self.repository_owner,
- dist_to_shed=True )
- update_tool_shed_repository_status( self.app, tool_shed_repository, self.app.model.ToolShedRepository.installation_status.CLONING )
+ tool_shed_repository = shed_util.create_or_update_tool_shed_repository( app=self.app,
+ name=name,
+ description=description,
+ installed_changeset_revision=installed_changeset_revision,
+ ctx_rev=ctx_rev,
+ repository_clone_url=repository_clone_url,
+ metadata_dict={},
+ status=self.app.model.ToolShedRepository.installation_status.NEW,
+ current_changeset_revision=None,
+ owner=self.repository_owner,
+ dist_to_shed=True )
+ shed_util.update_tool_shed_repository_status( self.app, tool_shed_repository, self.app.model.ToolShedRepository.installation_status.CLONING )
cloned_ok, error_message = suc.clone_repository( repository_clone_url, os.path.abspath( install_dir ), ctx_rev )
if cloned_ok:
self.handle_repository_contents( tool_shed_repository=tool_shed_repository,
@@ -305,9 +305,9 @@
self.app.sa_session.refresh( tool_shed_repository )
metadata_dict = tool_shed_repository.metadata
if 'tools' in metadata_dict:
- update_tool_shed_repository_status( self.app,
- tool_shed_repository,
- self.app.model.ToolShedRepository.installation_status.SETTING_TOOL_VERSIONS )
+ shed_util.update_tool_shed_repository_status( self.app,
+ tool_shed_repository,
+ self.app.model.ToolShedRepository.installation_status.SETTING_TOOL_VERSIONS )
# Get the tool_versions from the tool shed for each tool in the installed change set.
url = '%s/repository/get_tool_versions?name=%s&owner=%s&changeset_revision=%s' % \
( tool_shed_url, tool_shed_repository.name, self.repository_owner, installed_changeset_revision )
@@ -316,7 +316,7 @@
response.close()
if text:
tool_version_dicts = from_json_string( text )
- handle_tool_versions( self.app, tool_version_dicts, tool_shed_repository )
+ shed_util.handle_tool_versions( self.app, tool_version_dicts, tool_shed_repository )
else:
# Set the tool versions since they seem to be missing for this repository in the tool shed.
# CRITICAL NOTE: These default settings may not properly handle all parent/child associations.
@@ -325,8 +325,8 @@
tool_id = tool_dict[ 'guid' ]
old_tool_id = tool_dict[ 'id' ]
tool_version = tool_dict[ 'version' ]
- tool_version_using_old_id = get_tool_version( self.app, old_tool_id )
- tool_version_using_guid = get_tool_version( self.app, tool_id )
+ tool_version_using_old_id = shed_util.get_tool_version( self.app, old_tool_id )
+ tool_version_using_guid = shed_util.get_tool_version( self.app, tool_id )
if not tool_version_using_old_id:
tool_version_using_old_id = self.app.model.ToolVersion( tool_id=old_tool_id,
tool_shed_repository=tool_shed_repository )
@@ -338,15 +338,15 @@
self.app.sa_session.add( tool_version_using_guid )
self.app.sa_session.flush()
# Associate the two versions as parent / child.
- tool_version_association = get_tool_version_association( self.app,
- tool_version_using_old_id,
- tool_version_using_guid )
+ tool_version_association = shed_util.get_tool_version_association( self.app,
+ tool_version_using_old_id,
+ tool_version_using_guid )
if not tool_version_association:
tool_version_association = self.app.model.ToolVersionAssociation( tool_id=tool_version_using_guid.id,
parent_id=tool_version_using_old_id.id )
self.app.sa_session.add( tool_version_association )
self.app.sa_session.flush()
- update_tool_shed_repository_status( self.app, tool_shed_repository, self.app.model.ToolShedRepository.installation_status.INSTALLED )
+ shed_util.update_tool_shed_repository_status( self.app, tool_shed_repository, self.app.model.ToolShedRepository.installation_status.INSTALLED )
@property
def non_shed_tool_panel_configs( self ):
return get_non_shed_tool_panel_configs( self.app )
diff -r 70f88a048ed2b565d60250ca66302fd03a7852ff -r c0c6cf9f5d6ee0a63d0c058439467c1340698930 lib/galaxy/tool_shed/update_manager.py
--- a/lib/galaxy/tool_shed/update_manager.py
+++ b/lib/galaxy/tool_shed/update_manager.py
@@ -3,7 +3,7 @@
"""
import threading, urllib2, logging
from galaxy.util import string_as_bool
-from galaxy.util.shed_util import *
+import galaxy.util.shed_util as shed_util
log = logging.getLogger( __name__ )
diff -r 70f88a048ed2b565d60250ca66302fd03a7852ff -r c0c6cf9f5d6ee0a63d0c058439467c1340698930 lib/galaxy/tools/__init__.py
--- a/lib/galaxy/tools/__init__.py
+++ b/lib/galaxy/tools/__init__.py
@@ -32,7 +32,7 @@
from cgi import FieldStorage
from galaxy.util.hash_util import *
from galaxy.util import listify
-from galaxy.util.shed_util import *
+import galaxy.util.shed_util as shed_util
from galaxy.web import url_for
from galaxy.visualization.genome.visual_analytics import TracksterConfig
@@ -887,11 +887,11 @@
def tool_shed_repository( self ):
# If this tool is included in an installed tool shed repository, return it.
if self.tool_shed:
- return get_tool_shed_repository_by_shed_name_owner_installed_changeset_revision( self.app,
- self.tool_shed,
- self.repository_name,
- self.repository_owner,
- self.installed_changeset_revision )
+ return shed_util.get_tool_shed_repository_by_shed_name_owner_installed_changeset_revision( self.app,
+ self.tool_shed,
+ self.repository_name,
+ self.repository_owner,
+ self.installed_changeset_revision )
return None
def __get_job_run_config( self, run_configs, key, job_params=None ):
# Look through runners/handlers to find one with matching parameters.
diff -r 70f88a048ed2b565d60250ca66302fd03a7852ff -r c0c6cf9f5d6ee0a63d0c058439467c1340698930 lib/galaxy/util/shed_util.py
--- a/lib/galaxy/util/shed_util.py
+++ b/lib/galaxy/util/shed_util.py
@@ -1,4 +1,5 @@
import os, tempfile, shutil, logging, urllib2
+from galaxy.datatypes import checkers
from galaxy import util
import shed_util_common as suc
from galaxy.tools.search import ToolBoxSearch
@@ -293,10 +294,6 @@
set_status=set_status )
tool_dependency_objects.append( tool_dependency )
return tool_dependency_objects
-def generate_clone_url_for_installed_repository( trans, repository ):
- """Generate the URL for cloning a repository that has been installed into a Galaxy instance."""
- tool_shed_url = suc.get_url_from_repository_tool_shed( trans.app, repository )
- return suc.url_join( tool_shed_url, 'repos', repository.owner, repository.name )
def generate_tool_elem( tool_shed, repository_name, changeset_revision, owner, tool_file_path, tool, tool_section ):
if tool_section is not None:
tool_elem = SubElement( tool_section, 'tool' )
@@ -542,9 +539,6 @@
if idx == count:
break
return headers
-def get_installed_tool_shed_repository( trans, id ):
- """Get a repository on the Galaxy side from the database via id"""
- return trans.sa_session.query( trans.model.ToolShedRepository ).get( trans.security.decode_id( id ) )
def get_repository_owner( cleaned_repository_url ):
items = cleaned_repository_url.split( 'repos' )
repo_path = items[ 1 ]
@@ -1042,44 +1036,6 @@
trans.sa_session.add( tool_dependency )
trans.sa_session.flush()
return removed, error_message
-def tool_shed_from_repository_clone_url( repository_clone_url ):
- return suc.clean_repository_clone_url( repository_clone_url ).split( 'repos' )[ 0 ].rstrip( '/' )
-def update_in_shed_tool_config( app, repository ):
- # A tool shed repository is being updated so change the shed_tool_conf file. Parse the config file to generate the entire list
- # of config_elems instead of using the in-memory list.
- shed_conf_dict = repository.get_shed_config_dict( app )
- shed_tool_conf = shed_conf_dict[ 'config_filename' ]
- tool_path = shed_conf_dict[ 'tool_path' ]
-
- #hack for 'trans.app' used in lots of places. These places should just directly use app
- trans = util.bunch.Bunch()
- trans.app = app
-
- tool_panel_dict = generate_tool_panel_dict_from_shed_tool_conf_entries( trans, repository )
- repository_tools_tups = get_repository_tools_tups( app, repository.metadata )
- cleaned_repository_clone_url = suc.clean_repository_clone_url( generate_clone_url_for_installed_repository( trans, repository ) )
- tool_shed = tool_shed_from_repository_clone_url( cleaned_repository_clone_url )
- owner = repository.owner
- if not owner:
- owner = get_repository_owner( cleaned_repository_clone_url )
- guid_to_tool_elem_dict = {}
- for tool_config_filename, guid, tool in repository_tools_tups:
- guid_to_tool_elem_dict[ guid ] = generate_tool_elem( tool_shed, repository.name, repository.changeset_revision, repository.owner or '', tool_config_filename, tool, None )
- config_elems = []
- tree = util.parse_xml( shed_tool_conf )
- root = tree.getroot()
- for elem in root:
- if elem.tag == 'section':
- for i, tool_elem in enumerate( elem ):
- guid = tool_elem.attrib.get( 'guid' )
- if guid in guid_to_tool_elem_dict:
- elem[i] = guid_to_tool_elem_dict[ guid ]
- elif elem.tag == 'tool':
- guid = elem.attrib.get( 'guid' )
- if guid in guid_to_tool_elem_dict:
- elem = guid_to_tool_elem_dict[ guid ]
- config_elems.append( elem )
- config_elems_to_xml_file( app, config_elems, shed_tool_conf, tool_path )
def update_tool_shed_repository_status( app, tool_shed_repository, status ):
sa_session = app.model.context.current
tool_shed_repository.status = status
diff -r 70f88a048ed2b565d60250ca66302fd03a7852ff -r c0c6cf9f5d6ee0a63d0c058439467c1340698930 lib/galaxy/util/shed_util_common.py
--- a/lib/galaxy/util/shed_util_common.py
+++ b/lib/galaxy/util/shed_util_common.py
@@ -565,6 +565,10 @@
repository_dependencies,
metadata.get( 'tool_dependencies', None ) )
return repo_info_dict
+def generate_clone_url_for_installed_repository( trans, repository ):
+ """Generate the URL for cloning a repository that has been installed into a Galaxy instance."""
+ tool_shed_url = suc.get_url_from_repository_tool_shed( trans.app, repository )
+ return suc.url_join( tool_shed_url, 'repos', repository.owner, repository.name )
def generate_clone_url_for_repository_in_tool_shed( trans, repository ):
"""Generate the URL for cloning a repository that is in the tool shed."""
base_url = url_for( '/', qualified=True ).rstrip( '/' )
@@ -1056,6 +1060,9 @@
if deleted:
return 'DELETED'
return None
+def get_installed_tool_shed_repository( trans, id ):
+ """Get a repository on the Galaxy side from the database via id"""
+ return trans.sa_session.query( trans.model.ToolShedRepository ).get( trans.security.decode_id( id ) )
def get_list_of_copied_sample_files( repo, ctx, dir ):
"""
Find all sample files (files in the repository with the special .sample extension) in the reversed repository manifest up to ctx. Copy
@@ -2011,6 +2018,8 @@
if to_html:
str( markupsafe.escape( ''.join( translated ) ) )
return ''.join( translated )
+def tool_shed_from_repository_clone_url( repository_clone_url ):
+ return suc.clean_repository_clone_url( repository_clone_url ).split( 'repos' )[ 0 ].rstrip( '/' )
def tool_shed_is_this_tool_shed( toolshed_base_url ):
return toolshed_base_url.rstrip( '/' ) == str( url_for( '/', qualified=True ) ).rstrip( '/' )
def translate_string( raw_text, to_html=True ):
@@ -2090,6 +2099,42 @@
sa_session.delete( tool_dependency )
sa_session.flush()
return new_tool_dependency
+def update_in_shed_tool_config( app, repository ):
+ # A tool shed repository is being updated so change the shed_tool_conf file. Parse the config file to generate the entire list
+ # of config_elems instead of using the in-memory list.
+ shed_conf_dict = repository.get_shed_config_dict( app )
+ shed_tool_conf = shed_conf_dict[ 'config_filename' ]
+ tool_path = shed_conf_dict[ 'tool_path' ]
+
+ #hack for 'trans.app' used in lots of places. These places should just directly use app
+ trans = util.bunch.Bunch()
+ trans.app = app
+
+ tool_panel_dict = generate_tool_panel_dict_from_shed_tool_conf_entries( trans, repository )
+ repository_tools_tups = get_repository_tools_tups( app, repository.metadata )
+ cleaned_repository_clone_url = suc.clean_repository_clone_url( suc.generate_clone_url_for_installed_repository( trans, repository ) )
+ tool_shed = tool_shed_from_repository_clone_url( cleaned_repository_clone_url )
+ owner = repository.owner
+ if not owner:
+ owner = get_repository_owner( cleaned_repository_clone_url )
+ guid_to_tool_elem_dict = {}
+ for tool_config_filename, guid, tool in repository_tools_tups:
+ guid_to_tool_elem_dict[ guid ] = generate_tool_elem( tool_shed, repository.name, repository.changeset_revision, repository.owner or '', tool_config_filename, tool, None )
+ config_elems = []
+ tree = util.parse_xml( shed_tool_conf )
+ root = tree.getroot()
+ for elem in root:
+ if elem.tag == 'section':
+ for i, tool_elem in enumerate( elem ):
+ guid = tool_elem.attrib.get( 'guid' )
+ if guid in guid_to_tool_elem_dict:
+ elem[i] = guid_to_tool_elem_dict[ guid ]
+ elif elem.tag == 'tool':
+ guid = elem.attrib.get( 'guid' )
+ if guid in guid_to_tool_elem_dict:
+ elem = guid_to_tool_elem_dict[ guid ]
+ config_elems.append( elem )
+ config_elems_to_xml_file( app, config_elems, shed_tool_conf, tool_path )
def update_repository( repo, ctx_rev=None ):
"""
Update the cloned repository to changeset_revision. It is critical that the installed repository is updated to the desired
diff -r 70f88a048ed2b565d60250ca66302fd03a7852ff -r c0c6cf9f5d6ee0a63d0c058439467c1340698930 lib/galaxy/webapps/galaxy/controllers/admin_toolshed.py
--- a/lib/galaxy/webapps/galaxy/controllers/admin_toolshed.py
+++ b/lib/galaxy/webapps/galaxy/controllers/admin_toolshed.py
@@ -1,7 +1,7 @@
import urllib2, tempfile
from admin import *
from galaxy.util.json import from_json_string, to_json_string
-from galaxy.util.shed_util import *
+import galaxy.util.shed_util as shed_util
import galaxy.util.shed_util_common as suc
from galaxy.tool_shed.encoding_util import *
from galaxy import eggs, tools
@@ -284,7 +284,7 @@
model.ToolDependency.installation_status.ERROR ] ) )
]
def build_initial_query( self, trans, **kwd ):
- tool_dependency_ids = get_tool_dependency_ids( as_string=False, **kwd )
+ tool_dependency_ids = shed_util.get_tool_dependency_ids( as_string=False, **kwd )
if tool_dependency_ids:
clause_list = []
for tool_dependency_id in tool_dependency_ids:
@@ -308,15 +308,15 @@
@web.require_admin
def activate_repository( self, trans, **kwd ):
"""Activate a repository that was deactivated but not uninstalled."""
- repository = get_installed_tool_shed_repository( trans, kwd[ 'id' ] )
- shed_tool_conf, tool_path, relative_install_dir = get_tool_panel_config_tool_path_install_dir( trans.app, repository )
- repository_clone_url = generate_clone_url_for_installed_repository( trans, repository )
+ repository = suc.get_installed_tool_shed_repository( trans, kwd[ 'id' ] )
+ shed_tool_conf, tool_path, relative_install_dir = shed_util.get_tool_panel_config_tool_path_install_dir( trans.app, repository )
+ repository_clone_url = suc.generate_clone_url_for_installed_repository( trans, repository )
repository.deleted = False
repository.status = trans.model.ToolShedRepository.installation_status.INSTALLED
if repository.includes_tools:
metadata = repository.metadata
try:
- repository_tools_tups = get_repository_tools_tups( trans.app, metadata )
+ repository_tools_tups = shed_util.get_repository_tools_tups( trans.app, metadata )
except Exception, e:
error = "Error activating repository %s: %s" % ( repository.name, str( e ) )
log.debug( error )
@@ -324,25 +324,25 @@
% ( error, web.url_for( controller='admin_toolshed', action='deactivate_or_uninstall_repository', id=trans.security.encode_id( repository.id ) ) ) )
# Reload tools into the appropriate tool panel section.
tool_panel_dict = repository.metadata[ 'tool_panel_section' ]
- add_to_tool_panel( trans.app,
- repository.name,
- repository_clone_url,
- repository.changeset_revision,
- repository_tools_tups,
- repository.owner,
- shed_tool_conf,
- tool_panel_dict,
- new_install=False )
+ shed_util.add_to_tool_panel( trans.app,
+ repository.name,
+ repository_clone_url,
+ repository.changeset_revision,
+ repository_tools_tups,
+ repository.owner,
+ shed_tool_conf,
+ tool_panel_dict,
+ new_install=False )
trans.sa_session.add( repository )
trans.sa_session.flush()
if repository.includes_datatypes:
repository_install_dir = os.path.abspath ( relative_install_dir )
# Deactivate proprietary datatypes.
- installed_repository_dict = load_installed_datatypes( trans.app, repository, repository_install_dir, deactivate=False )
+ installed_repository_dict = shed_util.load_installed_datatypes( trans.app, repository, repository_install_dir, deactivate=False )
if installed_repository_dict and 'converter_path' in installed_repository_dict:
- load_installed_datatype_converters( trans.app, installed_repository_dict, deactivate=False )
+ shed_util.load_installed_datatype_converters( trans.app, installed_repository_dict, deactivate=False )
if installed_repository_dict and 'display_path' in installed_repository_dict:
- load_installed_display_applications( trans.app, installed_repository_dict, deactivate=False )
+ shed_util.load_installed_display_applications( trans.app, installed_repository_dict, deactivate=False )
message = 'The <b>%s</b> repository has been activated.' % repository.name
status = 'done'
return trans.response.send_redirect( web.url_for( controller='admin_toolshed',
@@ -355,7 +355,7 @@
params = util.Params( kwd )
message = util.restore_text( params.get( 'message', '' ) )
status = params.get( 'status', 'done' )
- repository = get_installed_tool_shed_repository( trans, kwd[ 'id' ] )
+ repository = suc.get_installed_tool_shed_repository( trans, kwd[ 'id' ] )
return trans.fill_template( '/admin/tool_shed_repository/browse_repository.mako',
repository=repository,
message=message,
@@ -379,7 +379,7 @@
action='reset_to_install',
**kwd ) )
if operation == "activate or reinstall":
- repository = get_installed_tool_shed_repository( trans, kwd[ 'id' ] )
+ repository = suc.get_installed_tool_shed_repository( trans, kwd[ 'id' ] )
if repository.uninstalled:
if repository.includes_tools:
# Only allow selecting a different section in the tool panel if the repository was uninstalled.
@@ -407,7 +407,7 @@
params = util.Params( kwd )
message = util.restore_text( params.get( 'message', '' ) )
status = params.get( 'status', 'done' )
- tool_dependency = get_tool_dependency( trans, kwd[ 'id' ] )
+ tool_dependency = shed_util.get_tool_dependency( trans, kwd[ 'id' ] )
if tool_dependency.in_error_state:
message = "This tool dependency is not installed correctly (see the <b>Tool dependency installation error</b> below). "
message += "Choose <b>Uninstall this tool dependency</b> from the <b>Repository Actions</b> menu, correct problems "
@@ -439,7 +439,7 @@
@web.require_admin
def check_for_updates( self, trans, **kwd ):
# Send a request to the relevant tool shed to see if there are any updates.
- repository = get_installed_tool_shed_repository( trans, kwd[ 'id' ] )
+ repository = suc.get_installed_tool_shed_repository( trans, kwd[ 'id' ] )
tool_shed_url = suc.get_url_from_repository_tool_shed( trans.app, repository )
url = suc.url_join( tool_shed_url,
'repository/check_for_updates?galaxy_url=%s&name=%s&owner=%s&changeset_revision=%s' % \
@@ -459,8 +459,8 @@
status = params.get( 'status', 'done' )
remove_from_disk = params.get( 'remove_from_disk', '' )
remove_from_disk_checked = CheckboxField.is_checked( remove_from_disk )
- tool_shed_repository = get_installed_tool_shed_repository( trans, kwd[ 'id' ] )
- shed_tool_conf, tool_path, relative_install_dir = get_tool_panel_config_tool_path_install_dir( trans.app, tool_shed_repository )
+ tool_shed_repository = suc.get_installed_tool_shed_repository( trans, kwd[ 'id' ] )
+ shed_tool_conf, tool_path, relative_install_dir = shed_util.get_tool_panel_config_tool_path_install_dir( trans.app, tool_shed_repository )
if relative_install_dir:
if tool_path:
relative_install_dir = os.path.join( tool_path, relative_install_dir )
@@ -471,14 +471,14 @@
if params.get( 'deactivate_or_uninstall_repository_button', False ):
if tool_shed_repository.includes_tools:
# Handle tool panel alterations.
- remove_from_tool_panel( trans, tool_shed_repository, shed_tool_conf, uninstall=remove_from_disk_checked )
+ shed_util.remove_from_tool_panel( trans, tool_shed_repository, shed_tool_conf, uninstall=remove_from_disk_checked )
if tool_shed_repository.includes_datatypes:
# Deactivate proprietary datatypes.
- installed_repository_dict = load_installed_datatypes( trans.app, tool_shed_repository, repository_install_dir, deactivate=True )
+ installed_repository_dict = shed_util.load_installed_datatypes( trans.app, tool_shed_repository, repository_install_dir, deactivate=True )
if installed_repository_dict and 'converter_path' in installed_repository_dict:
- load_installed_datatype_converters( trans.app, installed_repository_dict, deactivate=True )
+ shed_util.load_installed_datatype_converters( trans.app, installed_repository_dict, deactivate=True )
if installed_repository_dict and 'display_path' in installed_repository_dict:
- load_installed_display_applications( trans.app, installed_repository_dict, deactivate=True )
+ shed_util.load_installed_display_applications( trans.app, installed_repository_dict, deactivate=True )
if remove_from_disk_checked:
try:
# Remove the repository from disk.
@@ -496,7 +496,7 @@
tool_shed_repository.uninstalled = True
# Remove all installed tool dependencies.
for tool_dependency in tool_shed_repository.installed_tool_dependencies:
- uninstalled, error_message = remove_tool_dependency( trans, tool_dependency )
+ uninstalled, error_message = shed_util.remove_tool_dependency( trans, tool_dependency )
if error_message:
errors = '%s %s' % ( errors, error_message )
tool_shed_repository.deleted = True
@@ -553,7 +553,7 @@
items = tmp_url.split( 'repos' )
tool_shed_url = items[ 0 ]
repo_path = items[ 1 ]
- tool_shed_url = clean_tool_shed_url( tool_shed_url )
+ tool_shed_url = shed_util.clean_tool_shed_url( tool_shed_url )
return suc.url_join( tool_shed_url, 'repos', repo_path, changeset_revision )
@web.json
@web.require_admin
@@ -570,7 +570,7 @@
name, owner and changeset revision. The received repository_id is the encoded id of the installed tool shed repository in Galaxy. We
need it so that we can derive the tool shed from which it was installed.
"""
- repository = get_installed_tool_shed_repository( trans, repository_id )
+ repository = suc.get_installed_tool_shed_repository( trans, repository_id )
tool_shed_url = suc.get_url_from_repository_tool_shed( trans.app, repository )
url = suc.url_join( tool_shed_url,
'repository/get_repository_dependencies?name=%s&owner=%s&changeset_revision=%s' % \
@@ -585,7 +585,7 @@
text = ''
return text
def get_versions_of_tool( self, app, guid ):
- tool_version = get_tool_version( app, guid )
+ tool_version = shed_util.get_tool_version( app, guid )
return tool_version.get_version_ids( app, reverse=True )
@web.expose
@web.require_admin
@@ -614,10 +614,10 @@
tool_shed_repository = tool_dependencies[ 0 ].tool_shed_repository
# Get the tool_dependencies.xml file from the repository.
tool_dependencies_config = suc.get_config_from_disk( 'tool_dependencies.xml', tool_shed_repository.repo_path( trans.app ) )
- installed_tool_dependencies = handle_tool_dependencies( app=trans.app,
- tool_shed_repository=tool_shed_repository,
- tool_dependencies_config=tool_dependencies_config,
- tool_dependencies=tool_dependencies )
+ installed_tool_dependencies = shed_util.handle_tool_dependencies( app=trans.app,
+ tool_shed_repository=tool_shed_repository,
+ tool_dependencies_config=tool_dependencies_config,
+ tool_dependencies=tool_dependencies )
for installed_tool_dependency in installed_tool_dependencies:
if installed_tool_dependency.status == trans.app.model.ToolDependency.installation_status.ERROR:
message += ' %s' % installed_tool_dependency.error_message
@@ -644,7 +644,7 @@
tool_dependency_ids = util.listify( params.get( 'id', None ) )
tool_dependencies = []
for tool_dependency_id in tool_dependency_ids:
- tool_dependency = get_tool_dependency( trans, tool_dependency_id )
+ tool_dependency = shed_util.get_tool_dependency( trans, tool_dependency_id )
tool_dependencies.append( tool_dependency )
if kwd.get( 'install_tool_dependencies_button', False ):
# Filter tool dependencies to only those that are installed.
@@ -691,7 +691,7 @@
tool_shed_repository, repo_info_dict = tup
repo_info_dict = tool_shed_decode( repo_info_dict )
# Clone each repository to the configured location.
- update_tool_shed_repository_status( trans.app, tool_shed_repository, trans.model.ToolShedRepository.installation_status.CLONING )
+ shed_util.update_tool_shed_repository_status( trans.app, tool_shed_repository, trans.model.ToolShedRepository.installation_status.CLONING )
repo_info_tuple = repo_info_dict[ tool_shed_repository.name ]
description, repository_clone_url, changeset_revision, ctx_rev, repository_owner, repository_dependencies, tool_dependencies = repo_info_tuple
relative_clone_dir = self.generate_tool_path( repository_clone_url, tool_shed_repository.installed_changeset_revision )
@@ -702,10 +702,10 @@
if cloned_ok:
if reinstalling:
# Since we're reinstalling the repository we need to find the latest changeset revision to which is can be updated.
- current_changeset_revision, current_ctx_rev = get_update_to_changeset_revision_and_ctx_rev( trans, tool_shed_repository )
+ current_changeset_revision, current_ctx_rev = shed_util.get_update_to_changeset_revision_and_ctx_rev( trans, tool_shed_repository )
if current_ctx_rev != ctx_rev:
repo = hg.repository( suc.get_configured_ui(), path=os.path.abspath( install_dir ) )
- pull_repository( repo, repository_clone_url, current_changeset_revision )
+ shed_util.pull_repository( repo, repository_clone_url, current_changeset_revision )
suc.update_repository( repo, ctx_rev=current_ctx_rev )
self.handle_repository_contents( trans,
tool_shed_repository=tool_shed_repository,
@@ -720,9 +720,9 @@
metadata = tool_shed_repository.metadata
if 'tools' in metadata:
# Get the tool_versions from the tool shed for each tool in the installed change set.
- update_tool_shed_repository_status( trans.app,
- tool_shed_repository,
- trans.model.ToolShedRepository.installation_status.SETTING_TOOL_VERSIONS )
+ shed_util.update_tool_shed_repository_status( trans.app,
+ tool_shed_repository,
+ trans.model.ToolShedRepository.installation_status.SETTING_TOOL_VERSIONS )
tool_shed_url = suc.get_url_from_repository_tool_shed( trans.app, tool_shed_repository )
url = suc.url_join( tool_shed_url,
'/repository/get_tool_versions?name=%s&owner=%s&changeset_revision=%s' % \
@@ -732,7 +732,7 @@
response.close()
if text:
tool_version_dicts = from_json_string( text )
- handle_tool_versions( trans.app, tool_version_dicts, tool_shed_repository )
+ shed_util.handle_tool_versions( trans.app, tool_version_dicts, tool_shed_repository )
else:
message += "Version information for the tools included in the <b>%s</b> repository is missing. " % name
message += "Reset all of this repository's metadata in the tool shed, then set the installed tool versions "
@@ -741,20 +741,20 @@
if install_tool_dependencies and tool_shed_repository.tool_dependencies and 'tool_dependencies' in metadata:
work_dir = tempfile.mkdtemp()
# Install tool dependencies.
- update_tool_shed_repository_status( trans.app,
- tool_shed_repository,
- trans.model.ToolShedRepository.installation_status.INSTALLING_TOOL_DEPENDENCIES )
+ shed_util.update_tool_shed_repository_status( trans.app,
+ tool_shed_repository,
+ trans.model.ToolShedRepository.installation_status.INSTALLING_TOOL_DEPENDENCIES )
# Get the tool_dependencies.xml file from the repository.
tool_dependencies_config = suc.get_config_from_disk( 'tool_dependencies.xml', install_dir )#relative_install_dir )
- installed_tool_dependencies = handle_tool_dependencies( app=trans.app,
- tool_shed_repository=tool_shed_repository,
- tool_dependencies_config=tool_dependencies_config,
- tool_dependencies=tool_shed_repository.tool_dependencies )
+ installed_tool_dependencies = shed_util.handle_tool_dependencies( app=trans.app,
+ tool_shed_repository=tool_shed_repository,
+ tool_dependencies_config=tool_dependencies_config,
+ tool_dependencies=tool_shed_repository.tool_dependencies )
try:
shutil.rmtree( work_dir )
except:
pass
- update_tool_shed_repository_status( trans.app, tool_shed_repository, trans.model.ToolShedRepository.installation_status.INSTALLED )
+ shed_util.update_tool_shed_repository_status( trans.app, tool_shed_repository, trans.model.ToolShedRepository.installation_status.INSTALLED )
else:
# An error occurred while cloning the repository, so reset everything necessary to enable another attempt.
self.set_repository_attributes( trans,
@@ -788,34 +788,34 @@
trans.sa_session.add( tool_shed_repository )
trans.sa_session.flush()
if 'tool_dependencies' in metadata_dict and not reinstalling:
- tool_dependencies = create_tool_dependency_objects( trans.app, tool_shed_repository, relative_install_dir, set_status=True )
+ tool_dependencies = shed_util.create_tool_dependency_objects( trans.app, tool_shed_repository, relative_install_dir, set_status=True )
if 'tools' in metadata_dict:
- tool_panel_dict = generate_tool_panel_dict_for_new_install( metadata_dict[ 'tools' ], tool_section )
+ tool_panel_dict = shed_util.generate_tool_panel_dict_for_new_install( metadata_dict[ 'tools' ], tool_section )
sample_files = metadata_dict.get( 'sample_files', [] )
- tool_index_sample_files = get_tool_index_sample_files( sample_files )
- copy_sample_files( self.app, tool_index_sample_files, tool_path=tool_path )
+ tool_index_sample_files = shed_util.get_tool_index_sample_files( sample_files )
+ shed_util.copy_sample_files( self.app, tool_index_sample_files, tool_path=tool_path )
sample_files_copied = [ s for s in tool_index_sample_files ]
- repository_tools_tups = get_repository_tools_tups( trans.app, metadata_dict )
+ repository_tools_tups = shed_util.get_repository_tools_tups( trans.app, metadata_dict )
if repository_tools_tups:
# Handle missing data table entries for tool parameters that are dynamically generated select lists.
- repository_tools_tups = handle_missing_data_table_entry( trans.app, relative_install_dir, tool_path, repository_tools_tups )
+ repository_tools_tups = shed_util.handle_missing_data_table_entry( trans.app, relative_install_dir, tool_path, repository_tools_tups )
# Handle missing index files for tool parameters that are dynamically generated select lists.
- repository_tools_tups, sample_files_copied = handle_missing_index_file( trans.app,
- tool_path,
- sample_files,
- repository_tools_tups,
- sample_files_copied )
+ repository_tools_tups, sample_files_copied = shed_util.handle_missing_index_file( trans.app,
+ tool_path,
+ sample_files,
+ repository_tools_tups,
+ sample_files_copied )
# Copy remaining sample files included in the repository to the ~/tool-data directory of the local Galaxy instance.
- copy_sample_files( trans.app, sample_files, tool_path=tool_path, sample_files_copied=sample_files_copied )
- add_to_tool_panel( app=trans.app,
- repository_name=tool_shed_repository.name,
- repository_clone_url=repository_clone_url,
- changeset_revision=tool_shed_repository.changeset_revision,
- repository_tools_tups=repository_tools_tups,
- owner=tool_shed_repository.owner,
- shed_tool_conf=shed_tool_conf,
- tool_panel_dict=tool_panel_dict,
- new_install=True )
+ shed_util.copy_sample_files( trans.app, sample_files, tool_path=tool_path, sample_files_copied=sample_files_copied )
+ shed_util.add_to_tool_panel( app=trans.app,
+ repository_name=tool_shed_repository.name,
+ repository_clone_url=repository_clone_url,
+ changeset_revision=tool_shed_repository.changeset_revision,
+ repository_tools_tups=repository_tools_tups,
+ owner=tool_shed_repository.owner,
+ shed_tool_conf=shed_tool_conf,
+ tool_panel_dict=tool_panel_dict,
+ new_install=True )
if 'datatypes' in metadata_dict:
tool_shed_repository.status = trans.model.ToolShedRepository.installation_status.LOADING_PROPRIETARY_DATATYPES
if not tool_shed_repository.includes_datatypes:
@@ -827,16 +827,16 @@
files_dir = os.path.join( shed_config_dict['tool_path'], files_dir )
datatypes_config = suc.get_config_from_disk( 'datatypes_conf.xml', files_dir )
# Load data types required by tools.
- converter_path, display_path = alter_config_and_load_prorietary_datatypes( trans.app, datatypes_config, files_dir, override=False )
+ converter_path, display_path = shed_util.alter_config_and_load_prorietary_datatypes( trans.app, datatypes_config, files_dir, override=False )
if converter_path or display_path:
# Create a dictionary of tool shed repository related information.
- repository_dict = create_repository_dict_for_proprietary_datatypes( tool_shed=tool_shed,
- name=tool_shed_repository.name,
- owner=tool_shed_repository.owner,
- installed_changeset_revision=tool_shed_repository.installed_changeset_revision,
- tool_dicts=metadata_dict.get( 'tools', [] ),
- converter_path=converter_path,
- display_path=display_path )
+ repository_dict = shed_util.create_repository_dict_for_proprietary_datatypes( tool_shed=tool_shed,
+ name=tool_shed_repository.name,
+ owner=tool_shed_repository.owner,
+ installed_changeset_revision=tool_shed_repository.installed_changeset_revision,
+ tool_dicts=metadata_dict.get( 'tools', [] ),
+ converter_path=converter_path,
+ display_path=display_path )
if converter_path:
# Load proprietary datatype converters
trans.app.datatypes_registry.load_datatype_converters( trans.app.toolbox, installed_repository_dict=repository_dict )
@@ -851,7 +851,7 @@
status = params.get( 'status', 'done' )
repository_id = kwd[ 'id' ]
operation = kwd.get( 'operation', None )
- repository = get_installed_tool_shed_repository( trans, repository_id )
+ repository = suc.get_installed_tool_shed_repository( trans, repository_id )
if not repository:
return trans.show_error_message( 'Invalid repository specified.' )
if repository.status in [ trans.model.ToolShedRepository.installation_status.CLONING ]:
@@ -866,7 +866,7 @@
( repository.name, repository.owner, repository.installed_changeset_revision, ( url_for( '/', qualified=True ) ) ) )
return trans.response.send_redirect( url )
description = util.restore_text( params.get( 'description', repository.description ) )
- shed_tool_conf, tool_path, relative_install_dir = get_tool_panel_config_tool_path_install_dir( trans.app, repository )
+ shed_tool_conf, tool_path, relative_install_dir = shed_util.get_tool_panel_config_tool_path_install_dir( trans.app, repository )
if relative_install_dir:
repo_files_dir = os.path.abspath( os.path.join( tool_path, relative_install_dir, repository.name ) )
else:
@@ -954,9 +954,9 @@
params = util.Params( kwd )
message = util.restore_text( params.get( 'message', '' ) )
status = params.get( 'status', 'done' )
- tool_dependency_ids = get_tool_dependency_ids( as_string=False, **kwd )
+ tool_dependency_ids = shed_util.get_tool_dependency_ids( as_string=False, **kwd )
# We need a tool_shed_repository, so get it from one of the tool_dependencies.
- tool_dependency = get_tool_dependency( trans, tool_dependency_ids[ 0 ] )
+ tool_dependency = shed_util.get_tool_dependency( trans, tool_dependency_ids[ 0 ] )
tool_shed_repository = tool_dependency.tool_shed_repository
self.tool_dependency_grid.title = "Tool shed repository '%s' tool dependencies" % tool_shed_repository.name
self.tool_dependency_grid.global_actions = \
@@ -997,7 +997,7 @@
elif operation == 'uninstall':
tool_dependencies_for_uninstallation = []
for tool_dependency_id in tool_dependency_ids:
- tool_dependency = get_tool_dependency( trans, tool_dependency_id )
+ tool_dependency = shed_util.get_tool_dependency( trans, tool_dependency_id )
if tool_dependency.status in [ trans.model.ToolDependency.installation_status.INSTALLED,
trans.model.ToolDependency.installation_status.ERROR ]:
tool_dependencies_for_uninstallation.append( tool_dependency )
@@ -1012,7 +1012,7 @@
if trans.app.config.tool_dependency_dir:
tool_dependencies_for_installation = []
for tool_dependency_id in tool_dependency_ids:
- tool_dependency = get_tool_dependency( trans, tool_dependency_id )
+ tool_dependency = shed_util.get_tool_dependency( trans, tool_dependency_id )
if tool_dependency.status in [ trans.model.ToolDependency.installation_status.NEVER_INSTALLED,
trans.model.ToolDependency.installation_status.UNINSTALLED ]:
tool_dependencies_for_installation.append( tool_dependency )
@@ -1156,17 +1156,17 @@
**new_kwd ) )
else:
log.debug( "Adding new row (or updating an existing row) for repository '%s' in the tool_shed_repository table." % name )
- tool_shed_repository = create_or_update_tool_shed_repository( app=trans.app,
- name=name,
- description=description,
- installed_changeset_revision=changeset_revision,
- ctx_rev=ctx_rev,
- repository_clone_url=repository_clone_url,
- metadata_dict={},
- status=trans.model.ToolShedRepository.installation_status.NEW,
- current_changeset_revision=changeset_revision,
- owner=repository_owner,
- dist_to_shed=False )
+ tool_shed_repository = shed_util.create_or_update_tool_shed_repository( app=trans.app,
+ name=name,
+ description=description,
+ installed_changeset_revision=changeset_revision,
+ ctx_rev=ctx_rev,
+ repository_clone_url=repository_clone_url,
+ metadata_dict={},
+ status=trans.model.ToolShedRepository.installation_status.NEW,
+ current_changeset_revision=changeset_revision,
+ owner=repository_owner,
+ dist_to_shed=False )
created_or_updated_tool_shed_repositories.append( tool_shed_repository )
filtered_repo_info_dicts.append( tool_shed_encode( repo_info_dict ) )
if created_or_updated_tool_shed_repositories:
@@ -1288,14 +1288,14 @@
message = kwd.get( 'message', '' )
status = kwd.get( 'status', 'done' )
repository_id = kwd[ 'id' ]
- tool_shed_repository = get_installed_tool_shed_repository( trans, repository_id )
+ tool_shed_repository = suc.get_installed_tool_shed_repository( trans, repository_id )
no_changes = kwd.get( 'no_changes', '' )
no_changes_checked = CheckboxField.is_checked( no_changes )
install_tool_dependencies = CheckboxField.is_checked( kwd.get( 'install_tool_dependencies', '' ) )
new_tool_panel_section = kwd.get( 'new_tool_panel_section', '' )
tool_panel_section = kwd.get( 'tool_panel_section', '' )
- shed_tool_conf, tool_path, relative_install_dir = get_tool_panel_config_tool_path_install_dir( trans.app, tool_shed_repository )
- repository_clone_url = generate_clone_url_for_installed_repository( trans, tool_shed_repository )
+ shed_tool_conf, tool_path, relative_install_dir = shed_util.get_tool_panel_config_tool_path_install_dir( trans.app, tool_shed_repository )
+ repository_clone_url = suc.generate_clone_url_for_installed_repository( trans, tool_shed_repository )
clone_dir = os.path.join( tool_path, self.generate_tool_path( repository_clone_url, tool_shed_repository.installed_changeset_revision ) )
relative_install_dir = os.path.join( clone_dir, tool_shed_repository.name )
tool_shed_url = suc.get_url_from_repository_tool_shed( trans.app, tool_shed_repository )
@@ -1307,9 +1307,9 @@
if 'tool_panel_section' in metadata:
tool_panel_dict = metadata[ 'tool_panel_section' ]
if not tool_panel_dict:
- tool_panel_dict = generate_tool_panel_dict_for_new_install( metadata[ 'tools' ] )
+ tool_panel_dict = shed_util.generate_tool_panel_dict_for_new_install( metadata[ 'tools' ] )
else:
- tool_panel_dict = generate_tool_panel_dict_for_new_install( metadata[ 'tools' ] )
+ tool_panel_dict = shed_util.generate_tool_panel_dict_for_new_install( metadata[ 'tools' ] )
# Fix this to handle the case where the tools are distributed across in more than 1 ToolSection - this assumes everything was loaded into 1
# section (or no section) in the tool panel.
tool_section_dicts = tool_panel_dict[ tool_panel_dict.keys()[ 0 ] ]
@@ -1353,18 +1353,18 @@
else:
tool_section = None
# The repository's status must be updated from 'Uninstall' to 'New' when initiating reinstall so the repository_installation_updater will function.
- tool_shed_repository = create_or_update_tool_shed_repository( trans.app,
- tool_shed_repository.name,
- tool_shed_repository.description,
- tool_shed_repository.installed_changeset_revision,
- tool_shed_repository.ctx_rev,
- repository_clone_url,
- tool_shed_repository.metadata,
- trans.model.ToolShedRepository.installation_status.NEW,
- tool_shed_repository.installed_changeset_revision,
- tool_shed_repository.owner,
- tool_shed_repository.dist_to_shed )
- ctx_rev = get_ctx_rev( tool_shed_url, tool_shed_repository.name, tool_shed_repository.owner, tool_shed_repository.installed_changeset_revision )
+ tool_shed_repository = shed_util.create_or_update_tool_shed_repository( trans.app,
+ tool_shed_repository.name,
+ tool_shed_repository.description,
+ tool_shed_repository.installed_changeset_revision,
+ tool_shed_repository.ctx_rev,
+ repository_clone_url,
+ tool_shed_repository.metadata,
+ trans.model.ToolShedRepository.installation_status.NEW,
+ tool_shed_repository.installed_changeset_revision,
+ tool_shed_repository.owner,
+ tool_shed_repository.dist_to_shed )
+ ctx_rev = shed_util.get_ctx_rev( tool_shed_url, tool_shed_repository.name, tool_shed_repository.owner, tool_shed_repository.installed_changeset_revision )
repo_info_dict = kwd.get( 'repo_info_dict', None )
# The repo_info_dict should be encoded.
if not repo_info_dict:
@@ -1450,11 +1450,11 @@
#clone_path, clone_directory = os.path.split( clone_dir )
changeset_revisions = util.listify( text )
for previous_changeset_revision in changeset_revisions:
- tool_shed_repository = get_tool_shed_repository_by_shed_name_owner_installed_changeset_revision( trans.app,
- tool_shed,
- repository_name,
- repository_owner,
- previous_changeset_revision )
+ tool_shed_repository = shed_util.get_tool_shed_repository_by_shed_name_owner_installed_changeset_revision( trans.app,
+ tool_shed,
+ repository_name,
+ repository_owner,
+ previous_changeset_revision )
if tool_shed_repository and tool_shed_repository.status not in [ trans.model.ToolShedRepository.installation_status.NEW ]:
return tool_shed_repository, previous_changeset_revision
return None, None
@@ -1462,11 +1462,11 @@
@web.require_admin
def reselect_tool_panel_section( self, trans, **kwd ):
repository_id = kwd[ 'id' ]
- tool_shed_repository = get_installed_tool_shed_repository( trans, repository_id )
+ tool_shed_repository = suc.get_installed_tool_shed_repository( trans, repository_id )
metadata = tool_shed_repository.metadata
tool_shed_url = suc.get_url_from_repository_tool_shed( trans.app, tool_shed_repository )
- ctx_rev = get_ctx_rev( tool_shed_url, tool_shed_repository.name, tool_shed_repository.owner, tool_shed_repository.installed_changeset_revision )
- repository_clone_url = generate_clone_url_for_installed_repository( trans, tool_shed_repository )
+ ctx_rev = shed_util.get_ctx_rev( tool_shed_url, tool_shed_repository.name, tool_shed_repository.owner, tool_shed_repository.installed_changeset_revision )
+ repository_clone_url = suc.generate_clone_url_for_installed_repository( trans, tool_shed_repository )
repository_dependencies = self.get_repository_dependencies( trans=trans,
repository_id=repository_id,
repository_name=tool_shed_repository.name,
@@ -1486,7 +1486,7 @@
if 'tool_panel_section' in metadata:
tool_panel_dict = metadata[ 'tool_panel_section' ]
if tool_panel_dict:
- if panel_entry_per_tool( tool_panel_dict ):
+ if shed_util.panel_entry_per_tool( tool_panel_dict ):
# TODO: Fix this to handle the case where the tools are distributed across in more than 1 ToolSection. The
# following assumes everything was loaded into 1 section (or no section) in the tool panel.
tool_section_dicts = tool_panel_dict[ tool_panel_dict.keys()[ 0 ] ]
@@ -1570,9 +1570,9 @@
@web.require_admin
def reset_repository_metadata( self, trans, id ):
"""Reset all metadata on a single installed tool shed repository."""
- repository = get_installed_tool_shed_repository( trans, id )
+ repository = suc.get_installed_tool_shed_repository( trans, id )
tool_shed_url = suc.get_url_from_repository_tool_shed( trans.app, repository )
- repository_clone_url = generate_clone_url_for_installed_repository( trans, repository )
+ repository_clone_url = suc.generate_clone_url_for_installed_repository( trans, repository )
tool_path, relative_install_dir = repository.get_tool_relative_path( trans.app )
if relative_install_dir:
original_metadata_dict = repository.metadata
@@ -1587,7 +1587,7 @@
persist=False )
repository.metadata = metadata_dict
if metadata_dict != original_metadata_dict:
- update_in_shed_tool_config( trans.app, repository )
+ suc.update_in_shed_tool_config( trans.app, repository )
trans.sa_session.add( repository )
trans.sa_session.flush()
message = 'Metadata has been reset on repository <b>%s</b>.' % repository.name
@@ -1607,7 +1607,7 @@
@web.require_admin
def reset_to_install( self, trans, **kwd ):
"""An error occurred while cloning the repository, so reset everything necessary to enable another attempt."""
- repository = get_installed_tool_shed_repository( trans, kwd[ 'id' ] )
+ repository = suc.get_installed_tool_shed_repository( trans, kwd[ 'id' ] )
if kwd.get( 'reset_repository', False ):
self.set_repository_attributes( trans,
repository,
@@ -1642,7 +1642,7 @@
@web.require_admin
def set_tool_versions( self, trans, **kwd ):
# Get the tool_versions from the tool shed for each tool in the installed change set.
- repository = get_installed_tool_shed_repository( trans, kwd[ 'id' ] )
+ repository = suc.get_installed_tool_shed_repository( trans, kwd[ 'id' ] )
tool_shed_url = suc.get_url_from_repository_tool_shed( trans.app, repository )
url = suc.url_join( tool_shed_url,
'repository/get_tool_versions?name=%s&owner=%s&changeset_revision=%s' % \
@@ -1652,7 +1652,7 @@
response.close()
if text:
tool_version_dicts = from_json_string( text )
- handle_tool_versions( trans.app, tool_version_dicts, repository )
+ shed_util.handle_tool_versions( trans.app, tool_version_dicts, repository )
message = "Tool versions have been set for all included tools."
status = 'done'
else:
@@ -1660,7 +1660,7 @@
message += "Reset all of this reppository's metadata in the tool shed, then set the installed tool versions "
message ++ "from the installed repository's <b>Repository Actions</b> menu. "
status = 'error'
- shed_tool_conf, tool_path, relative_install_dir = get_tool_panel_config_tool_path_install_dir( trans.app, repository )
+ shed_tool_conf, tool_path, relative_install_dir = shed_util.get_tool_panel_config_tool_path_install_dir( trans.app, repository )
repo_files_dir = os.path.abspath( os.path.join( relative_install_dir, repository.name ) )
return trans.fill_template( '/admin/tool_shed_repository/manage_repository.mako',
repository=repository,
@@ -1699,7 +1699,7 @@
tool_dependency_ids = util.listify( params.get( 'id', None ) )
tool_dependencies = []
for tool_dependency_id in tool_dependency_ids:
- tool_dependency = get_tool_dependency( trans, tool_dependency_id )
+ tool_dependency = shed_util.get_tool_dependency( trans, tool_dependency_id )
tool_dependencies.append( tool_dependency )
tool_shed_repository = tool_dependencies[ 0 ].tool_shed_repository
if kwd.get( 'uninstall_tool_dependencies_button', False ):
@@ -1710,7 +1710,7 @@
if tool_dependency.can_uninstall:
tool_dependencies_for_uninstallation.append( tool_dependency )
for tool_dependency in tool_dependencies_for_uninstallation:
- uninstalled, error_message = remove_tool_dependency( trans, tool_dependency )
+ uninstalled, error_message = shed_util.remove_tool_dependency( trans, tool_dependency )
if error_message:
errors = True
message = '%s %s' % ( message, error_message )
@@ -1744,12 +1744,12 @@
changeset_revision = params.get( 'changeset_revision', None )
latest_changeset_revision = params.get( 'latest_changeset_revision', None )
latest_ctx_rev = params.get( 'latest_ctx_rev', None )
- repository = get_tool_shed_repository_by_shed_name_owner_changeset_revision( trans.app, tool_shed_url, name, owner, changeset_revision )
+ repository = shed_util.get_tool_shed_repository_by_shed_name_owner_changeset_revision( trans.app, tool_shed_url, name, owner, changeset_revision )
if changeset_revision and latest_changeset_revision and latest_ctx_rev:
if changeset_revision == latest_changeset_revision:
message = "The installed repository named '%s' is current, there are no updates available. " % name
else:
- shed_tool_conf, tool_path, relative_install_dir = get_tool_panel_config_tool_path_install_dir( trans.app, repository )
+ shed_tool_conf, tool_path, relative_install_dir = shed_util.get_tool_panel_config_tool_path_install_dir( trans.app, repository )
if relative_install_dir:
if tool_path:
repo_files_dir = os.path.abspath( os.path.join( tool_path, relative_install_dir, name ) )
@@ -1757,9 +1757,9 @@
repo_files_dir = os.path.abspath( os.path.join( relative_install_dir, name ) )
repo = hg.repository( suc.get_configured_ui(), path=repo_files_dir )
repository_clone_url = os.path.join( tool_shed_url, 'repos', owner, name )
- pull_repository( repo, repository_clone_url, latest_ctx_rev )
+ shed_util.pull_repository( repo, repository_clone_url, latest_ctx_rev )
suc.update_repository( repo, latest_ctx_rev )
- tool_shed = clean_tool_shed_url( tool_shed_url )
+ tool_shed = shed_util.clean_tool_shed_url( tool_shed_url )
# Update the repository metadata.
metadata_dict, invalid_file_tups = suc.generate_metadata_for_changeset_revision( app=trans.app,
repository=repository,
@@ -1779,10 +1779,10 @@
trans.sa_session.flush()
# Create tool_dependency records if necessary.
if 'tool_dependencies' in metadata_dict:
- tool_dependencies = create_tool_dependency_objects( trans.app, repository, relative_install_dir, set_status=False )
+ tool_dependencies = shed_util.create_tool_dependency_objects( trans.app, repository, relative_install_dir, set_status=False )
message = "The installed repository named '%s' has been updated to change set revision '%s'. " % ( name, latest_changeset_revision )
# See if any tool dependencies can be installed.
- shed_tool_conf, tool_path, relative_install_dir = get_tool_panel_config_tool_path_install_dir( trans.app, repository )
+ shed_tool_conf, tool_path, relative_install_dir = shed_util.get_tool_panel_config_tool_path_install_dir( trans.app, repository )
if repository.missing_tool_dependencies:
message += "Click the name of one of the missing tool dependencies listed below to install tool dependencies."
else:
@@ -1802,7 +1802,7 @@
params = util.Params( kwd )
message = util.restore_text( params.get( 'message', '' ) )
status = params.get( 'status', 'done' )
- repository = get_installed_tool_shed_repository( trans, repository_id )
+ repository = suc.get_installed_tool_shed_repository( trans, repository_id )
repository_metadata = repository.metadata
shed_config_dict = repository.get_shed_config_dict( trans.app )
tool_metadata = {}
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/70f88a048ed2/
changeset: 70f88a048ed2
user: greg
date: 2012-12-12 23:36:40
summary: Tweak some imports.
affected #: 10 files
diff -r ae60aaaf6a139e88b2849abf97fd47af711341fd -r 70f88a048ed2b565d60250ca66302fd03a7852ff lib/galaxy/tool_shed/install_manager.py
--- a/lib/galaxy/tool_shed/install_manager.py
+++ b/lib/galaxy/tool_shed/install_manager.py
@@ -6,6 +6,7 @@
from galaxy.tools import ToolSection
from galaxy.util.json import from_json_string, to_json_string
from galaxy.util.shed_util import *
+import galaxy.util.shed_util_common as suc
from galaxy.util.odict import odict
from galaxy.tool_shed.common_util import *
@@ -87,7 +88,7 @@
break
full_path = str( os.path.abspath( os.path.join( root, name ) ) )
tool = self.toolbox.load_tool( full_path )
- return generate_tool_guid( repository_clone_url, tool )
+ return suc.generate_tool_guid( repository_clone_url, tool )
def get_proprietary_tool_panel_elems( self, latest_tool_migration_script_number ):
# Parse each config in self.proprietary_tool_confs (the default is tool_conf.xml) and generate a list of Elements that are
# either ToolSection elements or Tool elements. These will be used to generate new entries in the migrated_tools_conf.xml
@@ -177,15 +178,15 @@
else:
print 'The tool "%s" (%s) has not been enabled because it is not defined in a proprietary tool config (%s).' \
% ( guid, tool_config, ", ".join( self.proprietary_tool_confs or [] ) )
- metadata_dict, invalid_file_tups = generate_metadata_for_changeset_revision( app=self.app,
- repository=tool_shed_repository,
- repository_clone_url=repository_clone_url,
- shed_config_dict = self.shed_config_dict,
- relative_install_dir=relative_install_dir,
- repository_files_dir=None,
- resetting_all_metadata_on_repository=False,
- updating_installed_repository=False,
- persist=True )
+ metadata_dict, invalid_file_tups = suc.generate_metadata_for_changeset_revision( app=self.app,
+ repository=tool_shed_repository,
+ repository_clone_url=repository_clone_url,
+ shed_config_dict = self.shed_config_dict,
+ relative_install_dir=relative_install_dir,
+ repository_files_dir=None,
+ resetting_all_metadata_on_repository=False,
+ updating_installed_repository=False,
+ persist=True )
tool_shed_repository.metadata = metadata_dict
self.app.sa_session.add( tool_shed_repository )
self.app.sa_session.flush()
@@ -217,7 +218,7 @@
tool_shed_repository,
self.app.model.ToolShedRepository.installation_status.INSTALLING_TOOL_DEPENDENCIES )
# Get the tool_dependencies.xml file from disk.
- tool_dependencies_config = get_config_from_disk( 'tool_dependencies.xml', repo_install_dir )
+ tool_dependencies_config = suc.get_config_from_disk( 'tool_dependencies.xml', repo_install_dir )
installed_tool_dependencies = handle_tool_dependencies( app=self.app,
tool_shed_repository=tool_shed_repository,
tool_dependencies_config=tool_dependencies_config,
@@ -242,7 +243,7 @@
self.app.sa_session.add( tool_shed_repository )
self.app.sa_session.flush()
work_dir = tempfile.mkdtemp()
- datatypes_config = get_config_from_disk( 'datatypes_conf.xml', repo_install_dir )
+ datatypes_config = suc.get_config_from_disk( 'datatypes_conf.xml', repo_install_dir )
# Load proprietary data types required by tools. The value of override is not important here since the Galaxy server will be started
# after this installation completes.
converter_path, display_path = alter_config_and_load_prorietary_datatypes( self.app, datatypes_config, repo_install_dir, override=False ) #repo_install_dir was relative_install_dir
@@ -294,7 +295,7 @@
owner=self.repository_owner,
dist_to_shed=True )
update_tool_shed_repository_status( self.app, tool_shed_repository, self.app.model.ToolShedRepository.installation_status.CLONING )
- cloned_ok, error_message = clone_repository( repository_clone_url, os.path.abspath( install_dir ), ctx_rev )
+ cloned_ok, error_message = suc.clone_repository( repository_clone_url, os.path.abspath( install_dir ), ctx_rev )
if cloned_ok:
self.handle_repository_contents( tool_shed_repository=tool_shed_repository,
repository_clone_url=repository_clone_url,
diff -r ae60aaaf6a139e88b2849abf97fd47af711341fd -r 70f88a048ed2b565d60250ca66302fd03a7852ff lib/galaxy/tool_shed/update_manager.py
--- a/lib/galaxy/tool_shed/update_manager.py
+++ b/lib/galaxy/tool_shed/update_manager.py
@@ -33,7 +33,7 @@
self.sleeper.sleep( self.seconds_to_sleep )
log.info( 'Transfer job restarter shutting down...' )
def check_for_update( self, repository ):
- tool_shed_url = get_url_from_repository_tool_shed( self.app, repository )
+ tool_shed_url = suc.get_url_from_repository_tool_shed( self.app, repository )
url = '%s/repository/check_for_updates?name=%s&owner=%s&changeset_revision=%s&from_update_manager=True' % \
( tool_shed_url, repository.name, repository.owner, repository.changeset_revision )
try:
diff -r ae60aaaf6a139e88b2849abf97fd47af711341fd -r 70f88a048ed2b565d60250ca66302fd03a7852ff lib/galaxy/util/shed_util.py
--- a/lib/galaxy/util/shed_util.py
+++ b/lib/galaxy/util/shed_util.py
@@ -1,10 +1,10 @@
import os, tempfile, shutil, logging, urllib2
from galaxy import util
-from shed_util_common import *
+import shed_util_common as suc
from galaxy.tools.search import ToolBoxSearch
from galaxy.tool_shed.tool_dependencies.install_util import create_or_update_tool_dependency, install_package, set_environment
-from galaxy.tool_shed.encoding_util import *
-from galaxy.model.orm import *
+from galaxy.tool_shed import encoding_util
+from galaxy.model.orm import and_
from galaxy import eggs
import pkg_resources
@@ -185,7 +185,7 @@
filename=os.path.join( tool_path, filename )
# Attempt to ensure we're copying an appropriate file.
if is_data_index_sample_file( filename ):
- copy_sample_file( app, filename, dest_path=dest_path )
+ suc.copy_sample_file( app, filename, dest_path=dest_path )
def create_repository_dict_for_proprietary_datatypes( tool_shed, name, owner, installed_changeset_revision, tool_dicts, converter_path=None, display_path=None ):
return dict( tool_shed=tool_shed,
repository_name=name,
@@ -204,7 +204,7 @@
# to it being uninstalled.
current_changeset_revision = installed_changeset_revision
sa_session = app.model.context.current
- tool_shed = get_tool_shed_from_clone_url( repository_clone_url )
+ tool_shed = suc.get_tool_shed_from_clone_url( repository_clone_url )
if not owner:
owner = get_repository_owner_from_clone_url( repository_clone_url )
includes_datatypes = 'datatypes' in metadata_dict
@@ -255,7 +255,7 @@
if shed_config_dict.get( 'tool_path' ):
relative_install_dir = os.path.join( shed_config_dict.get( 'tool_path' ), relative_install_dir )
# Get the tool_dependencies.xml file from the repository.
- tool_dependencies_config = get_config_from_disk( 'tool_dependencies.xml', relative_install_dir )
+ tool_dependencies_config = suc.get_config_from_disk( 'tool_dependencies.xml', relative_install_dir )
try:
tree = ElementTree.parse( tool_dependencies_config )
except Exception, e:
@@ -295,8 +295,8 @@
return tool_dependency_objects
def generate_clone_url_for_installed_repository( trans, repository ):
"""Generate the URL for cloning a repository that has been installed into a Galaxy instance."""
- tool_shed_url = get_url_from_repository_tool_shed( trans.app, repository )
- return url_join( tool_shed_url, 'repos', repository.owner, repository.name )
+ tool_shed_url = suc.get_url_from_repository_tool_shed( trans.app, repository )
+ return suc.url_join( tool_shed_url, 'repos', repository.owner, repository.name )
def generate_tool_elem( tool_shed, repository_name, changeset_revision, owner, tool_file_path, tool, tool_section ):
if tool_section is not None:
tool_elem = SubElement( tool_section, 'tool' )
@@ -321,7 +321,7 @@
"""Generate a list of ElementTree Element objects for each section or tool."""
elem_list = []
tool_elem = None
- cleaned_repository_clone_url = clean_repository_clone_url( repository_clone_url )
+ cleaned_repository_clone_url = suc.clean_repository_clone_url( repository_clone_url )
if not owner:
owner = get_repository_owner( cleaned_repository_clone_url )
tool_shed = cleaned_repository_clone_url.split( 'repos' )[ 0 ].rstrip( '/' )
@@ -475,12 +475,12 @@
def get_config( config_file, repo, ctx, dir ):
"""Return the latest version of config_filename from the repository manifest."""
config_file = strip_path( config_file )
- for changeset in reversed_upper_bounded_changelog( repo, ctx ):
+ for changeset in suc.reversed_upper_bounded_changelog( repo, ctx ):
changeset_ctx = repo.changectx( changeset )
for ctx_file in changeset_ctx.files():
ctx_file_name = strip_path( ctx_file )
if ctx_file_name == config_file:
- return get_named_tmpfile_from_ctx( changeset_ctx, ctx_file, dir )
+ return suc.get_named_tmpfile_from_ctx( changeset_ctx, ctx_file, dir )
return None
def get_converter_and_display_paths( registration_elem, relative_install_dir ):
"""Find the relative path to data type converters and display applications included in installed tool shed repositories."""
@@ -525,7 +525,7 @@
break
return converter_path, display_path
def get_ctx_rev( tool_shed_url, name, owner, changeset_revision ):
- url = url_join( tool_shed_url, 'repository/get_ctx_rev?name=%s&owner=%s&changeset_revision=%s' % ( name, owner, changeset_revision ) )
+ url = suc.url_join( tool_shed_url, 'repository/get_ctx_rev?name=%s&owner=%s&changeset_revision=%s' % ( name, owner, changeset_revision ) )
response = urllib2.urlopen( url )
ctx_rev = response.read()
response.close()
@@ -552,7 +552,7 @@
repo_path = repo_path.replace( '/', '', 1 )
return repo_path.lstrip( '/' ).split( '/' )[ 0 ]
def get_repository_owner_from_clone_url( repository_clone_url ):
- tmp_url = clean_repository_clone_url( repository_clone_url )
+ tmp_url = suc.clean_repository_clone_url( repository_clone_url )
tool_shed = tmp_url.split( 'repos' )[ 0 ].rstrip( '/' )
return get_repository_owner( tmp_url )
def get_repository_tools_tups( app, metadata_dict ):
@@ -674,14 +674,14 @@
.first()
def get_update_to_changeset_revision_and_ctx_rev( trans, repository ):
"""Return the changeset revision hash to which the repository can be updated."""
- tool_shed_url = get_url_from_repository_tool_shed( trans.app, repository )
- url = url_join( tool_shed_url, 'repository/get_changeset_revision_and_ctx_rev?name=%s&owner=%s&changeset_revision=%s' % \
+ tool_shed_url = suc.get_url_from_repository_tool_shed( trans.app, repository )
+ url = suc.url_join( tool_shed_url, 'repository/get_changeset_revision_and_ctx_rev?name=%s&owner=%s&changeset_revision=%s' % \
( repository.name, repository.owner, repository.installed_changeset_revision ) )
try:
response = urllib2.urlopen( url )
encoded_update_dict = response.read()
if encoded_update_dict:
- update_dict = tool_shed_decode( encoded_update_dict )
+ update_dict = encoding_util.tool_shed_decode( encoded_update_dict )
changeset_revision = update_dict[ 'changeset_revision' ]
ctx_rev = update_dict[ 'ctx_rev' ]
response.close()
@@ -704,11 +704,11 @@
break
if missing_data_table_entry:
# The repository must contain a tool_data_table_conf.xml.sample file that includes all required entries for all tools in the repository.
- sample_tool_data_table_conf = get_config_from_disk( 'tool_data_table_conf.xml.sample', relative_install_dir )
+ sample_tool_data_table_conf = suc.get_config_from_disk( 'tool_data_table_conf.xml.sample', relative_install_dir )
if sample_tool_data_table_conf:
# Add entries to the ToolDataTableManager's in-memory data_tables dictionary as well as the list of data_table_elems and the list of
# data_table_elem_names.
- error, message = handle_sample_tool_data_table_conf_file( app, sample_tool_data_table_conf, persist=True )
+ error, message = suc.handle_sample_tool_data_table_conf_file( app, sample_tool_data_table_conf, persist=True )
if error:
# TODO: Do more here than logging an exception.
log.debug( message )
@@ -716,7 +716,7 @@
repository_tool = app.toolbox.load_tool( os.path.join( tool_path, tup_path ), guid=guid )
repository_tools_tups[ index ] = ( tup_path, guid, repository_tool )
# Reset the tool_data_tables by loading the empty tool_data_table_conf.xml file.
- reset_tool_data_tables( app )
+ suc.reset_tool_data_tables( app )
return repository_tools_tups
def handle_missing_index_file( app, tool_path, sample_files, repository_tools_tups, sample_files_copied ):
"""
@@ -734,7 +734,7 @@
for sample_file in sample_files:
sample_file_name = strip_path( sample_file )
if sample_file_name == '%s.sample' % missing_file_name:
- copy_sample_file( app, sample_file )
+ suc.copy_sample_file( app, sample_file )
if options.tool_data_table and options.tool_data_table.missing_index_file:
options.tool_data_table.handle_found_index_file( options.missing_index_file )
sample_files_copied.append( options.missing_index_file )
@@ -852,7 +852,7 @@
# Load proprietary datatypes and return information needed for loading proprietary datatypes converters and display applications later.
metadata = repository.metadata
repository_dict = None
- datatypes_config = get_config_from_disk( 'datatypes_conf.xml', relative_install_dir )
+ datatypes_config = suc.get_config_from_disk( 'datatypes_conf.xml', relative_install_dir )
if datatypes_config:
converter_path, display_path = alter_config_and_load_prorietary_datatypes( app, datatypes_config, relative_install_dir, deactivate=deactivate )
if converter_path or display_path:
@@ -883,10 +883,7 @@
return False
def pull_repository( repo, repository_clone_url, ctx_rev ):
"""Pull changes from a remote repository to a local one."""
- commands.pull( get_configured_ui(),
- repo,
- source=repository_clone_url,
- rev=[ ctx_rev ] )
+ commands.pull( suc.get_configured_ui(), repo, source=repository_clone_url, rev=[ ctx_rev ] )
def remove_from_shed_tool_config( trans, shed_tool_conf_dict, guids_to_remove ):
# A tool shed repository is being uninstalled so change the shed_tool_conf file. Parse the config file to generate the entire list
# of config_elems instead of using the in-memory list since it will be a subset of the entire list if one or more repositories have
@@ -1038,7 +1035,7 @@
trans.app.toolbox.write_integrated_tool_panel_config_file()
def remove_tool_dependency( trans, tool_dependency ):
dependency_install_dir = tool_dependency.installation_directory( trans.app )
- removed, error_message = remove_tool_dependency_installation_directory( dependency_install_dir )
+ removed, error_message = suc.remove_tool_dependency_installation_directory( dependency_install_dir )
if removed:
tool_dependency.status = trans.model.ToolDependency.installation_status.UNINSTALLED
tool_dependency.error_message = None
@@ -1046,7 +1043,7 @@
trans.sa_session.flush()
return removed, error_message
def tool_shed_from_repository_clone_url( repository_clone_url ):
- return clean_repository_clone_url( repository_clone_url ).split( 'repos' )[ 0 ].rstrip( '/' )
+ return suc.clean_repository_clone_url( repository_clone_url ).split( 'repos' )[ 0 ].rstrip( '/' )
def update_in_shed_tool_config( app, repository ):
# A tool shed repository is being updated so change the shed_tool_conf file. Parse the config file to generate the entire list
# of config_elems instead of using the in-memory list.
@@ -1060,7 +1057,7 @@
tool_panel_dict = generate_tool_panel_dict_from_shed_tool_conf_entries( trans, repository )
repository_tools_tups = get_repository_tools_tups( app, repository.metadata )
- cleaned_repository_clone_url = clean_repository_clone_url( generate_clone_url_for_installed_repository( trans, repository ) )
+ cleaned_repository_clone_url = suc.clean_repository_clone_url( generate_clone_url_for_installed_repository( trans, repository ) )
tool_shed = tool_shed_from_repository_clone_url( cleaned_repository_clone_url )
owner = repository.owner
if not owner:
diff -r ae60aaaf6a139e88b2849abf97fd47af711341fd -r 70f88a048ed2b565d60250ca66302fd03a7852ff lib/galaxy/webapps/community/controllers/admin.py
--- a/lib/galaxy/webapps/community/controllers/admin.py
+++ b/lib/galaxy/webapps/community/controllers/admin.py
@@ -5,7 +5,7 @@
from galaxy.web.framework.helpers import time_ago, iff, grids
from galaxy.web.form_builder import SelectField
from galaxy.util import inflector
-from galaxy.util.shed_util_common import *
+import galaxy.util.shed_util_common as suc
from common import *
from repository import RepositoryGrid, CategoryGrid
@@ -342,8 +342,8 @@
class RevisionColumn( grids.TextColumn ):
def get_value( self, trans, grid, repository_metadata ):
repository = repository_metadata.repository
- repo = hg.repository( get_configured_ui(), repository.repo_path( trans.app ) )
- ctx = get_changectx_for_changeset( repo, repository_metadata.changeset_revision )
+ repo = hg.repository( suc.get_configured_ui(), repository.repo_path( trans.app ) )
+ ctx = suc.get_changectx_for_changeset( repo, repository_metadata.changeset_revision )
return "%s:%s" % ( str( ctx.rev() ), repository_metadata.changeset_revision )
class ToolsColumn( grids.TextColumn ):
def get_value( self, trans, grid, repository_metadata ):
@@ -481,7 +481,7 @@
# The received id is the repository id, so we need to get the id of the user
# that uploaded the repository.
repository_id = kwd.get( 'id', None )
- repository = get_repository_in_tool_shed( trans, repository_id )
+ repository = suc.get_repository_in_tool_shed( trans, repository_id )
kwd[ 'f-email' ] = repository.user.email
elif operation == "repositories_by_category":
# Eliminate the current filters if any exist.
@@ -513,7 +513,7 @@
changset_revision_str = 'changeset_revision_'
if k.startswith( changset_revision_str ):
repository_id = trans.security.encode_id( int( k.lstrip( changset_revision_str ) ) )
- repository = get_repository_in_tool_shed( trans, repository_id )
+ repository = suc.get_repository_in_tool_shed( trans, repository_id )
if repository.tip( trans.app ) != v:
return trans.response.send_redirect( web.url_for( controller='repository',
action='browse_repositories',
@@ -586,7 +586,7 @@
count = 0
deleted_repositories = ""
for repository_id in ids:
- repository = get_repository_in_tool_shed( trans, repository_id )
+ repository = suc.get_repository_in_tool_shed( trans, repository_id )
if not repository.deleted:
repository.deleted = True
trans.sa_session.add( repository )
@@ -717,12 +717,12 @@
@web.require_admin
def reset_metadata_on_selected_repositories_in_tool_shed( self, trans, **kwd ):
if 'reset_metadata_on_selected_repositories_button' in kwd:
- kwd[ 'CONTROLLER' ] = TOOL_SHED_ADMIN_CONTROLLER
- message, status = reset_metadata_on_selected_repositories( trans, **kwd )
+ kwd[ 'CONTROLLER' ] = suc.TOOL_SHED_ADMIN_CONTROLLER
+ message, status = suc.reset_metadata_on_selected_repositories( trans, **kwd )
else:
message = util.restore_text( kwd.get( 'message', '' ) )
status = kwd.get( 'status', 'done' )
- repositories_select_field = build_repository_ids_select_field( trans, TOOL_SHED_ADMIN_CONTROLLER )
+ repositories_select_field = suc.build_repository_ids_select_field( trans, suc.TOOL_SHED_ADMIN_CONTROLLER )
return trans.fill_template( '/webapps/community/admin/reset_metadata_on_selected_repositories.mako',
repositories_select_field=repositories_select_field,
message=message,
@@ -740,7 +740,7 @@
count = 0
undeleted_repositories = ""
for repository_id in ids:
- repository = get_repository_in_tool_shed( trans, repository_id )
+ repository = suc.get_repository_in_tool_shed( trans, repository_id )
if repository.deleted:
repository.deleted = False
trans.sa_session.add( repository )
diff -r ae60aaaf6a139e88b2849abf97fd47af711341fd -r 70f88a048ed2b565d60250ca66302fd03a7852ff lib/galaxy/webapps/community/controllers/common.py
--- a/lib/galaxy/webapps/community/controllers/common.py
+++ b/lib/galaxy/webapps/community/controllers/common.py
@@ -6,7 +6,7 @@
from galaxy.util.odict import odict
from galaxy.util.json import from_json_string, to_json_string
from galaxy.util.hash_util import *
-from galaxy.util.shed_util_common import *
+import galaxy.util.shed_util_common as suc
from galaxy.web.base.controller import *
from galaxy.web.base.controllers.admin import *
from galaxy.webapps.community import model
@@ -108,12 +108,12 @@
tool_versions_dict = {}
for tool_dict in metadata.get( 'tools', [] ):
# We have at least 2 changeset revisions to compare tool guids and tool ids.
- parent_id = get_parent_id( trans,
- id,
- tool_dict[ 'id' ],
- tool_dict[ 'version' ],
- tool_dict[ 'guid' ],
- changeset_revisions )
+ parent_id = suc.get_parent_id( trans,
+ id,
+ tool_dict[ 'id' ],
+ tool_dict[ 'version' ],
+ tool_dict[ 'guid' ],
+ changeset_revisions )
tool_versions_dict[ tool_dict[ 'guid' ] ] = parent_id
if tool_versions_dict:
repository_metadata.tool_versions = tool_versions_dict
@@ -130,7 +130,7 @@
return False
if changeset_revision == repository.tip( trans.app ):
return True
- file_name = strip_path( file_path )
+ file_name = suc.strip_path( file_path )
latest_version_of_file = get_latest_tool_config_revision_from_repository_manifest( repo, file_name, changeset_revision )
can_use_disk_file = filecmp.cmp( file_path, latest_version_of_file )
try:
@@ -140,7 +140,7 @@
return can_use_disk_file
def changeset_is_malicious( trans, id, changeset_revision, **kwd ):
"""Check the malicious flag in repository metadata for a specified change set"""
- repository_metadata = get_repository_metadata_by_changeset_revision( trans, id, changeset_revision )
+ repository_metadata = suc.get_repository_metadata_by_changeset_revision( trans, id, changeset_revision )
if repository_metadata:
return repository_metadata.malicious
return False
@@ -191,7 +191,7 @@
tool.id,
tool.version )
def get_absolute_path_to_file_in_repository( repo_files_dir, file_name ):
- stripped_file_name = strip_path( file_name )
+ stripped_file_name = suc.strip_path( file_name )
file_path = None
for root, dirs, files in os.walk( repo_files_dir ):
if root.find( '.hg' ) < 0:
@@ -246,11 +246,11 @@
This method is restricted to tool_config files rather than any file since it is likely that, with the exception of tool config files,
multiple files will have the same name in various directories within the repository.
"""
- stripped_filename = strip_path( filename )
- for changeset in reversed_upper_bounded_changelog( repo, changeset_revision ):
+ stripped_filename = suc.strip_path( filename )
+ for changeset in suc.reversed_upper_bounded_changelog( repo, changeset_revision ):
manifest_ctx = repo.changectx( changeset )
for ctx_file in manifest_ctx.files():
- ctx_file_name = strip_path( ctx_file )
+ ctx_file_name = suc.strip_path( ctx_file )
if ctx_file_name == stripped_filename:
try:
fctx = manifest_ctx[ ctx_file ]
@@ -268,10 +268,10 @@
return None
def get_previous_repository_reviews( trans, repository, changeset_revision ):
"""Return an ordered dictionary of repository reviews up to and including the received changeset revision."""
- repo = hg.repository( get_configured_ui(), repository.repo_path( trans.app ) )
+ repo = hg.repository( suc.get_configured_ui(), repository.repo_path( trans.app ) )
reviewed_revision_hashes = [ review.changeset_revision for review in repository.reviews ]
previous_reviews_dict = odict()
- for changeset in reversed_upper_bounded_changelog( repo, changeset_revision ):
+ for changeset in suc.reversed_upper_bounded_changelog( repo, changeset_revision ):
previous_changeset_revision = str( repo.changectx( changeset ) )
if previous_changeset_revision in reviewed_revision_hashes:
previous_rev, previous_changeset_revision_label = get_rev_label_from_changeset_revision( repo, previous_changeset_revision )
@@ -313,9 +313,9 @@
def get_rev_label_changeset_revision_from_repository_metadata( trans, repository_metadata, repository=None ):
if repository is None:
repository = repository_metadata.repository
- repo = hg.repository( get_configured_ui(), repository.repo_path( trans.app ) )
+ repo = hg.repository( suc.get_configured_ui(), repository.repo_path( trans.app ) )
changeset_revision = repository_metadata.changeset_revision
- ctx = get_changectx_for_changeset( repo, changeset_revision )
+ ctx = suc.get_changectx_for_changeset( repo, changeset_revision )
if ctx:
rev = '%04d' % ctx.rev()
label = "%s:%s" % ( str( ctx.rev() ), changeset_revision )
@@ -324,7 +324,7 @@
label = "-1:%s" % changeset_revision
return rev, label, changeset_revision
def get_rev_label_from_changeset_revision( repo, changeset_revision ):
- ctx = get_changectx_for_changeset( repo, changeset_revision )
+ ctx = suc.get_changectx_for_changeset( repo, changeset_revision )
if ctx:
rev = '%04d' % ctx.rev()
label = "%s:%s" % ( str( ctx.rev() ), changeset_revision )
@@ -358,8 +358,8 @@
Return a string consisting of the human read-able
changeset rev and the changeset revision string.
"""
- repo = hg.repository( get_configured_ui(), repository.repo_path( trans.app ) )
- ctx = get_changectx_for_changeset( repo, changeset_revision )
+ repo = hg.repository( suc.get_configured_ui(), repository.repo_path( trans.app ) )
+ ctx = suc.get_changectx_for_changeset( repo, changeset_revision )
if ctx:
return "%s:%s" % ( str( ctx.rev() ), changeset_revision )
else:
@@ -389,7 +389,7 @@
# user is not an admin user, the email will not include any information about both HTML and image content
# that was included in the change set.
repo_dir = repository.repo_path( trans.app )
- repo = hg.repository( get_configured_ui(), repo_dir )
+ repo = hg.repository( suc.get_configured_ui(), repo_dir )
smtp_server = trans.app.config.smtp_server
if smtp_server and ( new_repo_alert or repository.email_alerts ):
# Send email alert to users that want them.
@@ -457,9 +457,9 @@
log.exception( "An error occurred sending a tool shed repository update alert by email." )
def has_previous_repository_reviews( trans, repository, changeset_revision ):
"""Determine if a repository has a changeset revision review prior to the received changeset revision."""
- repo = hg.repository( get_configured_ui(), repository.repo_path( trans.app ) )
+ repo = hg.repository( suc.get_configured_ui(), repository.repo_path( trans.app ) )
reviewed_revision_hashes = [ review.changeset_revision for review in repository.reviews ]
- for changeset in reversed_upper_bounded_changelog( repo, changeset_revision ):
+ for changeset in suc.reversed_upper_bounded_changelog( repo, changeset_revision ):
previous_changeset_revision = str( repo.changectx( changeset ) )
if previous_changeset_revision in reviewed_revision_hashes:
return True
@@ -471,9 +471,9 @@
revision and the first changeset revision in the repository, searching backwards.
"""
original_tool_data_path = trans.app.config.tool_data_path
- repository = get_repository_in_tool_shed( trans, repository_id )
+ repository = suc.get_repository_in_tool_shed( trans, repository_id )
repo_files_dir = repository.repo_path( trans.app )
- repo = hg.repository( get_configured_ui(), repo_files_dir )
+ repo = hg.repository( suc.get_configured_ui(), repo_files_dir )
message = ''
tool = None
can_use_disk_file = False
@@ -482,27 +482,27 @@
can_use_disk_file = can_use_tool_config_disk_file( trans, repository, repo, tool_config_filepath, changeset_revision )
if can_use_disk_file:
trans.app.config.tool_data_path = work_dir
- tool, valid, message, sample_files = handle_sample_files_and_load_tool_from_disk( trans, repo_files_dir, tool_config_filepath, work_dir )
+ tool, valid, message, sample_files = suc.handle_sample_files_and_load_tool_from_disk( trans, repo_files_dir, tool_config_filepath, work_dir )
if tool is not None:
- invalid_files_and_errors_tups = check_tool_input_params( trans.app,
- repo_files_dir,
- tool_config_filename,
- tool,
- sample_files )
+ invalid_files_and_errors_tups = suc.check_tool_input_params( trans.app,
+ repo_files_dir,
+ tool_config_filename,
+ tool,
+ sample_files )
if invalid_files_and_errors_tups:
- message2 = generate_message_for_invalid_tools( trans,
- invalid_files_and_errors_tups,
- repository,
- metadata_dict=None,
- as_html=True,
- displaying_invalid_tool=True )
- message = concat_messages( message, message2 )
+ message2 = suc.generate_message_for_invalid_tools( trans,
+ invalid_files_and_errors_tups,
+ repository,
+ metadata_dict=None,
+ as_html=True,
+ displaying_invalid_tool=True )
+ message = suc.concat_messages( message, message2 )
else:
- tool, message, sample_files = handle_sample_files_and_load_tool_from_tmp_config( trans, repo, changeset_revision, tool_config_filename, work_dir )
- remove_dir( work_dir )
+ tool, message, sample_files = suc.handle_sample_files_and_load_tool_from_tmp_config( trans, repo, changeset_revision, tool_config_filename, work_dir )
+ suc.remove_dir( work_dir )
trans.app.config.tool_data_path = original_tool_data_path
# Reset the tool_data_tables by loading the empty tool_data_table_conf.xml file.
- reset_tool_data_tables( trans.app )
+ suc.reset_tool_data_tables( trans.app )
return repository, tool, message
def new_repository_dependency_metadata_required( trans, repository, metadata_dict ):
"""
@@ -594,36 +594,36 @@
message = ''
status = 'done'
encoded_id = trans.security.encode_id( repository.id )
- repository_clone_url = generate_clone_url_for_repository_in_tool_shed( trans, repository )
+ repository_clone_url = suc.generate_clone_url_for_repository_in_tool_shed( trans, repository )
repo_dir = repository.repo_path( trans.app )
- repo = hg.repository( get_configured_ui(), repo_dir )
- metadata_dict, invalid_file_tups = generate_metadata_for_changeset_revision( app=trans.app,
- repository=repository,
- repository_clone_url=repository_clone_url,
- relative_install_dir=repo_dir,
- repository_files_dir=None,
- resetting_all_metadata_on_repository=False,
- updating_installed_repository=False,
- persist=False )
+ repo = hg.repository( suc.get_configured_ui(), repo_dir )
+ metadata_dict, invalid_file_tups = suc.generate_metadata_for_changeset_revision( app=trans.app,
+ repository=repository,
+ repository_clone_url=repository_clone_url,
+ relative_install_dir=repo_dir,
+ repository_files_dir=None,
+ resetting_all_metadata_on_repository=False,
+ updating_installed_repository=False,
+ persist=False )
if metadata_dict:
- downloadable = is_downloadable( metadata_dict )
+ downloadable = suc.is_downloadable( metadata_dict )
repository_metadata = None
if new_repository_dependency_metadata_required( trans, repository, metadata_dict ) or \
new_tool_metadata_required( trans, repository, metadata_dict ) or \
new_workflow_metadata_required( trans, repository, metadata_dict ):
# Create a new repository_metadata table row.
- repository_metadata = create_or_update_repository_metadata( trans,
- encoded_id,
- repository,
- repository.tip( trans.app ),
- metadata_dict )
+ repository_metadata = suc.create_or_update_repository_metadata( trans,
+ encoded_id,
+ repository,
+ repository.tip( trans.app ),
+ metadata_dict )
# If this is the first record stored for this repository, see if we need to send any email alerts.
if len( repository.downloadable_revisions ) == 1:
handle_email_alerts( trans, repository, content_alert_str='', new_repo_alert=True, admin_only=False )
else:
repository_metadata = get_latest_repository_metadata( trans, repository.id )
if repository_metadata:
- downloadable = is_downloadable( metadata_dict )
+ downloadable = suc.is_downloadable( metadata_dict )
# Update the last saved repository_metadata table row.
repository_metadata.changeset_revision = repository.tip( trans.app )
repository_metadata.metadata = metadata_dict
@@ -632,17 +632,17 @@
trans.sa_session.flush()
else:
# There are no tools in the repository, and we're setting metadata on the repository tip.
- repository_metadata = create_or_update_repository_metadata( trans,
- encoded_id,
- repository,
- repository.tip( trans.app ),
- metadata_dict )
+ repository_metadata = suc.create_or_update_repository_metadata( trans,
+ encoded_id,
+ repository,
+ repository.tip( trans.app ),
+ metadata_dict )
if 'tools' in metadata_dict and repository_metadata and status != 'error':
# Set tool versions on the new downloadable change set. The order of the list of changesets is critical, so we use the repo's changelog.
changeset_revisions = []
for changeset in repo.changelog:
changeset_revision = str( repo.changectx( changeset ) )
- if get_repository_metadata_by_changeset_revision( trans, encoded_id, changeset_revision ):
+ if suc.get_repository_metadata_by_changeset_revision( trans, encoded_id, changeset_revision ):
changeset_revisions.append( changeset_revision )
add_tool_versions( trans, encoded_id, repository_metadata, changeset_revisions )
elif len( repo ) == 1 and not invalid_file_tups:
@@ -650,10 +650,10 @@
message += "be defined so this revision cannot be automatically installed into a local Galaxy instance."
status = "error"
if invalid_file_tups:
- message = generate_message_for_invalid_tools( trans, invalid_file_tups, repository, metadata_dict )
+ message = suc.generate_message_for_invalid_tools( trans, invalid_file_tups, repository, metadata_dict )
status = 'error'
# Reset the tool_data_tables by loading the empty tool_data_table_conf.xml file.
- reset_tool_data_tables( trans.app )
+ suc.reset_tool_data_tables( trans.app )
return message, status
def set_repository_metadata_due_to_new_tip( trans, repository, content_alert_str=None, **kwd ):
# Set metadata on the repository tip.
@@ -671,7 +671,7 @@
# Make a copy of a repository's files for browsing, remove from disk all files that are not tracked, and commit all
# added, modified or removed files that have not yet been committed.
repo_dir = repository.repo_path( trans.app )
- repo = hg.repository( get_configured_ui(), repo_dir )
+ repo = hg.repository( suc.get_configured_ui(), repo_dir )
# The following will delete the disk copy of only the files in the repository.
#os.system( 'hg update -r null > /dev/null 2>&1' )
files_to_remove_from_disk = []
diff -r ae60aaaf6a139e88b2849abf97fd47af711341fd -r 70f88a048ed2b565d60250ca66302fd03a7852ff lib/galaxy/webapps/community/controllers/repository.py
--- a/lib/galaxy/webapps/community/controllers/repository.py
+++ b/lib/galaxy/webapps/community/controllers/repository.py
@@ -9,7 +9,7 @@
from galaxy.web.framework.helpers import time_ago, iff, grids
from galaxy.util.json import from_json_string, to_json_string
from galaxy.model.orm import *
-from galaxy.util.shed_util_common import *
+import galaxy.util.shed_util_common as suc
from galaxy.tool_shed.encoding_util import *
from common import *
@@ -616,7 +616,7 @@
else:
# The received id is the repository id, so we need to get the id of the user that uploaded the repository.
repository_id = kwd.get( 'id', None )
- repository = get_repository_in_tool_shed( trans, repository_id )
+ repository = suc.get_repository_in_tool_shed( trans, repository_id )
kwd[ 'f-email' ] = repository.user.email
elif operation == "repositories_i_own":
# Eliminate the current filters if any exist.
@@ -673,7 +673,7 @@
changset_revision_str = 'changeset_revision_'
if k.startswith( changset_revision_str ):
repository_id = trans.security.encode_id( int( k.lstrip( changset_revision_str ) ) )
- repository = get_repository_in_tool_shed( trans, repository_id )
+ repository = suc.get_repository_in_tool_shed( trans, repository_id )
if repository.tip( trans.app ) != v:
return trans.response.send_redirect( web.url_for( controller='repository',
action='browse_repositories',
@@ -687,10 +687,10 @@
message = util.restore_text( params.get( 'message', '' ) )
status = params.get( 'status', 'done' )
commit_message = util.restore_text( params.get( 'commit_message', 'Deleted selected files' ) )
- repository = get_repository_in_tool_shed( trans, id )
- repo = hg.repository( get_configured_ui(), repository.repo_path( trans.app ) )
+ repository = suc.get_repository_in_tool_shed( trans, id )
+ repo = hg.repository( suc.get_configured_ui(), repository.repo_path( trans.app ) )
# Update repository files for browsing.
- update_repository( repo )
+ suc.update_repository( repo )
is_malicious = changeset_is_malicious( trans, id, repository.tip( trans.app ) )
metadata = self.get_metadata( trans, id, repository.tip( trans.app ) )
return trans.fill_template( '/webapps/community/repository/browse_repository.mako',
@@ -748,7 +748,7 @@
operation = kwd[ 'operation' ].lower()
if operation == "preview_tools_in_changeset":
repository_id = kwd.get( 'id', None )
- repository = get_repository_in_tool_shed( trans, repository_id )
+ repository = suc.get_repository_in_tool_shed( trans, repository_id )
repository_metadata = get_latest_repository_metadata( trans, repository.id )
latest_installable_changeset_revision = repository_metadata.changeset_revision
return trans.response.send_redirect( web.url_for( controller='repository',
@@ -772,7 +772,7 @@
changset_revision_str = 'changeset_revision_'
if k.startswith( changset_revision_str ):
repository_id = trans.security.encode_id( int( k.lstrip( changset_revision_str ) ) )
- repository = get_repository_in_tool_shed( trans, repository_id )
+ repository = suc.get_repository_in_tool_shed( trans, repository_id )
if repository.tip( trans.app ) != v:
return trans.response.send_redirect( web.url_for( controller='repository',
action='preview_tools_in_changeset',
@@ -817,11 +817,11 @@
name = params.get( 'name', None )
owner = params.get( 'owner', None )
changeset_revision = params.get( 'changeset_revision', None )
- repository = get_repository_by_name_and_owner( trans, name, owner )
+ repository = suc.get_repository_by_name_and_owner( trans, name, owner )
repo_dir = repository.repo_path( trans.app )
- repo = hg.repository( get_configured_ui(), repo_dir )
+ repo = hg.repository( suc.get_configured_ui(), repo_dir )
# Default to the current changeset revision.
- update_to_ctx = get_changectx_for_changeset( repo, changeset_revision )
+ update_to_ctx = suc.get_changectx_for_changeset( repo, changeset_revision )
latest_changeset_revision = changeset_revision
from_update_manager = kwd.get( 'from_update_manager', False )
if from_update_manager:
@@ -829,9 +829,9 @@
no_update = 'false'
else:
# Start building up the url to redirect back to the calling Galaxy instance.
- url = url_join( galaxy_url,
- 'admin_toolshed/update_to_changeset_revision?tool_shed_url=%s&name=%s&owner=%s&changeset_revision=%s&latest_changeset_revision=' % \
- ( url_for( '/', qualified=True ), repository.name, repository.user.username, changeset_revision ) )
+ url = suc.url_join( galaxy_url,
+ 'admin_toolshed/update_to_changeset_revision?tool_shed_url=%s&name=%s&owner=%s&changeset_revision=%s&latest_changeset_revision=' % \
+ ( url_for( '/', qualified=True ), repository.name, repository.user.username, changeset_revision ) )
if changeset_revision == repository.tip( trans.app ):
# If changeset_revision is the repository tip, there are no additional updates.
if from_update_manager:
@@ -839,9 +839,9 @@
# Return the same value for changeset_revision and latest_changeset_revision.
url += latest_changeset_revision
else:
- repository_metadata = get_repository_metadata_by_changeset_revision( trans,
- trans.security.encode_id( repository.id ),
- changeset_revision )
+ repository_metadata = suc.get_repository_metadata_by_changeset_revision( trans,
+ trans.security.encode_id( repository.id ),
+ changeset_revision )
if repository_metadata:
# If changeset_revision is in the repository_metadata table for this repository, there are no additional updates.
if from_update_manager:
@@ -855,19 +855,19 @@
update_to_changeset_hash = None
for changeset in repo.changelog:
changeset_hash = str( repo.changectx( changeset ) )
- ctx = get_changectx_for_changeset( repo, changeset_hash )
+ ctx = suc.get_changectx_for_changeset( repo, changeset_hash )
if update_to_changeset_hash:
if changeset_hash == repository.tip( trans.app ):
- update_to_ctx = get_changectx_for_changeset( repo, changeset_hash )
+ update_to_ctx = suc.get_changectx_for_changeset( repo, changeset_hash )
latest_changeset_revision = changeset_hash
break
else:
- repository_metadata = get_repository_metadata_by_changeset_revision( trans,
- trans.security.encode_id( repository.id ),
- changeset_hash )
+ repository_metadata = suc.get_repository_metadata_by_changeset_revision( trans,
+ trans.security.encode_id( repository.id ),
+ changeset_hash )
if repository_metadata:
# We found a RepositoryMetadata record.
- update_to_ctx = get_changectx_for_changeset( repo, changeset_hash )
+ update_to_ctx = suc.get_changectx_for_changeset( repo, changeset_hash )
latest_changeset_revision = changeset_hash
break
else:
@@ -888,7 +888,7 @@
params = util.Params( kwd )
message = util.restore_text( params.get( 'message', '' ) )
status = params.get( 'status', 'done' )
- repository = get_repository_in_tool_shed( trans, id )
+ repository = suc.get_repository_in_tool_shed( trans, id )
metadata = self.get_metadata( trans, id, repository.tip( trans.app ) )
if trans.user and trans.user.email:
return trans.fill_template( "/webapps/community/repository/contact_owner.mako",
@@ -904,7 +904,7 @@
# Since we support both http and https, we set push_ssl to False to override the default (which is True) in the mercurial api. The hg
# purge extension purges all files and directories not being tracked by mercurial in the current repository. It'll remove unknown files
# and empty directories. This is not currently used because it is not supported in the mercurial API.
- repo = hg.repository( get_configured_ui(), path=repository.repo_path( trans.app ) )
+ repo = hg.repository( suc.get_configured_ui(), path=repository.repo_path( trans.app ) )
fp = repo.opener( 'hgrc', 'wb' )
fp.write( '[paths]\n' )
fp.write( 'default = .\n' )
@@ -963,7 +963,7 @@
if not os.path.exists( repository_path ):
os.makedirs( repository_path )
# Create the local repository
- repo = hg.repository( get_configured_ui(), repository_path, create=True )
+ repo = hg.repository( suc.get_configured_ui(), repository_path, create=True )
# Add an entry in the hgweb.config file for the local repository.
lhs = "repos/%s/%s" % ( repository.user.username, repository.name )
trans.app.hgweb_config_manager.add_entry( lhs, repository_path )
@@ -999,7 +999,7 @@
message = util.restore_text( params.get( 'message', '' ) )
status = params.get( 'status', 'done' )
repository_id = params.get( 'id', None )
- repository = get_repository_in_tool_shed( trans, repository_id )
+ repository = suc.get_repository_in_tool_shed( trans, repository_id )
mark_deprecated = util.string_as_bool( params.get( 'mark_deprecated', False ) )
repository.deprecated = mark_deprecated
trans.sa_session.add( repository )
@@ -1054,7 +1054,7 @@
def download( self, trans, repository_id, changeset_revision, file_type, **kwd ):
# Download an archive of the repository files compressed as zip, gz or bz2.
params = util.Params( kwd )
- repository = get_repository_in_tool_shed( trans, repository_id )
+ repository = suc.get_repository_in_tool_shed( trans, repository_id )
# Allow hgweb to handle the download. This requires the tool shed
# server account's .hgrc file to include the following setting:
# [web]
@@ -1087,7 +1087,7 @@
# The received id is a RepositoryMetadata id, so we have to get the repository id.
repository_metadata = get_repository_metadata_by_id( trans, item_id )
repository_id = trans.security.encode_id( repository_metadata.repository.id )
- repository = get_repository_in_tool_shed( trans, repository_id )
+ repository = suc.get_repository_in_tool_shed( trans, repository_id )
kwd[ 'id' ] = repository_id
kwd[ 'changeset_revision' ] = repository_metadata.changeset_revision
if trans.webapp.name == 'community' and ( is_admin or repository.user == trans.user ):
@@ -1172,7 +1172,7 @@
# The received id is a RepositoryMetadata id, so we have to get the repository id.
repository_metadata = get_repository_metadata_by_id( trans, item_id )
repository_id = trans.security.encode_id( repository_metadata.repository.id )
- repository = get_repository_in_tool_shed( trans, repository_id )
+ repository = suc.get_repository_in_tool_shed( trans, repository_id )
kwd[ 'id' ] = repository_id
kwd[ 'changeset_revision' ] = repository_metadata.changeset_revision
if trans.webapp.name == 'community' and ( is_admin or repository.user == trans.user ):
@@ -1252,11 +1252,11 @@
name = params.get( 'name', None )
owner = params.get( 'owner', None )
changeset_revision = params.get( 'changeset_revision', None )
- repository = get_repository_by_name_and_owner( trans, name, owner )
+ repository = suc.get_repository_by_name_and_owner( trans, name, owner )
repo_dir = repository.repo_path( trans.app )
- repo = hg.repository( get_configured_ui(), repo_dir )
+ repo = hg.repository( suc.get_configured_ui(), repo_dir )
# Default to the received changeset revision and ctx_rev.
- update_to_ctx = get_changectx_for_changeset( repo, changeset_revision )
+ update_to_ctx = suc.get_changectx_for_changeset( repo, changeset_revision )
ctx_rev = str( update_to_ctx.rev() )
latest_changeset_revision = changeset_revision
update_dict = dict( changeset_revision=changeset_revision, ctx_rev=ctx_rev )
@@ -1264,9 +1264,9 @@
# If changeset_revision is the repository tip, there are no additional updates.
return tool_shed_encode( update_dict )
else:
- repository_metadata = get_repository_metadata_by_changeset_revision( trans,
- trans.security.encode_id( repository.id ),
- changeset_revision )
+ repository_metadata = suc.get_repository_metadata_by_changeset_revision( trans,
+ trans.security.encode_id( repository.id ),
+ changeset_revision )
if repository_metadata:
# If changeset_revision is in the repository_metadata table for this repository, there are no additional updates.
return tool_shed_encode( update_dict )
@@ -1276,16 +1276,16 @@
update_to_changeset_hash = None
for changeset in repo.changelog:
changeset_hash = str( repo.changectx( changeset ) )
- ctx = get_changectx_for_changeset( repo, changeset_hash )
+ ctx = suc.get_changectx_for_changeset( repo, changeset_hash )
if update_to_changeset_hash:
- if get_repository_metadata_by_changeset_revision( trans, trans.security.encode_id( repository.id ), changeset_hash ):
+ if suc.get_repository_metadata_by_changeset_revision( trans, trans.security.encode_id( repository.id ), changeset_hash ):
# We found a RepositoryMetadata record.
if changeset_hash == repository.tip( trans.app ):
# The current ctx is the repository tip, so use it.
- update_to_ctx = get_changectx_for_changeset( repo, changeset_hash )
+ update_to_ctx = suc.get_changectx_for_changeset( repo, changeset_hash )
latest_changeset_revision = changeset_hash
else:
- update_to_ctx = get_changectx_for_changeset( repo, update_to_changeset_hash )
+ update_to_ctx = suc.get_changectx_for_changeset( repo, update_to_changeset_hash )
latest_changeset_revision = update_to_changeset_hash
break
elif not update_to_changeset_hash and changeset_hash == changeset_revision:
@@ -1300,10 +1300,10 @@
repository_name = kwd[ 'name' ]
repository_owner = kwd[ 'owner' ]
changeset_revision = kwd[ 'changeset_revision' ]
- repository = get_repository_by_name_and_owner( trans, repository_name, repository_owner )
+ repository = suc.get_repository_by_name_and_owner( trans, repository_name, repository_owner )
repo_dir = repository.repo_path( trans.app )
- repo = hg.repository( get_configured_ui(), repo_dir )
- ctx = get_changectx_for_changeset( repo, changeset_revision )
+ repo = hg.repository( suc.get_configured_ui(), repo_dir )
+ ctx = suc.get_changectx_for_changeset( repo, changeset_revision )
if ctx:
return str( ctx.rev() )
return ''
@@ -1312,16 +1312,16 @@
# Avoid caching
trans.response.headers['Pragma'] = 'no-cache'
trans.response.headers['Expires'] = '0'
- return get_repository_file_contents( file_path )
+ return suc.get_repository_file_contents( file_path )
def get_file_from_changeset_revision( self, repo_files_dir, changeset_revision, file_name, dir ):
"""Return file_name from the received changeset_revision of the repository manifest."""
stripped_file_name = strip_path( file_name )
- repo = hg.repository( get_configured_ui(), repo_files_dir )
- ctx = get_changectx_for_changeset( repo, changeset_revision )
- named_tmp_file = get_named_tmpfile_from_ctx( ctx, file_name, dir )
+ repo = hg.repository( suc.get_configured_ui(), repo_files_dir )
+ ctx = suc.get_changectx_for_changeset( repo, changeset_revision )
+ named_tmp_file = suc.get_named_tmpfile_from_ctx( ctx, file_name, dir )
return named_tmp_file
def get_metadata( self, trans, repository_id, changeset_revision ):
- repository_metadata = get_repository_metadata_by_changeset_revision( trans, repository_id, changeset_revision )
+ repository_metadata = suc.get_repository_metadata_by_changeset_revision( trans, repository_id, changeset_revision )
if repository_metadata and repository_metadata.metadata:
return repository_metadata.metadata
return None
@@ -1331,21 +1331,21 @@
name = params.get( 'name', None )
owner = params.get( 'owner', None )
changeset_revision = params.get( 'changeset_revision', None )
- repository = get_repository_by_name_and_owner( trans, name, owner )
+ repository = suc.get_repository_by_name_and_owner( trans, name, owner )
repository_id = trans.security.encode_id( repository.id )
- repository_metadata = get_repository_metadata_by_changeset_revision( trans, repository_id, changeset_revision )
+ repository_metadata = suc.get_repository_metadata_by_changeset_revision( trans, repository_id, changeset_revision )
if repository_metadata:
metadata = repository_metadata.metadata
if metadata:
# Get a dictionary of all repositories upon which the contents of the received repository depends.
- repository_dependencies = get_repository_dependencies_for_changeset_revision( trans=trans,
- repository=repository,
- repository_metadata=repository_metadata,
- toolshed_base_url=str( url_for( '/', qualified=True ) ).rstrip( '/' ),
- key_rd_dicts_to_be_processed=None,
- all_repository_dependencies=None,
- handled_key_rd_dicts=None,
- circular_repository_dependencies=None )
+ repository_dependencies = suc.get_repository_dependencies_for_changeset_revision( trans=trans,
+ repository=repository,
+ repository_metadata=repository_metadata,
+ toolshed_base_url=str( url_for( '/', qualified=True ) ).rstrip( '/' ),
+ key_rd_dicts_to_be_processed=None,
+ all_repository_dependencies=None,
+ handled_key_rd_dicts=None,
+ circular_repository_dependencies=None )
if repository_dependencies:
return tool_shed_encode( repository_dependencies )
return ''
@@ -1361,9 +1361,9 @@
repo_info_dicts = []
for tup in zip( util.listify( repository_ids ), util.listify( changeset_revisions ) ):
repository_id, changeset_revision = tup
- repository = get_repository_in_tool_shed( trans, repository_id )
- repository_clone_url = generate_clone_url_for_repository_in_tool_shed( trans, repository )
- repository_metadata = get_repository_metadata_by_changeset_revision( trans, repository_id, changeset_revision )
+ repository = suc.get_repository_in_tool_shed( trans, repository_id )
+ repository_clone_url = suc.generate_clone_url_for_repository_in_tool_shed( trans, repository )
+ repository_metadata = suc.get_repository_metadata_by_changeset_revision( trans, repository_id, changeset_revision )
metadata = repository_metadata.metadata
if not includes_tools and 'tools' in metadata:
includes_tools = True
@@ -1372,17 +1372,17 @@
if not includes_tool_dependencies and 'tool_dependencies' in metadata:
includes_tool_dependencies = True
repo_dir = repository.repo_path( trans.app )
- repo = hg.repository( get_configured_ui(), repo_dir )
- ctx = get_changectx_for_changeset( repo, changeset_revision )
- repo_info_dict = create_repo_info_dict( trans=trans,
- repository_clone_url=repository_clone_url,
- changeset_revision=changeset_revision,
- ctx_rev=str( ctx.rev() ),
- repository_owner=repository.user.username,
- repository_name=repository.name,
- repository=repository,
- metadata=None,
- repository_metadata=repository_metadata )
+ repo = hg.repository( suc.get_configured_ui(), repo_dir )
+ ctx = suc.get_changectx_for_changeset( repo, changeset_revision )
+ repo_info_dict = suc.create_repo_info_dict( trans=trans,
+ repository_clone_url=repository_clone_url,
+ changeset_revision=changeset_revision,
+ ctx_rev=str( ctx.rev() ),
+ repository_owner=repository.user.username,
+ repository_name=repository.name,
+ repository=repository,
+ metadata=None,
+ repository_metadata=repository_metadata )
repo_info_dicts.append( tool_shed_encode( repo_info_dict ) )
return dict( includes_tools=includes_tools,
includes_repository_dependencies=includes_repository_dependencies,
@@ -1397,9 +1397,9 @@
repository_name = kwd[ 'name' ]
repository_owner = kwd[ 'owner' ]
changeset_revision = kwd[ 'changeset_revision' ]
- repository = get_repository_by_name_and_owner( trans, repository_name, repository_owner )
- repository_metadata = get_repository_metadata_by_changeset_revision( trans, trans.security.encode_id( repository.id ), changeset_revision )
- return build_readme_files_dict( repository_metadata )
+ repository = suc.get_repository_by_name_and_owner( trans, repository_name, repository_owner )
+ repository_metadata = suc.get_repository_metadata_by_changeset_revision( trans, trans.security.encode_id( repository.id ), changeset_revision )
+ return suc.build_readme_files_dict( repository_metadata )
@web.expose
def get_tool_dependencies( self, trans, **kwd ):
"""Handle a request from a local Galaxy instance."""
@@ -1411,7 +1411,7 @@
name = params.get( 'name', None )
owner = params.get( 'owner', None )
changeset_revision = params.get( 'changeset_revision', None )
- repository = get_repository_by_name_and_owner( trans, name, owner )
+ repository = suc.get_repository_by_name_and_owner( trans, name, owner )
for downloadable_revision in repository.downloadable_revisions:
if downloadable_revision.changeset_revision == changeset_revision:
break
@@ -1432,13 +1432,13 @@
name = kwd[ 'name' ]
owner = kwd[ 'owner' ]
changeset_revision = kwd[ 'changeset_revision' ]
- repository = get_repository_by_name_and_owner( trans, name, owner )
+ repository = suc.get_repository_by_name_and_owner( trans, name, owner )
repo_dir = repository.repo_path( trans.app )
- repo = hg.repository( get_configured_ui(), repo_dir )
+ repo = hg.repository( suc.get_configured_ui(), repo_dir )
tool_version_dicts = []
for changeset in repo.changelog:
current_changeset_revision = str( repo.changectx( changeset ) )
- repository_metadata = get_repository_metadata_by_changeset_revision( trans, trans.security.encode_id( repository.id ), current_changeset_revision )
+ repository_metadata = suc.get_repository_metadata_by_changeset_revision( trans, trans.security.encode_id( repository.id ), current_changeset_revision )
if repository_metadata and repository_metadata.tool_versions:
tool_version_dicts.append( repository_metadata.tool_versions )
if current_changeset_revision == changeset_revision:
@@ -1450,14 +1450,14 @@
"""Return the tool lineage in descendant order for the received guid contained in the received repsitory_metadata.tool_versions."""
encoded_id = trans.security.encode_id( repository.id )
repo_dir = repository.repo_path( trans.app )
- repo = hg.repository( get_configured_ui(), repo_dir )
+ repo = hg.repository( suc.get_configured_ui(), repo_dir )
# Initialize the tool lineage
tool_guid_lineage = [ guid ]
# Get all ancestor guids of the received guid.
current_child_guid = guid
- for changeset in reversed_upper_bounded_changelog( repo, repository_metadata.changeset_revision ):
+ for changeset in suc.reversed_upper_bounded_changelog( repo, repository_metadata.changeset_revision ):
ctx = repo.changectx( changeset )
- rm = get_repository_metadata_by_changeset_revision( trans, encoded_id, str( ctx ) )
+ rm = suc.get_repository_metadata_by_changeset_revision( trans, encoded_id, str( ctx ) )
if rm:
parent_guid = rm.tool_versions.get( current_child_guid, None )
if parent_guid:
@@ -1465,9 +1465,9 @@
current_child_guid = parent_guid
# Get all descendant guids of the received guid.
current_parent_guid = guid
- for changeset in reversed_lower_upper_bounded_changelog( repo, repository_metadata.changeset_revision, repository.tip( trans.app ) ):
+ for changeset in suc.reversed_lower_upper_bounded_changelog( repo, repository_metadata.changeset_revision, repository.tip( trans.app ) ):
ctx = repo.changectx( changeset )
- rm = get_repository_metadata_by_changeset_revision( trans, encoded_id, str( ctx ) )
+ rm = suc.get_repository_metadata_by_changeset_revision( trans, encoded_id, str( ctx ) )
if rm:
tool_versions = rm.tool_versions
for child_guid, parent_guid in tool_versions.items():
@@ -1566,15 +1566,15 @@
owner = kwd.get( 'owner', None )
galaxy_url = kwd.get( 'galaxy_url', None )
if not repository_ids:
- repository = get_repository_by_name_and_owner( trans, name, owner )
+ repository = suc.get_repository_by_name_and_owner( trans, name, owner )
repository_ids = trans.security.encode_id( repository.id )
if not galaxy_url:
# If galaxy_url is not in the request, it had to have been stored in a cookie by the tool shed.
galaxy_url = trans.get_cookie( name='toolshedgalaxyurl' )
# Redirect back to local Galaxy to perform install.
- url = url_join( galaxy_url,
- 'admin_toolshed/prepare_for_install?tool_shed_url=%s&repository_ids=%s&changeset_revisions=%s' % \
- ( url_for( '/', qualified=True ), ','.join( util.listify( repository_ids ) ), ','.join( util.listify( changeset_revisions ) ) ) )
+ url = suc.url_join( galaxy_url,
+ 'admin_toolshed/prepare_for_install?tool_shed_url=%s&repository_ids=%s&changeset_revisions=%s' % \
+ ( url_for( '/', qualified=True ), ','.join( util.listify( repository_ids ) ), ','.join( util.listify( changeset_revisions ) ) ) )
return trans.response.send_redirect( url )
@web.expose
def load_invalid_tool( self, trans, repository_id, tool_config, changeset_revision, **kwd ):
@@ -1586,13 +1586,13 @@
is_malicious = changeset_is_malicious( trans, repository_id, repository.tip( trans.app ) )
invalid_file_tups = []
if tool:
- invalid_file_tups = check_tool_input_params( trans.app,
- repository.repo_path( trans.app ),
- tool_config,
- tool,
- [] )
+ invalid_file_tups = suc.check_tool_input_params( trans.app,
+ repository.repo_path( trans.app ),
+ tool_config,
+ tool,
+ [] )
if invalid_file_tups:
- message = generate_message_for_invalid_tools( trans, invalid_file_tups, repository, {}, as_html=True, displaying_invalid_tool=True )
+ message = suc.generate_message_for_invalid_tools( trans, invalid_file_tups, repository, {}, as_html=True, displaying_invalid_tool=True )
elif error_message:
message = error_message
try:
@@ -1667,9 +1667,9 @@
message = util.restore_text( params.get( 'message', '' ) )
status = params.get( 'status', 'done' )
cntrller = params.get( 'cntrller', 'repository' )
- repository = get_repository_in_tool_shed( trans, id )
+ repository = suc.get_repository_in_tool_shed( trans, id )
repo_dir = repository.repo_path( trans.app )
- repo = hg.repository( get_configured_ui(), repo_dir )
+ repo = hg.repository( suc.get_configured_ui(), repo_dir )
repo_name = util.restore_text( params.get( 'repo_name', repository.name ) )
changeset_revision = util.restore_text( params.get( 'changeset_revision', repository.tip( trans.app ) ) )
description = util.restore_text( params.get( 'description', repository.description ) )
@@ -1787,8 +1787,8 @@
metadata = None
is_malicious = False
repository_dependencies = None
- if changeset_revision != INITIAL_CHANGELOG_HASH:
- repository_metadata = get_repository_metadata_by_changeset_revision( trans, id, changeset_revision )
+ if changeset_revision != suc.INITIAL_CHANGELOG_HASH:
+ repository_metadata = suc.get_repository_metadata_by_changeset_revision( trans, id, changeset_revision )
if repository_metadata:
revision_label = get_revision_label( trans, repository, changeset_revision )
repository_metadata_id = trans.security.encode_id( repository_metadata.id )
@@ -1796,9 +1796,9 @@
is_malicious = repository_metadata.malicious
else:
# There is no repository_metadata defined for the changeset_revision, so see if it was defined in a previous changeset in the changelog.
- previous_changeset_revision = get_previous_downloadable_changset_revision( repository, repo, changeset_revision )
- if previous_changeset_revision != INITIAL_CHANGELOG_HASH:
- repository_metadata = get_repository_metadata_by_changeset_revision( trans, id, previous_changeset_revision )
+ previous_changeset_revision = suc.get_previous_downloadable_changset_revision( repository, repo, changeset_revision )
+ if previous_changeset_revision != suc.INITIAL_CHANGELOG_HASH:
+ repository_metadata = suc.get_repository_metadata_by_changeset_revision( trans, id, previous_changeset_revision )
if repository_metadata:
revision_label = get_revision_label( trans, repository, previous_changeset_revision )
repository_metadata_id = trans.security.encode_id( repository_metadata.id )
@@ -1806,13 +1806,13 @@
is_malicious = repository_metadata.malicious
if repository_metadata:
# Get a dictionary of all repositories upon which the contents of the current repository_metadata record depend.
- repository_dependencies = get_repository_dependencies_for_changeset_revision( trans=trans,
- repository=repository,
- repository_metadata=repository_metadata,
- toolshed_base_url=str( url_for( '/', qualified=True ) ).rstrip( '/' ),
- key_rd_dicts_to_be_processed=None,
- all_repository_dependencies=None,
- handled_key_rd_dicts=None )
+ repository_dependencies = suc.get_repository_dependencies_for_changeset_revision( trans=trans,
+ repository=repository,
+ repository_metadata=repository_metadata,
+ toolshed_base_url=str( url_for( '/', qualified=True ) ).rstrip( '/' ),
+ key_rd_dicts_to_be_processed=None,
+ all_repository_dependencies=None,
+ handled_key_rd_dicts=None )
if is_malicious:
if trans.app.security_agent.can_push( trans.app, trans.user, repository ):
message += malicious_error_can_push
@@ -1832,7 +1832,7 @@
review_id = trans.security.encode_id( review.id )
else:
review_id = None
- containers_dict = build_repository_containers_for_tool_shed( repository, changeset_revision, repository_dependencies, repository_metadata )
+ containers_dict = suc.build_repository_containers_for_tool_shed( repository, changeset_revision, repository_dependencies, repository_metadata )
return trans.fill_template( '/webapps/community/repository/manage_repository.mako',
cntrller=cntrller,
repo_name=repo_name,
@@ -1903,28 +1903,28 @@
# Avoid caching
trans.response.headers['Pragma'] = 'no-cache'
trans.response.headers['Expires'] = '0'
- return open_repository_files_folder( trans, folder_path )
+ return suc.open_repository_files_folder( trans, folder_path )
@web.expose
def preview_tools_in_changeset( self, trans, repository_id, **kwd ):
params = util.Params( kwd )
message = util.restore_text( params.get( 'message', '' ) )
status = params.get( 'status', 'done' )
- repository = get_repository_in_tool_shed( trans, repository_id )
+ repository = suc.get_repository_in_tool_shed( trans, repository_id )
repo_dir = repository.repo_path( trans.app )
- repo = hg.repository( get_configured_ui(), repo_dir )
+ repo = hg.repository( suc.get_configured_ui(), repo_dir )
changeset_revision = util.restore_text( params.get( 'changeset_revision', repository.tip( trans.app ) ) )
- repository_metadata = get_repository_metadata_by_changeset_revision( trans, repository_id, changeset_revision )
+ repository_metadata = suc.get_repository_metadata_by_changeset_revision( trans, repository_id, changeset_revision )
if repository_metadata:
repository_metadata_id = trans.security.encode_id( repository_metadata.id ),
metadata = repository_metadata.metadata
# Get a dictionary of all repositories upon which the contents of the current repository_metadata record depend.
- repository_dependencies = get_repository_dependencies_for_changeset_revision( trans=trans,
- repository=repository,
- repository_metadata=repository_metadata,
- toolshed_base_url=str( url_for( '/', qualified=True ) ).rstrip( '/' ),
- key_rd_dicts_to_be_processed=None,
- all_repository_dependencies=None,
- handled_key_rd_dicts=None )
+ repository_dependencies = suc.get_repository_dependencies_for_changeset_revision( trans=trans,
+ repository=repository,
+ repository_metadata=repository_metadata,
+ toolshed_base_url=str( url_for( '/', qualified=True ) ).rstrip( '/' ),
+ key_rd_dicts_to_be_processed=None,
+ all_repository_dependencies=None,
+ handled_key_rd_dicts=None )
else:
repository_metadata_id = None
metadata = None
@@ -1935,7 +1935,7 @@
selected_value=changeset_revision,
add_id_to_name=False,
downloadable=False )
- containers_dict = build_repository_containers_for_tool_shed( repository, changeset_revision, repository_dependencies, repository_metadata )
+ containers_dict = suc.build_repository_containers_for_tool_shed( repository, changeset_revision, repository_dependencies, repository_metadata )
return trans.fill_template( '/webapps/community/repository/preview_tools_in_changeset.mako',
repository=repository,
containers_dict=containers_dict,
@@ -1961,14 +1961,14 @@
name = params.get( 'name', None )
owner = params.get( 'owner', None )
changeset_revision = params.get( 'changeset_revision', None )
- repository = get_repository_by_name_and_owner( trans, name, owner )
+ repository = suc.get_repository_by_name_and_owner( trans, name, owner )
repo_dir = repository.repo_path( trans.app )
- repo = hg.repository( get_configured_ui(), repo_dir )
+ repo = hg.repository( suc.get_configured_ui(), repo_dir )
# Get the lower bound changeset revision
- lower_bound_changeset_revision = get_previous_downloadable_changset_revision( repository, repo, changeset_revision )
+ lower_bound_changeset_revision = suc.get_previous_downloadable_changset_revision( repository, repo, changeset_revision )
# Build the list of changeset revision hashes.
changeset_hashes = []
- for changeset in reversed_lower_upper_bounded_changelog( repo, lower_bound_changeset_revision, changeset_revision ):
+ for changeset in suc.reversed_lower_upper_bounded_changelog( repo, lower_bound_changeset_revision, changeset_revision ):
changeset_hashes.append( str( repo.changectx( changeset ) ) )
if changeset_hashes:
changeset_hashes_str = ','.join( changeset_hashes )
@@ -1987,8 +1987,8 @@
action='browse_repositories',
message='Select a repository to rate',
status='error' ) )
- repository = get_repository_in_tool_shed( trans, id )
- repo = hg.repository( get_configured_ui(), repository.repo_path( trans.app ) )
+ repository = suc.get_repository_in_tool_shed( trans, id )
+ repo = hg.repository( suc.get_configured_ui(), repository.repo_path( trans.app ) )
if repository.user == trans.user:
return trans.response.send_redirect( web.url_for( controller='repository',
action='browse_repositories',
@@ -2017,10 +2017,10 @@
def reset_all_metadata( self, trans, id, **kwd ):
# This method is called only from the ~/templates/webapps/community/repository/manage_repository.mako template.
# It resets all metadata on the complete changelog for a single repository in the tool shed.
- invalid_file_tups, metadata_dict = reset_all_metadata_on_repository_in_tool_shed( trans, id, **kwd )
+ invalid_file_tups, metadata_dict = suc.reset_all_metadata_on_repository_in_tool_shed( trans, id, **kwd )
if invalid_file_tups:
- repository = get_repository_in_tool_shed( trans, id )
- message = generate_message_for_invalid_tools( trans, invalid_file_tups, repository, metadata_dict )
+ repository = suc.get_repository_in_tool_shed( trans, id )
+ message = suc.generate_message_for_invalid_tools( trans, invalid_file_tups, repository, metadata_dict )
status = 'error'
else:
message = "All repository metadata has been reset."
@@ -2125,9 +2125,9 @@
message = util.restore_text( params.get( 'message', '' ) )
status = params.get( 'status', 'done' )
commit_message = util.restore_text( params.get( 'commit_message', 'Deleted selected files' ) )
- repository = get_repository_in_tool_shed( trans, id )
+ repository = suc.get_repository_in_tool_shed( trans, id )
repo_dir = repository.repo_path( trans.app )
- repo = hg.repository( get_configured_ui(), repo_dir )
+ repo = hg.repository( suc.get_configured_ui(), repo_dir )
selected_files_to_delete = util.restore_text( params.get( 'selected_files_to_delete', '' ) )
if params.get( 'select_files_to_delete_button', False ):
if selected_files_to_delete:
@@ -2163,9 +2163,9 @@
commands.commit( repo.ui, repo, repo_dir, user=trans.user.username, message=commit_message )
handle_email_alerts( trans, repository )
# Update the repository files for browsing.
- update_repository( repo )
+ suc.update_repository( repo )
# Get the new repository tip.
- repo = hg.repository( get_configured_ui(), repo_dir )
+ repo = hg.repository( suc.get_configured_ui(), repo_dir )
if tip == repository.tip( trans.app ):
message += 'No changes to repository. '
kwd[ 'message' ] = message
@@ -2187,7 +2187,7 @@
status=status )
@web.expose
def send_to_owner( self, trans, id, message='' ):
- repository = get_repository_in_tool_shed( trans, id )
+ repository = suc.get_repository_in_tool_shed( trans, id )
if not message:
message = 'Enter a message'
status = 'error'
@@ -2237,7 +2237,7 @@
total_alerts_removed = 0
flush_needed = False
for repository_id in repository_ids:
- repository = get_repository_in_tool_shed( trans, repository_id )
+ repository = suc.get_repository_in_tool_shed( trans, repository_id )
if repository.email_alerts:
email_alerts = from_json_string( repository.email_alerts )
else:
@@ -2268,7 +2268,7 @@
def set_malicious( self, trans, id, ctx_str, **kwd ):
malicious = kwd.get( 'malicious', '' )
if kwd.get( 'malicious_button', False ):
- repository_metadata = get_repository_metadata_by_changeset_revision( trans, id, ctx_str )
+ repository_metadata = suc.get_repository_metadata_by_changeset_revision( trans, id, ctx_str )
malicious_checked = CheckboxField.is_checked( malicious )
repository_metadata.malicious = malicious_checked
trans.sa_session.add( repository_metadata )
@@ -2309,12 +2309,12 @@
params = util.Params( kwd )
message = util.restore_text( params.get( 'message', '' ) )
status = params.get( 'status', 'done' )
- repository = get_repository_in_tool_shed( trans, id )
- repo = hg.repository( get_configured_ui(), repository.repo_path( trans.app ) )
+ repository = suc.get_repository_in_tool_shed( trans, id )
+ repo = hg.repository( suc.get_configured_ui(), repository.repo_path( trans.app ) )
changesets = []
for changeset in repo.changelog:
ctx = repo.changectx( changeset )
- if get_repository_metadata_by_changeset_revision( trans, id, str( ctx ) ):
+ if suc.get_repository_metadata_by_changeset_revision( trans, id, str( ctx ) ):
has_metadata = True
else:
has_metadata = False
@@ -2346,9 +2346,9 @@
params = util.Params( kwd )
message = util.restore_text( params.get( 'message', '' ) )
status = params.get( 'status', 'done' )
- repository = get_repository_in_tool_shed( trans, id )
- repo = hg.repository( get_configured_ui(), repository.repo_path( trans.app ) )
- ctx = get_changectx_for_changeset( repo, ctx_str )
+ repository = suc.get_repository_in_tool_shed( trans, id )
+ repo = hg.repository( suc.get_configured_ui(), repository.repo_path( trans.app ) )
+ ctx = suc.get_changectx_for_changeset( repo, ctx_str )
if ctx is None:
message = "Repository does not include changeset revision '%s'." % str( ctx_str )
status = 'error'
@@ -2362,7 +2362,7 @@
anchors = modified + added + removed + deleted + unknown + ignored + clean
diffs = []
for diff in patch.diff( repo, node1=ctx_parent.node(), node2=ctx.node() ):
- diffs.append( to_safe_string( diff, to_html=True ) )
+ diffs.append( suc.to_safe_string( diff, to_html=True ) )
is_malicious = changeset_is_malicious( trans, id, repository.tip( trans.app ) )
metadata = self.get_metadata( trans, id, ctx_str )
return trans.fill_template( '/webapps/community/repository/view_changeset.mako',
@@ -2383,7 +2383,7 @@
status=status )
@web.expose
def view_or_manage_repository( self, trans, **kwd ):
- repository = get_repository_in_tool_shed( trans, kwd[ 'id' ] )
+ repository = suc.get_repository_in_tool_shed( trans, kwd[ 'id' ] )
if trans.user_is_admin() or repository.user == trans.user:
return trans.response.send_redirect( web.url_for( controller='repository',
action='manage_repository',
@@ -2398,8 +2398,8 @@
message = util.restore_text( params.get( 'message', '' ) )
status = params.get( 'status', 'done' )
cntrller = params.get( 'cntrller', 'repository' )
- repository = get_repository_in_tool_shed( trans, id )
- repo = hg.repository( get_configured_ui(), repository.repo_path( trans.app ) )
+ repository = suc.get_repository_in_tool_shed( trans, id )
+ repo = hg.repository( suc.get_configured_ui(), repository.repo_path( trans.app ) )
avg_rating, num_ratings = self.get_ave_item_rating_data( trans.sa_session, repository, webapp_model=trans.model )
changeset_revision = util.restore_text( params.get( 'changeset_revision', repository.tip( trans.app ) ) )
display_reviews = util.string_as_bool( params.get( 'display_reviews', False ) )
@@ -2434,18 +2434,18 @@
add_id_to_name=False,
downloadable=False )
revision_label = get_revision_label( trans, repository, changeset_revision )
- repository_metadata = get_repository_metadata_by_changeset_revision( trans, id, changeset_revision )
+ repository_metadata = suc.get_repository_metadata_by_changeset_revision( trans, id, changeset_revision )
if repository_metadata:
repository_metadata_id = trans.security.encode_id( repository_metadata.id )
metadata = repository_metadata.metadata
# Get a dictionary of all repositories upon which the contents of the current repository_metadata record depend.
- repository_dependencies = get_repository_dependencies_for_changeset_revision( trans=trans,
- repository=repository,
- repository_metadata=repository_metadata,
- toolshed_base_url=str( url_for( '/', qualified=True ) ).rstrip( '/' ),
- key_rd_dicts_to_be_processed=None,
- all_repository_dependencies=None,
- handled_key_rd_dicts=None )
+ repository_dependencies = suc.get_repository_dependencies_for_changeset_revision( trans=trans,
+ repository=repository,
+ repository_metadata=repository_metadata,
+ toolshed_base_url=str( url_for( '/', qualified=True ) ).rstrip( '/' ),
+ key_rd_dicts_to_be_processed=None,
+ all_repository_dependencies=None,
+ handled_key_rd_dicts=None )
else:
repository_metadata_id = None
metadata = None
@@ -2466,7 +2466,7 @@
review_id = trans.security.encode_id( review.id )
else:
review_id = None
- containers_dict = build_repository_containers_for_tool_shed( repository, changeset_revision, repository_dependencies, repository_metadata )
+ containers_dict = suc.build_repository_containers_for_tool_shed( repository, changeset_revision, repository_dependencies, repository_metadata )
return trans.fill_template( '/webapps/community/repository/view_repository.mako',
cntrller=cntrller,
repo=repo,
@@ -2491,16 +2491,16 @@
params = util.Params( kwd )
message = util.restore_text( params.get( 'message', '' ) )
status = params.get( 'status', 'done' )
- repository = get_repository_in_tool_shed( trans, repository_id )
+ repository = suc.get_repository_in_tool_shed( trans, repository_id )
repo_files_dir = repository.repo_path( trans.app )
- repo = hg.repository( get_configured_ui(), repo_files_dir )
+ repo = hg.repository( suc.get_configured_ui(), repo_files_dir )
tool_metadata_dict = {}
tool_lineage = []
tool = None
guid = None
original_tool_data_path = trans.app.config.tool_data_path
revision_label = get_revision_label( trans, repository, changeset_revision )
- repository_metadata = get_repository_metadata_by_changeset_revision( trans, repository_id, changeset_revision )
+ repository_metadata = suc.get_repository_metadata_by_changeset_revision( trans, repository_id, changeset_revision )
if repository_metadata:
metadata = repository_metadata.metadata
if metadata:
@@ -2515,18 +2515,18 @@
can_use_disk_file = can_use_tool_config_disk_file( trans, repository, repo, full_path_to_tool_config, changeset_revision )
if can_use_disk_file:
trans.app.config.tool_data_path = work_dir
- tool, valid, message, sample_files = handle_sample_files_and_load_tool_from_disk( trans,
- repo_files_dir,
- full_path_to_tool_config,
- work_dir )
+ tool, valid, message, sample_files = suc.handle_sample_files_and_load_tool_from_disk( trans,
+ repo_files_dir,
+ full_path_to_tool_config,
+ work_dir )
if message:
status = 'error'
else:
- tool, message, sample_files = handle_sample_files_and_load_tool_from_tmp_config( trans,
- repo,
- changeset_revision,
- tool_config_filename,
- work_dir )
+ tool, message, sample_files = suc.handle_sample_files_and_load_tool_from_tmp_config( trans,
+ repo,
+ changeset_revision,
+ tool_config_filename,
+ work_dir )
if message:
status = 'error'
break
diff -r ae60aaaf6a139e88b2849abf97fd47af711341fd -r 70f88a048ed2b565d60250ca66302fd03a7852ff lib/galaxy/webapps/community/controllers/repository_review.py
--- a/lib/galaxy/webapps/community/controllers/repository_review.py
+++ b/lib/galaxy/webapps/community/controllers/repository_review.py
@@ -4,12 +4,12 @@
from galaxy.web.form_builder import SelectField, CheckboxField
from galaxy.webapps.community import model
from galaxy.web.framework.helpers import time_ago, iff, grids
-from galaxy.model.orm import *
+from galaxy.model.orm import and_
from sqlalchemy.sql.expression import func
from common import *
from galaxy.webapps.community.util.container_util import STRSEP
from repository import RepositoryGrid
-from galaxy.util.shed_util_common import *
+import galaxy.util.shed_util_common as suc
from galaxy.util.odict import odict
from galaxy import eggs
@@ -56,7 +56,7 @@
# Restrict to revisions that have been reviewed.
if repository.reviews:
rval = ''
- repo = hg.repository( get_configured_ui(), repository.repo_path( trans.app ) )
+ repo = hg.repository( suc.get_configured_ui(), repository.repo_path( trans.app ) )
for review in repository.reviews:
changeset_revision = review.changeset_revision
rev, label = get_rev_label_from_changeset_revision( repo, changeset_revision )
@@ -311,7 +311,7 @@
status = params.get( 'status', 'done' )
review = get_review( trans, kwd[ 'id' ] )
repository = review.repository
- repo = hg.repository( get_configured_ui(), repository.repo_path( trans.app ) )
+ repo = hg.repository( suc.get_configured_ui(), repository.repo_path( trans.app ) )
rev, changeset_revision_label = get_rev_label_from_changeset_revision( repo, review.changeset_revision )
return trans.fill_template( '/webapps/community/repository_review/browse_review.mako',
repository=repository,
@@ -384,7 +384,7 @@
message = "You have already created a review for revision <b>%s</b> of repository <b>%s</b>." % ( changeset_revision, repository.name )
status = "error"
else:
- repository = get_repository_in_tool_shed( trans, repository_id )
+ repository = suc.get_repository_in_tool_shed( trans, repository_id )
# See if there are any reviews for previous changeset revisions that the user can copy.
if not create_without_copying and not previous_review_id and has_previous_repository_reviews( trans, repository, changeset_revision ):
return trans.response.send_redirect( web.url_for( controller='repository_review',
@@ -392,7 +392,7 @@
**kwd ) )
# A review can be initially performed only on an installable revision of a repository, so make sure we have metadata associated
# with the received changeset_revision.
- repository_metadata = get_repository_metadata_by_changeset_revision( trans, repository_id, changeset_revision )
+ repository_metadata = suc.get_repository_metadata_by_changeset_revision( trans, repository_id, changeset_revision )
if repository_metadata:
metadata = repository_metadata.metadata
if metadata:
@@ -470,7 +470,7 @@
for component in get_components( trans ):
components_dict[ component.name ] = dict( component=component, component_review=None )
repository = review.repository
- repo = hg.repository( get_configured_ui(), repository.repo_path( trans.app ) )
+ repo = hg.repository( suc.get_configured_ui(), repository.repo_path( trans.app ) )
for component_review in review.component_reviews:
if component_review and component_review.component:
component_name = component_review.component.name
@@ -653,9 +653,9 @@
status = params.get( 'status', 'done' )
repository_id = kwd.get( 'id', None )
if repository_id:
- repository = get_repository_in_tool_shed( trans, repository_id )
+ repository = suc.get_repository_in_tool_shed( trans, repository_id )
repo_dir = repository.repo_path( trans.app )
- repo = hg.repository( get_configured_ui(), repo_dir )
+ repo = hg.repository( suc.get_configured_ui(), repo_dir )
metadata_revision_hashes = [ metadata_revision.changeset_revision for metadata_revision in repository.metadata_revisions ]
reviewed_revision_hashes = [ review.changeset_revision for review in repository.reviews ]
reviews_dict = odict()
@@ -669,7 +669,7 @@
repository_reviews = get_reviews_by_repository_id_changeset_revision( trans, repository_id, changeset_revision )
# Determine if the current user can add a review to this revision.
can_add_review = trans.user not in [ repository_review.user for repository_review in repository_reviews ]
- repository_metadata = get_repository_metadata_by_changeset_revision( trans, repository_id, changeset_revision )
+ repository_metadata = suc.get_repository_metadata_by_changeset_revision( trans, repository_id, changeset_revision )
if repository_metadata:
repository_metadata_reviews = util.listify( repository_metadata.reviews )
else:
@@ -700,9 +700,9 @@
status = params.get( 'status', 'done' )
repository_id = kwd.get( 'id', None )
changeset_revision = kwd.get( 'changeset_revision', None )
- repository = get_repository_in_tool_shed( trans, repository_id )
+ repository = suc.get_repository_in_tool_shed( trans, repository_id )
repo_dir = repository.repo_path( trans.app )
- repo = hg.repository( get_configured_ui(), repo_dir )
+ repo = hg.repository( suc.get_configured_ui(), repo_dir )
installable = changeset_revision in [ metadata_revision.changeset_revision for metadata_revision in repository.metadata_revisions ]
rev, changeset_revision_label = get_rev_label_from_changeset_revision( repo, changeset_revision )
reviews = get_reviews_by_repository_id_changeset_revision( trans, repository_id, changeset_revision )
@@ -765,9 +765,9 @@
params = util.Params( kwd )
message = util.restore_text( params.get( 'message', '' ) )
status = params.get( 'status', 'done' )
- repository = get_repository_in_tool_shed( trans, kwd[ 'id' ] )
+ repository = suc.get_repository_in_tool_shed( trans, kwd[ 'id' ] )
changeset_revision = kwd.get( 'changeset_revision', None )
- repo = hg.repository( get_configured_ui(), repository.repo_path( trans.app ) )
+ repo = hg.repository( suc.get_configured_ui(), repository.repo_path( trans.app ) )
previous_reviews_dict = get_previous_repository_reviews( trans, repository, changeset_revision )
rev, changeset_revision_label = get_rev_label_from_changeset_revision( repo, changeset_revision )
return trans.fill_template( '/webapps/community/repository_review/select_previous_review.mako',
@@ -780,7 +780,7 @@
@web.expose
@web.require_login( "view or manage repository" )
def view_or_manage_repository( self, trans, **kwd ):
- repository = get_repository_in_tool_shed( trans, kwd[ 'id' ] )
+ repository = suc.get_repository_in_tool_shed( trans, kwd[ 'id' ] )
if trans.user_is_admin() or repository.user == trans.user:
return trans.response.send_redirect( web.url_for( controller='repository',
action='manage_repository',
diff -r ae60aaaf6a139e88b2849abf97fd47af711341fd -r 70f88a048ed2b565d60250ca66302fd03a7852ff lib/galaxy/webapps/community/controllers/upload.py
--- a/lib/galaxy/webapps/community/controllers/upload.py
+++ b/lib/galaxy/webapps/community/controllers/upload.py
@@ -3,7 +3,7 @@
from galaxy.model.orm import *
from galaxy.datatypes.checkers import *
from common import *
-from galaxy.util.shed_util_common import *
+import galaxy.util.shed_util_common as suc
from galaxy import eggs
eggs.require('mercurial')
@@ -28,9 +28,9 @@
category_ids = util.listify( params.get( 'category_id', '' ) )
categories = get_categories( trans )
repository_id = params.get( 'repository_id', '' )
- repository = get_repository_in_tool_shed( trans, repository_id )
+ repository = suc.get_repository_in_tool_shed( trans, repository_id )
repo_dir = repository.repo_path( trans.app )
- repo = hg.repository( get_configured_ui(), repo_dir )
+ repo = hg.repository( suc.get_configured_ui(), repo_dir )
uncompress_file = util.string_as_bool( params.get( 'uncompress_file', 'true' ) )
remove_repo_files_not_in_tar = util.string_as_bool( params.get( 'remove_repo_files_not_in_tar', 'true' ) )
uploaded_file = None
@@ -53,7 +53,7 @@
uploaded_directory = tempfile.mkdtemp()
repo_url = 'http%s' % url[ len( 'hg' ): ]
repo_url = repo_url.encode( 'ascii', 'replace' )
- commands.clone( get_configured_ui(), repo_url, uploaded_directory )
+ commands.clone( suc.get_configured_ui(), repo_url, uploaded_directory )
elif url:
valid_url = True
try:
@@ -146,7 +146,7 @@
if full_path.endswith( 'tool_data_table_conf.xml.sample' ):
# Handle the special case where a tool_data_table_conf.xml.sample file is being uploaded by parsing the file and adding new entries
# to the in-memory trans.app.tool_data_tables dictionary.
- error, error_message = handle_sample_tool_data_table_conf_file( trans.app, full_path )
+ error, error_message = suc.handle_sample_tool_data_table_conf_file( trans.app, full_path )
if error:
message = '%s<br/>%s' % ( message, error_message )
# See if the content of the change set was valid.
@@ -154,7 +154,7 @@
handle_email_alerts( trans, repository, content_alert_str=content_alert_str, new_repo_alert=new_repo_alert, admin_only=admin_only )
if ok:
# Update the repository files for browsing.
- update_repository( repo )
+ suc.update_repository( repo )
# Get the new repository tip.
if tip == repository.tip( trans.app ):
message = 'No changes to repository. '
@@ -181,8 +181,8 @@
message += " %d files were removed from the repository root. " % len( files_to_remove )
kwd[ 'message' ] = message
set_repository_metadata_due_to_new_tip( trans, repository, content_alert_str=content_alert_str, **kwd )
- #provide a warning message if a tool_dependencies.xml file is provided, but tool dependencies weren't loaded due to e.g. a requirement tag mismatch
- if get_config_from_disk( 'tool_dependencies.xml', repo_dir ):
+ # Provide a warning message if a tool_dependencies.xml file is provided, but tool dependencies weren't loaded due to e.g. a requirement tag mismatch
+ if suc.get_config_from_disk( 'tool_dependencies.xml', repo_dir ):
if repository.metadata_revisions:
metadata_dict = repository.metadata_revisions[0].metadata
else:
@@ -192,7 +192,7 @@
status = 'warning'
log.debug( 'Error in tool dependencies for repository %s: %s.' % ( repository.id, repository.name ) )
# Reset the tool_data_tables by loading the empty tool_data_table_conf.xml file.
- reset_tool_data_tables( trans.app )
+ suc.reset_tool_data_tables( trans.app )
trans.response.send_redirect( web.url_for( controller='repository',
action='browse_repository',
id=repository_id,
@@ -202,7 +202,7 @@
else:
status = 'error'
# Reset the tool_data_tables by loading the empty tool_data_table_conf.xml file.
- reset_tool_data_tables( trans.app )
+ suc.reset_tool_data_tables( trans.app )
selected_categories = [ trans.security.decode_id( id ) for id in category_ids ]
return trans.fill_template( '/webapps/community/repository/upload.mako',
repository=repository,
@@ -214,7 +214,7 @@
status=status )
def upload_directory( self, trans, repository, uploaded_directory, upload_point, remove_repo_files_not_in_tar, commit_message, new_repo_alert ):
repo_dir = repository.repo_path( trans.app )
- repo = hg.repository( get_configured_ui(), repo_dir )
+ repo = hg.repository( suc.get_configured_ui(), repo_dir )
undesirable_dirs_removed = 0
undesirable_files_removed = 0
if upload_point is not None:
@@ -250,7 +250,7 @@
def upload_tar( self, trans, repository, tar, uploaded_file, upload_point, remove_repo_files_not_in_tar, commit_message, new_repo_alert ):
# Upload a tar archive of files.
repo_dir = repository.repo_path( trans.app )
- repo = hg.repository( get_configured_ui(), repo_dir )
+ repo = hg.repository( suc.get_configured_ui(), repo_dir )
undesirable_dirs_removed = 0
undesirable_files_removed = 0
ok, message = self.__check_archive( tar )
@@ -283,7 +283,7 @@
return self.__handle_directory_changes(trans, repository, full_path, filenames_in_archive, remove_repo_files_not_in_tar, new_repo_alert, commit_message, undesirable_dirs_removed, undesirable_files_removed)
def __handle_directory_changes( self, trans, repository, full_path, filenames_in_archive, remove_repo_files_not_in_tar, new_repo_alert, commit_message, undesirable_dirs_removed, undesirable_files_removed ):
repo_dir = repository.repo_path( trans.app )
- repo = hg.repository( get_configured_ui(), repo_dir )
+ repo = hg.repository( suc.get_configured_ui(), repo_dir )
content_alert_str = ''
files_to_remove = []
filenames_in_archive = [ os.path.join( full_path, name ) for name in filenames_in_archive ]
@@ -339,7 +339,7 @@
if filename_in_archive.endswith( 'tool_data_table_conf.xml.sample' ):
# Handle the special case where a tool_data_table_conf.xml.sample file is being uploaded by parsing the file and adding new entries
# to the in-memory trans.app.tool_data_tables dictionary.
- error, message = handle_sample_tool_data_table_conf_file( trans.app, filename_in_archive )
+ error, message = suc.handle_sample_tool_data_table_conf_file( trans.app, filename_in_archive )
if error:
return False, message, files_to_remove, content_alert_str, undesirable_dirs_removed, undesirable_files_removed
commands.commit( repo.ui, repo, full_path, user=trans.user.username, message=commit_message )
diff -r ae60aaaf6a139e88b2849abf97fd47af711341fd -r 70f88a048ed2b565d60250ca66302fd03a7852ff lib/galaxy/webapps/community/controllers/workflow.py
--- a/lib/galaxy/webapps/community/controllers/workflow.py
+++ b/lib/galaxy/webapps/community/controllers/workflow.py
@@ -10,7 +10,7 @@
from galaxy.webapps.galaxy.controllers.workflow import attach_ordered_steps
from galaxy.model.orm import *
from common import *
-from galaxy.util.shed_util_common import *
+import galaxy.util.shed_util_common as suc
from galaxy.tool_shed.encoding_util import *
class RepoInputDataModule( InputDataModule ):
@@ -145,7 +145,7 @@
message = kwd.get( 'message', '' )
status = kwd.get( 'status', 'done' )
repository_metadata = get_repository_metadata_by_id( trans, repository_metadata_id )
- repository = get_repository_in_tool_shed( trans, trans.security.encode_id( repository_metadata.repository_id ) )
+ repository = suc.get_repository_in_tool_shed( trans, trans.security.encode_id( repository_metadata.repository_id ) )
return trans.fill_template( "/webapps/community/repository/view_workflow.mako",
repository=repository,
changeset_revision=repository_metadata.changeset_revision,
diff -r ae60aaaf6a139e88b2849abf97fd47af711341fd -r 70f88a048ed2b565d60250ca66302fd03a7852ff lib/galaxy/webapps/galaxy/controllers/admin_toolshed.py
--- a/lib/galaxy/webapps/galaxy/controllers/admin_toolshed.py
+++ b/lib/galaxy/webapps/galaxy/controllers/admin_toolshed.py
@@ -2,6 +2,7 @@
from admin import *
from galaxy.util.json import from_json_string, to_json_string
from galaxy.util.shed_util import *
+import galaxy.util.shed_util_common as suc
from galaxy.tool_shed.encoding_util import *
from galaxy import eggs, tools
@@ -423,7 +424,7 @@
def browse_tool_shed( self, trans, **kwd ):
tool_shed_url = kwd[ 'tool_shed_url' ]
galaxy_url = url_for( '/', qualified=True )
- url = url_join( tool_shed_url, 'repository/browse_valid_categories?galaxy_url=%s' % ( galaxy_url ) )
+ url = suc.url_join( tool_shed_url, 'repository/browse_valid_categories?galaxy_url=%s' % ( galaxy_url ) )
return trans.response.send_redirect( url )
@web.expose
@web.require_admin
@@ -439,10 +440,10 @@
def check_for_updates( self, trans, **kwd ):
# Send a request to the relevant tool shed to see if there are any updates.
repository = get_installed_tool_shed_repository( trans, kwd[ 'id' ] )
- tool_shed_url = get_url_from_repository_tool_shed( trans.app, repository )
- url = url_join( tool_shed_url,
- 'repository/check_for_updates?galaxy_url=%s&name=%s&owner=%s&changeset_revision=%s' % \
- ( url_for( '/', qualified=True ), repository.name, repository.owner, repository.changeset_revision ) )
+ tool_shed_url = suc.get_url_from_repository_tool_shed( trans.app, repository )
+ url = suc.url_join( tool_shed_url,
+ 'repository/check_for_updates?galaxy_url=%s&name=%s&owner=%s&changeset_revision=%s' % \
+ ( url_for( '/', qualified=True ), repository.name, repository.owner, repository.changeset_revision ) )
return trans.response.send_redirect( url )
@web.expose
@web.require_admin
@@ -531,14 +532,14 @@
def find_tools_in_tool_shed( self, trans, **kwd ):
tool_shed_url = kwd[ 'tool_shed_url' ]
galaxy_url = url_for( '/', qualified=True )
- url = url_join( tool_shed_url, 'repository/find_tools?galaxy_url=%s' % galaxy_url )
+ url = suc.url_join( tool_shed_url, 'repository/find_tools?galaxy_url=%s' % galaxy_url )
return trans.response.send_redirect( url )
@web.expose
@web.require_admin
def find_workflows_in_tool_shed( self, trans, **kwd ):
tool_shed_url = kwd[ 'tool_shed_url' ]
galaxy_url = url_for( '/', qualified=True )
- url = url_join( tool_shed_url, 'repository/find_workflows?galaxy_url=%s' % galaxy_url )
+ url = suc.url_join( tool_shed_url, 'repository/find_workflows?galaxy_url=%s' % galaxy_url )
return trans.response.send_redirect( url )
def generate_tool_path( self, repository_clone_url, changeset_revision ):
"""
@@ -547,20 +548,20 @@
<tool shed url>/repos/<repository owner>/<repository name>/<installed changeset revision>
http://test@bx.psu.edu:9009/repos/test/filter
"""
- tmp_url = clean_repository_clone_url( repository_clone_url )
+ tmp_url = suc.clean_repository_clone_url( repository_clone_url )
# Now tmp_url is something like: bx.psu.edu:9009/repos/some_username/column
items = tmp_url.split( 'repos' )
tool_shed_url = items[ 0 ]
repo_path = items[ 1 ]
tool_shed_url = clean_tool_shed_url( tool_shed_url )
- return url_join( tool_shed_url, 'repos', repo_path, changeset_revision )
+ return suc.url_join( tool_shed_url, 'repos', repo_path, changeset_revision )
@web.json
@web.require_admin
def get_file_contents( self, trans, file_path ):
# Avoid caching
trans.response.headers['Pragma'] = 'no-cache'
trans.response.headers['Expires'] = '0'
- return get_repository_file_contents( file_path )
+ return suc.get_repository_file_contents( file_path )
@web.expose
@web.require_admin
def get_repository_dependencies( self, trans, repository_id, repository_name, repository_owner, changeset_revision ):
@@ -570,10 +571,10 @@
need it so that we can derive the tool shed from which it was installed.
"""
repository = get_installed_tool_shed_repository( trans, repository_id )
- tool_shed_url = get_url_from_repository_tool_shed( trans.app, repository )
- url = url_join( tool_shed_url,
- 'repository/get_repository_dependencies?name=%s&owner=%s&changeset_revision=%s' % \
- ( repository_name, repository_owner, changeset_revision ) )
+ tool_shed_url = suc.get_url_from_repository_tool_shed( trans.app, repository )
+ url = suc.url_join( tool_shed_url,
+ 'repository/get_repository_dependencies?name=%s&owner=%s&changeset_revision=%s' % \
+ ( repository_name, repository_owner, changeset_revision ) )
response = urllib2.urlopen( url )
raw_text = response.read()
response.close()
@@ -612,7 +613,7 @@
message = ''
tool_shed_repository = tool_dependencies[ 0 ].tool_shed_repository
# Get the tool_dependencies.xml file from the repository.
- tool_dependencies_config = get_config_from_disk( 'tool_dependencies.xml', tool_shed_repository.repo_path( trans.app ) )
+ tool_dependencies_config = suc.get_config_from_disk( 'tool_dependencies.xml', tool_shed_repository.repo_path( trans.app ) )
installed_tool_dependencies = handle_tool_dependencies( app=trans.app,
tool_shed_repository=tool_shed_repository,
tool_dependencies_config=tool_dependencies_config,
@@ -697,15 +698,15 @@
clone_dir = os.path.join( tool_path, relative_clone_dir )
relative_install_dir = os.path.join( relative_clone_dir, tool_shed_repository.name )
install_dir = os.path.join( tool_path, relative_install_dir )
- cloned_ok, error_message = clone_repository( repository_clone_url, os.path.abspath( install_dir ), ctx_rev )
+ cloned_ok, error_message = suc.clone_repository( repository_clone_url, os.path.abspath( install_dir ), ctx_rev )
if cloned_ok:
if reinstalling:
# Since we're reinstalling the repository we need to find the latest changeset revision to which is can be updated.
current_changeset_revision, current_ctx_rev = get_update_to_changeset_revision_and_ctx_rev( trans, tool_shed_repository )
if current_ctx_rev != ctx_rev:
- repo = hg.repository( get_configured_ui(), path=os.path.abspath( install_dir ) )
+ repo = hg.repository( suc.get_configured_ui(), path=os.path.abspath( install_dir ) )
pull_repository( repo, repository_clone_url, current_changeset_revision )
- update_repository( repo, ctx_rev=current_ctx_rev )
+ suc.update_repository( repo, ctx_rev=current_ctx_rev )
self.handle_repository_contents( trans,
tool_shed_repository=tool_shed_repository,
tool_path=tool_path,
@@ -722,10 +723,10 @@
update_tool_shed_repository_status( trans.app,
tool_shed_repository,
trans.model.ToolShedRepository.installation_status.SETTING_TOOL_VERSIONS )
- tool_shed_url = get_url_from_repository_tool_shed( trans.app, tool_shed_repository )
- url = url_join( tool_shed_url,
- '/repository/get_tool_versions?name=%s&owner=%s&changeset_revision=%s' % \
- ( tool_shed_repository.name, tool_shed_repository.owner, tool_shed_repository.changeset_revision ) )
+ tool_shed_url = suc.get_url_from_repository_tool_shed( trans.app, tool_shed_repository )
+ url = suc.url_join( tool_shed_url,
+ '/repository/get_tool_versions?name=%s&owner=%s&changeset_revision=%s' % \
+ ( tool_shed_repository.name, tool_shed_repository.owner, tool_shed_repository.changeset_revision ) )
response = urllib2.urlopen( url )
text = response.read()
response.close()
@@ -744,7 +745,7 @@
tool_shed_repository,
trans.model.ToolShedRepository.installation_status.INSTALLING_TOOL_DEPENDENCIES )
# Get the tool_dependencies.xml file from the repository.
- tool_dependencies_config = get_config_from_disk( 'tool_dependencies.xml', install_dir )#relative_install_dir )
+ tool_dependencies_config = suc.get_config_from_disk( 'tool_dependencies.xml', install_dir )#relative_install_dir )
installed_tool_dependencies = handle_tool_dependencies( app=trans.app,
tool_shed_repository=tool_shed_repository,
tool_dependencies_config=tool_dependencies_config,
@@ -774,15 +775,15 @@
when an admin is installing a new repository or reinstalling an uninstalled repository.
"""
shed_config_dict = trans.app.toolbox.get_shed_config_dict_by_filename( shed_tool_conf )
- metadata_dict, invalid_file_tups = generate_metadata_for_changeset_revision( app=trans.app,
- repository=tool_shed_repository,
- repository_clone_url=repository_clone_url,
- shed_config_dict=shed_config_dict,
- relative_install_dir=relative_install_dir,
- repository_files_dir=None,
- resetting_all_metadata_on_repository=False,
- updating_installed_repository=False,
- persist=True )
+ metadata_dict, invalid_file_tups = suc.generate_metadata_for_changeset_revision( app=trans.app,
+ repository=tool_shed_repository,
+ repository_clone_url=repository_clone_url,
+ shed_config_dict=shed_config_dict,
+ relative_install_dir=relative_install_dir,
+ repository_files_dir=None,
+ resetting_all_metadata_on_repository=False,
+ updating_installed_repository=False,
+ persist=True )
tool_shed_repository.metadata = metadata_dict
trans.sa_session.add( tool_shed_repository )
trans.sa_session.flush()
@@ -824,7 +825,7 @@
files_dir = relative_install_dir
if shed_config_dict.get( 'tool_path' ):
files_dir = os.path.join( shed_config_dict['tool_path'], files_dir )
- datatypes_config = get_config_from_disk( 'datatypes_conf.xml', files_dir )
+ datatypes_config = suc.get_config_from_disk( 'datatypes_conf.xml', files_dir )
# Load data types required by tools.
converter_path, display_path = alter_config_and_load_prorietary_datatypes( trans.app, datatypes_config, files_dir, override=False )
if converter_path or display_path:
@@ -859,13 +860,10 @@
**kwd ) )
if repository.can_install and operation == 'install':
# Send a request to the tool shed to install the repository.
- tool_shed_url = get_url_from_repository_tool_shed( trans.app, repository )
- url = url_join( tool_shed_url,
- 'repository/install_repositories_by_revision?name=%s&owner=%s&changeset_revisions=%s&galaxy_url=%s' % \
- ( repository.name,
- repository.owner,
- repository.installed_changeset_revision,
- ( url_for( '/', qualified=True ) ) ) )
+ tool_shed_url = suc.get_url_from_repository_tool_shed( trans.app, repository )
+ url = suc.url_join( tool_shed_url,
+ 'repository/install_repositories_by_revision?name=%s&owner=%s&changeset_revisions=%s&galaxy_url=%s' % \
+ ( repository.name, repository.owner, repository.installed_changeset_revision, ( url_for( '/', qualified=True ) ) ) )
return trans.response.send_redirect( url )
description = util.restore_text( params.get( 'description', repository.description ) )
shed_tool_conf, tool_path, relative_install_dir = get_tool_panel_config_tool_path_install_dir( trans.app, repository )
@@ -1049,7 +1047,7 @@
# Avoid caching
trans.response.headers['Pragma'] = 'no-cache'
trans.response.headers['Expires'] = '0'
- return open_repository_files_folder( trans, folder_path )
+ return suc.open_repository_files_folder( trans, folder_path )
@web.expose
@web.require_admin
def prepare_for_install( self, trans, **kwd ):
@@ -1081,9 +1079,9 @@
repository_ids = kwd.get( 'repository_ids', None )
changeset_revisions = kwd.get( 'changeset_revisions', None )
# Get the information necessary to install each repository.
- url = url_join( tool_shed_url,
- 'repository/get_repository_information?repository_ids=%s&changeset_revisions=%s' % \
- ( repository_ids, changeset_revisions ) )
+ url = suc.url_join( tool_shed_url,
+ 'repository/get_repository_information?repository_ids=%s&changeset_revisions=%s' % \
+ ( repository_ids, changeset_revisions ) )
response = urllib2.urlopen( url )
raw_text = response.read()
response.close()
@@ -1240,21 +1238,21 @@
repository_dependencies = None
elif len( repo_info_tuple ) == 7:
description, repository_clone_url, changeset_revision, ctx_rev, repository_owner, repository_dependencies, tool_dependencies = repo_info_tuple
- url = url_join( tool_shed_url,
- 'repository/get_readme_files?name=%s&owner=%s&changeset_revision=%s' % \
- ( name, repository_owner, changeset_revision ) )
+ url = suc.url_join( tool_shed_url,
+ 'repository/get_readme_files?name=%s&owner=%s&changeset_revision=%s' % \
+ ( name, repository_owner, changeset_revision ) )
response = urllib2.urlopen( url )
raw_text = response.read()
response.close()
readme_files_dict = from_json_string( raw_text )
- containers_dict = build_repository_containers_for_galaxy( trans=trans,
- toolshed_base_url=tool_shed_url,
- repository_name=name,
- repository_owner=repository_owner,
- changeset_revision=changeset_revision,
- readme_files_dict=readme_files_dict,
- repository_dependencies=repository_dependencies,
- tool_dependencies=tool_dependencies )
+ containers_dict = suc.build_repository_containers_for_galaxy( trans=trans,
+ toolshed_base_url=tool_shed_url,
+ repository_name=name,
+ repository_owner=repository_owner,
+ changeset_revision=changeset_revision,
+ readme_files_dict=readme_files_dict,
+ repository_dependencies=repository_dependencies,
+ tool_dependencies=tool_dependencies )
else:
containers_dict = dict( readme_files_dict=None, repository_dependencies=None, tool_dependencies=None )
# Handle tool dependencies chack box.
@@ -1300,7 +1298,7 @@
repository_clone_url = generate_clone_url_for_installed_repository( trans, tool_shed_repository )
clone_dir = os.path.join( tool_path, self.generate_tool_path( repository_clone_url, tool_shed_repository.installed_changeset_revision ) )
relative_install_dir = os.path.join( clone_dir, tool_shed_repository.name )
- tool_shed_url = get_url_from_repository_tool_shed( trans.app, tool_shed_repository )
+ tool_shed_url = suc.get_url_from_repository_tool_shed( trans.app, tool_shed_repository )
tool_section = None
tool_panel_section_key = None
metadata = tool_shed_repository.metadata
@@ -1377,17 +1375,17 @@
repository_name=tool_shed_repository.name,
repository_owner=tool_shed_repository.owner,
changeset_revision=tool_shed_repository.installed_changeset_revision )
- repo = hg.repository( get_configured_ui(), path=os.path.abspath( tool_shed_repository.repo_path( trans.app ) ) )
- repo_info_dict = create_repo_info_dict( trans=trans,
- repository_clone_url=repository_clone_url,
- changeset_revision=tool_shed_repository.installed_changeset_revision,
- ctx_rev=ctx_rev,
- repository_owner=tool_shed_repository.owner,
- repository_name=tool_shed_repository.name,
- repository=None,
- repository_metadata=None,
- metadata=metadata,
- repository_dependencies=repository_dependencies )
+ repo = hg.repository( suc.get_configured_ui(), path=os.path.abspath( tool_shed_repository.repo_path( trans.app ) ) )
+ repo_info_dict = suc.create_repo_info_dict( trans=trans,
+ repository_clone_url=repository_clone_url,
+ changeset_revision=tool_shed_repository.installed_changeset_revision,
+ ctx_rev=ctx_rev,
+ repository_owner=tool_shed_repository.owner,
+ repository_name=tool_shed_repository.name,
+ repository=None,
+ repository_metadata=None,
+ metadata=metadata,
+ repository_dependencies=repository_dependencies )
repo_info_dict = tool_shed_encode( repo_info_dict )
new_kwd = dict( includes_tool_dependencies=tool_shed_repository.includes_tool_dependencies,
includes_tools=tool_shed_repository.includes_tools,
@@ -1439,12 +1437,12 @@
repository_dependencies = None
elif len( repo_info_tuple ) == 7:
description, repository_clone_url, changeset_revision, ctx_rev, repository_owner, repository_dependencies, tool_dependencies = repo_info_tuple
- tool_shed = get_tool_shed_from_clone_url( repository_clone_url )
+ tool_shed = suc.get_tool_shed_from_clone_url( repository_clone_url )
# Get all previous change set revisions from the tool shed for the repository back to, but excluding, the previous valid changeset
# revision to see if it was previously installed using one of them.
- url = url_join( tool_shed_url,
- 'repository/previous_changeset_revisions?galaxy_url=%s&name=%s&owner=%s&changeset_revision=%s' % \
- ( url_for( '/', qualified=True ), repository_name, repository_owner, changeset_revision ) )
+ url = suc.url_join( tool_shed_url,
+ 'repository/previous_changeset_revisions?galaxy_url=%s&name=%s&owner=%s&changeset_revision=%s' % \
+ ( url_for( '/', qualified=True ), repository_name, repository_owner, changeset_revision ) )
response = urllib2.urlopen( url )
text = response.read()
response.close()
@@ -1466,7 +1464,7 @@
repository_id = kwd[ 'id' ]
tool_shed_repository = get_installed_tool_shed_repository( trans, repository_id )
metadata = tool_shed_repository.metadata
- tool_shed_url = get_url_from_repository_tool_shed( trans.app, tool_shed_repository )
+ tool_shed_url = suc.get_url_from_repository_tool_shed( trans.app, tool_shed_repository )
ctx_rev = get_ctx_rev( tool_shed_url, tool_shed_repository.name, tool_shed_repository.owner, tool_shed_repository.installed_changeset_revision )
repository_clone_url = generate_clone_url_for_installed_repository( trans, tool_shed_repository )
repository_dependencies = self.get_repository_dependencies( trans=trans,
@@ -1474,16 +1472,16 @@
repository_name=tool_shed_repository.name,
repository_owner=tool_shed_repository.owner,
changeset_revision=tool_shed_repository.installed_changeset_revision )
- repo_info_dict = create_repo_info_dict( trans=trans,
- repository_clone_url=repository_clone_url,
- changeset_revision=tool_shed_repository.installed_changeset_revision,
- ctx_rev=ctx_rev,
- repository_owner=tool_shed_repository.owner,
- repository_name=tool_shed_repository.name,
- repository=None,
- repository_metadata=None,
- metadata=metadata,
- repository_dependencies=repository_dependencies )
+ repo_info_dict = suc.create_repo_info_dict( trans=trans,
+ repository_clone_url=repository_clone_url,
+ changeset_revision=tool_shed_repository.installed_changeset_revision,
+ ctx_rev=ctx_rev,
+ repository_owner=tool_shed_repository.owner,
+ repository_name=tool_shed_repository.name,
+ repository=None,
+ repository_metadata=None,
+ metadata=metadata,
+ repository_dependencies=repository_dependencies )
# Get the location in the tool panel in which the tool was originally loaded.
if 'tool_panel_section' in metadata:
tool_panel_dict = metadata[ 'tool_panel_section' ]
@@ -1513,21 +1511,21 @@
message += "Uncheck the <b>No changes</b> check box and select a tool panel section to load the tools into that section."
status = 'warning'
if metadata and 'readme_files' in metadata:
- url = url_join( tool_shed_url,
- 'repository/get_readme_files?name=%s&owner=%s&changeset_revision=%s' % \
- ( tool_shed_repository.name, tool_shed_repository.owner, tool_shed_repository.installed_changeset_revision ) )
+ url = suc.url_join( tool_shed_url,
+ 'repository/get_readme_files?name=%s&owner=%s&changeset_revision=%s' % \
+ ( tool_shed_repository.name, tool_shed_repository.owner, tool_shed_repository.installed_changeset_revision ) )
response = urllib2.urlopen( url )
raw_text = response.read()
response.close()
readme_files_dict = from_json_string( raw_text )
- containers_dict = build_repository_containers_for_galaxy( trans=trans,
- toolshed_base_url=tool_shed_url,
- repository_name=name,
- repository_owner=repository_owner,
- changeset_revision=changeset_revision,
- readme_files_dict=readme_files_dict,
- repository_dependencies=repository_dependencies,
- tool_dependencies=tool_dependencies )
+ containers_dict = suc.build_repository_containers_for_galaxy( trans=trans,
+ toolshed_base_url=tool_shed_url,
+ repository_name=name,
+ repository_owner=repository_owner,
+ changeset_revision=changeset_revision,
+ readme_files_dict=readme_files_dict,
+ repository_dependencies=repository_dependencies,
+ tool_dependencies=tool_dependencies )
else:
containers_dict = dict( readme_files_dict=None, repository_dependencies=None, tool_dependencies=None )
# Handle repository dependencies check box.
@@ -1558,12 +1556,12 @@
@web.require_admin
def reset_metadata_on_selected_installed_repositories( self, trans, **kwd ):
if 'reset_metadata_on_selected_repositories_button' in kwd:
- kwd[ 'CONTROLLER' ] = GALAXY_ADMIN_TOOL_SHED_CONTROLLER
- message, status = reset_metadata_on_selected_repositories( trans, **kwd )
+ kwd[ 'CONTROLLER' ] = suc.GALAXY_ADMIN_TOOL_SHED_CONTROLLER
+ message, status = suc.reset_metadata_on_selected_repositories( trans, **kwd )
else:
message = util.restore_text( kwd.get( 'message', '' ) )
status = kwd.get( 'status', 'done' )
- repositories_select_field = build_repository_ids_select_field( trans, GALAXY_ADMIN_TOOL_SHED_CONTROLLER )
+ repositories_select_field = suc.build_repository_ids_select_field( trans, suc.GALAXY_ADMIN_TOOL_SHED_CONTROLLER )
return trans.fill_template( '/admin/tool_shed_repository/reset_metadata_on_selected_repositories.mako',
repositories_select_field=repositories_select_field,
message=message,
@@ -1573,20 +1571,20 @@
def reset_repository_metadata( self, trans, id ):
"""Reset all metadata on a single installed tool shed repository."""
repository = get_installed_tool_shed_repository( trans, id )
- tool_shed_url = get_url_from_repository_tool_shed( trans.app, repository )
+ tool_shed_url = suc.get_url_from_repository_tool_shed( trans.app, repository )
repository_clone_url = generate_clone_url_for_installed_repository( trans, repository )
tool_path, relative_install_dir = repository.get_tool_relative_path( trans.app )
if relative_install_dir:
original_metadata_dict = repository.metadata
- metadata_dict, invalid_file_tups = generate_metadata_for_changeset_revision( app=trans.app,
- repository=repository,
- repository_clone_url=repository_clone_url,
- shed_config_dict = repository.get_shed_config_dict( trans.app ),
- relative_install_dir=relative_install_dir,
- repository_files_dir=None,
- resetting_all_metadata_on_repository=False,
- updating_installed_repository=False,
- persist=False )
+ metadata_dict, invalid_file_tups = suc.generate_metadata_for_changeset_revision( app=trans.app,
+ repository=repository,
+ repository_clone_url=repository_clone_url,
+ shed_config_dict = repository.get_shed_config_dict( trans.app ),
+ relative_install_dir=relative_install_dir,
+ repository_files_dir=None,
+ resetting_all_metadata_on_repository=False,
+ updating_installed_repository=False,
+ persist=False )
repository.metadata = metadata_dict
if metadata_dict != original_metadata_dict:
update_in_shed_tool_config( trans.app, repository )
@@ -1645,10 +1643,10 @@
def set_tool_versions( self, trans, **kwd ):
# Get the tool_versions from the tool shed for each tool in the installed change set.
repository = get_installed_tool_shed_repository( trans, kwd[ 'id' ] )
- tool_shed_url = get_url_from_repository_tool_shed( trans.app, repository )
- url = url_join( tool_shed_url,
- 'repository/get_tool_versions?name=%s&owner=%s&changeset_revision=%s' % \
- ( repository.name, repository.owner, repository.changeset_revision ) )
+ tool_shed_url = suc.get_url_from_repository_tool_shed( trans.app, repository )
+ url = suc.url_join( tool_shed_url,
+ 'repository/get_tool_versions?name=%s&owner=%s&changeset_revision=%s' % \
+ ( repository.name, repository.owner, repository.changeset_revision ) )
response = urllib2.urlopen( url )
text = response.read()
response.close()
@@ -1757,21 +1755,21 @@
repo_files_dir = os.path.abspath( os.path.join( tool_path, relative_install_dir, name ) )
else:
repo_files_dir = os.path.abspath( os.path.join( relative_install_dir, name ) )
- repo = hg.repository( get_configured_ui(), path=repo_files_dir )
+ repo = hg.repository( suc.get_configured_ui(), path=repo_files_dir )
repository_clone_url = os.path.join( tool_shed_url, 'repos', owner, name )
pull_repository( repo, repository_clone_url, latest_ctx_rev )
- update_repository( repo, latest_ctx_rev )
+ suc.update_repository( repo, latest_ctx_rev )
tool_shed = clean_tool_shed_url( tool_shed_url )
# Update the repository metadata.
- metadata_dict, invalid_file_tups = generate_metadata_for_changeset_revision( app=trans.app,
- repository=repository,
- repository_clone_url=repository_clone_url,
- shed_config_dict = repository.get_shed_config_dict( trans.app ),
- relative_install_dir=relative_install_dir,
- repository_files_dir=None,
- resetting_all_metadata_on_repository=False,
- updating_installed_repository=True,
- persist=True )
+ metadata_dict, invalid_file_tups = suc.generate_metadata_for_changeset_revision( app=trans.app,
+ repository=repository,
+ repository_clone_url=repository_clone_url,
+ shed_config_dict = repository.get_shed_config_dict( trans.app ),
+ relative_install_dir=relative_install_dir,
+ repository_files_dir=None,
+ resetting_all_metadata_on_repository=False,
+ updating_installed_repository=True,
+ persist=True )
repository.metadata = metadata_dict
# Update the repository changeset_revision in the database.
repository.changeset_revision = latest_changeset_revision
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: inithello: Added functional tests to verify repository installation on the Galaxy side.
by Bitbucket 12 Dec '12
by Bitbucket 12 Dec '12
12 Dec '12
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/ae60aaaf6a13/
changeset: ae60aaaf6a13
user: inithello
date: 2012-12-12 22:56:14
summary: Added functional tests to verify repository installation on the Galaxy side.
affected #: 6 files
diff -r dad76985fc571feed65cc63de37b2685312348e3 -r ae60aaaf6a139e88b2849abf97fd47af711341fd test/tool_shed/base/test_db_util.py
--- a/test/tool_shed/base/test_db_util.py
+++ b/test/tool_shed/base/test_db_util.py
@@ -1,6 +1,8 @@
+import galaxy.model
import galaxy.webapps.community.model as model
from galaxy.model.orm import *
from galaxy.webapps.community.model.mapping import context as sa_session
+from galaxy.model.mapping import context as ga_session
def delete_obj( obj ):
sa_session.delete( obj )
@@ -12,6 +14,10 @@
def flush( obj ):
sa_session.add( obj )
sa_session.flush()
+def get_category_by_name( name ):
+ return sa_session.query( model.Category ) \
+ .filter( model.Category.table.c.name == name ) \
+ .first()
def get_default_user_permissions_by_role( role ):
return sa_session.query( model.DefaultUserPermissions ) \
.filter( model.DefaultUserPermissions.table.c.role_id == role.id ) \
@@ -20,6 +26,12 @@
return sa_session.query( model.DefaultUserPermissions ) \
.filter( model.DefaultUserPermissions.table.c.user_id==user.id ) \
.all()
+def get_galaxy_repository_by_name_owner_changeset_revision( repository_name, owner, changeset_revision ):
+ return ga_session.query( galaxy.model.ToolShedRepository ) \
+ .filter( and_( galaxy.model.ToolShedRepository.table.c.name == repository_name,
+ galaxy.model.ToolShedRepository.table.c.owner == owner,
+ galaxy.model.ToolShedRepository.table.c.changeset_revision == changeset_revision ) ) \
+ .first()
def get_private_role( user ):
for role in user.all_roles():
if role.name == user.email and role.description == 'Private Role for %s' % user.email:
@@ -39,6 +51,17 @@
sa_session.flush()
def refresh( obj ):
sa_session.refresh( obj )
+def ga_refresh( obj ):
+ ga_session.refresh( obj )
+def get_galaxy_private_role( user ):
+ for role in user.all_roles():
+ if role.name == user.email and role.description == 'Private Role for %s' % user.email:
+ return role
+ raise AssertionError( "Private role not found for user '%s'" % user.email )
+def get_galaxy_user( email ):
+ return ga_session.query( galaxy.model.User ) \
+ .filter( galaxy.model.User.table.c.email==email ) \
+ .first()
def get_repository_by_name_and_owner( name, owner_username ):
owner = get_user_by_name( owner_username )
repository = sa_session.query( model.Repository ) \
diff -r dad76985fc571feed65cc63de37b2685312348e3 -r ae60aaaf6a139e88b2849abf97fd47af711341fd test/tool_shed/base/twilltestcase.py
--- a/test/tool_shed/base/twilltestcase.py
+++ b/test/tool_shed/base/twilltestcase.py
@@ -1,7 +1,9 @@
import galaxy.webapps.community.util.hgweb_config
-import common, string, os
+import galaxy.model as galaxy_model
+import common, string, os, re
from base.twilltestcase import tc, from_json_string, TwillTestCase, security
-from test_db_util import get_repository_metadata_by_repository_id_changeset_revision
+from test_db_util import get_repository_by_name_and_owner, get_repository_metadata_by_repository_id_changeset_revision, \
+ get_galaxy_repository_by_name_owner_changeset_revision
from galaxy import eggs
eggs.require('mercurial')
@@ -19,14 +21,25 @@
self.host = os.environ.get( 'TOOL_SHED_TEST_HOST' )
self.port = os.environ.get( 'TOOL_SHED_TEST_PORT' )
self.url = "http://%s:%s" % ( self.host, self.port )
+ self.galaxy_host = os.environ.get( 'GALAXY_TEST_HOST' )
+ self.galaxy_port = os.environ.get( 'GALAXY_TEST_PORT' )
+ self.galaxy_url = "http://%s:%s" % ( self.galaxy_host, self.galaxy_port )
self.file_dir = os.environ.get( 'TOOL_SHED_TEST_FILE_DIR', None )
self.tool_shed_test_file = None
self.shed_tools_dict = {}
self.home()
+ def browse_category( self, category, strings_displayed=[], strings_not_displayed=[] ):
+ url = '/repository/browse_valid_categories?sort=name&operation=valid_repositories_by_category&id=%s' % \
+ self.security.encode_id( category.id )
+ self.visit_url( url )
+ self.check_for_strings( strings_displayed, strings_not_displayed )
def browse_repository( self, repository, strings_displayed=[], strings_not_displayed=[] ):
url = '/repository/browse_repository?id=%s' % self.security.encode_id( repository.id )
self.visit_url( url )
self.check_for_strings( strings_displayed, strings_not_displayed )
+ def browse_tool_shed( self, url, strings_displayed=[], strings_not_displayed=[] ):
+ self.visit_galaxy_url( '/admin_toolshed/browse_tool_shed?tool_shed_url=%s' % url )
+ self.check_for_strings( strings_displayed, strings_not_displayed )
def check_for_strings( self, strings_displayed=[], strings_not_displayed=[] ):
if strings_displayed:
for string in strings_displayed:
@@ -122,6 +135,36 @@
tc.fv( "1", "category_id", "+%s" % category )
tc.submit( "create_repository_button" )
self.check_for_strings( strings_displayed, strings_not_displayed )
+ def create_user_in_galaxy( self, cntrller='user', email='test(a)bx.psu.edu', password='testuser', username='admin-user', redirect='' ):
+ self.visit_galaxy_url( "/user/create?cntrller=%s&use_panels=False" % cntrller )
+ tc.fv( '1', 'email', email )
+ tc.fv( '1', 'redirect', redirect )
+ tc.fv( '1', 'password', password )
+ tc.fv( '1', 'confirm', password )
+ tc.fv( '1', 'username', username )
+ tc.submit( 'create_user_button' )
+ previously_created = False
+ username_taken = False
+ invalid_username = False
+ try:
+ self.check_page_for_string( "Created new user account" )
+ except:
+ try:
+ # May have created the account in a previous test run...
+ self.check_page_for_string( "User with that email already exists" )
+ previously_created = True
+ except:
+ try:
+ self.check_page_for_string( 'Public name is taken; please choose another' )
+ username_taken = True
+ except:
+ try:
+ # Note that we're only checking if the usr name is >< 4 chars here...
+ self.check_page_for_string( 'Public name must be at least 4 characters in length' )
+ invalid_username = True
+ except:
+ pass
+ return previously_created, username_taken, invalid_username
def delete_files_from_repository( self, repository, filenames=[], strings_displayed=[ 'were deleted from the repository' ], strings_not_displayed=[] ):
files_to_delete = []
basepath = self.get_repo_path( repository )
@@ -212,6 +255,21 @@
else:
string = string.replace( character, replacement )
return string
+ def galaxy_login( self, email='test(a)bx.psu.edu', password='testuser', username='admin-user', redirect='' ):
+ previously_created, username_taken, invalid_username = \
+ self.create_user_in_galaxy( email=email, password=password, username=username, redirect=redirect )
+ if previously_created:
+ self.visit_galaxy_url( "/user/login?use_panels=False" )
+ tc.fv( '1', 'email', email )
+ tc.fv( '1', 'redirect', redirect )
+ tc.fv( '1', 'password', password )
+ tc.submit( 'login_button' )
+ def galaxy_logout( self ):
+ self.home()
+ self.visit_galaxy_url( "/user/logout" )
+ self.check_page_for_string( "You have been logged out" )
+ self.home()
+
def generate_repository_dependency_xml( self, repositories, xml_filename, dependency_description='' ):
file_path = os.path.split( xml_filename )[0]
if not os.path.exists( file_path ):
@@ -239,9 +297,6 @@
return os.path.abspath( os.path.join( filepath, filename ) )
else:
return os.path.abspath( os.path.join( self.file_dir, filename ) )
- def get_latest_repository_metadata_for_repository( self, repository ):
- # TODO: This will not work as expected. Fix it.
- return repository.metadata_revisions[ 0 ]
def get_repo_path( self, repository ):
# An entry in the hgweb.config file looks something like: repos/test/mira_assembler = database/community_files/000/repo_123
lhs = "repos/%s/%s" % ( repository.user.username, repository.name )
@@ -304,6 +359,29 @@
tc.fv( "3", "allow_push", '+%s' % username )
tc.submit( 'user_access_button' )
self.check_for_strings( strings_displayed, strings_not_displayed )
+ def install_repository( self, name, owner, changeset_revision=None, strings_displayed=[], strings_not_displayed=[] ):
+ repository = get_repository_by_name_and_owner( name, owner )
+ repository_id = self.security.encode_id( repository.id )
+ if changeset_revision is None:
+ changeset_revision = self.get_repository_tip( repository )
+ url = '/repository/install_repositories_by_revision?changeset_revisions=%s&repository_ids=%s&galaxy_url=%s' % \
+ ( changeset_revision, repository_id, self.galaxy_url )
+ self.visit_url( url )
+ self.check_for_strings( strings_displayed, strings_not_displayed )
+ tc.submit( 'select_tool_panel_section_button' )
+ html = self.last_page()
+ # Since the installation process is by necessity asynchronous, we have to get the parameters to 'manually' initiate the
+ # installation process. This regex will return the tool shed repository IDs in group(1), the encoded_kwd parameter in
+ # group(2), and the reinstalling flag in group(3) and pass them to the manage_repositories method in the Galaxy
+ # admin_toolshed controller.
+ install_parameters = re.search( 'initiate_repository_installation\( "([^"]+)", "([^"]+)", "([^"]+)" \);', html )
+ iri_ids = install_parameters.group(1)
+ encoded_kwd = install_parameters.group(2)
+ reinstalling = install_parameters.group(3)
+ url = '/admin_toolshed/manage_repositories?operation=install&tool_shed_repository_ids=%s&encoded_kwd=%s&reinstalling=%s' % \
+ ( iri_ids, encoded_kwd, reinstalling )
+ self.visit_galaxy_url( url )
+ self.wait_for_repository_installation( repository )
def load_invalid_tool_page( self, repository, tool_xml, changeset_revision, strings_displayed=[], strings_not_displayed=[] ):
url = '/repository/load_invalid_tool?repository_id=%s&tool_config=%s&changeset_revision=%s' % \
( self.security.encode_id( repository.id ), tool_xml, changeset_revision )
@@ -314,6 +392,13 @@
( self.security.encode_id( repository.id ), tool_xml_path, changeset_revision )
self.visit_url( url )
self.check_for_strings( strings_displayed, strings_not_displayed )
+ def preview_repository_in_tool_shed( self, name, owner, changeset_revision=None, strings_displayed=[], strings_not_displayed=[] ):
+ repository = get_repository_by_name_and_owner( name, owner )
+ if changeset_revision is None:
+ changeset_revision = self.get_repository_tip( repository )
+ self.visit_url( '/repository/preview_tools_in_changeset?repository_id=%s&changeset_revision=%s' % \
+ ( self.security.encode_id( repository.id ), changeset_revision ) )
+ self.check_for_strings( strings_displayed, strings_not_displayed )
def repository_is_new( self, repository ):
repo = hg.repository( ui.ui(), self.get_repo_path( repository ) )
tip_ctx = repo.changectx( repo.changelog.tip() )
@@ -361,3 +446,18 @@
tc.formfile( "1", "file_data", self.get_filename( filename, filepath ) )
tc.submit( "upload_button" )
self.check_for_strings( strings_displayed, strings_not_displayed )
+ def visit_galaxy_url( self, url ):
+ url = '%s%s' % ( self.galaxy_url, url )
+ self.visit_url( url )
+ def wait_for_repository_installation( self, repository ):
+ final_states = [ galaxy_model.ToolShedRepository.installation_status.ERROR,
+ galaxy_model.ToolShedRepository.installation_status.INSTALLED,
+ galaxy_model.ToolShedRepository.installation_status.UNINSTALLED,
+ galaxy_model.ToolShedRepository.installation_status.DEACTIVATED ]
+ repository_name = repository.name
+ owner = repository.user.username
+ changeset_revision = self.get_repository_tip( repository )
+ galaxy_repository = get_galaxy_repository_by_name_owner_changeset_revision( repository_name, owner, changeset_revision )
+ while galaxy_repository.status not in final_states:
+ ga_refresh( galaxy_repository )
+ time.sleep( 1 )
diff -r dad76985fc571feed65cc63de37b2685312348e3 -r ae60aaaf6a139e88b2849abf97fd47af711341fd test/tool_shed/functional/test_0030_repository_dependency_revisions.py
--- a/test/tool_shed/functional/test_0030_repository_dependency_revisions.py
+++ b/test/tool_shed/functional/test_0030_repository_dependency_revisions.py
@@ -1,7 +1,7 @@
from tool_shed.base.twilltestcase import ShedTwillTestCase, common, os
from tool_shed.base.test_db_util import get_repository_by_name_and_owner, get_user, get_private_role
-datatypes_repository_name = 'emboss_datatypes'
+datatypes_repository_name = 'emboss_datatypes_0030'
datatypes_repository_description = "Galaxy applicable data formats used by Emboss tools."
datatypes_repository_long_description = "Galaxy applicable data formats used by Emboss tools. This repository contains no tools."
@@ -29,7 +29,7 @@
"""Create a category for this test suite"""
self.create_category( 'Test 0030 Repository Dependency Revisions', 'Testing repository dependencies by revision.' )
def test_0010_create_repositories( self ):
- '''Create the emboss_5_0030, emboss_6_0030, emboss_datatypes, and emboss repositories and populate the emboss_datatypes repository.'''
+ '''Create the emboss_5_0030, emboss_6_0030, emboss_datatypes_0030, and emboss_0030 repositories and populate the emboss_datatypes repository.'''
self.logout()
self.login( email=common.test_user_1_email, username=common.test_user_1_name )
emboss_5_repository = get_repository_by_name_and_owner( emboss_5_repository_name, common.test_user_1_name )
diff -r dad76985fc571feed65cc63de37b2685312348e3 -r ae60aaaf6a139e88b2849abf97fd47af711341fd test/tool_shed/functional/test_0050_circular_n_levels.py
--- a/test/tool_shed/functional/test_0050_circular_n_levels.py
+++ b/test/tool_shed/functional/test_0050_circular_n_levels.py
@@ -79,8 +79,8 @@
repository_long_description=filtering_repository_long_description,
categories=[ default_category ],
strings_displayed=[] )
- repository = get_repository_by_name_and_owner( filtering_repository_name, common.test_user_1_name )
- self.upload_file( repository,
+ filtering_repository = get_repository_by_name_and_owner( filtering_repository_name, common.test_user_1_name )
+ self.upload_file( filtering_repository,
'filtering/filtering_1.1.0.tar',
strings_displayed=[],
commit_message='Uploaded filtering.tar.' )
@@ -89,7 +89,7 @@
self.generate_repository_dependency_xml( [ emboss_repository ],
self.get_filename( 'repository_dependencies.xml', filepath=repository_dependencies_path ),
dependency_description='Filtering depends on the emboss repository.' )
- self.upload_file( repository,
+ self.upload_file( filtering_repository,
'repository_dependencies.xml',
filepath=repository_dependencies_path,
commit_message='Uploaded dependency on emboss.' )
@@ -109,6 +109,13 @@
emboss_repository = get_repository_by_name_and_owner( emboss_repository_name, common.test_user_1_name )
filtering_repository = get_repository_by_name_and_owner( filtering_repository_name, common.test_user_1_name )
repository_dependencies_path = self.generate_temp_path( 'test_0050', additional_paths=[ 'freebayes' ] )
+ self.generate_repository_dependency_xml( [ filtering_repository ],
+ self.get_filename( 'repository_dependencies.xml', filepath=repository_dependencies_path ),
+ dependency_description='Emboss depends on the filtering repository.' )
+ self.upload_file( emboss_repository,
+ 'repository_dependencies.xml',
+ filepath=repository_dependencies_path,
+ commit_message='Uploaded dependency on filtering.' )
previous_tip = self.get_repository_tip( repository )
self.generate_repository_dependency_xml( [ emboss_datatypes_repository, emboss_repository, filtering_repository, repository ],
self.get_filename( 'repository_dependencies.xml', filepath=repository_dependencies_path ),
@@ -119,11 +126,24 @@
commit_message='Uploaded dependency on filtering.' )
self.display_manage_repository_page( repository, strings_not_displayed=[ previous_tip ] )
def test_0030_verify_repository_dependencies( self ):
- '''Verify that the generated dependency circle does not cause an infinite loop.'''
+ '''Verify that the generated dependency circle does not cause an infinite loop.
+
+ Expected structure:
+
+ id: 2 key: http://localhost:8634__ESEP__freebayes_0050__ESEP__user1__ESEP__2e73d8e1b59d
+ ['http://localhost:8634', 'emboss_datatypes_0050', 'user1', '596029c334b1']
+ ['http://localhost:8634', 'emboss_0050', 'user1', '9f1503046640']
+ id: 3 key: http://localhost:8634__ESEP__filtering_0050__ESEP__user1__ESEP__eefdd8bc0db9
+ ['http://localhost:8634', 'emboss_0050', 'user1', '9f1503046640']
+ id: 4 key: http://localhost:8634__ESEP__emboss_0050__ESEP__user1__ESEP__9f1503046640
+ ['http://localhost:8634', 'emboss_datatypes_0050', 'user1', '596029c334b1']
+ '''
emboss_datatypes_repository = get_repository_by_name_and_owner( emboss_datatypes_repository_name, common.test_user_1_name )
emboss_repository = get_repository_by_name_and_owner( emboss_repository_name, common.test_user_1_name )
filtering_repository = get_repository_by_name_and_owner( filtering_repository_name, common.test_user_1_name )
freebayes_repository = get_repository_by_name_and_owner( freebayes_repository_name, common.test_user_1_name )
for repository in [ emboss_datatypes_repository, emboss_repository, filtering_repository ]:
self.check_repository_dependency( freebayes_repository, repository, self.get_repository_tip( repository ) )
+ for changeset_revision in self.get_repository_metadata_revisions( emboss_repository ):
+ self.check_repository_dependency( freebayes_repository, emboss_repository, changeset_revision )
self.display_manage_repository_page( freebayes_repository, strings_displayed=[ 'Freebayes depends on the filtering repository.' ] )
diff -r dad76985fc571feed65cc63de37b2685312348e3 -r ae60aaaf6a139e88b2849abf97fd47af711341fd test/tool_shed/functional/test_1000_install_filtering_repository.py
--- /dev/null
+++ b/test/tool_shed/functional/test_1000_install_filtering_repository.py
@@ -0,0 +1,31 @@
+from tool_shed.base.twilltestcase import ShedTwillTestCase, common, os
+from tool_shed.base.test_db_util import get_repository_by_name_and_owner, get_galaxy_user, get_galaxy_private_role, get_category_by_name
+
+class BasicToolShedFeatures( ShedTwillTestCase ):
+ '''Test installing a basic repository.'''
+ def test_0000_initiate_users( self ):
+ """Create necessary user accounts."""
+ self.galaxy_logout()
+ self.galaxy_login( email=common.test_user_1_email, username=common.test_user_1_name )
+ test_user_1 = get_galaxy_user( common.test_user_1_email )
+ assert test_user_1 is not None, 'Problem retrieving user with email %s from the database' % test_user_1_email
+ test_user_1_private_role = get_galaxy_private_role( test_user_1 )
+ self.galaxy_logout()
+ self.galaxy_login( email=common.admin_email, username=common.admin_username )
+ admin_user = get_galaxy_user( common.admin_email )
+ assert admin_user is not None, 'Problem retrieving user with email %s from the database' % admin_email
+ admin_user_private_role = get_galaxy_private_role( admin_user )
+ def test_0005_browse_tool_sheds( self ):
+ """Browse the available tool sheds in this Galaxy instance."""
+ self.visit_galaxy_url( '/admin_toolshed/browse_tool_sheds' )
+ self.check_page_for_string( 'Embedded tool shed for functional tests' )
+ self.browse_tool_shed( url=self.url, strings_displayed=[ 'Test 0000 Basic Repository Features 1', 'Test 0000 Basic Repository Features 2' ] )
+ def test_0010_browse_test_0000_category( self ):
+ '''Browse the category created in test 0000. It should contain the filtering_0000 repository also created in that test.'''
+ category = get_category_by_name( 'Test 0000 Basic Repository Features 1' )
+ self.browse_category( category, strings_displayed=[ 'filtering_0000' ] )
+ def test_0015_preview_filtering_repository( self ):
+ '''Load the preview page for the filtering_0000 repository in the tool shed.'''
+ self.preview_repository_in_tool_shed( 'filtering_0000', common.test_user_1_name, strings_displayed=[ 'filtering_0000', 'Valid tools' ] )
+ def test_0020_install_filtering_repository( self ):
+ self.install_repository( 'filtering_0000', common.test_user_1_name )
diff -r dad76985fc571feed65cc63de37b2685312348e3 -r ae60aaaf6a139e88b2849abf97fd47af711341fd test/tool_shed/functional_tests.py
--- a/test/tool_shed/functional_tests.py
+++ b/test/tool_shed/functional_tests.py
@@ -1,6 +1,6 @@
#!/usr/bin/env python
-import os, sys, shutil, tempfile, re
+import os, sys, shutil, tempfile, re, string
# Assume we are run from the galaxy root directory, add lib to the python path
cwd = os.getcwd()
@@ -33,9 +33,14 @@
import sys, threading, random
import httplib, socket
from paste import httpserver
+# This is for the tool shed application.
import galaxy.webapps.community.app
-from galaxy.webapps.community.app import UniverseApplication
-from galaxy.webapps.community import buildapp
+from galaxy.webapps.community.app import UniverseApplication as ToolshedUniverseApplication
+from galaxy.webapps.community import buildapp as toolshedbuildapp
+# This is for the galaxy application.
+import galaxy.app
+from galaxy.app import UniverseApplication as GalaxyUniverseApplication
+from galaxy.web import buildapp as galaxybuildapp
import nose.core
import nose.config
@@ -46,8 +51,22 @@
default_tool_shed_test_host = "localhost"
default_tool_shed_test_port_min = 8000
-default_tool_shed_test_port_max = 9999
+default_tool_shed_test_port_max = 8999
default_tool_shed_locales = 'en'
+default_galaxy_test_port_min = 9000
+default_galaxy_test_port_max = 9999
+default_galaxy_test_host = 'localhost'
+
+tool_sheds_conf_xml_template = '''<?xml version="1.0"?>
+<tool_sheds>
+ <tool_shed name="Embedded tool shed for functional tests" url="http://${shed_url}:${shed_port}/"/>
+</tool_sheds>
+'''
+
+shed_tool_conf_xml_template = '''<?xml version="1.0"?>
+<toolbox tool_path="${shed_tool_path}">
+</toolbox>
+'''
def run_tests( test_config ):
loader = nose.loader.TestLoader( config=test_config )
@@ -67,6 +86,8 @@
# ---- Configuration ------------------------------------------------------
tool_shed_test_host = os.environ.get( 'TOOL_SHED_TEST_HOST', default_tool_shed_test_host )
tool_shed_test_port = os.environ.get( 'TOOL_SHED_TEST_PORT', None )
+ galaxy_test_host = os.environ.get( 'GALAXY_TEST_HOST', default_galaxy_test_host )
+ galaxy_test_port = os.environ.get( 'GALAXY_TEST_PORT', None )
tool_path = os.environ.get( 'TOOL_SHED_TEST_TOOL_PATH', 'tools' )
if 'HTTP_ACCEPT_LANGUAGE' not in os.environ:
os.environ[ 'HTTP_ACCEPT_LANGUAGE' ] = default_tool_shed_locales
@@ -85,18 +106,33 @@
if not os.path.isdir( tool_shed_test_tmp_dir ):
os.mkdir( tool_shed_test_tmp_dir )
tool_shed_test_proxy_port = None
+ galaxy_test_proxy_port = None
if 'TOOL_SHED_TEST_DBPATH' in os.environ:
- db_path = os.environ[ 'TOOL_SHED_TEST_DBPATH' ]
+ shed_db_path = os.environ[ 'TOOL_SHED_TEST_DBPATH' ]
else:
tempdir = tempfile.mkdtemp( dir=tool_shed_test_tmp_dir )
- db_path = os.path.join( tempdir, 'database' )
- file_path = os.path.join( db_path, 'files' )
+ shed_db_path = os.path.join( tempdir, 'database' )
+ galaxy_shed_tool_conf_file = os.environ.get( 'GALAXY_TEST_TOOL_CONF', os.path.join( tool_shed_test_tmp_dir, 'test_tool_conf.xml' ) )
+ galaxy_tool_sheds_conf_file = os.environ.get( 'GALAXY_TEST_SHED_TOOLS_CONF', os.path.join( tool_shed_test_tmp_dir, 'test_sheds_conf.xml' ) )
+ if 'GALAXY_TEST_DBPATH' in os.environ:
+ galaxy_db_path = os.environ[ 'GALAXY_TEST_DBPATH' ]
+ else:
+ tempdir = tempfile.mkdtemp( dir=tool_shed_test_tmp_dir )
+ galaxy_db_path = os.path.join( tempdir, 'database' )
+ shed_file_path = os.path.join( shed_db_path, 'files' )
+ galaxy_file_path = os.path.join( galaxy_db_path, 'files' )
hgweb_config_file_path = tempfile.mkdtemp( dir=tool_shed_test_tmp_dir )
new_repos_path = tempfile.mkdtemp( dir=tool_shed_test_tmp_dir )
+ galaxy_shed_tool_path = tempfile.mkdtemp( dir=tool_shed_test_tmp_dir )
+ galaxy_tool_dependency_dir = tempfile.mkdtemp( dir=tool_shed_test_tmp_dir )
if 'TOOL_SHED_TEST_DBURI' in os.environ:
- database_connection = os.environ[ 'TOOL_SHED_TEST_DBURI' ]
+ toolshed_database_connection = os.environ[ 'TOOL_SHED_TEST_DBURI' ]
else:
- database_connection = 'sqlite:///' + os.path.join( db_path, 'universe.sqlite' )
+ toolshed_database_connection = 'sqlite:///' + os.path.join( shed_db_path, 'community_test.sqlite' )
+ if 'GALAXY_TEST_DBURI' in os.environ:
+ galaxy_database_connection = os.environ[ 'GALAXY_TEST_DBURI' ]
+ else:
+ galaxy_database_connection = 'sqlite:///' + os.path.join( galaxy_db_path, 'universe_test.sqlite' )
kwargs = {}
for dir in [ tool_shed_test_tmp_dir ]:
try:
@@ -104,30 +140,31 @@
except OSError:
pass
- print "Database connection:", database_connection
+ print "Tool shed database connection:", toolshed_database_connection
+ print "Galaxy database connection:", galaxy_database_connection
hgweb_config_dir = hgweb_config_file_path
os.environ[ 'TEST_HG_WEB_CONFIG_DIR' ] = hgweb_config_dir
print "Directory location for hgweb.config:", hgweb_config_dir
- # ---- Build Application --------------------------------------------------
- app = None
+ # ---- Build Tool Shed Application --------------------------------------------------
+ toolshedapp = None
global_conf = { '__file__' : 'community_wsgi.ini.sample' }
- if not database_connection.startswith( 'sqlite://' ):
- kwargs[ 'database_engine_option_max_overflow' ] = '20'
+# if not toolshed_database_connection.startswith( 'sqlite://' ):
+# kwargs[ 'database_engine_option_max_overflow' ] = '20'
if tool_dependency_dir is not None:
kwargs[ 'tool_dependency_dir' ] = tool_dependency_dir
if use_distributed_object_store:
kwargs[ 'object_store' ] = 'distributed'
kwargs[ 'distributed_object_store_config_file' ] = 'distributed_object_store_conf.xml.sample'
- app = UniverseApplication( job_queue_workers = 5,
+ toolshedapp = ToolshedUniverseApplication( job_queue_workers = 5,
id_secret = 'changethisinproductiontoo',
template_path = 'templates',
- database_connection = database_connection,
+ database_connection = toolshed_database_connection,
database_engine_option_pool_size = '10',
- file_path = file_path,
+ file_path = shed_file_path,
new_file_path = new_repos_path,
tool_path=tool_path,
datatype_converters_config_file = 'datatype_converters_conf.xml.sample',
@@ -144,23 +181,23 @@
hgweb_config_dir = hgweb_config_dir,
**kwargs )
- log.info( "Embedded Universe application started" )
+ log.info( "Embedded Toolshed application started" )
- # ---- Run webserver ------------------------------------------------------
- server = None
- webapp = buildapp.app_factory( dict( database_file=database_connection ),
- use_translogger=False,
- static_enabled=False,
- app=app )
+ # ---- Run tool shed webserver ------------------------------------------------------
+ tool_shed_server = None
+ toolshedwebapp = toolshedbuildapp.app_factory( dict( database_file=toolshed_database_connection ),
+ use_translogger=False,
+ static_enabled=False,
+ app=toolshedapp )
if tool_shed_test_port is not None:
- server = httpserver.serve( webapp, host=tool_shed_test_host, port=tool_shed_test_port, start_loop=False )
+ tool_shed_server = httpserver.serve( toolshedwebapp, host=tool_shed_test_host, port=tool_shed_test_port, start_loop=False )
else:
random.seed()
for i in range( 0, 9 ):
try:
tool_shed_test_port = str( random.randint( default_tool_shed_test_port_min, default_tool_shed_test_port_max ) )
log.debug( "Attempting to serve app on randomly chosen port: %s" % tool_shed_test_port )
- server = httpserver.serve( webapp, host=tool_shed_test_host, port=tool_shed_test_port, start_loop=False )
+ tool_shed_server = httpserver.serve( toolshedwebapp, host=tool_shed_test_host, port=tool_shed_test_port, start_loop=False )
break
except socket.error, e:
if e[0] == 98:
@@ -172,7 +209,7 @@
os.environ[ 'TOOL_SHED_TEST_PORT' ] = tool_shed_test_proxy_port
else:
os.environ[ 'TOOL_SHED_TEST_PORT' ] = tool_shed_test_port
- t = threading.Thread( target=server.serve_forever )
+ t = threading.Thread( target=tool_shed_server.serve_forever )
t.start()
# Test if the server is up
for i in range( 10 ):
@@ -185,6 +222,90 @@
else:
raise Exception( "Test HTTP server did not return '200 OK' after 10 tries" )
log.info( "Embedded web server started" )
+
+ # ---- Optionally start up a Galaxy instance ------------------------------------------------------
+ if 'TEST_TOOL_SHED_START_GALAXY' in os.environ:
+ # Generate the shed_tool_conf.xml and tool_sheds_conf.xml files
+ tool_sheds_conf_template_parser = string.Template( tool_sheds_conf_xml_template )
+ tool_sheds_conf_xml = tool_sheds_conf_template_parser.safe_substitute( shed_url=tool_shed_test_host, shed_port=tool_shed_test_port )
+ file( galaxy_tool_sheds_conf_file, 'w' ).write( tool_sheds_conf_xml )
+ shed_tool_conf_template_parser = string.Template( shed_tool_conf_xml_template )
+ shed_tool_conf_xml = shed_tool_conf_template_parser.safe_substitute( shed_tool_path=galaxy_shed_tool_path )
+ file( galaxy_shed_tool_conf_file, 'w' ).write( shed_tool_conf_xml )
+
+ # ---- Build Galaxy Application --------------------------------------------------
+ galaxy_global_conf = { '__file__' : 'universe_wsgi.ini.sample' }
+ if not galaxy_database_connection.startswith( 'sqlite://' ):
+ kwargs[ 'database_engine_option_max_overflow' ] = '20'
+ galaxyapp = GalaxyUniverseApplication( job_queue_workers = 5,
+ id_secret = 'changethisinproductiontoo',
+ template_path = "templates",
+ database_connection = galaxy_database_connection,
+ database_engine_option_pool_size = '10',
+ file_path = galaxy_file_path,
+ tool_path = tool_path,
+ tool_dependency_dir=galaxy_tool_dependency_dir,
+ shed_tool_path=galaxy_shed_tool_path,
+ update_integrated_tool_panel = False,
+ tool_config_file = galaxy_shed_tool_conf_file,
+ tool_sheds_config_file = galaxy_tool_sheds_conf_file,
+ datatype_converters_config_file = "datatype_converters_conf.xml.sample",
+ tool_parse_help = False,
+ tool_data_table_config_path = tool_data_table_config_path,
+ shed_tool_data_table_config = shed_tool_data_table_config,
+ log_destination = "stdout",
+ use_heartbeat = False,
+ allow_user_creation = True,
+ allow_user_deletion = True,
+ admin_users = 'test(a)bx.psu.edu',
+ allow_library_path_paste = True,
+ global_conf = global_conf,
+ running_functional_tests=True,
+ **kwargs )
+
+ log.info( "Embedded Galaxy application started" )
+
+ # ---- Run galaxy webserver ------------------------------------------------------
+ galaxy_server = None
+ galaxywebapp = galaxybuildapp.app_factory( dict( database_file=galaxy_database_connection ),
+ use_translogger=False,
+ static_enabled=False,
+ app=galaxyapp )
+
+ if galaxy_test_port is not None:
+ galaxy_server = httpserver.serve( galaxywebapp, host=galaxy_test_host, port=galaxy_test_port, start_loop=False )
+ else:
+ random.seed()
+ for i in range( 0, 9 ):
+ try:
+ galaxy_test_port = str( random.randint( default_galaxy_test_port_min, default_galaxy_test_port_max ) )
+ log.debug( "Attempting to serve app on randomly chosen port: %s" % galaxy_test_port )
+ galaxy_server = httpserver.serve( galaxywebapp, host=galaxy_test_host, port=galaxy_test_port, start_loop=False )
+ break
+ except socket.error, e:
+ if e[0] == 98:
+ continue
+ raise
+ else:
+ raise Exception( "Unable to open a port between %s and %s to start Galaxy server" % \
+ ( default_galaxy_test_port_min, default_galaxy_test_port_max ) )
+ if galaxy_test_proxy_port:
+ os.environ[ 'GALAXY_TEST_PORT' ] = galaxy_test_proxy_port
+ else:
+ os.environ[ 'GALAXY_TEST_PORT' ] = galaxy_test_port
+ t = threading.Thread( target=galaxy_server.serve_forever )
+ t.start()
+ # Test if the server is up
+ for i in range( 10 ):
+ # Directly test the app, not the proxy.
+ conn = httplib.HTTPConnection( galaxy_test_host, galaxy_test_port )
+ conn.request( "GET", "/" )
+ if conn.getresponse().status == 200:
+ break
+ time.sleep( 0.1 )
+ else:
+ raise Exception( "Test HTTP server did not return '200 OK' after 10 tries" )
+ log.info( "Embedded galaxy web server started" )
# We don't add the tests to the path until everything is up and running
new_path = [ os.path.join( cwd, 'test' ) ]
new_path.extend( sys.path[1:] )
@@ -194,10 +315,15 @@
log.info( "Functional tests will be run against %s:%s" % ( tool_shed_test_host, tool_shed_test_proxy_port ) )
else:
log.info( "Functional tests will be run against %s:%s" % ( tool_shed_test_host, tool_shed_test_port ) )
+ if galaxy_test_proxy_port:
+ log.info( "Galaxy tests will be run against %s:%s" % ( galaxy_test_host, galaxy_test_proxy_port ) )
+ else:
+ log.info( "Galaxy tests will be run against %s:%s" % ( galaxy_test_host, galaxy_test_port ) )
success = False
try:
# Pass in through script set env, will leave a copy of ALL test validate files.
os.environ[ 'TOOL_SHED_TEST_HOST' ] = tool_shed_test_host
+ os.environ[ 'GALAXY_TEST_HOST' ] = galaxy_test_host
if tool_shed_test_file_dir:
os.environ[ 'TOOL_SHED_TEST_FILE_DIR' ] = tool_shed_test_file_dir
test_config = nose.config.Config( env=os.environ, ignoreFiles=ignore_files, plugins=nose.plugins.manager.DefaultPluginManager() )
@@ -210,16 +336,26 @@
log.info( "Shutting down" )
# ---- Tear down -----------------------------------------------------------
- if server:
+ if tool_shed_server:
log.info( "Shutting down embedded web server" )
- server.server_close()
- server = None
+ tool_shed_server.server_close()
+ tool_shed_server = None
log.info( "Embedded web server stopped" )
- if app:
- log.info( "Shutting down app" )
- app.shutdown()
- app = None
- log.info( "Embedded Universe application stopped" )
+ if toolshedapp:
+ log.info( "Shutting down tool shed app" )
+ toolshedapp.shutdown()
+ toolshedapp = None
+ log.info( "Embedded tool shed application stopped" )
+ if galaxy_server:
+ log.info( "Shutting down galaxy web server" )
+ galaxy_server.server_close()
+ galaxy_server = None
+ log.info( "Embedded galaxy server stopped" )
+ if galaxyapp:
+ log.info( "Shutting down galaxy app" )
+ galaxyapp.shutdown()
+ galaxyapp = None
+ log.info( "Embedded galaxy application stopped" )
if 'TOOL_SHED_TEST_NO_CLEANUP' not in os.environ:
try:
for dir in [ tool_shed_test_tmp_dir ]:
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: greg: Fix for rendering the repository dependencies container.
by Bitbucket 12 Dec '12
by Bitbucket 12 Dec '12
12 Dec '12
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/dad76985fc57/
changeset: dad76985fc57
user: greg
date: 2012-12-12 20:58:13
summary: Fix for rendering the repository dependencies container.
affected #: 1 file
diff -r 3301ed8fea42b1067f88138573b7d716c3b631e5 -r dad76985fc571feed65cc63de37b2685312348e3 lib/galaxy/webapps/community/util/container_util.py
--- a/lib/galaxy/webapps/community/util/container_util.py
+++ b/lib/galaxy/webapps/community/util/container_util.py
@@ -214,6 +214,7 @@
del repository_dependencies[ 'description' ]
repository_dependencies_folder, folder_id, repository_dependency_id = \
populate_repository_dependencies_container( repository_dependencies_folder, repository_dependencies, folder_id, repository_dependency_id )
+ repository_dependencies_folder = prune_repository_dependencies( repository_dependencies_folder )
else:
repository_dependencies_root_folder = None
return folder_id, repository_dependencies_root_folder
@@ -346,6 +347,10 @@
workflows_root_folder = None
return folder_id, workflows_root_folder
def cast_empty_repository_dependency_folders( folder, repository_dependency_id ):
+ """
+ Change any empty folders contained within the repository dependencies container into a repository dependency since it has no repository dependencies
+ of it's own. This method is not used (and may not be needed), but here it is just in case.
+ """
if not folder.folders and not folder.repository_dependencies:
repository_dependency_id += 1
repository_dependency = folder.to_repository_dependency( repository_dependency_id )
@@ -439,4 +444,20 @@
print ' %s%s' % ( pad_str, repository_dependency.listify )
for sub_folder in folder.folders:
print_folders( pad+5, sub_folder )
+def prune_repository_dependencies( folder ):
+ """
+ Since the object used to generate a repository dependencies container is a dictionary and not an odict() (it must be json-serialize-able), the
+ order in which the dictionary is processed to create the container sometimes results in repository dependency entries in a folder that also
+ includes the repository dependency as a sub-folder (if the repository dependency has it's own repository dependency). This method will remove
+ all repository dependencies from folder that are also sub-folders of folder.
+ """
+ repository_dependencies = [ rd for rd in folder.repository_dependencies ]
+ for repository_dependency in repository_dependencies:
+ listified_repository_dependency = repository_dependency.listify
+ if is_subfolder_of( folder, listified_repository_dependency ):
+ repository_dependencies.remove( repository_dependency )
+ folder.repository_dependencies = repository_dependencies
+ for sub_folder in folder.folders:
+ return prune_repository_dependencies( sub_folder )
+ return folder
\ No newline at end of file
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
12 Dec '12
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/3301ed8fea42/
changeset: 3301ed8fea42
user: greg
date: 2012-12-12 20:35:12
summary: Fixes for installing tool shed repositories.
affected #: 8 files
diff -r c8c181a904677adf0c9c65a1151848d9d2da16fb -r 3301ed8fea42b1067f88138573b7d716c3b631e5 lib/galaxy/util/shed_util.py
--- a/lib/galaxy/util/shed_util.py
+++ b/lib/galaxy/util/shed_util.py
@@ -636,9 +636,6 @@
relative_install_dir = os.path.join( tool_path, partial_install_dir )
return tool_path, relative_install_dir
return None, None
-def get_tool_shed_from_clone_url( repository_clone_url ):
- tmp_url = clean_repository_clone_url( repository_clone_url )
- return tmp_url.split( 'repos' )[ 0 ].rstrip( '/' )
def get_tool_shed_repository_by_shed_name_owner_changeset_revision( app, tool_shed, name, owner, changeset_revision ):
# This method is used only in Galaxy, not the tool shed.
sa_session = app.model.context.current
@@ -693,18 +690,6 @@
changeset_revision = None
ctx_rev = None
return changeset_revision, ctx_rev
-def get_url_from_repository_tool_shed( app, repository ):
- """
- The stored value of repository.tool_shed is something like: toolshed.g2.bx.psu.edu. We need the URL to this tool shed, which is
- something like: http://toolshed.g2.bx.psu.edu/.
- """
- for shed_name, shed_url in app.tool_shed_registry.tool_sheds.items():
- if shed_url.find( repository.tool_shed ) >= 0:
- if shed_url.endswith( '/' ):
- shed_url = shed_url.rstrip( '/' )
- return shed_url
- # The tool shed from which the repository was originally installed must no longer be configured in tool_sheds_conf.xml.
- return None
def handle_missing_data_table_entry( app, relative_install_dir, tool_path, repository_tools_tups ):
"""
Inspect each tool to see if any have input parameters that are dynamically generated select lists that require entries in the
diff -r c8c181a904677adf0c9c65a1151848d9d2da16fb -r 3301ed8fea42b1067f88138573b7d716c3b631e5 lib/galaxy/util/shed_util_common.py
--- a/lib/galaxy/util/shed_util_common.py
+++ b/lib/galaxy/util/shed_util_common.py
@@ -1,4 +1,4 @@
-import os, shutil, tempfile, logging, string, threading
+import os, shutil, tempfile, logging, string, threading, urllib2
from galaxy import util
from galaxy.tools import parameters
from galaxy.util import inflector
@@ -91,12 +91,12 @@
folder_id, readme_files_root_folder = build_readme_files_folder( folder_id, readme_files_dict )
containers_dict[ 'readme_files' ] = readme_files_root_folder
if repository_dependencies:
- folder_id, repository_dependencies_root_folder = build_repository_dependencies_folder( toolshed_base_url=toolshed_base_url,
- repository_name=repository_name,
- repository_owner=repository_owner,
- changeset_revision=changeset_revision,
- folder_id=folder_id,
- repository_dependencies=repository_dependencies )
+ folder_id, repository_dependencies_root_folder = container_util.build_repository_dependencies_folder( toolshed_base_url=toolshed_base_url,
+ repository_name=repository_name,
+ repository_owner=repository_owner,
+ changeset_revision=changeset_revision,
+ folder_id=folder_id,
+ repository_dependencies=repository_dependencies )
containers_dict[ 'repository_dependencies' ] = repository_dependencies_root_folder
if tool_dependencies:
folder_id, tool_dependencies_root_folder = container_util.build_tool_dependencies_folder( folder_id, tool_dependencies, for_galaxy=True )
@@ -526,42 +526,44 @@
trans.sa_session.add( repository_metadata )
trans.sa_session.flush()
return repository_metadata
-def create_repo_info_dict( trans, repo, repository_clone_url, changeset_revision, ctx_rev, repository_owner, repository_name=None,
- repository=None, repository_metadata=None ):
+def create_repo_info_dict( trans, repository_clone_url, changeset_revision, ctx_rev, repository_owner, repository_name=None, repository=None,
+ repository_metadata=None, metadata=None, repository_dependencies=None ):
"""
- Return a dictionary that includes all of the information needed to install a repository into a local Galaxy instance. The dictionary will also contain
- the recursive list of repository dependencies defined for the repository, as well as the defined tool dependencies. This method is called during the
- tool shed repository installation process from Galaxy. In this case both the received repository and repository_metadata will be objects, but repository_name
- sill be None. This method is also called when a tool shed repository that was uninstalled from a Galaxy instance is being re-installed. In this case, both
- repository and repository_metadata will be None, but repository_name will have a value.
+ Return a dictionary that includes all of the information needed to install a repository into a local Galaxy instance. The dictionary will also
+ contain the recursive list of repository dependencies defined for the repository, as well as the defined tool dependencies.
+
+ This method is called from Galaxy from two places:
+ 1. During the tool shed repository installation process (via the tool shed's get_repository_information() method)- in this case both the received
+ repository and repository_metadata will be objects.
+ 2. When a tool shed repository that was uninstalled from a Galaxy instance is being re-installed - in this case, both repository and
+ repository_metadata will be None, but metadata will be the tool_shed_repository metadata on the Galaxy side, and the repository_dependencies will
+ be an object previously retrieved from the tool shed.
"""
repo_info_dict = {}
- if repository is None and repository_metadata is None:
- # The repository associated with the received repository_clone_url is being re-installed into a Galaxy instance, so we need to retrieve the
- # appropriate repository from the tool shed using the received information.
- repository = get_repository_by_name_and_owner( trans, repository_name, repository_owner )
+ repository = get_repository_by_name_and_owner( trans, repository_name, repository_owner )
+ if trans.webapp.name == 'community':
+ # We're in the tool shed.
repository_metadata = get_repository_metadata_by_changeset_revision( trans, trans.security.encode_id( repository.id ), changeset_revision )
- if repository_metadata:
- metadata = repository_metadata.metadata
- if metadata:
- # Get a dictionary of all repositories upon which the contents of the received repository depends.
- repository_dependencies = get_repository_dependencies_for_changeset_revision( trans=trans,
- repo=repo,
- repository=repository,
- repository_metadata=repository_metadata,
- toolshed_base_url=str( url_for( '/', qualified=True ) ).rstrip( '/' ),
- key_rd_dicts_to_be_processed=None,
- all_repository_dependencies=None,
- handled_key_rd_dicts=None,
- circular_repository_dependencies=None )
- # Cast unicode to string.
- repo_info_dict[ str( repository.name ) ] = ( str( repository.description ),
- str( repository_clone_url ),
- str( changeset_revision ),
- str( ctx_rev ),
- str( repository_owner ),
- repository_dependencies,
- metadata.get( 'tool_dependencies', None ) )
+ if repository_metadata:
+ metadata = repository_metadata.metadata
+ if metadata:
+ # Get a dictionary of all repositories upon which the contents of the received repository depends.
+ repository_dependencies = get_repository_dependencies_for_changeset_revision( trans=trans,
+ repository=repository,
+ repository_metadata=repository_metadata,
+ toolshed_base_url=str( url_for( '/', qualified=True ) ).rstrip( '/' ),
+ key_rd_dicts_to_be_processed=None,
+ all_repository_dependencies=None,
+ handled_key_rd_dicts=None,
+ circular_repository_dependencies=None )
+ # Cast unicode to string.
+ repo_info_dict[ str( repository.name ) ] = ( str( repository.description ),
+ str( repository_clone_url ),
+ str( changeset_revision ),
+ str( ctx_rev ),
+ str( repository_owner ),
+ repository_dependencies,
+ metadata.get( 'tool_dependencies', None ) )
return repo_info_dict
def generate_clone_url_for_repository_in_tool_shed( trans, repository ):
"""Generate the URL for cloning a repository that is in the tool shed."""
@@ -1189,12 +1191,18 @@
return valid_filenames
def get_repository_by_name_and_owner( trans, name, owner ):
"""Get a repository from the database via name and owner"""
+ if trans.webapp.name == 'galaxy':
+ return trans.sa_session.query( trans.model.ToolShedRepository ) \
+ .filter( and_( trans.model.ToolShedRepository.table.c.name == name,
+ trans.model.ToolShedRepository.table.c.owner == owner ) ) \
+ .first()
+ # We're in the tool shed.
user = get_user_by_username( trans, owner )
return trans.sa_session.query( trans.model.Repository ) \
.filter( and_( trans.model.Repository.table.c.name == name,
trans.model.Repository.table.c.user_id == user.id ) ) \
.first()
-def get_repository_dependencies_for_changeset_revision( trans, repo, repository, repository_metadata, toolshed_base_url,
+def get_repository_dependencies_for_changeset_revision( trans, repository, repository_metadata, toolshed_base_url,
key_rd_dicts_to_be_processed=None, all_repository_dependencies=None,
handled_key_rd_dicts=None, circular_repository_dependencies=None ):
"""
@@ -1291,7 +1299,7 @@
"""Get a repository on the tool shed side from the database via id"""
return trans.sa_session.query( trans.model.Repository ).get( trans.security.decode_id( id ) )
def get_repository_metadata_by_changeset_revision( trans, id, changeset_revision ):
- """Get metadata for a specified repository change set from the database"""
+ """Get metadata for a specified repository change set from the database."""
# Make sure there are no duplicate records, and return the single unique record for the changeset_revision. Duplicate records were somehow
# created in the past. The cause of this issue has been resolved, but we'll leave this method as is for a while longer to ensure all duplicate
# records are removed.
@@ -1357,6 +1365,9 @@
relative_path_to_sample_file = relative_path_to_sample_file[ len( tool_path ) + 1 :]
sample_file_metadata_paths.append( relative_path_to_sample_file )
return sample_file_metadata_paths, sample_file_copy_paths
+def get_tool_shed_from_clone_url( repository_clone_url ):
+ tmp_url = clean_repository_clone_url( repository_clone_url )
+ return tmp_url.split( 'repos' )[ 0 ].rstrip( '/' )
def get_updated_changeset_revisions_for_repository_dependencies( trans, key_rd_dicts ):
updated_key_rd_dicts = []
for key_rd_dict in key_rd_dicts:
@@ -1387,6 +1398,18 @@
# We have the updated changset revision.
updated_key_rd_dicts.append( new_key_rd_dict )
return updated_key_rd_dicts
+def get_url_from_repository_tool_shed( app, repository ):
+ """
+ The stored value of repository.tool_shed is something like: toolshed.g2.bx.psu.edu. We need the URL to this tool shed, which is
+ something like: http://toolshed.g2.bx.psu.edu/.
+ """
+ for shed_name, shed_url in app.tool_shed_registry.tool_sheds.items():
+ if shed_url.find( repository.tool_shed ) >= 0:
+ if shed_url.endswith( '/' ):
+ shed_url = shed_url.rstrip( '/' )
+ return shed_url
+ # The tool shed from which the repository was originally installed must no longer be configured in tool_sheds_conf.xml.
+ return None
def get_user_by_username( trans, username ):
"""Get a user from the database by username"""
return trans.sa_session.query( trans.model.User ) \
@@ -1412,7 +1435,7 @@
new_key_rd_dict[ current_repository_key ] = rd_copy
current_repository_key_rd_dicts.append( new_key_rd_dict )
if current_repository_key_rd_dicts:
- toolshed, required_repo, required_repository, required_repository_metadata, repository_key_rd_dicts, key_rd_dicts_to_be_processed, handled_key_rd_dicts = \
+ toolshed, required_repository, required_repository_metadata, repository_key_rd_dicts, key_rd_dicts_to_be_processed, handled_key_rd_dicts = \
handle_key_rd_dicts_for_repository( trans,
current_repository_key,
current_repository_key_rd_dicts,
@@ -1420,7 +1443,6 @@
handled_key_rd_dicts,
circular_repository_dependencies )
return get_repository_dependencies_for_changeset_revision( trans=trans,
- repo=required_repo,
repository=required_repository,
repository_metadata=required_repository_metadata,
toolshed_base_url=toolshed,
@@ -1456,8 +1478,6 @@
trans.security.encode_id( required_repository.id ),
changeset_revision )
if required_repository_metadata:
- required_repo_dir = required_repository.repo_path( trans.app )
- required_repo = hg.repository( get_configured_ui(), required_repo_dir )
# The required_repository_metadata changeset_revision is installable.
required_metadata = required_repository_metadata.metadata
if required_metadata:
@@ -1475,12 +1495,12 @@
error_message = "Repository dependencies are currently supported only within the same tool shed. Ignoring repository dependency definition "
error_message += "for tool shed %s, name %s, owner %s, changeset revision %s" % ( toolshed, name, owner, changeset_revision )
log.debug( error_message )
- return toolshed, required_repo, required_repository, required_repository_metadata, repository_key_rd_dicts, key_rd_dicts_to_be_processed, handled_key_rd_dicts
+ return toolshed, required_repository, required_repository_metadata, repository_key_rd_dicts, key_rd_dicts_to_be_processed, handled_key_rd_dicts
def handle_next_repository_dependency( trans, key_rd_dicts_to_be_processed, all_repository_dependencies, handled_key_rd_dicts, circular_repository_dependencies ):
next_repository_key_rd_dict = key_rd_dicts_to_be_processed.pop( 0 )
next_repository_key_rd_dicts = [ next_repository_key_rd_dict ]
next_repository_key = next_repository_key_rd_dict.keys()[ 0 ]
- toolshed, required_repo, required_repository, required_repository_metadata, repository_key_rd_dicts, key_rd_dicts_to_be_processed, handled_key_rd_dicts = \
+ toolshed, required_repository, required_repository_metadata, repository_key_rd_dicts, key_rd_dicts_to_be_processed, handled_key_rd_dicts = \
handle_key_rd_dicts_for_repository( trans,
next_repository_key,
next_repository_key_rd_dicts,
@@ -1488,7 +1508,6 @@
handled_key_rd_dicts,
circular_repository_dependencies )
return get_repository_dependencies_for_changeset_revision( trans=trans,
- repo=required_repo,
repository=required_repository,
repository_metadata=required_repository_metadata,
toolshed_base_url=toolshed,
diff -r c8c181a904677adf0c9c65a1151848d9d2da16fb -r 3301ed8fea42b1067f88138573b7d716c3b631e5 lib/galaxy/webapps/community/controllers/repository.py
--- a/lib/galaxy/webapps/community/controllers/repository.py
+++ b/lib/galaxy/webapps/community/controllers/repository.py
@@ -1243,6 +1243,58 @@
message=message,
status=status )
@web.expose
+ def get_changeset_revision_and_ctx_rev( self, trans, **kwd ):
+ """Handle a request from a local Galaxy instance to retrieve the changeset revision hash to which an installed repository can be updated."""
+ params = util.Params( kwd )
+ message = util.restore_text( params.get( 'message', '' ) )
+ status = params.get( 'status', 'done' )
+ galaxy_url = kwd.get( 'galaxy_url', '' )
+ name = params.get( 'name', None )
+ owner = params.get( 'owner', None )
+ changeset_revision = params.get( 'changeset_revision', None )
+ repository = get_repository_by_name_and_owner( trans, name, owner )
+ repo_dir = repository.repo_path( trans.app )
+ repo = hg.repository( get_configured_ui(), repo_dir )
+ # Default to the received changeset revision and ctx_rev.
+ update_to_ctx = get_changectx_for_changeset( repo, changeset_revision )
+ ctx_rev = str( update_to_ctx.rev() )
+ latest_changeset_revision = changeset_revision
+ update_dict = dict( changeset_revision=changeset_revision, ctx_rev=ctx_rev )
+ if changeset_revision == repository.tip( trans.app ):
+ # If changeset_revision is the repository tip, there are no additional updates.
+ return tool_shed_encode( update_dict )
+ else:
+ repository_metadata = get_repository_metadata_by_changeset_revision( trans,
+ trans.security.encode_id( repository.id ),
+ changeset_revision )
+ if repository_metadata:
+ # If changeset_revision is in the repository_metadata table for this repository, there are no additional updates.
+ return tool_shed_encode( update_dict )
+ else:
+ # The changeset_revision column in the repository_metadata table has been updated with a new changeset_revision value since the
+ # repository was installed. We need to find the changeset_revision to which we need to update.
+ update_to_changeset_hash = None
+ for changeset in repo.changelog:
+ changeset_hash = str( repo.changectx( changeset ) )
+ ctx = get_changectx_for_changeset( repo, changeset_hash )
+ if update_to_changeset_hash:
+ if get_repository_metadata_by_changeset_revision( trans, trans.security.encode_id( repository.id ), changeset_hash ):
+ # We found a RepositoryMetadata record.
+ if changeset_hash == repository.tip( trans.app ):
+ # The current ctx is the repository tip, so use it.
+ update_to_ctx = get_changectx_for_changeset( repo, changeset_hash )
+ latest_changeset_revision = changeset_hash
+ else:
+ update_to_ctx = get_changectx_for_changeset( repo, update_to_changeset_hash )
+ latest_changeset_revision = update_to_changeset_hash
+ break
+ elif not update_to_changeset_hash and changeset_hash == changeset_revision:
+ # We've found the changeset in the changelog for which we need to get the next update.
+ update_to_changeset_hash = changeset_hash
+ update_dict[ 'changeset_revision' ] = str( latest_changeset_revision )
+ update_dict[ 'ctx_rev' ] = str( update_to_ctx.rev() )
+ return tool_shed_encode( update_dict )
+ @web.expose
def get_ctx_rev( self, trans, **kwd ):
"""Given a repository and changeset_revision, return the correct ctx.rev() value."""
repository_name = kwd[ 'name' ]
@@ -1274,6 +1326,30 @@
return repository_metadata.metadata
return None
@web.json
+ def get_repository_dependencies( self, trans, **kwd ):
+ params = util.Params( kwd )
+ name = params.get( 'name', None )
+ owner = params.get( 'owner', None )
+ changeset_revision = params.get( 'changeset_revision', None )
+ repository = get_repository_by_name_and_owner( trans, name, owner )
+ repository_id = trans.security.encode_id( repository.id )
+ repository_metadata = get_repository_metadata_by_changeset_revision( trans, repository_id, changeset_revision )
+ if repository_metadata:
+ metadata = repository_metadata.metadata
+ if metadata:
+ # Get a dictionary of all repositories upon which the contents of the received repository depends.
+ repository_dependencies = get_repository_dependencies_for_changeset_revision( trans=trans,
+ repository=repository,
+ repository_metadata=repository_metadata,
+ toolshed_base_url=str( url_for( '/', qualified=True ) ).rstrip( '/' ),
+ key_rd_dicts_to_be_processed=None,
+ all_repository_dependencies=None,
+ handled_key_rd_dicts=None,
+ circular_repository_dependencies=None )
+ if repository_dependencies:
+ return tool_shed_encode( repository_dependencies )
+ return ''
+ @web.json
def get_repository_information( self, trans, repository_ids, changeset_revisions, **kwd ):
"""
Generate a list of dictionaries, each of which contains the information about a repository that will be necessary for installing
@@ -1299,13 +1375,13 @@
repo = hg.repository( get_configured_ui(), repo_dir )
ctx = get_changectx_for_changeset( repo, changeset_revision )
repo_info_dict = create_repo_info_dict( trans=trans,
- repo=repo,
repository_clone_url=repository_clone_url,
changeset_revision=changeset_revision,
ctx_rev=str( ctx.rev() ),
repository_owner=repository.user.username,
- repository_name=None,
+ repository_name=repository.name,
repository=repository,
+ metadata=None,
repository_metadata=repository_metadata )
repo_info_dicts.append( tool_shed_encode( repo_info_dict ) )
return dict( includes_tools=includes_tools,
@@ -1370,58 +1446,6 @@
if tool_version_dicts:
return to_json_string( tool_version_dicts )
return ''
- @web.expose
- def get_changeset_revision_and_ctx_rev( self, trans, **kwd ):
- """Handle a request from a local Galaxy instance to retrieve the changeset revision hash to which an installed repository can be updated."""
- params = util.Params( kwd )
- message = util.restore_text( params.get( 'message', '' ) )
- status = params.get( 'status', 'done' )
- galaxy_url = kwd.get( 'galaxy_url', '' )
- name = params.get( 'name', None )
- owner = params.get( 'owner', None )
- changeset_revision = params.get( 'changeset_revision', None )
- repository = get_repository_by_name_and_owner( trans, name, owner )
- repo_dir = repository.repo_path( trans.app )
- repo = hg.repository( get_configured_ui(), repo_dir )
- # Default to the received changeset revision and ctx_rev.
- update_to_ctx = get_changectx_for_changeset( repo, changeset_revision )
- ctx_rev = str( update_to_ctx.rev() )
- latest_changeset_revision = changeset_revision
- update_dict = dict( changeset_revision=changeset_revision, ctx_rev=ctx_rev )
- if changeset_revision == repository.tip( trans.app ):
- # If changeset_revision is the repository tip, there are no additional updates.
- return tool_shed_encode( update_dict )
- else:
- repository_metadata = get_repository_metadata_by_changeset_revision( trans,
- trans.security.encode_id( repository.id ),
- changeset_revision )
- if repository_metadata:
- # If changeset_revision is in the repository_metadata table for this repository, there are no additional updates.
- return tool_shed_encode( update_dict )
- else:
- # The changeset_revision column in the repository_metadata table has been updated with a new changeset_revision value since the
- # repository was installed. We need to find the changeset_revision to which we need to update.
- update_to_changeset_hash = None
- for changeset in repo.changelog:
- changeset_hash = str( repo.changectx( changeset ) )
- ctx = get_changectx_for_changeset( repo, changeset_hash )
- if update_to_changeset_hash:
- if get_repository_metadata_by_changeset_revision( trans, trans.security.encode_id( repository.id ), changeset_hash ):
- # We found a RepositoryMetadata record.
- if changeset_hash == repository.tip( trans.app ):
- # The current ctx is the repository tip, so use it.
- update_to_ctx = get_changectx_for_changeset( repo, changeset_hash )
- latest_changeset_revision = changeset_hash
- else:
- update_to_ctx = get_changectx_for_changeset( repo, update_to_changeset_hash )
- latest_changeset_revision = update_to_changeset_hash
- break
- elif not update_to_changeset_hash and changeset_hash == changeset_revision:
- # We've found the changeset in the changelog for which we need to get the next update.
- update_to_changeset_hash = changeset_hash
- update_dict[ 'changeset_revision' ] = str( latest_changeset_revision )
- update_dict[ 'ctx_rev' ] = str( update_to_ctx.rev() )
- return tool_shed_encode( update_dict )
def get_versions_of_tool( self, trans, repository, repository_metadata, guid ):
"""Return the tool lineage in descendant order for the received guid contained in the received repsitory_metadata.tool_versions."""
encoded_id = trans.security.encode_id( repository.id )
@@ -1783,7 +1807,6 @@
if repository_metadata:
# Get a dictionary of all repositories upon which the contents of the current repository_metadata record depend.
repository_dependencies = get_repository_dependencies_for_changeset_revision( trans=trans,
- repo=repo,
repository=repository,
repository_metadata=repository_metadata,
toolshed_base_url=str( url_for( '/', qualified=True ) ).rstrip( '/' ),
@@ -1896,7 +1919,6 @@
metadata = repository_metadata.metadata
# Get a dictionary of all repositories upon which the contents of the current repository_metadata record depend.
repository_dependencies = get_repository_dependencies_for_changeset_revision( trans=trans,
- repo=repo,
repository=repository,
repository_metadata=repository_metadata,
toolshed_base_url=str( url_for( '/', qualified=True ) ).rstrip( '/' ),
@@ -2418,7 +2440,6 @@
metadata = repository_metadata.metadata
# Get a dictionary of all repositories upon which the contents of the current repository_metadata record depend.
repository_dependencies = get_repository_dependencies_for_changeset_revision( trans=trans,
- repo=repo,
repository=repository,
repository_metadata=repository_metadata,
toolshed_base_url=str( url_for( '/', qualified=True ) ).rstrip( '/' ),
diff -r c8c181a904677adf0c9c65a1151848d9d2da16fb -r 3301ed8fea42b1067f88138573b7d716c3b631e5 lib/galaxy/webapps/galaxy/controllers/admin_toolshed.py
--- a/lib/galaxy/webapps/galaxy/controllers/admin_toolshed.py
+++ b/lib/galaxy/webapps/galaxy/controllers/admin_toolshed.py
@@ -561,6 +561,28 @@
trans.response.headers['Pragma'] = 'no-cache'
trans.response.headers['Expires'] = '0'
return get_repository_file_contents( file_path )
+ @web.expose
+ @web.require_admin
+ def get_repository_dependencies( self, trans, repository_id, repository_name, repository_owner, changeset_revision ):
+ """
+ Send a request to the appropriate tool shed to retrieve the dictionary of repository dependencies defined for the received repository
+ name, owner and changeset revision. The received repository_id is the encoded id of the installed tool shed repository in Galaxy. We
+ need it so that we can derive the tool shed from which it was installed.
+ """
+ repository = get_installed_tool_shed_repository( trans, repository_id )
+ tool_shed_url = get_url_from_repository_tool_shed( trans.app, repository )
+ url = url_join( tool_shed_url,
+ 'repository/get_repository_dependencies?name=%s&owner=%s&changeset_revision=%s' % \
+ ( repository_name, repository_owner, changeset_revision ) )
+ response = urllib2.urlopen( url )
+ raw_text = response.read()
+ response.close()
+ if len( raw_text ) > 2:
+ text = json.from_json_string( tool_shed_decode( raw_text ) )
+ log.debug( text )
+ else:
+ text = ''
+ return text
def get_versions_of_tool( self, app, guid ):
tool_version = get_tool_version( app, guid )
return tool_version.get_version_ids( app, reverse=True )
@@ -1348,17 +1370,24 @@
repo_info_dict = kwd.get( 'repo_info_dict', None )
# The repo_info_dict should be encoded.
if not repo_info_dict:
- # This should only happen if the tool_shed_repository does not include any valid tools.
+ # Entering this if block used to happen only if the tool_shed_repository does not include any valid tools. After repository dependencies
+ # were introduced, it may never happen, but will keep the block just in case.
+ repository_dependencies = self.get_repository_dependencies( trans=trans,
+ repository_id=repository_id,
+ repository_name=tool_shed_repository.name,
+ repository_owner=tool_shed_repository.owner,
+ changeset_revision=tool_shed_repository.installed_changeset_revision )
repo = hg.repository( get_configured_ui(), path=os.path.abspath( tool_shed_repository.repo_path( trans.app ) ) )
repo_info_dict = create_repo_info_dict( trans=trans,
- repo=repo,
repository_clone_url=repository_clone_url,
changeset_revision=tool_shed_repository.installed_changeset_revision,
ctx_rev=ctx_rev,
repository_owner=tool_shed_repository.owner,
repository_name=tool_shed_repository.name,
repository=None,
- repository_metadata=None )
+ repository_metadata=None,
+ metadata=metadata,
+ repository_dependencies=repository_dependencies )
repo_info_dict = tool_shed_encode( repo_info_dict )
new_kwd = dict( includes_tool_dependencies=tool_shed_repository.includes_tool_dependencies,
includes_tools=tool_shed_repository.includes_tools,
@@ -1434,21 +1463,27 @@
@web.expose
@web.require_admin
def reselect_tool_panel_section( self, trans, **kwd ):
- repository = get_installed_tool_shed_repository( trans, kwd[ 'id' ] )
- metadata = repository.metadata
- tool_shed_url = get_url_from_repository_tool_shed( trans.app, repository )
- ctx_rev = get_ctx_rev( tool_shed_url, repository.name, repository.owner, repository.installed_changeset_revision )
- repository_clone_url = generate_clone_url_for_installed_repository( trans, repository )
- repo = hg.repository( get_configured_ui(), path=os.path.abspath( tool_shed_repository.repo_path( trans.app ) ) )
+ repository_id = kwd[ 'id' ]
+ tool_shed_repository = get_installed_tool_shed_repository( trans, repository_id )
+ metadata = tool_shed_repository.metadata
+ tool_shed_url = get_url_from_repository_tool_shed( trans.app, tool_shed_repository )
+ ctx_rev = get_ctx_rev( tool_shed_url, tool_shed_repository.name, tool_shed_repository.owner, tool_shed_repository.installed_changeset_revision )
+ repository_clone_url = generate_clone_url_for_installed_repository( trans, tool_shed_repository )
+ repository_dependencies = self.get_repository_dependencies( trans=trans,
+ repository_id=repository_id,
+ repository_name=tool_shed_repository.name,
+ repository_owner=tool_shed_repository.owner,
+ changeset_revision=tool_shed_repository.installed_changeset_revision )
repo_info_dict = create_repo_info_dict( trans=trans,
- repo=repo,
repository_clone_url=repository_clone_url,
- changeset_revision=repository.installed_changeset_revision,
+ changeset_revision=tool_shed_repository.installed_changeset_revision,
ctx_rev=ctx_rev,
- repository_owner=repository.owner,
- repository_name=repository.name,
+ repository_owner=tool_shed_repository.owner,
+ repository_name=tool_shed_repository.name,
repository=None,
- repository_metadata=None )
+ repository_metadata=None,
+ metadata=metadata,
+ repository_dependencies=repository_dependencies )
# Get the location in the tool panel in which the tool was originally loaded.
if 'tool_panel_section' in metadata:
tool_panel_dict = metadata[ 'tool_panel_section' ]
@@ -1469,18 +1504,18 @@
no_changes_check_box = CheckboxField( 'no_changes', checked=True )
if original_section_name:
message = "The tools contained in your <b>%s</b> repository were last loaded into the tool panel section <b>%s</b>. " \
- % ( repository.name, original_section_name )
+ % ( tool_shed_repository.name, original_section_name )
message += "Uncheck the <b>No changes</b> check box and select a different tool panel section to load the tools in a "
message += "different section in the tool panel."
status = 'warning'
else:
- message = "The tools contained in your <b>%s</b> repository were last loaded into the tool panel outside of any sections. " % repository.name
+ message = "The tools contained in your <b>%s</b> repository were last loaded into the tool panel outside of any sections. " % tool_shed_repository.name
message += "Uncheck the <b>No changes</b> check box and select a tool panel section to load the tools into that section."
status = 'warning'
if metadata and 'readme_files' in metadata:
url = url_join( tool_shed_url,
'repository/get_readme_files?name=%s&owner=%s&changeset_revision=%s' % \
- ( repository.name, repository.owner, repository.installed_changeset_revision ) )
+ ( tool_shed_repository.name, tool_shed_repository.owner, tool_shed_repository.installed_changeset_revision ) )
response = urllib2.urlopen( url )
raw_text = response.read()
response.close()
@@ -1508,7 +1543,7 @@
install_tool_dependencies_check_box_checked = True
install_tool_dependencies_check_box = CheckboxField( 'install_tool_dependencies', checked=install_tool_dependencies_check_box_checked )
return trans.fill_template( '/admin/tool_shed_repository/reselect_tool_panel_section.mako',
- repository=repository,
+ repository=tool_shed_repository,
no_changes_check_box=no_changes_check_box,
original_section_name=original_section_name,
install_repository_dependencies_check_box=install_repository_dependencies_check_box,
diff -r c8c181a904677adf0c9c65a1151848d9d2da16fb -r 3301ed8fea42b1067f88138573b7d716c3b631e5 templates/admin/tool_shed_repository/common.mako
--- a/templates/admin/tool_shed_repository/common.mako
+++ b/templates/admin/tool_shed_repository/common.mako
@@ -159,7 +159,7 @@
def __str__( self ):
return str( self.count )
- readme_files_root_folder = containers_dict[ 'readme_files' ]
+ readme_files_root_folder = containers_dict.get( 'readme_files', None )
%>
%if readme_files_root_folder:
<p/>
diff -r c8c181a904677adf0c9c65a1151848d9d2da16fb -r 3301ed8fea42b1067f88138573b7d716c3b631e5 templates/admin/tool_shed_repository/reselect_tool_panel_section.mako
--- a/templates/admin/tool_shed_repository/reselect_tool_panel_section.mako
+++ b/templates/admin/tool_shed_repository/reselect_tool_panel_section.mako
@@ -11,7 +11,7 @@
<div class="toolFormBody"><form name="reselect_tool_panel_section" id="reselect_tool_panel_section" action="${h.url_for( controller='admin_toolshed', action='reinstall_repository', id=trans.security.encode_id( repository.id ), repo_info_dict=encoded_repo_info_dict )}" method="post" ><div style="clear: both"></div>
- <% readme_files_dict = containers_dict[ 'readme_files' ] %>
+ <% readme_files_dict = containers_dict.get( 'readme_files', None ) %>
%if readme_files_dict:
<div class="form-row"><table class="colored" width="100%">
diff -r c8c181a904677adf0c9c65a1151848d9d2da16fb -r 3301ed8fea42b1067f88138573b7d716c3b631e5 templates/admin/tool_shed_repository/select_tool_panel_section.mako
--- a/templates/admin/tool_shed_repository/select_tool_panel_section.mako
+++ b/templates/admin/tool_shed_repository/select_tool_panel_section.mako
@@ -37,7 +37,7 @@
<div class="toolFormBody"><form name="select_tool_panel_section" id="select_tool_panel_section" action="${h.url_for( controller='admin_toolshed', action='prepare_for_install', tool_shed_url=tool_shed_url, encoded_repo_info_dicts=encoded_repo_info_dicts, includes_tools=includes_tools, includes_tool_dependencies=includes_tool_dependencies )}" method="post" ><div style="clear: both"></div>
- <% readme_files_dict = containers_dict[ 'readme_files' ] %>
+ <% readme_files_dict = containers_dict.get( 'readme_files', None ) %>
%if readme_files_dict:
<div class="form-row"><table class="colored" width="100%">
diff -r c8c181a904677adf0c9c65a1151848d9d2da16fb -r 3301ed8fea42b1067f88138573b7d716c3b631e5 templates/webapps/community/repository/common.mako
--- a/templates/webapps/community/repository/common.mako
+++ b/templates/webapps/community/repository/common.mako
@@ -467,13 +467,13 @@
has_readme_files = metadata and 'readme_files' in metadata
has_workflows = metadata and 'workflows' in metadata
- datatypes_root_folder = containers_dict[ 'datatypes' ]
- invalid_tools_root_folder = containers_dict[ 'invalid_tools' ]
- readme_files_root_folder = containers_dict[ 'readme_files' ]
- repository_dependencies_root_folder = containers_dict[ 'repository_dependencies' ]
- tool_dependencies_root_folder = containers_dict[ 'tool_dependencies' ]
- valid_tools_root_folder = containers_dict[ 'valid_tools' ]
- workflows_root_folder = containers_dict[ 'workflows' ]
+ datatypes_root_folder = containers_dict.get( 'datatypes', None )
+ invalid_tools_root_folder = containers_dict.get( 'invalid_tools', None )
+ readme_files_root_folder = containers_dict.get( 'readme_files', None )
+ repository_dependencies_root_folder = containers_dict.get( 'repository_dependencies', None )
+ tool_dependencies_root_folder = containers_dict.get( 'tool_dependencies', None )
+ valid_tools_root_folder = containers_dict.get( 'valid_tools', none )
+ workflows_root_folder = containers_dict.get( 'workflows', None )
has_contents = datatypes_root_folder or invalid_tools_root_folder or valid_tools_root_folder or workflows_root_folder
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: dan: Fix for displaying old-style external display applications. Fixes e.g. display at UCSC for BED in client-side history items.
by Bitbucket 12 Dec '12
by Bitbucket 12 Dec '12
12 Dec '12
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/c8c181a90467/
changeset: c8c181a90467
user: dan
date: 2012-12-12 20:15:25
summary: Fix for displaying old-style external display applications. Fixes e.g. display at UCSC for BED in client-side history items.
affected #: 1 file
diff -r bfbf96a1435087fe4aaea6c0e16c072777ccdabd -r c8c181a904677adf0c9c65a1151848d9d2da16fb lib/galaxy/webapps/galaxy/api/history_contents.py
--- a/lib/galaxy/webapps/galaxy/api/history_contents.py
+++ b/lib/galaxy/webapps/galaxy/api/history_contents.py
@@ -196,6 +196,7 @@
hda_dict[ 'meta_files' ] = meta_files
hda_dict[ 'display_apps' ] = get_display_apps( trans, hda )
+ hda_dict[ 'display_types' ] = get_display_types( trans, hda )
hda_dict[ 'visualizations' ] = hda.get_visualizations()
hda_dict[ 'peek' ] = to_unicode( hda.display_peek() )
@@ -226,3 +227,22 @@
display_apps.append( dict( label=display_app.name, links=app_links ) )
return display_apps
+
+def get_display_types( trans, hda ):
+ #TODO: make more straightforward (somehow)
+ #FIXME: need to force a transition to all new-style display applications
+ display_apps = []
+
+ for display_app in hda.datatype.get_display_types():
+ app_links = []
+ target_frame, display_links = hda.datatype.get_display_links( hda, display_app, trans.app, trans.request.base )
+ for display_name, display_link in display_links:
+ app_links.append({
+ 'target' : target_frame,
+ 'href' : display_link,
+ 'text' : display_name
+ })
+ if app_links:
+ display_apps.append( dict( label=hda.datatype.get_display_label( display_app ), links=app_links ) )
+
+ return display_apps
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: greg: Fixes for generating the repository dependencies container.
by Bitbucket 12 Dec '12
by Bitbucket 12 Dec '12
12 Dec '12
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/bfbf96a14350/
changeset: bfbf96a14350
user: greg
date: 2012-12-12 16:36:12
summary: Fixes for generating the repository dependencies container.
affected #: 1 file
diff -r 7b91f9d888d33ce51e07661c77958d2a3d8d68f2 -r bfbf96a1435087fe4aaea6c0e16c072777ccdabd lib/galaxy/webapps/community/util/container_util.py
--- a/lib/galaxy/webapps/community/util/container_util.py
+++ b/lib/galaxy/webapps/community/util/container_util.py
@@ -212,39 +212,8 @@
repository_dependencies_folder.description = repository_dependencies.get( 'description', None )
repository_dependencies_root_folder.folders.append( repository_dependencies_folder )
del repository_dependencies[ 'description' ]
- # The current keys in repository_dependencies should all be folders.
- folder_keys = repository_dependencies.keys()
- # If repository_dependencies_folder_key is an entry in repository_dependencies, process it first.
- if repository_dependencies_folder_key in repository_dependencies:
- val = repository_dependencies[ repository_dependencies_folder_key ]
- repository_dependencies_folder, folder_id, repository_dependency_id = handle_repository_dependencies_entry( repository_dependencies_root_folder,
- repository_dependencies_folder,
- repository_dependencies_folder_key,
- folder_keys,
- folder_id,
- repository_dependency_id,
- repository_name,
- repository_owner,
- changeset_revision,
- repository_dependencies_folder_key,
- val )
- del repository_dependencies[ repository_dependencies_folder_key ]
- for key, val in repository_dependencies.items():
- repository_dependencies_folder, folder_id, repository_dependency_id = handle_repository_dependencies_entry( repository_dependencies_root_folder,
- repository_dependencies_folder,
- repository_dependencies_folder_key,
- folder_keys,
- folder_id,
- repository_dependency_id,
- repository_name,
- repository_owner,
- changeset_revision,
- key,
- val )
- # Cast empty folders to be repository dependencies.
- repository_dependencies_folder, repository_dependency_id = cast_empty_repository_dependency_folders( repository_dependencies_folder,
- repository_dependency_id )
- # Remove repository_dependencies that are also folders, and coerce empty folders into repository dependencies.
+ repository_dependencies_folder, folder_id, repository_dependency_id = \
+ populate_repository_dependencies_container( repository_dependencies_folder, repository_dependencies, folder_id, repository_dependency_id )
else:
repository_dependencies_root_folder = None
return folder_id, repository_dependencies_root_folder
@@ -391,7 +360,6 @@
if key_is_current_repositorys_key( repository_name, repository_owner, changeset_revision, key ):
label = 'Repository dependencies'
else:
- #toolshed_base_url, name, owner, revision = get_components_from_key( key )
label = "Repository <b>%s</b> revision <b>%s</b> owned by <b>%s</b>" % ( repository_name, changeset_revision, repository_owner )
return label
def generate_repository_dependencies_key_for_repository( toolshed_base_url, repository_name, repository_owner, changeset_revision ):
@@ -417,61 +385,58 @@
repository_owner = items[ 2 ]
changeset_revision = items[ 3 ]
return toolshed_base_url, repository_name, repository_owner, changeset_revision
-def handle_repository_dependencies_entry( repository_dependencies_root_folder, repository_dependencies_folder, repository_dependencies_folder_key,
- folder_keys, folder_id, repository_dependency_id, repository_name, repository_owner, changeset_revision,
- key, val ):
- # Only create a new folder object if necessary.
- folder = get_folder( repository_dependencies_folder, key )
- if not folder:
+def handle_repository_dependencies_container_entry( repository_dependencies_folder, rd_key, rd_value, folder_id, repository_dependency_id, folder_keys ):
+ toolshed, repository_name, repository_owner, changeset_revision = get_components_from_key( rd_key )
+ folder = get_folder( repository_dependencies_folder, rd_key )
+ label = generate_repository_dependencies_folder_label_from_key( repository_name, repository_owner, changeset_revision, repository_dependencies_folder.key )
+ if folder:
+ if rd_key not in folder_keys:
+ folder_id += 1
+ sub_folder = Folder( id=folder_id, key=rd_key, label=label, parent=folder )
+ folder.folders.append( sub_folder )
+ else:
+ sub_folder = folder
+ else:
folder_id += 1
- label = generate_repository_dependencies_folder_label_from_key( repository_name, repository_owner, changeset_revision, key )
- folder = Folder( id=folder_id, key=key, label=label, parent=repository_dependencies_folder )
- for repository_dependency_tup in val:
- toolshed, name, owner, changeset_revision = repository_dependency_tup
- if is_or_should_be_folder( folder_keys, toolshed, name, owner, changeset_revision ):
- check_folder_key = generate_repository_dependencies_key_for_repository( toolshed, name, owner, changeset_revision )
- check_folder = get_folder( repository_dependencies_folder, check_folder_key )
- if check_folder:
- repository_dependency_id += 1
- repository_dependency = RepositoryDependency( id=repository_dependency_id,
- toolshed=toolshed,
- repository_name=name,
- repository_owner=owner,
- changeset_revision=changeset_revision )
- if not check_folder.contains_repository_dependency( repository_dependency ):
- check_folder.repository_dependencies.append( repository_dependency )
- else:
- # Create a new folder, which may be populated later.
- folder_id += 1
- label = generate_repository_dependencies_folder_label_from_key( name, owner, changeset_revision, key )
- sub_folder = Folder( id=folder_id, key=check_folder_key, label=label, parent=folder )
- folder.folders.append( sub_folder )
- else:
+ sub_folder = Folder( id=folder_id, key=rd_key, label=label, parent=repository_dependencies_folder )
+ repository_dependencies_folder.folders.append( sub_folder )
+ for repository_dependency in rd_value:
+ can_create_dependency = not is_subfolder_of( sub_folder, repository_dependency )
+ if can_create_dependency:
+ toolshed, repository_name, repository_owner, changeset_revision = repository_dependency
repository_dependency_id += 1
repository_dependency = RepositoryDependency( id=repository_dependency_id,
toolshed=toolshed,
- repository_name=name,
- repository_owner=owner,
+ repository_name=repository_name,
+ repository_owner=repository_owner,
changeset_revision=changeset_revision )
# Insert the repository_dependency into the folder.
- folder.repository_dependencies.append( repository_dependency )
- if not get_folder( repository_dependencies_folder, folder.key ):
- if folder.folders:
- # Insert the folder into the list.
- repository_dependencies_folder.folders.append( folder )
+ sub_folder.repository_dependencies.append( repository_dependency )
return repository_dependencies_folder, folder_id, repository_dependency_id
-def is_or_should_be_folder( folder_keys, toolshed, repository_name, repository_owner, changeset_revision ):
- key = '%s%s%s%s%s%s%s' % ( toolshed, STRSEP, repository_name, STRSEP, repository_owner, STRSEP, changeset_revision )
- return key in folder_keys
+def is_subfolder_of( folder, repository_dependency ):
+ toolshed, repository_name, repository_owner, changeset_revision = repository_dependency
+ key = generate_repository_dependencies_key_for_repository( toolshed, repository_name, repository_owner, changeset_revision )
+ for sub_folder in folder.folders:
+ if key == sub_folder.key:
+ return True
+ return False
def key_is_current_repositorys_key( repository_name, repository_owner, changeset_revision, key ):
toolshed_base_url, key_name, key_owner, key_changeset_revision = get_components_from_key( key )
return repository_name == key_name and repository_owner == key_owner and changeset_revision == key_changeset_revision
+def populate_repository_dependencies_container( repository_dependencies_folder, repository_dependencies, folder_id, repository_dependency_id ):
+ folder_keys = repository_dependencies.keys()
+ for key, value in repository_dependencies.items():
+ repository_dependencies_folder, folder_id, repository_dependency_id = \
+ handle_repository_dependencies_container_entry( repository_dependencies_folder, key, value, folder_id, repository_dependency_id, folder_keys )
+ return repository_dependencies_folder, folder_id, repository_dependency_id
def print_folders( pad, folder ):
# For debugging...
pad_str = ''
for i in range( 1, pad ):
pad_str += ' '
- print '%s%s' % ( pad_str, folder.key )
+ print '%sid: %s key: %s' % ( pad_str, str( folder.id ), folder.key )
+ for repository_dependency in folder.repository_dependencies:
+ print ' %s%s' % ( pad_str, repository_dependency.listify )
for sub_folder in folder.folders:
print_folders( pad+5, sub_folder )
\ No newline at end of file
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
11 Dec '12
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/7b91f9d888d3/
changeset: 7b91f9d888d3
user: carlfeberhard
date: 2012-12-11 23:29:35
summary: Fixes to upload functional tests (test_get_data.py); TwillTestCase: add is_history_empty which checks the length of HDA JSON instead of relying on html, add check_hda_json_key_value which checks a specific HDA's JSON for a specific value
affected #: 2 files
diff -r d9e2418fb00aefd0a8f65a4686dae4e8cd6cd16d -r 7b91f9d888d33ce51e07661c77958d2a3d8d68f2 test/base/twilltestcase.py
--- a/test/base/twilltestcase.py
+++ b/test/base/twilltestcase.py
@@ -2,6 +2,7 @@
pkg_resources.require( "twill==0.9" )
import StringIO, os, filecmp, time, unittest, urllib, logging, difflib, tarfile, zipfile, tempfile, re, shutil, subprocess
+import pprint
import twill
import twill.commands as tc
@@ -299,10 +300,11 @@
# twill stores the regex match in a special stack variable
match = twill.namespaces.get_twill_glocals()[1][ '__match__' ]
json_data = from_json_string( match )
- assert check_fn( json_data ), 'failed check_fn'
+ assert check_fn( json_data ), 'failed check_fn: %s' %( check_fn.func_name )
except Exception, exc:
log.error( exc, exc_info=True )
+ log.debug( 'json_data: %s', ( '\n' + pprint.pformat( json_data ) if json_data else '(no match)' ) )
fname = self.write_temp_file( tc.browser.get_html() )
errmsg = ( "json '%s' could not be found or failed check_fn" % ( pattern ) +
"\npage content written to '%s'" % ( fname ) )
@@ -310,6 +312,49 @@
self.home()
+ def is_history_empty( self ):
+ """
+ Uses history page JSON to determine whether this history is empty
+ (i.e. has no undeleted datasets).
+ """
+ def has_no_undeleted_hdas( hda_list ):
+ if not len( hda_list ):
+ return True
+ for hda in hda_list:
+ if not( hda[ 'deleted' ] or hda[ 'purged' ] ):
+ return False
+ return True
+ try:
+ self.check_history_json( r'\bhdas\s*=\s*(.*);', has_no_undeleted_hdas )
+ except AssertionError, exc:
+ log.error( 'history is not empty' )
+ raise exc
+
+ def check_hda_json_for_key_value( self, hda_id, key, value, use_string_contains=False ):
+ """
+ Uses history page JSON to determine whether the current history:
+ (1) has an hda with hda_id,
+ (2) that hda has a JSON var named 'key',
+ (3) that var 'key' == value
+ If use_string_contains=True, this will search for value in var 'key'
+ instead of testing for an entire, exact match (string only).
+ """
+ #TODO: multi key, value
+ def hda_has_key_value( hda_list ):
+ for hda in hda_list:
+ # if we found the hda and there's a var in the json named key
+ if( ( hda[ 'id' ] == hda_id )
+ and ( key in hda ) ):
+ var = hda[ key ]
+ # test for partial string containment if str and requested
+ if( ( type( var ) == str )
+ and ( use_string_contains ) ):
+ return ( value in var )
+ # otherwise, test for equivalence
+ return ( var == value )
+ return False
+ self.check_history_json( r'\bhdas\s*=\s*(.*);', hda_has_key_value )
+
def clear_history( self ):
"""Empties a history of all datasets"""
self.visit_page( "clear_history" )
diff -r d9e2418fb00aefd0a8f65a4686dae4e8cd6cd16d -r 7b91f9d888d33ce51e07661c77958d2a3d8d68f2 test/functional/test_get_data.py
--- a/test/functional/test_get_data.py
+++ b/test/functional/test_get_data.py
@@ -5,131 +5,210 @@
from base.test_db_util import *
class UploadData( TwillTestCase ):
+
def test_0000_setup_upload_tests( self ):
- """Configuring upload tests, setting admin_user"""
+ """
+ Configuring upload tests, setting admin_user
+ """
self.logout()
self.login( email='test(a)bx.psu.edu' )
global admin_user
admin_user = get_user( email='test(a)bx.psu.edu' )
+
+ def create_fresh_history( self, user ):
+ """
+ Deletes latest history for the given user, checks for an empty history,
+ and returns that new, empty history
+ """
+ # in order to remove a lot of boiler plate - and not have cascading errors
+ history = get_latest_history_for_user( user )
+ self.delete_history( id=self.security.encode_id( history.id ) )
+ self.is_history_empty()
+ return get_latest_history_for_user( user )
+
def test_0005_upload_file( self ):
- """Test uploading 1.bed, NOT setting the file format"""
- history = get_latest_history_for_user( admin_user )
+ """
+ Test uploading 1.bed, NOT setting the file format
+ """
+ history = self.create_fresh_history( admin_user )
+
self.upload_file( '1.bed' )
hda = get_latest_hda()
assert hda is not None, "Problem retrieving hda from database"
self.verify_dataset_correctness( '1.bed', hid=str( hda.hid ) )
self.check_history_for_string( "<th>1.Chrom</th><th>2.Start</th><th>3.End</th>" )
+
self.delete_history( id=self.security.encode_id( history.id ) )
+
def test_0006_upload_file( self ):
- """Test uploading 1.bed.spaces, with space to tab selected, NOT setting the file format"""
- history = get_latest_history_for_user( admin_user )
+ """
+ Test uploading 1.bed.spaces, with space to tab selected, NOT setting the file format
+ """
+ history = self.create_fresh_history( admin_user )
+
self.upload_file( '1.bed.spaces', space_to_tab = True )
hda = get_latest_hda()
assert hda is not None, "Problem retrieving hda from database"
self.verify_dataset_correctness( '1.bed', hid=str( hda.hid ) )
self.check_history_for_string( "<th>1.Chrom</th><th>2.Start</th><th>3.End</th>" )
+
self.delete_history( id=self.security.encode_id( history.id ) )
+
def test_0010_upload_file( self ):
- """Test uploading 4.bed.gz, manually setting the file format"""
- self.check_history_for_string( 'Your history is empty' )
- history = get_latest_history_for_user( admin_user )
+ """
+ Test uploading 4.bed.gz, manually setting the file format
+ """
+ history = self.create_fresh_history( admin_user )
+
self.upload_file( '4.bed.gz', dbkey='hg17', ftype='bed' )
hda = get_latest_hda()
assert hda is not None, "Problem retrieving hda from database"
self.verify_dataset_correctness( '4.bed', hid=str( hda.hid ) )
- self.check_history_for_string( "<th>1.Chrom</th><th>2.Start</th><th>3.End</th>" )
+ self.check_hda_json_for_key_value( self.security.encode_id( hda.id ),
+ "peek", "<th>1.Chrom</th><th>2.Start</th><th>3.End</th>", use_string_contains=True )
+
self.delete_history( id=self.security.encode_id( history.id ) )
+
def test_0012_upload_file( self ):
- """Test uploading 4.bed.bz2, manually setting the file format"""
- self.check_history_for_string( 'Your history is empty' )
- history = get_latest_history_for_user( admin_user )
+ """
+ Test uploading 4.bed.bz2, manually setting the file format
+ """
+ history = self.create_fresh_history( admin_user )
+
self.upload_file( '4.bed.bz2', dbkey='hg17', ftype='bed' )
hda = get_latest_hda()
assert hda is not None, "Problem retrieving hda from database"
+
self.verify_dataset_correctness( '4.bed', hid=str( hda.hid ) )
- self.check_history_for_string( "<th>1.Chrom</th><th>2.Start</th><th>3.End</th>" )
+ self.check_hda_json_for_key_value( self.security.encode_id( hda.id ),
+ "peek", "<th>1.Chrom</th><th>2.Start</th><th>3.End</th>", use_string_contains=True )
+
self.delete_history( id=self.security.encode_id( history.id ) )
+
def test_0015_upload_file( self ):
- """Test uploading 1.scf, manually setting the file format"""
- self.check_history_for_string( 'Your history is empty' )
- history = get_latest_history_for_user( admin_user )
+ """
+ Test uploading 1.scf, manually setting the file format
+ """
+ history = self.create_fresh_history( admin_user )
+
self.upload_file( '1.scf', ftype='scf' )
hda = get_latest_hda()
assert hda is not None, "Problem retrieving hda from database"
self.verify_dataset_correctness( '1.scf', hid=str( hda.hid ) )
- self.check_history_for_string( "Binary scf sequence file</pre>" )
+ self.check_hda_json_for_key_value( self.security.encode_id( hda.id ),
+ "peek", "Binary scf sequence file", use_string_contains=True )
+
self.delete_history( id=self.security.encode_id( history.id ) )
+
def test_0020_upload_file( self ):
- """Test uploading 1.scf, NOT setting the file format"""
- self.check_history_for_string( 'Your history is empty' )
- history = get_latest_history_for_user( admin_user )
+ """
+ Test uploading 1.scf, NOT setting the file format
+ """
+ history = self.create_fresh_history( admin_user )
+
self.upload_file( '1.scf' )
hda = get_latest_hda()
assert hda is not None, "Problem retrieving hda from database"
- self.check_history_for_string( "File Format' to 'Scf' when uploading scf files" )
+ self.check_hda_json_for_key_value( self.security.encode_id( hda.id ),
+ "misc_info", "File Format' to 'Scf' when uploading scf files", use_string_contains=True )
+
self.delete_history( id=self.security.encode_id( history.id ) )
+
def test_0025_upload_file( self ):
- """Test uploading 4.bed.zip, manually setting the file format"""
- self.check_history_for_string( 'Your history is empty' )
- history = get_latest_history_for_user( admin_user )
+ """
+ Test uploading 4.bed.zip, manually setting the file format
+ """
+ history = self.create_fresh_history( admin_user )
+
self.upload_file( '4.bed.zip', ftype='bed' )
hda = get_latest_hda()
assert hda is not None, "Problem retrieving hda from database"
self.verify_dataset_correctness( '4.bed', hid=str( hda.hid ) )
- self.check_history_for_string( "<th>1.Chrom</th><th>2.Start</th><th>3.End</th>" )
+ self.check_hda_json_for_key_value( self.security.encode_id( hda.id ),
+ "peek", "<th>1.Chrom</th><th>2.Start</th><th>3.End</th>", use_string_contains=True )
+
self.delete_history( id=self.security.encode_id( history.id ) )
+
def test_0030_upload_file( self ):
- """Test uploading 4.bed.zip, NOT setting the file format"""
- self.check_history_for_string( 'Your history is empty' )
- history = get_latest_history_for_user( admin_user )
+ """
+ Test uploading 4.bed.zip, NOT setting the file format
+ """
+ history = self.create_fresh_history( admin_user )
+
self.upload_file( '4.bed.zip' )
hda = get_latest_hda()
assert hda is not None, "Problem retrieving hda from database"
self.verify_dataset_correctness( '4.bed', hid=str( hda.hid ) )
- self.check_history_for_string( "<th>1.Chrom</th><th>2.Start</th><th>3.End</th>" )
+ self.check_hda_json_for_key_value( self.security.encode_id( hda.id ),
+ "peek", "<th>1.Chrom</th><th>2.Start</th><th>3.End</th>", use_string_contains=True )
+
self.delete_history( id=self.security.encode_id( history.id ) )
+
def test_0035_upload_file( self ):
- """Test uploading 1.sam NOT setting the file format"""
- self.check_history_for_string( 'Your history is empty' )
- history = get_latest_history_for_user( admin_user )
+ """
+ Test uploading 1.sam NOT setting the file format
+ """
+ history = self.create_fresh_history( admin_user )
+
self.upload_file( '1.sam' )
hda = get_latest_hda()
assert hda is not None, "Problem retrieving hda from database"
self.verify_dataset_correctness( '1.sam', hid=str( hda.hid ) )
- self.check_history_for_string( "<th>1.QNAME</th><th>2.FLAG</th><th>3.RNAME</th><th>4.POS</th>" )
+ self.check_hda_json_for_key_value( self.security.encode_id( hda.id ),
+ "peek", "<th>1.QNAME</th><th>2.FLAG</th><th>3.RNAME</th><th>4.POS</th>", use_string_contains=True )
+
self.delete_history( id=self.security.encode_id( history.id ) )
+
def test_0040_upload_file( self ):
- """Test uploading 1.sff, NOT setting the file format"""
- self.check_history_for_string( 'Your history is empty' )
- history = get_latest_history_for_user( admin_user )
+ """
+ Test uploading 1.sff, NOT setting the file format
+ """
+ history = self.create_fresh_history( admin_user )
+
self.upload_file( '1.sff' )
hda = get_latest_hda()
assert hda is not None, "Problem retrieving hda from database"
self.verify_dataset_correctness( '1.sff', hid=str( hda.hid ) )
- self.check_history_for_string( 'format: <span class="sff">sff' )
+ self.check_hda_json_for_key_value( self.security.encode_id( hda.id ),
+ "misc_info", "sff", use_string_contains=True )
+
self.delete_history( id=self.security.encode_id( history.id ) )
+
def test_0045_upload_file( self ):
- """Test uploading 454Score.pdf, NOT setting the file format"""
- self.check_history_for_string( 'Your history is empty' )
- history = get_latest_history_for_user( admin_user )
+ """
+ Test uploading 454Score.pdf, NOT setting the file format
+ """
+ history = self.create_fresh_history( admin_user )
+
self.upload_file( '454Score.pdf' )
hda = get_latest_hda()
assert hda is not None, "Problem retrieving hda from database"
- self.check_history_for_string( "1: 454Score.pdf</span>" )
+ self.check_hda_json_for_key_value( self.security.encode_id( hda.id ),
+ "name", "454Score.pdf" )
+
self.delete_history( id=self.security.encode_id( history.id ) )
+
def test_0050_upload_file( self ):
- """Test uploading 454Score.png, NOT setting the file format"""
- self.check_history_for_string( 'Your history is empty' )
- history = get_latest_history_for_user( admin_user )
+ """
+ Test uploading 454Score.png, NOT setting the file format
+ """
+ history = self.create_fresh_history( admin_user )
+
self.upload_file( '454Score.png' )
hda = get_latest_hda()
assert hda is not None, "Problem retrieving hda from database"
- self.check_history_for_string( "454Score.png" )
+ self.check_hda_json_for_key_value( self.security.encode_id( hda.id ),
+ "name", "454Score.png" )
+
+ self.delete_history( id=self.security.encode_id( history.id ) )
+
def test_0055_upload_file( self ):
- """Test uploading lped composite datatype file, manually setting the file format"""
+ """
+ Test uploading lped composite datatype file, manually setting the file format
+ """
# Logged in as admin_user
- self.check_history_for_string( 'Your history is empty' )
- history = get_latest_history_for_user( admin_user )
+ history = self.create_fresh_history( admin_user )
+
# lped data types include a ped_file and a map_file ( which is binary )
self.upload_file( None, ftype='lped', metadata = [ { 'name':'base_name', 'value':'rgenetics' } ], composite_data = [ { 'name':'ped_file', 'value':'tinywga.ped' }, { 'name':'map_file', 'value':'tinywga.map'} ] )
# Get the latest hid for testing
@@ -138,13 +217,18 @@
# We'll test against the resulting ped file and map file for correctness
self.verify_composite_datatype_file_content( 'tinywga.ped', str( hda.id ), base_name = 'rgenetics.ped' )
self.verify_composite_datatype_file_content( 'tinywga.map', str( hda.id ), base_name = 'rgenetics.map' )
- self.check_history_for_string( "rgenetics" )
+ self.check_hda_json_for_key_value( self.security.encode_id( hda.id ),
+ "metadata_base_name", "rgenetics", use_string_contains=True )
+
self.delete_history( id=self.security.encode_id( history.id ) )
+
def test_0056_upload_file( self ):
- """Test uploading lped composite datatype file, manually setting the file format, and using space to tab on one file (tinywga.ped)"""
+ """
+ Test uploading lped composite datatype file, manually setting the file format, and using space to tab on one file (tinywga.ped)
+ """
# Logged in as admin_user
- self.check_history_for_string( 'Your history is empty' )
- history = get_latest_history_for_user( admin_user )
+ history = self.create_fresh_history( admin_user )
+
# lped data types include a ped_file and a map_file ( which is binary )
self.upload_file( None, ftype='lped', metadata = [ { 'name':'base_name', 'value':'rgenetics' } ], composite_data = [ { 'name':'ped_file', 'value':'tinywga.ped', 'space_to_tab':True }, { 'name':'map_file', 'value':'tinywga.map'} ] )
# Get the latest hid for testing
@@ -153,15 +237,25 @@
# We'll test against the resulting ped file and map file for correctness
self.verify_composite_datatype_file_content( 'tinywga.ped.space_to_tab', str( hda.id ), base_name = 'rgenetics.ped' )
self.verify_composite_datatype_file_content( 'tinywga.map', str( hda.id ), base_name = 'rgenetics.map' )
- self.check_history_for_string( "rgenetics" )
+ self.check_hda_json_for_key_value( self.security.encode_id( hda.id ),
+ "metadata_base_name", "rgenetics", use_string_contains=True )
+
self.delete_history( id=self.security.encode_id( history.id ) )
+
def test_0060_upload_file( self ):
- """Test uploading pbed composite datatype file, manually setting the file format"""
+ """
+ Test uploading pbed composite datatype file, manually setting the file format
+ """
# Logged in as admin_user
- self.check_history_for_string( 'Your history is empty' )
- history = get_latest_history_for_user( admin_user )
+ history = self.create_fresh_history( admin_user )
+
# pbed data types include a bim_file, a bed_file and a fam_file
- self.upload_file( None, ftype='pbed', metadata = [ { 'name':'base_name', 'value':'rgenetics' } ], composite_data = [ { 'name':'bim_file', 'value':'tinywga.bim' }, { 'name':'bed_file', 'value':'tinywga.bed'}, { 'name':'fam_file', 'value':'tinywga.fam' } ] )
+ self.upload_file( None, ftype='pbed',
+ metadata = [ { 'name':'base_name', 'value':'rgenetics' } ],
+ composite_data = [
+ { 'name':'bim_file', 'value':'tinywga.bim' },
+ { 'name':'bed_file', 'value':'tinywga.bed' },
+ { 'name':'fam_file', 'value':'tinywga.fam' } ])
# Get the latest hid for testing
hda = get_latest_hda()
assert hda is not None, "Problem retrieving hda from database"
@@ -169,280 +263,373 @@
self.verify_composite_datatype_file_content( 'tinywga.bim', str( hda.id ), base_name = 'rgenetics.bim' )
self.verify_composite_datatype_file_content( 'tinywga.bed', str( hda.id ), base_name = 'rgenetics.bed' )
self.verify_composite_datatype_file_content( 'tinywga.fam', str( hda.id ), base_name = 'rgenetics.fam' )
- self.check_history_for_string( "rgenetics" )
+ self.check_hda_json_for_key_value( self.security.encode_id( hda.id ),
+ "metadata_base_name", "rgenetics", use_string_contains=True )
+
self.delete_history( id=self.security.encode_id( history.id ) )
+
def test_0065_upload_file( self ):
- """Test uploading asian_chars_1.txt, NOT setting the file format"""
+ """
+ Test uploading asian_chars_1.txt, NOT setting the file format
+ """
# Logged in as admin_user
- self.check_history_for_string( 'Your history is empty' )
- history = get_latest_history_for_user( admin_user )
+ history = self.create_fresh_history( admin_user )
+
self.upload_file( 'asian_chars_1.txt' )
hda = get_latest_hda()
assert hda is not None, "Problem retrieving hda from database"
self.verify_dataset_correctness( 'asian_chars_1.txt', hid=str( hda.hid ) )
- self.check_history_for_string( 'uploaded multi-byte char file' )
+ self.check_hda_json_for_key_value( self.security.encode_id( hda.id ),
+ "misc_info", "uploaded multi-byte char file", use_string_contains=True )
+
self.delete_history( id=self.security.encode_id( history.id ) )
+
def test_0070_upload_file( self ):
- """Test uploading 2gen.fastq, NOT setting the file format"""
+ """
+ Test uploading 2gen.fastq, NOT setting the file format
+ """
# Logged in as admin_user
- self.check_history_for_string( 'Your history is empty' )
- history = get_latest_history_for_user( admin_user )
+ history = self.create_fresh_history( admin_user )
+
self.upload_file( '2gen.fastq' )
hda = get_latest_hda()
assert hda is not None, "Problem retrieving hda from database"
self.verify_dataset_correctness( '2gen.fastq', hid=str( hda.hid ) )
- self.check_history_for_string( '2gen.fastq format: <span class="fastq">fastq</span>, database: \? Info: uploaded fastq file' )
+ self.check_hda_json_for_key_value( self.security.encode_id( hda.id ), "data_type", "fastq" )
+
self.delete_history( id=self.security.encode_id( history.id ) )
+
def test_0075_upload_file( self ):
- """Test uploading 1.wig, NOT setting the file format"""
+ """
+ Test uploading 1.wig, NOT setting the file format
+ """
# Logged in as admin_user
- self.check_history_for_string( 'Your history is empty' )
- history = get_latest_history_for_user( admin_user )
+ history = self.create_fresh_history( admin_user )
+
self.upload_file( '1.wig' )
hda = get_latest_hda()
assert hda is not None, "Problem retrieving hda from database"
self.verify_dataset_correctness( '1.wig', hid=str( hda.hid ) )
- self.check_history_for_string( '1.wig format: <span class="wig">wig</span>, database: \? Info: uploaded file' )
+ self.check_hda_json_for_key_value( self.security.encode_id( hda.id ), "data_type", "wig" )
self.check_metadata_for_string( 'value="1.wig" value="\?"' )
self.check_metadata_for_string( 'Change data type selected value="wig" selected="yes"' )
+
self.delete_history( id=self.security.encode_id( history.id ) )
+
def test_0080_upload_file( self ):
- """Test uploading 1.tabular, NOT setting the file format"""
+ """
+ Test uploading 1.tabular, NOT setting the file format
+ """
# Logged in as admin_user
- self.check_history_for_string( 'Your history is empty' )
- history = get_latest_history_for_user( admin_user )
+ history = self.create_fresh_history( admin_user )
+
self.upload_file( '1.tabular' )
hda = get_latest_hda()
assert hda is not None, "Problem retrieving hda from database"
self.verify_dataset_correctness( '1.tabular', hid=str( hda.hid ) )
- self.check_history_for_string( '1.tabular format: <span class="tabular">tabular</span>, database: \? Info: uploaded file' )
+ self.check_hda_json_for_key_value( self.security.encode_id( hda.id ), "data_type", "tabular" )
self.check_metadata_for_string( 'value="1.tabular" value="\?"' )
self.check_metadata_for_string( 'Change data type selected value="tabular" selected="yes"' )
+
self.delete_history( id=self.security.encode_id( history.id ) )
+
def test_0085_upload_file( self ):
- """Test uploading qualscores.qualsolid, NOT setting the file format"""
+ """
+ Test uploading qualscores.qualsolid, NOT setting the file format
+ """
# Logged in as admin_user
- self.check_history_for_string( 'Your history is empty' )
- history = get_latest_history_for_user( admin_user )
+ history = self.create_fresh_history( admin_user )
+
self.upload_file( 'qualscores.qualsolid' )
hda = get_latest_hda()
assert hda is not None, "Problem retrieving hda from database"
self.verify_dataset_correctness( 'qualscores.qualsolid', hid=str( hda.hid ) )
- self.check_history_for_string( '48 lines format: <span class="qualsolid">qualsolid</span>, database: \? Info: uploaded file' )
+ self.check_hda_json_for_key_value( self.security.encode_id( hda.id ), "data_type", "qualsolid" )
self.check_metadata_for_string( 'Change data type value="qualsolid" selected="yes">qualsolid' )
+
self.delete_history( id=self.security.encode_id( history.id ) )
+
def test_0090_upload_file( self ):
- """Test uploading qualscores.qual454, NOT setting the file format"""
+ """
+ Test uploading qualscores.qual454, NOT setting the file format
+ """
# Logged in as admin_user
- self.check_history_for_string( 'Your history is empty' )
- history = get_latest_history_for_user( admin_user )
+ history = self.create_fresh_history( admin_user )
+
self.upload_file( 'qualscores.qual454' )
hda = get_latest_hda()
assert hda is not None, "Problem retrieving hda from database"
self.verify_dataset_correctness( 'qualscores.qual454', hid=str( hda.hid ) )
- self.check_history_for_string( '49 lines format: <span class="qual454">qual454</span>, database: \?' )
+ self.check_hda_json_for_key_value( self.security.encode_id( hda.id ), "data_type", "qual454" )
self.check_metadata_for_string( 'Change data type value="qual454" selected="yes">qual454' )
+
self.delete_history( id=self.security.encode_id( history.id ) )
+
def test_0095_upload_file( self ):
- """Test uploading 3.maf, NOT setting the file format"""
+ """
+ Test uploading 3.maf, NOT setting the file format
+ """
# Logged in as admin_user
- self.check_history_for_string( 'Your history is empty' )
- history = get_latest_history_for_user( admin_user )
+ history = self.create_fresh_history( admin_user )
+
self.upload_file( '3.maf' )
hda = get_latest_hda()
assert hda is not None, "Problem retrieving hda from database"
self.verify_dataset_correctness( '3.maf', hid=str( hda.hid ) )
- self.check_history_for_string( '3.maf format: <span class="maf">maf</span>, database: \? Info: uploaded file' )
+ self.check_hda_json_for_key_value( self.security.encode_id( hda.id ), "data_type", "maf" )
self.check_metadata_for_string( 'value="3.maf" value="\?"' )
self.check_metadata_for_string( 'Convert to new format <option value="interval">Convert MAF to Genomic Intervals <option value="fasta">Convert MAF to Fasta' )
self.check_metadata_for_string( 'Change data type selected value="maf" selected="yes"' )
+
self.delete_history( id=self.security.encode_id( history.id ) )
+
def test_0100_upload_file( self ):
- """Test uploading 1.lav, NOT setting the file format"""
+ """
+ Test uploading 1.lav, NOT setting the file format
+ """
# Logged in as admin_user
- self.check_history_for_string( 'Your history is empty' )
- history = get_latest_history_for_user( admin_user )
+ history = self.create_fresh_history( admin_user )
+
self.upload_file( '1.lav' )
hda = get_latest_hda()
assert hda is not None, "Problem retrieving hda from database"
self.verify_dataset_correctness( '1.lav', hid=str( hda.hid ) )
- self.check_history_for_string( '1.lav format: <span class="lav">lav</span>, database: \? Info: uploaded file' )
+ self.check_hda_json_for_key_value( self.security.encode_id( hda.id ), "data_type", "lav" )
self.check_metadata_for_string( 'value="1.lav" value="\?"' )
self.check_metadata_for_string( 'Change data type selected value="lav" selected="yes"' )
+
self.delete_history( id=self.security.encode_id( history.id ) )
+
def test_0105_upload_file( self ):
- """Test uploading 1.interval, NOT setting the file format"""
+ """
+ Test uploading 1.interval, NOT setting the file format
+ """
# Logged in as admin_user
- self.check_history_for_string( 'Your history is empty' )
- history = get_latest_history_for_user( admin_user )
+ history = self.create_fresh_history( admin_user )
+
self.upload_file( '1.interval' )
hda = get_latest_hda()
assert hda is not None, "Problem retrieving hda from database"
self.verify_dataset_correctness( '1.interval', hid=str( hda.hid ) )
- self.check_history_for_string( '1.interval format: <span class="interval">interval</span>, database: \? Info: uploaded file' )
+ self.check_hda_json_for_key_value( self.security.encode_id( hda.id ), "data_type", "interval" )
self.check_metadata_for_string( 'value="1.interval" value="\?"' )
self.check_metadata_for_string( 'Chrom column: <option value="1" selected> Start column: <option value="2" selected>' )
self.check_metadata_for_string( 'End column: <option value="3" selected> Strand column <option value="6" selected>' )
self.check_metadata_for_string( 'Convert to new format <option value="bed">Convert Genomic Intervals To BED' )
self.check_metadata_for_string( 'Change data type selected value="interval" selected="yes"' )
+
self.delete_history( id=self.security.encode_id( history.id ) )
+
def test_0110_upload_file( self ):
- """Test uploading 5.gff3, NOT setting the file format"""
+ """
+ Test uploading 5.gff3, NOT setting the file format
+ """
# Logged in as admin_user
- self.check_history_for_string( 'Your history is empty' )
- history = get_latest_history_for_user( admin_user )
+ history = self.create_fresh_history( admin_user )
+
self.upload_file( '5.gff3' )
hda = get_latest_hda()
assert hda is not None, "Problem retrieving hda from database"
self.verify_dataset_correctness( '5.gff3', hid=str( hda.hid ) )
- self.check_history_for_string( '5.gff3 format: <span class="gff3">gff3</span>, database: \? Info: uploaded file' )
+ self.check_hda_json_for_key_value( self.security.encode_id( hda.id ), "data_type", "gff3" )
self.check_metadata_for_string( 'value="5.gff3" value="\?"' )
self.check_metadata_for_string( 'Convert to new format <option value="bed">Convert GFF to BED' )
self.check_metadata_for_string( 'Change data type selected value="gff3" selected="yes"' )
+
self.delete_history( id=self.security.encode_id( history.id ) )
+
def test_0115_upload_file( self ):
- """Test uploading html_file.txt, NOT setting the file format"""
+ """
+ Test uploading html_file.txt, NOT setting the file format
+ """
# Logged in as admin_user
- self.check_history_for_string( 'Your history is empty' )
- history = get_latest_history_for_user( admin_user )
+ history = self.create_fresh_history( admin_user )
+
self.upload_file( 'html_file.txt' )
hda = get_latest_hda()
assert hda is not None, "Problem retrieving hda from database"
- self.check_history_for_string( 'The uploaded file contains inappropriate content' )
+ self.check_hda_json_for_key_value( self.security.encode_id( hda.id ),
+ "misc_info", "The uploaded file contains inappropriate HTML content", use_string_contains=True )
+
self.delete_history( id=self.security.encode_id( history.id ) )
+
def test_0120_upload_file( self ):
- """Test uploading 5.gff, NOT setting the file format"""
- # Logged in as admin_user
- self.check_history_for_string( 'Your history is empty' )
- history = get_latest_history_for_user( admin_user )
+ """
+ Test uploading 5.gff, NOT setting the file format
+
+ Test sniffer for gff.
+ """
+ history = self.create_fresh_history( admin_user )
+
self.upload_file( '5.gff' )
hda = get_latest_hda()
assert hda is not None, "Problem retrieving hda from database"
self.verify_dataset_correctness( '5.gff', hid=str( hda.hid ) )
- self.check_history_for_string( '5.gff format: <span class="gff">gff</span>, database: \? Info: uploaded file' )
+ self.check_hda_json_for_key_value( self.security.encode_id( hda.id ), "data_type", "gff" )
self.check_metadata_for_string( 'value="5.gff" value="\?"' )
self.check_metadata_for_string( 'Convert to new format <option value="bed">Convert GFF to BED' )
self.check_metadata_for_string( 'Change data type selected value="gff" selected="yes"' )
+
self.delete_history( id=self.security.encode_id( history.id ) )
+
def test_0125_upload_file( self ):
- """Test uploading 1.fasta, NOT setting the file format"""
+ """
+ Test uploading 1.fasta, NOT setting the file format
+ """
# Logged in as admin_user
- self.check_history_for_string( 'Your history is empty' )
- history = get_latest_history_for_user( admin_user )
+ history = self.create_fresh_history( admin_user )
+
self.upload_file( '1.fasta' )
hda = get_latest_hda()
assert hda is not None, "Problem retrieving hda from database"
self.verify_dataset_correctness( '1.fasta', hid=str( hda.hid ) )
- self.check_history_for_string( '1.fasta format: <span class="fasta">fasta</span>, database: \? Info: uploaded file' )
+ self.check_hda_json_for_key_value( self.security.encode_id( hda.id ), "data_type", "fasta" )
self.check_metadata_for_string( 'value="1.fasta" value="\?" Change data type selected value="fasta" selected="yes"' )
+
self.delete_history( id=self.security.encode_id( history.id ) )
+
def test_0130_upload_file( self ):
- """Test uploading 1.customtrack, NOT setting the file format"""
+ """
+ Test uploading 1.customtrack, NOT setting the file format
+ """
# Logged in as admin_user
- self.check_history_for_string( 'Your history is empty' )
- history = get_latest_history_for_user( admin_user )
+ history = self.create_fresh_history( admin_user )
+
self.upload_file( '1.customtrack' )
hda = get_latest_hda()
assert hda is not None, "Problem retrieving hda from database"
self.verify_dataset_correctness( '1.customtrack', hid=str( hda.hid ) )
- self.check_history_for_string( '1.customtrack format: <span class="customtrack">customtrack</span>, database: \? Info: uploaded file' )
+ self.check_hda_json_for_key_value( self.security.encode_id( hda.id ), "data_type", "customtrack" )
self.check_metadata_for_string( 'value="1.customtrack" value="\?" Change data type selected value="customtrack" selected="yes"' )
+
self.delete_history( id=self.security.encode_id( history.id ) )
+
def test_0135_upload_file( self ):
- """Test uploading shrimp_cs_test1.csfasta, NOT setting the file format"""
+ """
+ Test uploading shrimp_cs_test1.csfasta, NOT setting the file format
+ """
# Logged in as admin_user
- self.check_history_for_string( 'Your history is empty' )
- history = get_latest_history_for_user( admin_user )
+ history = self.create_fresh_history( admin_user )
+
self.upload_file( 'shrimp_cs_test1.csfasta' )
hda = get_latest_hda()
assert hda is not None, "Problem retrieving hda from database"
self.verify_dataset_correctness( 'shrimp_cs_test1.csfasta', hid=str( hda.hid ) )
- self.check_history_for_string( '2,500 sequences format: <span class="csfasta">csfasta</span>, <td>>2_14_26_F3,-1282216.0</td>' )
+ self.check_hda_json_for_key_value( self.security.encode_id( hda.id ), "data_type", "csfasta" )
self.check_metadata_for_string( 'value="shrimp_cs_test1.csfasta" value="\?" Change data type value="csfasta" selected="yes"' )
+
self.delete_history( id=self.security.encode_id( history.id ) )
+
def test_0145_upload_file( self ):
- """Test uploading 1.axt, NOT setting the file format"""
+ """
+ Test uploading 1.axt, NOT setting the file format
+ """
# Logged in as admin_user
- self.check_history_for_string( 'Your history is empty' )
- history = get_latest_history_for_user( admin_user )
+ history = self.create_fresh_history( admin_user )
+
self.upload_file( '1.axt' )
hda = get_latest_hda()
assert hda is not None, "Problem retrieving hda from database"
self.verify_dataset_correctness( '1.axt', hid=str( hda.hid ) )
- self.check_history_for_string( '1.axt format: <span class="axt">axt</span>, database: \? Info: uploaded file' )
+ self.check_hda_json_for_key_value( self.security.encode_id( hda.id ), "data_type", "axt" )
self.check_metadata_for_string( 'value="1.axt" value="\?" Change data type selected value="axt" selected="yes"' )
+
self.delete_history( id=self.security.encode_id( history.id ) )
+
def test_0150_upload_file( self ):
- """Test uploading 1.bam, which is a sorted Bam file creaed by the Galaxy sam_to_bam tool, NOT setting the file format"""
- self.check_history_for_string( 'Your history is empty' )
- history = get_latest_history_for_user( admin_user )
+ """
+ Test uploading 1.bam, which is a sorted Bam file creaed by the Galaxy sam_to_bam tool, NOT setting the file format
+ """
+ history = self.create_fresh_history( admin_user )
+
self.upload_file( '1.bam' )
hda = get_latest_hda()
assert hda is not None, "Problem retrieving hda from database"
self.verify_dataset_correctness( '1.bam', hid=str( hda.hid ), attributes={ 'ftype' : 'bam' } )
- self.check_history_for_string( '<span class="bam">bam</span>' )
+ self.check_hda_json_for_key_value( self.security.encode_id( hda.id ), "data_type", "bam" )
# Make sure the Bam index was created
assert hda.metadata.bam_index is not None, "Bam index was not correctly created for 1.bam"
+
self.delete_history( id=self.security.encode_id( history.id ) )
+
def test_0155_upload_file( self ):
- """Test uploading 3unsorted.bam, which is an unsorted Bam file, NOT setting the file format"""
- self.check_history_for_string( 'Your history is empty' )
- history = get_latest_history_for_user( admin_user )
+ """
+ Test uploading 3unsorted.bam, which is an unsorted Bam file, NOT setting the file format
+ """
+ history = self.create_fresh_history( admin_user )
+
self.upload_file( '3unsorted.bam' )
hda = get_latest_hda()
assert hda is not None, "Problem retrieving hda from database"
# Since 3unsorted.bam is not sorted, we cannot verify dataset correctness since the uploaded
# dataset will be sorted. However, the check below to see if the index was created is
# sufficient.
- self.check_history_for_string( '<span class="bam">bam</span>' )
+ self.check_hda_json_for_key_value( self.security.encode_id( hda.id ), "data_type", "bam" )
# Make sure the Bam index was created
assert hda.metadata.bam_index is not None, "Bam index was not correctly created for 3unsorted.bam"
+
self.delete_history( id=self.security.encode_id( history.id ) )
+
def test_0160_url_paste( self ):
- """Test url paste behavior"""
+ """
+ Test url paste behavior
+ """
# Logged in as admin_user
- # Deleting the current history should have created a new history
- self.check_history_for_string( 'Your history is empty' )
- history = get_latest_history_for_user( admin_user )
+ history = self.create_fresh_history( admin_user )
+
self.upload_url_paste( 'hello world' )
- self.check_history_for_string( 'Pasted Entry' )
- self.check_history_for_string( 'hello world' )
+ self.check_history_for_exact_string( 'Pasted Entry' )
+ self.check_history_for_exact_string( 'hello world' )
self.upload_url_paste( u'hello world' )
- self.check_history_for_string( 'Pasted Entry' )
- self.check_history_for_string( 'hello world' )
+ self.check_history_for_exact_string( 'Pasted Entry' )
+ self.check_history_for_exact_string( 'hello world' )
+
self.delete_history( id=self.security.encode_id( history.id ) )
+
def test_0165_upload_file( self ):
- """Test uploading 1.pileup, NOT setting the file format"""
- self.check_history_for_string( 'Your history is empty' )
- history = get_latest_history_for_user( admin_user )
+ """
+ Test uploading 1.pileup, NOT setting the file format
+ """
+ history = self.create_fresh_history( admin_user )
+
self.upload_file( '1.pileup' )
hda = get_latest_hda()
assert hda is not None, "Problem retrieving hda from database"
self.verify_dataset_correctness( '1.pileup', hid=str( hda.hid ) )
- self.check_history_for_string( '1.pileup format: <span class="pileup">pileup</span>, database: \? Info: uploaded file' )
+ self.check_hda_json_for_key_value( self.security.encode_id( hda.id ), "data_type", "pileup" )
self.check_metadata_for_string( 'value="1.pileup" value="\?" Change data type selected value="pileup" selected="yes"' )
+
self.delete_history( id=self.security.encode_id( history.id ) )
+
def test_0170_upload_file( self ):
- """Test uploading 1.bigbed, NOT setting the file format"""
- self.check_history_for_string( 'Your history is empty' )
- history = get_latest_history_for_user( admin_user )
+ """
+ Test uploading 1.bigbed, NOT setting the file format
+ """
+ history = self.create_fresh_history( admin_user )
+
self.upload_file( '1.bigbed' )
hda = get_latest_hda()
assert hda is not None, "Problem retrieving hda from database"
self.verify_dataset_correctness( '1.bigbed', hid=str( hda.hid ) )
- self.check_history_for_string( '1.bigbed</span> database: \? Info: uploaded bigbed file' )
+ self.check_hda_json_for_key_value( self.security.encode_id( hda.id ), "data_type", "bigbed" )
self.check_metadata_for_string( 'value="1.bigbed" value="\?" Change data type selected value="bigbed" selected="yes"' )
+
self.delete_history( id=self.security.encode_id( history.id ) )
+
def test_0175_upload_file( self ):
- """Test uploading 1.bigwig, NOT setting the file format"""
- self.check_history_for_string( 'Your history is empty' )
- history = get_latest_history_for_user( admin_user )
+ """
+ Test uploading 1.bigwig, NOT setting the file format
+ """
+ history = self.create_fresh_history( admin_user )
+
self.upload_file( '1.bigwig' )
hda = get_latest_hda()
assert hda is not None, "Problem retrieving hda from database"
self.verify_dataset_correctness( '1.bigwig', hid=str( hda.hid ) )
- self.check_history_for_string( '1.bigwig</span> database: \? Info: uploaded bigwig file' )
+ self.check_hda_json_for_key_value( self.security.encode_id( hda.id ), "data_type", "bigwig" )
self.check_metadata_for_string( 'value="1.bigwig" value="\?" Change data type selected value="bigwig" selected="yes"' )
+
self.delete_history( id=self.security.encode_id( history.id ) )
+
def test_9999_clean_up( self ):
self.logout()
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: jgoecks: Trackster: when creating a visualization, default the view to the first chromosome.
by Bitbucket 11 Dec '12
by Bitbucket 11 Dec '12
11 Dec '12
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/d9e2418fb00a/
changeset: d9e2418fb00a
user: jgoecks
date: 2012-12-11 22:47:54
summary: Trackster: when creating a visualization, default the view to the first chromosome.
affected #: 2 files
diff -r 376a3714dc1510d3a1405575fc2667b68a28a695 -r d9e2418fb00aefd0a8f65a4686dae4e8cd6cd16d static/scripts/viz/trackster/tracks.js
--- a/static/scripts/viz/trackster/tracks.js
+++ b/static/scripts/viz/trackster/tracks.js
@@ -1190,7 +1190,7 @@
view.chrom_select.html(chrom_options);
view.chrom_start_index = result.start_index;
- chrom_data.resolve(result);
+ chrom_data.resolve(result.chrom_info);
},
error: function() {
alert("Could not load chroms for this dbkey:", view.dbkey);
diff -r 376a3714dc1510d3a1405575fc2667b68a28a695 -r d9e2418fb00aefd0a8f65a4686dae4e8cd6cd16d static/scripts/viz/trackster_ui.js
--- a/static/scripts/viz/trackster_ui.js
+++ b/static/scripts/viz/trackster_ui.js
@@ -188,7 +188,7 @@
var self = this,
view = new tracks.TracksterView(view_config);
view.editor = true;
- $.when( view.load_chroms_deferred ).then(function() {
+ $.when( view.load_chroms_deferred ).then(function(chrom_info) {
// Viewport config.
if (viewport_config) {
var
@@ -201,6 +201,10 @@
view.change_chrom(chrom, start, end);
}
}
+ else {
+ // No viewport, so use first chromosome.
+ view.change_chrom(chrom_info[0].chrom);
+ }
// Add drawables to view.
if (drawables_config) {
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: inithello: Functional tests for n levels of circular repository dependencies.
by Bitbucket 11 Dec '12
by Bitbucket 11 Dec '12
11 Dec '12
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/376a3714dc15/
changeset: 376a3714dc15
user: inithello
date: 2012-12-11 20:29:28
summary: Functional tests for n levels of circular repository dependencies.
affected #: 7 files
diff -r a7ea7728829dcc5395606bd402805d2ac972d6c6 -r 376a3714dc1510d3a1405575fc2667b68a28a695 test/tool_shed/base/common.py
--- a/test/tool_shed/base/common.py
+++ b/test/tool_shed/base/common.py
@@ -10,6 +10,8 @@
new_repository_dependencies_xml = '''<?xml version="1.0"?><repositories${description}>
- <repository toolshed="${toolshed_url}" name="${repository_name}" owner="${owner}" changeset_revision="${changeset_revision}" />
+${dependency_lines}
</repositories>
'''
+
+new_repository_dependencies_line = ''' <repository toolshed="${toolshed_url}" name="${repository_name}" owner="${owner}" changeset_revision="${changeset_revision}" />'''
\ No newline at end of file
diff -r a7ea7728829dcc5395606bd402805d2ac972d6c6 -r 376a3714dc1510d3a1405575fc2667b68a28a695 test/tool_shed/base/twilltestcase.py
--- a/test/tool_shed/base/twilltestcase.py
+++ b/test/tool_shed/base/twilltestcase.py
@@ -22,12 +22,6 @@
self.file_dir = os.environ.get( 'TOOL_SHED_TEST_FILE_DIR', None )
self.tool_shed_test_file = None
self.shed_tools_dict = {}
- self.keepOutdir = os.environ.get( 'TOOL_SHED_TEST_SAVE', '' )
- if self.keepOutdir > '':
- try:
- os.makedirs( self.keepOutdir )
- except:
- pass
self.home()
def browse_repository( self, repository, strings_displayed=[], strings_not_displayed=[] ):
url = '/repository/browse_repository?id=%s' % self.security.encode_id( repository.id )
@@ -50,9 +44,11 @@
url = '/repository/view_changelog?id=%s' % self.security.encode_id( repository.id )
self.visit_url( url )
self.check_for_strings( strings_displayed, strings_not_displayed )
- def check_repository_dependency( self, repository, depends_on_repository, depends_on_changeset_revision ):
+ def check_repository_dependency( self, repository, depends_on_repository, depends_on_changeset_revision, changeset_revision=None ):
+ if changeset_revision is None:
+ changeset_revision = self.get_repository_tip( repository )
strings_displayed = [ depends_on_repository.name, depends_on_repository.user.username, depends_on_changeset_revision ]
- self.display_manage_repository_page( repository, strings_displayed=strings_displayed )
+ self.display_manage_repository_page( repository, changeset_revision=changeset_revision, strings_displayed=strings_displayed )
def check_repository_metadata( self, repository, tip_only=True ):
if tip_only:
assert self.tip_has_metadata( repository ) and len( self.get_repository_metadata_revisions( repository ) ) == 1, \
@@ -216,21 +212,24 @@
else:
string = string.replace( character, replacement )
return string
- def generate_repository_dependency_xml( self, repository, xml_filename, dependency_description='' ):
+ def generate_repository_dependency_xml( self, repositories, xml_filename, dependency_description='' ):
file_path = os.path.split( xml_filename )[0]
if not os.path.exists( file_path ):
os.makedirs( file_path )
- changeset_revision = self.get_repository_tip( repository )
+ dependency_entries = []
+ for repository in repositories:
+ changeset_revision = self.get_repository_tip( repository )
+ template = string.Template( common.new_repository_dependencies_line )
+ dependency_entries.append( template.safe_substitute( toolshed_url=self.url,
+ owner=repository.user.username,
+ repository_name=repository.name,
+ changeset_revision=changeset_revision ) )
if dependency_description:
description = ' description="%s"' % dependency_description
else:
description = dependency_description
template_parser = string.Template( common.new_repository_dependencies_xml )
- repository_dependency_xml = template_parser.safe_substitute( toolshed_url=self.url,
- owner=repository.user.username,
- repository_name=repository.name,
- changeset_revision=changeset_revision,
- description=description )
+ repository_dependency_xml = template_parser.safe_substitute( description=description, dependency_lines='\n'.join( dependency_entries ) )
# Save the generated xml to the specified location.
file( xml_filename, 'w' ).write( repository_dependency_xml )
def generate_temp_path( self, test_script_path, additional_paths=[] ):
diff -r a7ea7728829dcc5395606bd402805d2ac972d6c6 -r 376a3714dc1510d3a1405575fc2667b68a28a695 test/tool_shed/functional/test_0020_basic_repository_dependencies.py
--- a/test/tool_shed/functional/test_0020_basic_repository_dependencies.py
+++ b/test/tool_shed/functional/test_0020_basic_repository_dependencies.py
@@ -55,7 +55,7 @@
repository = get_repository_by_name_and_owner( emboss_repository_name, common.test_user_1_name )
datatypes_repository = get_repository_by_name_and_owner( datatypes_repository_name, common.test_user_1_name )
repository_dependencies_path = self.generate_temp_path( 'test_0020', additional_paths=[ 'emboss', '5' ] )
- self.generate_repository_dependency_xml( datatypes_repository,
+ self.generate_repository_dependency_xml( [ datatypes_repository ],
self.get_filename( 'repository_dependencies.xml', filepath=repository_dependencies_path ) )
self.upload_file( repository,
'repository_dependencies.xml',
diff -r a7ea7728829dcc5395606bd402805d2ac972d6c6 -r 376a3714dc1510d3a1405575fc2667b68a28a695 test/tool_shed/functional/test_0030_repository_dependency_revisions.py
--- a/test/tool_shed/functional/test_0030_repository_dependency_revisions.py
+++ b/test/tool_shed/functional/test_0030_repository_dependency_revisions.py
@@ -73,7 +73,7 @@
'''Generate a repository_dependencies.xml file specifying emboss_datatypes and upload it to the emboss_5 repository.'''
datatypes_repository = get_repository_by_name_and_owner( datatypes_repository_name, common.test_user_1_name )
repository_dependencies_path = self.generate_temp_path( 'test_0030', additional_paths=[ 'emboss' ] )
- self.generate_repository_dependency_xml( datatypes_repository,
+ self.generate_repository_dependency_xml( [ datatypes_repository ],
self.get_filename( 'repository_dependencies.xml', filepath=repository_dependencies_path ) )
emboss_5_repository = get_repository_by_name_and_owner( emboss_5_repository_name, common.test_user_1_name )
self.upload_file( emboss_5_repository,
@@ -93,7 +93,7 @@
emboss_repository = get_repository_by_name_and_owner( emboss_repository_name, common.test_user_1_name )
emboss_5_repository = get_repository_by_name_and_owner( emboss_5_repository_name, common.test_user_1_name )
repository_dependencies_path = self.generate_temp_path( 'test_0030', additional_paths=[ 'emboss', '5' ] )
- self.generate_repository_dependency_xml( emboss_5_repository,
+ self.generate_repository_dependency_xml( [ emboss_5_repository ],
self.get_filename( 'repository_dependencies.xml', filepath=repository_dependencies_path ),
dependency_description='Emboss requires the Emboss 5 repository.' )
self.upload_file( emboss_repository,
@@ -105,7 +105,7 @@
emboss_repository = get_repository_by_name_and_owner( emboss_repository_name, common.test_user_1_name )
emboss_6_repository = get_repository_by_name_and_owner( emboss_6_repository_name, common.test_user_1_name )
repository_dependencies_path = self.generate_temp_path( 'test_0030', additional_paths=[ 'emboss', '6' ] )
- self.generate_repository_dependency_xml( emboss_6_repository,
+ self.generate_repository_dependency_xml( [ emboss_6_repository ],
self.get_filename( 'repository_dependencies.xml', filepath=repository_dependencies_path ),
dependency_description='Emboss requires the Emboss 6 repository.' )
self.upload_file( emboss_repository,
diff -r a7ea7728829dcc5395606bd402805d2ac972d6c6 -r 376a3714dc1510d3a1405575fc2667b68a28a695 test/tool_shed/functional/test_0040_repository_circular_dependencies.py
--- a/test/tool_shed/functional/test_0040_repository_circular_dependencies.py
+++ b/test/tool_shed/functional/test_0040_repository_circular_dependencies.py
@@ -63,7 +63,7 @@
repository = get_repository_by_name_and_owner( freebayes_repository_name, common.test_user_1_name )
filtering_repository = get_repository_by_name_and_owner( filtering_repository_name, common.test_user_1_name )
repository_dependencies_path = self.generate_temp_path( 'test_0040', additional_paths=[ 'filtering' ] )
- self.generate_repository_dependency_xml( repository,
+ self.generate_repository_dependency_xml( [ repository ],
self.get_filename( 'repository_dependencies.xml', filepath=repository_dependencies_path ),
dependency_description='Filtering 1.1.0 depends on the freebayes repository.' )
self.upload_file( filtering_repository,
@@ -79,7 +79,7 @@
repository = get_repository_by_name_and_owner( filtering_repository_name, common.test_user_1_name )
freebayes_repository = get_repository_by_name_and_owner( freebayes_repository_name, common.test_user_1_name )
repository_dependencies_path = self.generate_temp_path( 'test_0040', additional_paths=[ 'freebayes' ] )
- self.generate_repository_dependency_xml( repository,
+ self.generate_repository_dependency_xml( [ repository ],
self.get_filename( 'repository_dependencies.xml', filepath=repository_dependencies_path ),
dependency_description='Freebayes depends on the filtering repository.' )
self.upload_file( freebayes_repository,
@@ -95,8 +95,5 @@
# Freebayes revision 0 -> filtering revision 1.
# Filtering will have two revisions, one with just the filtering tool, and one with the filtering tool and a dependency on freebayes.
# In this case, the displayed dependency will specify the tip revision, but this will not always be the case.
- filtering_strings_displayed = [ freebayes_repository_name, common.test_user_1_name, self.get_repository_tip( freebayes_repository ) ]
- freebayes_strings_displayed = [ filtering_repository_name, common.test_user_1_name, self.get_repository_tip( filtering_repository ) ]
- self.display_manage_repository_page( filtering_repository, strings_displayed=filtering_strings_displayed )
- self.display_manage_repository_page( freebayes_repository, strings_displayed=freebayes_strings_displayed )
-
+ self.check_repository_dependency( filtering_repository, freebayes_repository, self.get_repository_tip( freebayes_repository ) )
+ self.check_repository_dependency( freebayes_repository, filtering_repository, self.get_repository_tip( filtering_repository ) )
diff -r a7ea7728829dcc5395606bd402805d2ac972d6c6 -r 376a3714dc1510d3a1405575fc2667b68a28a695 test/tool_shed/functional/test_0050_circular_n_levels.py
--- /dev/null
+++ b/test/tool_shed/functional/test_0050_circular_n_levels.py
@@ -0,0 +1,129 @@
+from tool_shed.base.twilltestcase import ShedTwillTestCase, common, os
+from tool_shed.base.test_db_util import get_repository_by_name_and_owner, get_user, get_private_role
+
+emboss_datatypes_repository_name = 'emboss_datatypes_0050'
+emboss_datatypes_repository_description = "Datatypes for emboss"
+emboss_datatypes_repository_long_description = "Long description of Emboss' datatypes"
+
+emboss_repository_name = 'emboss_0050'
+emboss_repository_description = "Galaxy's emboss tool"
+emboss_repository_long_description = "Long description of Galaxy's emboss tool"
+
+freebayes_repository_name = 'freebayes_0050'
+freebayes_repository_description = "Galaxy's freebayes tool"
+freebayes_repository_long_description = "Long description of Galaxy's freebayes tool"
+
+filtering_repository_name = 'filtering_0050'
+filtering_repository_description = "Galaxy's filtering tool"
+filtering_repository_long_description = "Long description of Galaxy's filtering tool"
+
+default_category = 'test_0050_repository_n_level_circular_dependencies'
+default_category_description = 'Testing handling of circular repository dependencies to n levels.'
+
+class TestRepositoryCircularDependenciesToNLevels( ShedTwillTestCase ):
+ '''Verify that the code correctly handles circular dependencies down to n levels.'''
+ def test_0000_initiate_users( self ):
+ """Create necessary user accounts."""
+ self.logout()
+ self.login( email=common.test_user_1_email, username=common.test_user_1_name )
+ test_user_1 = get_user( common.test_user_1_email )
+ assert test_user_1 is not None, 'Problem retrieving user with email %s from the database' % test_user_1_email
+ test_user_1_private_role = get_private_role( test_user_1 )
+ self.logout()
+ self.login( email=common.admin_email, username=common.admin_username )
+ admin_user = get_user( common.admin_email )
+ assert admin_user is not None, 'Problem retrieving user with email %s from the database' % admin_email
+ admin_user_private_role = get_private_role( admin_user )
+ def test_0005_create_category( self ):
+ """Create a category for this test suite"""
+ self.create_category( default_category, default_category_description )
+ def test_0010_create_emboss_datatypes_repository( self ):
+ '''Create and populate emboss_datatypes_0050.'''
+ self.logout()
+ self.login( email=common.test_user_1_email, username=common.test_user_1_name )
+ self.create_repository( emboss_datatypes_repository_name,
+ emboss_datatypes_repository_description,
+ repository_long_description=emboss_datatypes_repository_long_description,
+ categories=[ default_category ],
+ strings_displayed=[] )
+ repository = get_repository_by_name_and_owner( emboss_datatypes_repository_name, common.test_user_1_name )
+ self.upload_file( repository,
+ 'emboss/datatypes/datatypes_conf.xml',
+ strings_displayed=[],
+ commit_message='Uploaded datatypes_conf.xml.' )
+ def test_0015_create_emboss_repository( self ):
+ '''Create and populate emboss_0050.'''
+ self.create_repository( emboss_repository_name,
+ emboss_repository_description,
+ repository_long_description=emboss_repository_long_description,
+ categories=[ default_category ],
+ strings_displayed=[] )
+ repository = get_repository_by_name_and_owner( emboss_repository_name, common.test_user_1_name )
+ self.upload_file( repository,
+ 'emboss/emboss.tar',
+ strings_displayed=[],
+ commit_message='Uploaded tool tarball.' )
+ datatypes_repository = get_repository_by_name_and_owner( emboss_datatypes_repository_name, common.test_user_1_name )
+ repository_dependencies_path = self.generate_temp_path( 'test_0050', additional_paths=[ 'emboss' ] )
+ self.generate_repository_dependency_xml( [ datatypes_repository ],
+ self.get_filename( 'repository_dependencies.xml', filepath=repository_dependencies_path ),
+ dependency_description='Emboss depends on the emboss_datatypes repository.' )
+ self.upload_file( repository,
+ 'repository_dependencies.xml',
+ filepath=repository_dependencies_path,
+ commit_message='Uploaded dependency on emboss_datatypes.' )
+ def test_0020_create_filtering_repository( self ):
+ '''Create and populate filtering_0050.'''
+ self.create_repository( filtering_repository_name,
+ filtering_repository_description,
+ repository_long_description=filtering_repository_long_description,
+ categories=[ default_category ],
+ strings_displayed=[] )
+ repository = get_repository_by_name_and_owner( filtering_repository_name, common.test_user_1_name )
+ self.upload_file( repository,
+ 'filtering/filtering_1.1.0.tar',
+ strings_displayed=[],
+ commit_message='Uploaded filtering.tar.' )
+ emboss_repository = get_repository_by_name_and_owner( emboss_repository_name, common.test_user_1_name )
+ repository_dependencies_path = self.generate_temp_path( 'test_0050', additional_paths=[ 'filtering' ] )
+ self.generate_repository_dependency_xml( [ emboss_repository ],
+ self.get_filename( 'repository_dependencies.xml', filepath=repository_dependencies_path ),
+ dependency_description='Filtering depends on the emboss repository.' )
+ self.upload_file( repository,
+ 'repository_dependencies.xml',
+ filepath=repository_dependencies_path,
+ commit_message='Uploaded dependency on emboss.' )
+ def test_0025_create_freebayes_repository( self ):
+ '''Create and populate freebayes_0050.'''
+ self.create_repository( freebayes_repository_name,
+ freebayes_repository_description,
+ repository_long_description=freebayes_repository_long_description,
+ categories=[ default_category ],
+ strings_displayed=[] )
+ repository = get_repository_by_name_and_owner( freebayes_repository_name, common.test_user_1_name )
+ self.upload_file( repository,
+ 'freebayes/freebayes.tar',
+ strings_displayed=[],
+ commit_message='Uploaded freebayes.tar.' )
+ emboss_datatypes_repository = get_repository_by_name_and_owner( emboss_datatypes_repository_name, common.test_user_1_name )
+ emboss_repository = get_repository_by_name_and_owner( emboss_repository_name, common.test_user_1_name )
+ filtering_repository = get_repository_by_name_and_owner( filtering_repository_name, common.test_user_1_name )
+ repository_dependencies_path = self.generate_temp_path( 'test_0050', additional_paths=[ 'freebayes' ] )
+ previous_tip = self.get_repository_tip( repository )
+ self.generate_repository_dependency_xml( [ emboss_datatypes_repository, emboss_repository, filtering_repository, repository ],
+ self.get_filename( 'repository_dependencies.xml', filepath=repository_dependencies_path ),
+ dependency_description='Freebayes depends on the filtering repository.' )
+ self.upload_file( repository,
+ 'repository_dependencies.xml',
+ filepath=repository_dependencies_path,
+ commit_message='Uploaded dependency on filtering.' )
+ self.display_manage_repository_page( repository, strings_not_displayed=[ previous_tip ] )
+ def test_0030_verify_repository_dependencies( self ):
+ '''Verify that the generated dependency circle does not cause an infinite loop.'''
+ emboss_datatypes_repository = get_repository_by_name_and_owner( emboss_datatypes_repository_name, common.test_user_1_name )
+ emboss_repository = get_repository_by_name_and_owner( emboss_repository_name, common.test_user_1_name )
+ filtering_repository = get_repository_by_name_and_owner( filtering_repository_name, common.test_user_1_name )
+ freebayes_repository = get_repository_by_name_and_owner( freebayes_repository_name, common.test_user_1_name )
+ for repository in [ emboss_datatypes_repository, emboss_repository, filtering_repository ]:
+ self.check_repository_dependency( freebayes_repository, repository, self.get_repository_tip( repository ) )
+ self.display_manage_repository_page( freebayes_repository, strings_displayed=[ 'Freebayes depends on the filtering repository.' ] )
diff -r a7ea7728829dcc5395606bd402805d2ac972d6c6 -r 376a3714dc1510d3a1405575fc2667b68a28a695 test/tool_shed/functional_tests.py
--- a/test/tool_shed/functional_tests.py
+++ b/test/tool_shed/functional_tests.py
@@ -67,9 +67,7 @@
# ---- Configuration ------------------------------------------------------
tool_shed_test_host = os.environ.get( 'TOOL_SHED_TEST_HOST', default_tool_shed_test_host )
tool_shed_test_port = os.environ.get( 'TOOL_SHED_TEST_PORT', None )
- tool_shed_test_save = os.environ.get( 'TOOL_SHED_TEST_SAVE', None )
tool_path = os.environ.get( 'TOOL_SHED_TEST_TOOL_PATH', 'tools' )
- start_server = 'TOOL_SHED_TEST_EXTERNAL' not in os.environ
if 'HTTP_ACCEPT_LANGUAGE' not in os.environ:
os.environ[ 'HTTP_ACCEPT_LANGUAGE' ] = default_tool_shed_locales
tool_shed_test_file_dir = os.environ.get( 'TOOL_SHED_TEST_FILE_DIR', default_tool_shed_test_file_dir )
@@ -84,57 +82,27 @@
tool_dependency_dir = os.environ.get( 'TOOL_SHED_TOOL_DEPENDENCY_DIR', None )
use_distributed_object_store = os.environ.get( 'TOOL_SHED_USE_DISTRIBUTED_OBJECT_STORE', False )
- if start_server:
- if not os.path.isdir( tool_shed_test_tmp_dir ):
- os.mkdir( tool_shed_test_tmp_dir )
- psu_production = False
- tool_shed_test_proxy_port = None
- if 'TOOL_SHED_TEST_PSU_PRODUCTION' in os.environ:
- if not tool_shed_test_port:
- raise Exception( 'Set TOOL_SHED_TEST_PORT to the port to which the proxy server will proxy' )
- tool_shed_test_proxy_port = os.environ.get( 'TOOL_SHED_TEST_PROXY_PORT', None )
- if not tool_shed_test_proxy_port:
- raise Exception( 'Set TOOL_SHED_TEST_PROXY_PORT to the port on which the proxy server is listening' )
- base_file_path = os.environ.get( 'TOOL_SHED_TEST_BASE_FILE_PATH', None )
- if not base_file_path:
- raise Exception( 'Set TOOL_SHED_TEST_BASE_FILE_PATH to the directory which will contain the dataset files directory' )
- base_new_file_path = os.environ.get( 'TOOL_SHED_TEST_BASE_NEW_FILE_PATH', None )
- if not base_new_file_path:
- raise Exception( 'Set TOOL_SHED_TEST_BASE_NEW_FILE_PATH to the directory which will contain the temporary directory' )
- database_connection = os.environ.get( 'TOOL_SHED_TEST_DBURI', None )
- if not database_connection:
- raise Exception( 'Set TOOL_SHED_TEST_DBURI to the URI of the database to be used for tests' )
- nginx_upload_store = os.environ.get( 'TOOL_SHED_TEST_NGINX_UPLOAD_STORE', None )
- if not nginx_upload_store:
- raise Exception( 'Set TOOL_SHED_TEST_NGINX_UPLOAD_STORE to the path where the nginx upload module places uploaded files' )
- file_path = tempfile.mkdtemp( dir=base_file_path )
- new_repos_path = tempfile.mkdtemp( dir=base_new_file_path )
- hgweb_config_file_path = tempfile.mkdtemp( dir=tool_shed_test_tmp_dir )
- kwargs = dict( database_engine_option_pool_size = '10',
- database_engine_option_max_overflow = '20',
- database_engine_option_strategy = 'threadlocal',
- static_enabled = 'False',
- debug = 'False' )
- psu_production = True
- else:
- if 'TOOL_SHED_TEST_DBPATH' in os.environ:
- db_path = os.environ[ 'TOOL_SHED_TEST_DBPATH' ]
- else:
- tempdir = tempfile.mkdtemp( dir=tool_shed_test_tmp_dir )
- db_path = os.path.join( tempdir, 'database' )
- file_path = os.path.join( db_path, 'files' )
- hgweb_config_file_path = tempfile.mkdtemp( dir=tool_shed_test_tmp_dir )
- new_repos_path = tempfile.mkdtemp( dir=tool_shed_test_tmp_dir )
- if 'TOOL_SHED_TEST_DBURI' in os.environ:
- database_connection = os.environ[ 'TOOL_SHED_TEST_DBURI' ]
- else:
- database_connection = 'sqlite:///' + os.path.join( db_path, 'universe.sqlite' )
- kwargs = {}
- for dir in [ tool_shed_test_tmp_dir ]:
- try:
- os.makedirs( dir )
- except OSError:
- pass
+ if not os.path.isdir( tool_shed_test_tmp_dir ):
+ os.mkdir( tool_shed_test_tmp_dir )
+ tool_shed_test_proxy_port = None
+ if 'TOOL_SHED_TEST_DBPATH' in os.environ:
+ db_path = os.environ[ 'TOOL_SHED_TEST_DBPATH' ]
+ else:
+ tempdir = tempfile.mkdtemp( dir=tool_shed_test_tmp_dir )
+ db_path = os.path.join( tempdir, 'database' )
+ file_path = os.path.join( db_path, 'files' )
+ hgweb_config_file_path = tempfile.mkdtemp( dir=tool_shed_test_tmp_dir )
+ new_repos_path = tempfile.mkdtemp( dir=tool_shed_test_tmp_dir )
+ if 'TOOL_SHED_TEST_DBURI' in os.environ:
+ database_connection = os.environ[ 'TOOL_SHED_TEST_DBURI' ]
+ else:
+ database_connection = 'sqlite:///' + os.path.join( db_path, 'universe.sqlite' )
+ kwargs = {}
+ for dir in [ tool_shed_test_tmp_dir ]:
+ try:
+ os.makedirs( dir )
+ except OSError:
+ pass
print "Database connection:", database_connection
@@ -145,89 +113,78 @@
# ---- Build Application --------------------------------------------------
app = None
- if start_server:
- global_conf = { '__file__' : 'community_wsgi.ini.sample' }
- if psu_production:
- global_conf = None
- if not database_connection.startswith( 'sqlite://' ):
- kwargs[ 'database_engine_option_max_overflow' ] = '20'
- if tool_dependency_dir is not None:
- kwargs[ 'tool_dependency_dir' ] = tool_dependency_dir
- if use_distributed_object_store:
- kwargs[ 'object_store' ] = 'distributed'
- kwargs[ 'distributed_object_store_config_file' ] = 'distributed_object_store_conf.xml.sample'
+ global_conf = { '__file__' : 'community_wsgi.ini.sample' }
+ if not database_connection.startswith( 'sqlite://' ):
+ kwargs[ 'database_engine_option_max_overflow' ] = '20'
+ if tool_dependency_dir is not None:
+ kwargs[ 'tool_dependency_dir' ] = tool_dependency_dir
+ if use_distributed_object_store:
+ kwargs[ 'object_store' ] = 'distributed'
+ kwargs[ 'distributed_object_store_config_file' ] = 'distributed_object_store_conf.xml.sample'
- app = UniverseApplication( job_queue_workers = 5,
- id_secret = 'changethisinproductiontoo',
- template_path = 'templates',
- database_connection = database_connection,
- database_engine_option_pool_size = '10',
- file_path = file_path,
- new_file_path = new_repos_path,
- tool_path=tool_path,
- datatype_converters_config_file = 'datatype_converters_conf.xml.sample',
- tool_parse_help = False,
- tool_data_table_config_path = tool_data_table_config_path,
- shed_tool_data_table_config = shed_tool_data_table_config,
- log_destination = "stdout",
- use_heartbeat = False,
- allow_user_creation = True,
- allow_user_deletion = True,
- admin_users = 'test(a)bx.psu.edu',
- global_conf = global_conf,
- running_functional_tests = True,
- hgweb_config_dir = hgweb_config_dir,
- **kwargs )
+ app = UniverseApplication( job_queue_workers = 5,
+ id_secret = 'changethisinproductiontoo',
+ template_path = 'templates',
+ database_connection = database_connection,
+ database_engine_option_pool_size = '10',
+ file_path = file_path,
+ new_file_path = new_repos_path,
+ tool_path=tool_path,
+ datatype_converters_config_file = 'datatype_converters_conf.xml.sample',
+ tool_parse_help = False,
+ tool_data_table_config_path = tool_data_table_config_path,
+ shed_tool_data_table_config = shed_tool_data_table_config,
+ log_destination = "stdout",
+ use_heartbeat = False,
+ allow_user_creation = True,
+ allow_user_deletion = True,
+ admin_users = 'test(a)bx.psu.edu',
+ global_conf = global_conf,
+ running_functional_tests = True,
+ hgweb_config_dir = hgweb_config_dir,
+ **kwargs )
- log.info( "Embedded Universe application started" )
+ log.info( "Embedded Universe application started" )
# ---- Run webserver ------------------------------------------------------
server = None
- if start_server:
- webapp = buildapp.app_factory( dict( database_file=database_connection ),
- use_translogger=False,
- static_enabled=False,
- app=app )
- if tool_shed_test_port is not None:
- server = httpserver.serve( webapp, host=tool_shed_test_host, port=tool_shed_test_port, start_loop=False )
+ webapp = buildapp.app_factory( dict( database_file=database_connection ),
+ use_translogger=False,
+ static_enabled=False,
+ app=app )
+ if tool_shed_test_port is not None:
+ server = httpserver.serve( webapp, host=tool_shed_test_host, port=tool_shed_test_port, start_loop=False )
+ else:
+ random.seed()
+ for i in range( 0, 9 ):
+ try:
+ tool_shed_test_port = str( random.randint( default_tool_shed_test_port_min, default_tool_shed_test_port_max ) )
+ log.debug( "Attempting to serve app on randomly chosen port: %s" % tool_shed_test_port )
+ server = httpserver.serve( webapp, host=tool_shed_test_host, port=tool_shed_test_port, start_loop=False )
+ break
+ except socket.error, e:
+ if e[0] == 98:
+ continue
+ raise
else:
- random.seed()
- for i in range( 0, 9 ):
- try:
- tool_shed_test_port = str( random.randint( default_tool_shed_test_port_min, default_tool_shed_test_port_max ) )
- log.debug( "Attempting to serve app on randomly chosen port: %s" % tool_shed_test_port )
- server = httpserver.serve( webapp, host=tool_shed_test_host, port=tool_shed_test_port, start_loop=False )
- break
- except socket.error, e:
- if e[0] == 98:
- continue
- raise
- else:
- raise Exception( "Unable to open a port between %s and %s to start Galaxy server" % ( default_tool_shed_test_port_min, default_tool_shed_test_port_max ) )
- if tool_shed_test_proxy_port:
- os.environ[ 'TOOL_SHED_TEST_PORT' ] = tool_shed_test_proxy_port
- else:
- os.environ[ 'TOOL_SHED_TEST_PORT' ] = tool_shed_test_port
- t = threading.Thread( target=server.serve_forever )
- t.start()
- # Test if the server is up
- for i in range( 10 ):
- # Directly test the app, not the proxy.
- conn = httplib.HTTPConnection( tool_shed_test_host, tool_shed_test_port )
- conn.request( "GET", "/" )
- if conn.getresponse().status == 200:
- break
- time.sleep( 0.1 )
- else:
- raise Exception( "Test HTTP server did not return '200 OK' after 10 tries" )
- # Test if the proxy server is up.
- if psu_production:
- # Directly test the app, not the proxy.
- conn = httplib.HTTPConnection( tool_shed_test_host, tool_shed_test_proxy_port )
- conn.request( "GET", "/" )
- if not conn.getresponse().status == 200:
- raise Exception( "Test HTTP proxy server did not return '200 OK'" )
- log.info( "Embedded web server started" )
+ raise Exception( "Unable to open a port between %s and %s to start Galaxy server" % ( default_tool_shed_test_port_min, default_tool_shed_test_port_max ) )
+ if tool_shed_test_proxy_port:
+ os.environ[ 'TOOL_SHED_TEST_PORT' ] = tool_shed_test_proxy_port
+ else:
+ os.environ[ 'TOOL_SHED_TEST_PORT' ] = tool_shed_test_port
+ t = threading.Thread( target=server.serve_forever )
+ t.start()
+ # Test if the server is up
+ for i in range( 10 ):
+ # Directly test the app, not the proxy.
+ conn = httplib.HTTPConnection( tool_shed_test_host, tool_shed_test_port )
+ conn.request( "GET", "/" )
+ if conn.getresponse().status == 200:
+ break
+ time.sleep( 0.1 )
+ else:
+ raise Exception( "Test HTTP server did not return '200 OK' after 10 tries" )
+ log.info( "Embedded web server started" )
# We don't add the tests to the path until everything is up and running
new_path = [ os.path.join( cwd, 'test' ) ]
new_path.extend( sys.path[1:] )
@@ -239,9 +196,6 @@
log.info( "Functional tests will be run against %s:%s" % ( tool_shed_test_host, tool_shed_test_port ) )
success = False
try:
- # What requires these? Handy for (eg) functional tests to save outputs?
- if tool_shed_test_save:
- os.environ[ 'TOOL_SHED_TEST_SAVE' ] = tool_shed_test_save
# Pass in through script set env, will leave a copy of ALL test validate files.
os.environ[ 'TOOL_SHED_TEST_HOST' ] = tool_shed_test_host
if tool_shed_test_file_dir:
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: dan: Add 'checkers' namespace before calls to .check_*.
by Bitbucket 11 Dec '12
by Bitbucket 11 Dec '12
11 Dec '12
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/a7ea7728829d/
changeset: a7ea7728829d
user: dan
date: 2012-12-11 20:14:14
summary: Add 'checkers' namespace before calls to .check_*.
affected #: 1 file
diff -r eed6dd67514b5e5ab0174f181af9514dea7a8d33 -r a7ea7728829dcc5395606bd402805d2ac972d6c6 lib/galaxy/util/shed_util.py
--- a/lib/galaxy/util/shed_util.py
+++ b/lib/galaxy/util/shed_util.py
@@ -846,17 +846,17 @@
if is_column_based( file_path ):
return True
# If the file is any of the following, don't copy it.
- if check_html( file_path ):
+ if checkers.check_html( file_path ):
return False
- if check_image( file_path ):
+ if checkers.check_image( file_path ):
return False
- if check_binary( name=file_path ):
+ if checkers.check_binary( name=file_path ):
return False
- if is_bz2( file_path ):
+ if checkers.is_bz2( file_path ):
return False
- if is_gzip( file_path ):
+ if checkers.is_gzip( file_path ):
return False
- if check_zip( file_path ):
+ if checkers.check_zip( file_path ):
return False
# Default to copying the file if none of the above are true.
return True
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
11 Dec '12
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/eed6dd67514b/
changeset: eed6dd67514b
user: greg
date: 2012-12-11 17:54:09
summary: Fix for building tool dependencies container.
affected #: 1 file
diff -r 71c3b867efd1d8074a59659ed7320c1e6a3d9ed3 -r eed6dd67514b5e5ab0174f181af9514dea7a8d33 lib/galaxy/util/shed_util_common.py
--- a/lib/galaxy/util/shed_util_common.py
+++ b/lib/galaxy/util/shed_util_common.py
@@ -99,7 +99,7 @@
repository_dependencies=repository_dependencies )
containers_dict[ 'repository_dependencies' ] = repository_dependencies_root_folder
if tool_dependencies:
- folder_id, tool_dependencies_root_folder = build_tool_dependencies_folder( folder_id, tool_dependencies, for_galaxy=True )
+ folder_id, tool_dependencies_root_folder = container_util.build_tool_dependencies_folder( folder_id, tool_dependencies, for_galaxy=True )
containers_dict[ 'tool_dependencies' ] = tool_dependencies_root_folder
except Exception, e:
log.debug( "Exception in build_repository_containers_for_galaxy: %s" % str( e ) )
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: greg: Fix for resetting all metadata on a tool shed repository.
by Bitbucket 11 Dec '12
by Bitbucket 11 Dec '12
11 Dec '12
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/71c3b867efd1/
changeset: 71c3b867efd1
user: greg
date: 2012-12-11 17:37:53
summary: Fix for resetting all metadata on a tool shed repository.
affected #: 1 file
diff -r 1cdb5f5eb6d061b3232c4f7725839a9fe287355a -r 71c3b867efd1d8074a59659ed7320c1e6a3d9ed3 lib/galaxy/util/shed_util_common.py
--- a/lib/galaxy/util/shed_util_common.py
+++ b/lib/galaxy/util/shed_util_common.py
@@ -726,7 +726,7 @@
relative_path, filename = os.path.split( sample_file )
if filename == 'tool_data_table_conf.xml.sample':
new_table_elems, error_message = app.tool_data_tables.add_new_entries_from_config_file( config_filename=sample_file,
- tool_data_path=original_tool_data_path,
+ tool_data_path=app.config.tool_data_path,
shed_tool_data_table_config=app.config.shed_tool_data_table_config,
persist=persist )
if error_message:
@@ -771,7 +771,7 @@
if not valid:
invalid_file_tups.append( ( name, error_message ) )
else:
- invalid_files_and_errors_tups = check_tool_input_params( app, files_dir, name, tool, sample_file_metadata_paths )
+ invalid_files_and_errors_tups = check_tool_input_params( app, files_dir, name, tool, sample_file_copy_paths )
can_set_metadata = True
for tup in invalid_files_and_errors_tups:
if name in tup:
@@ -1329,32 +1329,32 @@
relative_path_to_file.startswith( os.path.join( shed_config_dict.get( 'tool_path' ), relative_install_dir ) ):
relative_path_to_file = relative_path_to_file[ len( shed_config_dict.get( 'tool_path' ) ) + 1: ]
return relative_path_to_file
-def get_sample_files_from_disk( repository_files_dir, tool_path = None, relative_install_dir=None, resetting_all_metadata_on_repository=False ):
+def get_sample_files_from_disk( repository_files_dir, tool_path=None, relative_install_dir=None, resetting_all_metadata_on_repository=False ):
if resetting_all_metadata_on_repository:
# Keep track of the location where the repository is temporarily cloned so that we can strip it when setting metadata.
work_dir = repository_files_dir
sample_file_metadata_paths = []
sample_file_copy_paths = []
for root, dirs, files in os.walk( repository_files_dir ):
- if root.find( '.hg' ) < 0:
- for name in files:
- if name.endswith( '.sample' ):
- if resetting_all_metadata_on_repository:
- full_path_to_sample_file = os.path.join( root, name )
- stripped_path_to_sample_file = full_path_to_sample_file.replace( work_dir, '' )
- if stripped_path_to_sample_file.startswith( '/' ):
- stripped_path_to_sample_file = stripped_path_to_sample_file[ 1: ]
- relative_path_to_sample_file = os.path.join( relative_install_dir, stripped_path_to_sample_file )
- if os.path.exists( relative_path_to_sample_file ):
- sample_file_copy_paths.append( relative_path_to_sample_file )
- else:
- sample_file_copy_paths.append( full_path_to_sample_file )
+ if root.find( '.hg' ) < 0:
+ for name in files:
+ if name.endswith( '.sample' ):
+ if resetting_all_metadata_on_repository:
+ full_path_to_sample_file = os.path.join( root, name )
+ stripped_path_to_sample_file = full_path_to_sample_file.replace( work_dir, '' )
+ if stripped_path_to_sample_file.startswith( '/' ):
+ stripped_path_to_sample_file = stripped_path_to_sample_file[ 1: ]
+ relative_path_to_sample_file = os.path.join( relative_install_dir, stripped_path_to_sample_file )
+ if os.path.exists( relative_path_to_sample_file ):
+ sample_file_copy_paths.append( relative_path_to_sample_file )
else:
- relative_path_to_sample_file = os.path.join( root, name )
- sample_file_copy_paths.append( relative_path_to_sample_file )
- if tool_path and relative_install_dir:
- if relative_path_to_sample_file.startswith( os.path.join( tool_path, relative_install_dir ) ):
- relative_path_to_sample_file = relative_path_to_sample_file[ len( tool_path ) + 1 :]
+ sample_file_copy_paths.append( full_path_to_sample_file )
+ else:
+ relative_path_to_sample_file = os.path.join( root, name )
+ sample_file_copy_paths.append( relative_path_to_sample_file )
+ if tool_path and relative_install_dir:
+ if relative_path_to_sample_file.startswith( os.path.join( tool_path, relative_install_dir ) ):
+ relative_path_to_sample_file = relative_path_to_sample_file[ len( tool_path ) + 1 :]
sample_file_metadata_paths.append( relative_path_to_sample_file )
return sample_file_metadata_paths, sample_file_copy_paths
def get_updated_changeset_revisions_for_repository_dependencies( trans, key_rd_dicts ):
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: inithello: Functional tests for repositories with circular repository dependencies.
by Bitbucket 11 Dec '12
by Bitbucket 11 Dec '12
11 Dec '12
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/1cdb5f5eb6d0/
changeset: 1cdb5f5eb6d0
user: inithello
date: 2012-12-11 16:47:31
summary: Functional tests for repositories with circular repository dependencies.
affected #: 4 files
diff -r 795a1799e7dcf092b14f7e2a2dba3fb389403531 -r 1cdb5f5eb6d061b3232c4f7725839a9fe287355a test/tool_shed/base/twilltestcase.py
--- a/test/tool_shed/base/twilltestcase.py
+++ b/test/tool_shed/base/twilltestcase.py
@@ -50,6 +50,9 @@
url = '/repository/view_changelog?id=%s' % self.security.encode_id( repository.id )
self.visit_url( url )
self.check_for_strings( strings_displayed, strings_not_displayed )
+ def check_repository_dependency( self, repository, depends_on_repository, depends_on_changeset_revision ):
+ strings_displayed = [ depends_on_repository.name, depends_on_repository.user.username, depends_on_changeset_revision ]
+ self.display_manage_repository_page( repository, strings_displayed=strings_displayed )
def check_repository_metadata( self, repository, tip_only=True ):
if tip_only:
assert self.tip_has_metadata( repository ) and len( self.get_repository_metadata_revisions( repository ) ) == 1, \
@@ -353,7 +356,7 @@
**kwd ):
self.visit_url( '/upload/upload?repository_id=%s' % self.security.encode_id( repository.id ) )
if valid_tools_only:
- strings_displayed.append( "has been successfully uploaded to the repository." )
+ strings_displayed.extend( [ 'has been successfully', 'uploaded to the repository.' ] )
for key in kwd:
tc.fv( "1", key, kwd[ key ] )
tc.formfile( "1", "file_data", self.get_filename( filename, filepath ) )
diff -r 795a1799e7dcf092b14f7e2a2dba3fb389403531 -r 1cdb5f5eb6d061b3232c4f7725839a9fe287355a test/tool_shed/functional/test_0030_repository_dependency_revisions.py
--- a/test/tool_shed/functional/test_0030_repository_dependency_revisions.py
+++ b/test/tool_shed/functional/test_0030_repository_dependency_revisions.py
@@ -18,7 +18,7 @@
self.logout()
self.login( email=common.test_user_1_email, username=common.test_user_1_name )
test_user_1 = get_user( common.test_user_1_email )
- assert test_user_1 is not None, 'Problem retrieving user with email %s from the database' % regular_email
+ assert test_user_1 is not None, 'Problem retrieving user with email %s from the database' % test_user_1_email
test_user_1_private_role = get_private_role( test_user_1 )
self.logout()
self.login( email=common.admin_email, username=common.admin_username )
@@ -28,7 +28,7 @@
def test_0005_create_category( self ):
"""Create a category for this test suite"""
self.create_category( 'Test 0030 Repository Dependency Revisions', 'Testing repository dependencies by revision.' )
- def test_0005_create_repositories( self ):
+ def test_0010_create_repositories( self ):
'''Create the emboss_5_0030, emboss_6_0030, emboss_datatypes, and emboss repositories and populate the emboss_datatypes repository.'''
self.logout()
self.login( email=common.test_user_1_email, username=common.test_user_1_name )
@@ -69,7 +69,7 @@
strings_displayed=[] )
emboss_repository = get_repository_by_name_and_owner( emboss_repository_name, common.test_user_1_name )
self.upload_file( emboss_5_repository, 'emboss/emboss.tar', commit_message='Uploaded tool tarball.' )
- def test_0010_generate_repository_dependencies_for_emboss_5( self ):
+ def test_0015_generate_repository_dependencies_for_emboss_5( self ):
'''Generate a repository_dependencies.xml file specifying emboss_datatypes and upload it to the emboss_5 repository.'''
datatypes_repository = get_repository_by_name_and_owner( datatypes_repository_name, common.test_user_1_name )
repository_dependencies_path = self.generate_temp_path( 'test_0030', additional_paths=[ 'emboss' ] )
@@ -80,7 +80,7 @@
'repository_dependencies.xml',
filepath=repository_dependencies_path,
commit_message='Uploaded repository_depepndencies.xml.' )
- def test_0015_generate_repository_dependencies_for_emboss_6( self ):
+ def test_0020_generate_repository_dependencies_for_emboss_6( self ):
'''Generate a repository_dependencies.xml file specifying emboss_datatypes and upload it to the emboss_6 repository.'''
emboss_6_repository = get_repository_by_name_and_owner( emboss_6_repository_name, common.test_user_1_name )
repository_dependencies_path = self.generate_temp_path( 'test_0030', additional_paths=[ 'emboss' ] )
@@ -88,7 +88,7 @@
'repository_dependencies.xml',
filepath=repository_dependencies_path,
commit_message='Uploaded repository_depepndencies.xml.' )
- def test_0020_generate_repository_dependency_on_emboss_5( self ):
+ def test_0025_generate_repository_dependency_on_emboss_5( self ):
'''Create and upload repository_dependencies.xml for the emboss_5_0030 repository.'''
emboss_repository = get_repository_by_name_and_owner( emboss_repository_name, common.test_user_1_name )
emboss_5_repository = get_repository_by_name_and_owner( emboss_5_repository_name, common.test_user_1_name )
@@ -100,7 +100,7 @@
'repository_dependencies.xml',
filepath=repository_dependencies_path,
commit_message='Uploaded dependency configuration specifying emboss_5' )
- def test_0025_generate_repository_dependency_on_emboss_6( self ):
+ def test_0030_generate_repository_dependency_on_emboss_6( self ):
'''Create and upload repository_dependencies.xml for the emboss_6_0030 repository.'''
emboss_repository = get_repository_by_name_and_owner( emboss_repository_name, common.test_user_1_name )
emboss_6_repository = get_repository_by_name_and_owner( emboss_6_repository_name, common.test_user_1_name )
@@ -112,7 +112,7 @@
'repository_dependencies.xml',
filepath=repository_dependencies_path,
commit_message='Uploaded dependency configuration specifying emboss_6' )
- def test_0030_verify_repository_dependency_revisions( self ):
+ def test_0035_verify_repository_dependency_revisions( self ):
'''Verify that different metadata revisions of the emboss repository have different repository dependencies.'''
repository = get_repository_by_name_and_owner( emboss_repository_name, common.test_user_1_name )
repository_metadata = [ ( metadata.metadata, metadata.changeset_revision ) for metadata in self.get_repository_metadata( repository ) ]
diff -r 795a1799e7dcf092b14f7e2a2dba3fb389403531 -r 1cdb5f5eb6d061b3232c4f7725839a9fe287355a test/tool_shed/functional/test_0040_repository_circular_dependencies.py
--- /dev/null
+++ b/test/tool_shed/functional/test_0040_repository_circular_dependencies.py
@@ -0,0 +1,102 @@
+from tool_shed.base.twilltestcase import ShedTwillTestCase, common, os
+from tool_shed.base.test_db_util import get_repository_by_name_and_owner, get_user, get_private_role
+
+freebayes_repository_name = 'freebayes_0040'
+freebayes_repository_name_description = "Galaxy's freebayes tool"
+freebayes_repository_name_long_description = "Long description of Galaxy's freebayes tool"
+
+filtering_repository_name = 'filtering_0040'
+filtering_repository_description = "Galaxy's filtering tool"
+filtering_repository_long_description = "Long description of Galaxy's filtering tool"
+
+class TestRepositoryCircularDependencies( ShedTwillTestCase ):
+ '''Verify that the code correctly handles circular dependencies.'''
+ def test_0000_initiate_users( self ):
+ """Create necessary user accounts."""
+ self.logout()
+ self.login( email=common.test_user_1_email, username=common.test_user_1_name )
+ test_user_1 = get_user( common.test_user_1_email )
+ assert test_user_1 is not None, 'Problem retrieving user with email %s from the database' % test_user_1_email
+ test_user_1_private_role = get_private_role( test_user_1 )
+ self.logout()
+ self.login( email=common.admin_email, username=common.admin_username )
+ admin_user = get_user( common.admin_email )
+ assert admin_user is not None, 'Problem retrieving user with email %s from the database' % admin_email
+ admin_user_private_role = get_private_role( admin_user )
+ def test_0005_create_category( self ):
+ """Create a category for this test suite"""
+ self.create_category( 'test_0040_repository_circular_dependencies', 'Testing handling of circular repository dependencies.' )
+ def test_0010_create_freebayes_repository_name( self ):
+ '''Create and populate freebayes_0040.'''
+ self.logout()
+ self.login( email=common.test_user_1_email, username=common.test_user_1_name )
+ self.create_repository( freebayes_repository_name,
+ freebayes_repository_name_description,
+ repository_long_description=freebayes_repository_name_long_description,
+ categories=[ 'test_0040_repository_circular_dependencies' ],
+ strings_displayed=[] )
+ repository = get_repository_by_name_and_owner( freebayes_repository_name, common.test_user_1_name )
+ self.upload_file( repository,
+ 'freebayes/freebayes.tar',
+ strings_displayed=[],
+ commit_message='Uploaded freebayes.tar.' )
+ def test_0015_create_filtering_repository( self ):
+ '''Create and populate filtering_0040.'''
+ self.logout()
+ self.login( email=common.test_user_1_email, username=common.test_user_1_name )
+ self.create_repository( filtering_repository_name,
+ filtering_repository_description,
+ repository_long_description=filtering_repository_long_description,
+ categories=[ 'test_0040_repository_circular_dependencies' ],
+ strings_displayed=[] )
+ repository = get_repository_by_name_and_owner( filtering_repository_name, common.test_user_1_name )
+ self.upload_file( repository,
+ 'filtering/filtering_1.1.0.tar',
+ strings_displayed=[],
+ commit_message='Uploaded filtering.tar.' )
+ def test_0020_create_dependency_on_freebayes( self ):
+ '''Upload a repository_dependencies.xml file that specifies the current revision of freebayes to the filtering_0040 repository.'''
+ # The dependency structure should look like:
+ # Filtering revision 0 -> freebayes revision 0.
+ # Freebayes revision 0 -> filtering revision 1.
+ # Filtering will have two revisions, one with just the filtering tool, and one with the filtering tool and a dependency on freebayes.
+ repository = get_repository_by_name_and_owner( freebayes_repository_name, common.test_user_1_name )
+ filtering_repository = get_repository_by_name_and_owner( filtering_repository_name, common.test_user_1_name )
+ repository_dependencies_path = self.generate_temp_path( 'test_0040', additional_paths=[ 'filtering' ] )
+ self.generate_repository_dependency_xml( repository,
+ self.get_filename( 'repository_dependencies.xml', filepath=repository_dependencies_path ),
+ dependency_description='Filtering 1.1.0 depends on the freebayes repository.' )
+ self.upload_file( filtering_repository,
+ 'repository_dependencies.xml',
+ filepath=repository_dependencies_path,
+ commit_message='Uploaded dependency on freebayes' )
+ def test_0025_create_dependency_on_filtering( self ):
+ '''Upload a repository_dependencies.xml file that specifies the current revision of filtering to the freebayes_0040 repository.'''
+ # The dependency structure should look like:
+ # Filtering revision 0 -> freebayes revision 0.
+ # Freebayes revision 0 -> filtering revision 1.
+ # Filtering will have two revisions, one with just the filtering tool, and one with the filtering tool and a dependency on freebayes.
+ repository = get_repository_by_name_and_owner( filtering_repository_name, common.test_user_1_name )
+ freebayes_repository = get_repository_by_name_and_owner( freebayes_repository_name, common.test_user_1_name )
+ repository_dependencies_path = self.generate_temp_path( 'test_0040', additional_paths=[ 'freebayes' ] )
+ self.generate_repository_dependency_xml( repository,
+ self.get_filename( 'repository_dependencies.xml', filepath=repository_dependencies_path ),
+ dependency_description='Freebayes depends on the filtering repository.' )
+ self.upload_file( freebayes_repository,
+ 'repository_dependencies.xml',
+ filepath=repository_dependencies_path,
+ commit_message='Uploaded dependency on filtering' )
+ def test_0030_verify_repository_dependencies( self ):
+ '''Verify that each repository can depend on the other without causing an infinite loop.'''
+ filtering_repository = get_repository_by_name_and_owner( filtering_repository_name, common.test_user_1_name )
+ freebayes_repository = get_repository_by_name_and_owner( freebayes_repository_name, common.test_user_1_name )
+ # The dependency structure should look like:
+ # Filtering revision 0 -> freebayes revision 0.
+ # Freebayes revision 0 -> filtering revision 1.
+ # Filtering will have two revisions, one with just the filtering tool, and one with the filtering tool and a dependency on freebayes.
+ # In this case, the displayed dependency will specify the tip revision, but this will not always be the case.
+ filtering_strings_displayed = [ freebayes_repository_name, common.test_user_1_name, self.get_repository_tip( freebayes_repository ) ]
+ freebayes_strings_displayed = [ filtering_repository_name, common.test_user_1_name, self.get_repository_tip( filtering_repository ) ]
+ self.display_manage_repository_page( filtering_repository, strings_displayed=filtering_strings_displayed )
+ self.display_manage_repository_page( freebayes_repository, strings_displayed=freebayes_strings_displayed )
+
diff -r 795a1799e7dcf092b14f7e2a2dba3fb389403531 -r 1cdb5f5eb6d061b3232c4f7725839a9fe287355a test/tool_shed/test_data/freebayes/freebayes.tar
Binary file test/tool_shed/test_data/freebayes/freebayes.tar has changed
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: carlfeberhard: Fixes to history functional tests; Twilltestcase: added function to parse and check json, exact string matcher
by Bitbucket 10 Dec '12
by Bitbucket 10 Dec '12
10 Dec '12
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/795a1799e7dc/
changeset: 795a1799e7dc
user: carlfeberhard
date: 2012-12-10 23:44:11
summary: Fixes to history functional tests; Twilltestcase: added function to parse and check json, exact string matcher
affected #: 5 files
diff -r e19bf2b117638221414239698f840730a2cd0569 -r 795a1799e7dcf092b14f7e2a2dba3fb389403531 lib/galaxy/webapps/galaxy/controllers/root.py
--- a/lib/galaxy/webapps/galaxy/controllers/root.py
+++ b/lib/galaxy/webapps/galaxy/controllers/root.py
@@ -130,6 +130,7 @@
history_panel_template = "root/history.mako"
else:
+ # get all datasets server-side, client-side will get flags and render appropriately
datasets = self.get_history_datasets( trans, history,
show_deleted=True, show_hidden=True, show_purged=True )
diff -r e19bf2b117638221414239698f840730a2cd0569 -r 795a1799e7dcf092b14f7e2a2dba3fb389403531 scripts/functional_tests.py
--- a/scripts/functional_tests.py
+++ b/scripts/functional_tests.py
@@ -241,8 +241,9 @@
try:
os.makedirs( dir )
except OSError:
- pass
- print "Database connection:", database_connection
+ pass
+ log.info( "Database connection:", database_connection )
+
# ---- Build Application --------------------------------------------------
app = None
if start_server:
@@ -412,6 +413,8 @@
if os.path.exists( tempdir ) and 'GALAXY_TEST_NO_CLEANUP' not in os.environ:
log.info( "Cleaning up temporary files in %s" % tempdir )
shutil.rmtree( tempdir )
+ else:
+ log.info( "GALAXY_TEST_NO_CLEANUP is on. Temporary files in %s" % tempdir )
except:
pass
if psu_production and 'GALAXY_TEST_NO_CLEANUP' not in os.environ:
diff -r e19bf2b117638221414239698f840730a2cd0569 -r 795a1799e7dcf092b14f7e2a2dba3fb389403531 templates/root/alternate_history.mako
--- a/templates/root/alternate_history.mako
+++ b/templates/root/alternate_history.mako
@@ -310,11 +310,12 @@
// ostensibly, this is the App
// LOAD INITIAL DATA IN THIS PAGE - since we're already sending it...
// ...use mako to 'bootstrap' the models
- var user = ${ get_current_user() },
+ var page_show_deleted = ${ 'true' if show_deleted == True else ( 'null' if show_deleted == None else 'false' ) },
+ page_show_hidden = ${ 'true' if show_hidden == True else ( 'null' if show_hidden == None else 'false' ) },
+
+ user = ${ get_current_user() },
history = ${ get_history( history.id ) },
hdas = ${ get_hdas( history.id, datasets ) };
- var currUser = new User( user );
- if( !Galaxy.currUser ){ Galaxy.currUser = currUser; }
// add user data to history
// i don't like this history+user relationship, but user authentication changes views/behaviour
@@ -326,8 +327,8 @@
urlTemplates : galaxy_paths.attributes,
logger : ( debugging )?( console ):( null ),
// is page sending in show settings? if so override history's
- show_deleted : ${ 'true' if show_deleted == True else ( 'null' if show_deleted == None else 'false' ) },
- show_hidden : ${ 'true' if show_hidden == True else ( 'null' if show_hidden == None else 'false' ) }
+ show_deleted : page_show_deleted,
+ show_hidden : page_show_hidden
});
historyPanel.render();
@@ -337,15 +338,16 @@
// urlTemplates : galaxy_paths.attributes,
// logger : ( debugging )?( console ):( null ),
// // is page sending in show settings? if so override history's
- // show_deleted : ${ 'true' if show_deleted == True else ( 'null' if show_deleted == None else 'false' ) },
- // show_hidden : ${ 'true' if show_hidden == True else ( 'null' if show_hidden == None else 'false' ) }
+ // show_deleted : page_show_deleted,
+ // show_hidden : page_show_hidden
//});
//historyPanel.model.loadFromApi( history.id );
// set it up to be accessible across iframes
//TODO:?? mem leak
top.Galaxy.currHistoryPanel = historyPanel;
-
+ var currUser = new User( user );
+ if( !Galaxy.currUser ){ Galaxy.currUser = currUser; }
// QUOTA METER is a cross-frame ui element (meter in masthead, over quota message in history)
// create it and join them here for now (via events)
diff -r e19bf2b117638221414239698f840730a2cd0569 -r 795a1799e7dcf092b14f7e2a2dba3fb389403531 test/base/twilltestcase.py
--- a/test/base/twilltestcase.py
+++ b/test/base/twilltestcase.py
@@ -249,8 +249,9 @@
page = self.last_page()
if page.find( 'error' ) > -1:
raise AssertionError('Errors in the history for user %s' % self.user )
+
def check_history_for_string( self, patt, show_deleted=False ):
- """Looks for 'string' in history page"""
+ """Breaks patt on whitespace and searches for each element seperately in the history"""
self.home()
if show_deleted:
self.visit_page( "history?show_deleted=True" )
@@ -264,11 +265,57 @@
errmsg = "no match to '%s'\npage content written to '%s'" % ( subpatt, fname )
raise AssertionError( errmsg )
self.home()
+
+ def check_history_for_exact_string( self, string, show_deleted=False ):
+ """Looks for exact match to 'string' in history page"""
+ self.home()
+ if show_deleted:
+ self.visit_page( "history?show_deleted=True" )
+ else:
+ self.visit_page( "history" )
+ try:
+ tc.find( string )
+ except:
+ fname = self.write_temp_file( tc.browser.get_html() )
+ errmsg = "no match to '%s'\npage content written to '%s'" % ( string, fname )
+ raise AssertionError( errmsg )
+ self.home()
+
+ def check_history_json( self, pattern, check_fn, show_deleted=None, multiline=True ):
+ """
+ Tries to find a JSON string in the history page using the regex pattern,
+ parse it, and assert check_fn returns True when called on that parsed
+ data.
+ """
+ self.home()
+ if show_deleted:
+ self.visit_page( "history?show_deleted=True" )
+ elif show_deleted == False:
+ self.visit_page( "history?show_deleted=False" )
+ else:
+ self.visit_page( "history" )
+ try:
+ tc.find( pattern, flags=( 'm' if multiline else '' ) )
+ # twill stores the regex match in a special stack variable
+ match = twill.namespaces.get_twill_glocals()[1][ '__match__' ]
+ json_data = from_json_string( match )
+ assert check_fn( json_data ), 'failed check_fn'
+
+ except Exception, exc:
+ log.error( exc, exc_info=True )
+ fname = self.write_temp_file( tc.browser.get_html() )
+ errmsg = ( "json '%s' could not be found or failed check_fn" % ( pattern ) +
+ "\npage content written to '%s'" % ( fname ) )
+ raise AssertionError( errmsg )
+
+ self.home()
+
def clear_history( self ):
"""Empties a history of all datasets"""
self.visit_page( "clear_history" )
self.check_history_for_string( 'Your history is empty' )
self.home()
+
def delete_history( self, id ):
"""Deletes one or more histories"""
history_list = self.get_histories_as_data_list()
@@ -279,6 +326,7 @@
check_str = 'Deleted %d %s' % ( num_deleted, iff( num_deleted != 1, "histories", "history" ) )
self.check_page_for_string( check_str )
self.home()
+
def delete_current_history( self, strings_displayed=[] ):
"""Deletes the current history"""
self.home()
@@ -286,16 +334,19 @@
for check_str in strings_displayed:
self.check_page_for_string( check_str )
self.home()
+
def get_histories_as_data_list( self ):
"""Returns the data elements of all histories"""
tree = self.histories_as_xml_tree()
data_list = [ elem for elem in tree.findall("data") ]
return data_list
+
def get_history_as_data_list( self, show_deleted=False ):
"""Returns the data elements of a history"""
tree = self.history_as_xml_tree( show_deleted=show_deleted )
data_list = [ elem for elem in tree.findall("data") ]
return data_list
+
def history_as_xml_tree( self, show_deleted=False ):
"""Returns a parsed xml object of a history"""
self.home()
@@ -303,6 +354,7 @@
xml = self.last_page()
tree = ElementTree.fromstring(xml)
return tree
+
def histories_as_xml_tree( self ):
"""Returns a parsed xml object of all histories"""
self.home()
@@ -310,6 +362,7 @@
xml = self.last_page()
tree = ElementTree.fromstring(xml)
return tree
+
def history_options( self, user=False, active_datasets=False, activatable_datasets=False, histories_shared_by_others=False ):
"""Mimics user clicking on history options link"""
self.home()
@@ -329,6 +382,7 @@
self.check_page_for_string( 'Rename</a> current history' )
self.check_page_for_string( 'Delete</a> current history' )
self.home()
+
def new_history( self, name=None ):
"""Creates a new, empty history"""
self.home()
@@ -338,6 +392,7 @@
self.visit_url( "%s/history_new" % self.url )
self.check_history_for_string('Your history is empty')
self.home()
+
def rename_history( self, id, old_name, new_name ):
"""Rename an existing history"""
self.home()
@@ -345,6 +400,7 @@
check_str = 'History: %s renamed to: %s' % ( old_name, urllib.unquote( new_name ) )
self.check_page_for_string( check_str )
self.home()
+
def set_history( self ):
"""Sets the history (stores the cookies for this run)"""
if self.history_id:
@@ -353,6 +409,7 @@
else:
self.new_history()
self.home()
+
def share_current_history( self, email, strings_displayed=[], strings_displayed_after_submit=[],
action='', action_strings_displayed=[], action_strings_displayed_after_submit=[] ):
"""Share the current history with different users"""
@@ -372,6 +429,7 @@
for check_str in action_strings_displayed_after_submit:
self.check_page_for_string( check_str )
self.home()
+
def share_histories_with_users( self, ids, emails, strings_displayed=[], strings_displayed_after_submit=[],
action=None, action_strings_displayed=[] ):
"""Share one or more histories with one or more different users"""
@@ -389,6 +447,7 @@
for check_str in action_strings_displayed:
self.check_page_for_string( check_str )
self.home()
+
def unshare_history( self, history_id, user_id, strings_displayed=[] ):
"""Unshare a history that has been shared with another user"""
self.visit_url( "%s/history/list?id=%s&operation=share+or+publish" % ( self.url, history_id ) )
@@ -396,12 +455,14 @@
self.check_page_for_string( check_str )
self.visit_url( "%s/history/sharing?unshare_user=%s&id=%s" % ( self.url, user_id, history_id ) )
self.home()
+
def switch_history( self, id='', name='' ):
"""Switches to a history in the current list of histories"""
self.visit_url( "%s/history/list?operation=switch&id=%s" % ( self.url, id ) )
if name:
- self.check_history_for_string( escape( name ) )
+ self.check_history_for_exact_string( name )
self.home()
+
def view_stored_active_histories( self, strings_displayed=[] ):
self.home()
self.visit_page( "history/list" )
@@ -698,11 +759,13 @@
# if the server's env has GALAXY_TEST_SAVE, save the output file to that dir
if self.keepOutdir:
ofn = os.path.join( self.keepOutdir, os.path.basename( local_name ) )
+ log.debug( 'keepoutdir: %s, ofn: %s', self.keepOutdir, ofn )
try:
shutil.copy( temp_name, ofn )
except Exception, exc:
error_log_msg = ( 'TwillTestCase could not save output file %s to %s: ' % ( temp_name, ofn ) )
error_log_msg += str( exc )
+ log.error( error_log_msg, exc_info=True )
else:
log.debug('## GALAXY_TEST_SAVE=%s. saved %s' % ( self.keepOutdir, ofn ) )
diff -r e19bf2b117638221414239698f840730a2cd0569 -r 795a1799e7dcf092b14f7e2a2dba3fb389403531 test/functional/test_history_functions.py
--- a/test/functional/test_history_functions.py
+++ b/test/functional/test_history_functions.py
@@ -250,8 +250,7 @@
sa_session.query( galaxy.model.HistoryDatasetAssociation )
.filter( and_( galaxy.model.HistoryDatasetAssociation.table.c.history_id==history3.id,
galaxy.model.HistoryDatasetAssociation.table.c.name=='2.bed' ) )
- .first()
- )
+ .first() )
assert hda_2_bed is not None, "Problem retrieving hda_2_bed from database"
self.delete_history_item( str( hda_2_bed.id ) )
@@ -260,8 +259,7 @@
sa_session.query( galaxy.model.HistoryDatasetAssociation )
.filter( and_( galaxy.model.HistoryDatasetAssociation.table.c.history_id==history3.id,
galaxy.model.HistoryDatasetAssociation.table.c.name=='3.bed' ) )
- .first()
- )
+ .first() )
assert hda_3_bed is not None, "Problem retrieving hda_3_bed from database"
self.delete_history_item( str( hda_3_bed.id ) )
@@ -281,39 +279,52 @@
self.view_stored_active_histories( strings_displayed=[ "Clone of '%s'" % history3.name ] )
# Switch to the cloned history to make sure activatable datasets were cloned
self.switch_history( id=self.security.encode_id( history3_clone2.id ), name=history3_clone2.name )
- hda_2_bed = sa_session.query( galaxy.model.HistoryDatasetAssociation ) \
- .filter( and_( galaxy.model.HistoryDatasetAssociation.table.c.history_id==history3_clone2.id,
- galaxy.model.HistoryDatasetAssociation.table.c.name=='2.bed' ) ) \
- .first()
+ hda_2_bed = (
+ sa_session.query( galaxy.model.HistoryDatasetAssociation )
+ .filter( and_( galaxy.model.HistoryDatasetAssociation.table.c.history_id==history3_clone2.id,
+ galaxy.model.HistoryDatasetAssociation.table.c.name=='2.bed' ) )
+ .first() )
assert hda_2_bed is not None, "Problem retrieving hda_2_bed from database"
- hda_3_bed = sa_session.query( galaxy.model.HistoryDatasetAssociation ) \
- .filter( and_( galaxy.model.HistoryDatasetAssociation.table.c.history_id==history3_clone2.id,
- galaxy.model.HistoryDatasetAssociation.table.c.name=='3.bed' ) ) \
- .first()
+ hda_3_bed = (
+ sa_session.query( galaxy.model.HistoryDatasetAssociation )
+ .filter( and_( galaxy.model.HistoryDatasetAssociation.table.c.history_id==history3_clone2.id,
+ galaxy.model.HistoryDatasetAssociation.table.c.name=='3.bed' ) )
+ .first() )
assert hda_3_bed is not None, "Problem retrieving hda_3_bed from database"
+
# Make sure the deleted datasets are included in the cloned history
- check_str = 'This dataset has been deleted. Click undelete id=%d' % hda_2_bed.id
- self.check_history_for_string( check_str, show_deleted=True )
- check_str = 'This dataset has been deleted. Click undelete id=%d' % hda_3_bed.id
- self.check_history_for_string( check_str, show_deleted=True )
+ # check for encoded ids
+ # - these will be available bc the refreshed page will have bootstrapped json for the hdas
+ #NOTE: that these WON'T be available when refreshes become less common
+ # (when the backbone.js is fully integrated and refreshes aren't used after every history function)
+ self.check_history_for_exact_string( self.security.encode_id( hda_2_bed.id ), show_deleted=True )
+ self.check_history_for_exact_string( self.security.encode_id( hda_3_bed.id ), show_deleted=True )
+
# Test cloning only active datasets
- self.clone_history( self.security.encode_id( history3.id ),
- 'active',
- strings_displayed_after_submit=[ 'is now included in your previously stored histories.' ] )
+ self.clone_history(
+ self.security.encode_id( history3.id ),
+ 'active',
+ strings_displayed_after_submit=[ 'is now included in your previously stored histories.' ] )
global history3_clone3
- history3_clone3 = sa_session.query( galaxy.model.History ) \
- .filter( and_( galaxy.model.History.table.c.deleted==False,
- galaxy.model.History.table.c.user_id==admin_user.id ) ) \
- .order_by( desc( galaxy.model.History.table.c.create_time ) ) \
- .first()
+ history3_clone3 = (
+ sa_session.query( galaxy.model.History )
+ .filter( and_( galaxy.model.History.table.c.deleted==False,
+ galaxy.model.History.table.c.user_id==admin_user.id ) )
+ .order_by( desc( galaxy.model.History.table.c.create_time ) )
+ .first()
+ )
assert history3_clone3 is not None, "Problem retrieving history3_clone3 from database"
+
# Check list of histories to make sure shared history3 was cloned
self.view_stored_active_histories( strings_displayed = ["Clone of '%s'" % history3.name ] )
- # Switch to the cloned history to make sure activatable datasets were cloned
+
+ # Switch to the cloned history to make sure ONLY activatable datasets were cloned
self.switch_history( id=self.security.encode_id( history3_clone3.id ) )
# Make sure the deleted datasets are NOT included in the cloned history
+ # - again using the bootstrapped json for the hdas
try:
- self.check_history_for_string( 'This dataset has been deleted.', show_deleted=True )
+ self.check_history_for_exact_string( '"deleted": true', show_deleted=True )
+ #self.check_history_for_string( 'This dataset has been deleted.', show_deleted=True )
raise AssertionError, "Deleted datasets incorrectly included in cloned history history3_clone3"
except:
pass
@@ -349,6 +360,7 @@
# Shared history3 should be in regular_user3's list of shared histories
self.view_shared_histories( cstrings_displayed=[ history3.name, admin_user.email ] )
"""
+
def test_045_change_permissions_on_current_history( self ):
"""Testing changing permissions on the current history"""
# Logged in as regular_user3
@@ -402,6 +414,7 @@
current_history_permissions.sort()
if current_history_permissions != history5_default_permissions:
raise AssertionError, "With logout and login, the history default permissions are not preserved"
+
def test_050_sharing_restricted_history_by_making_datasets_public( self ):
"""Testing sharing a restricted history by making the datasets public"""
# Logged in as admin_user
@@ -432,6 +445,7 @@
self.check_history_for_string( 'chr1' )
self.logout()
self.login( email=admin_user.email )
+
def test_055_sharing_restricted_history_by_making_new_sharing_role( self ):
"""Testing sharing a restricted history by associating a new sharing role with protected datasets"""
# At this point, history5 should have 1 item, 1.bed, which is public. We'll add another
@@ -506,6 +520,7 @@
self.display_history_item( str( hda_2_bed.id ), strings_displayed=[ 'chr1' ] )
# Delete the clone so the next test will be valid
self.delete_history( id=self.security.encode_id( history5_clone2.id ) )
+
def test_060_sharing_restricted_history_with_multiple_users_by_changing_no_permissions( self ):
"""Testing sharing a restricted history with multiple users, making no permission changes"""
# Logged in as regular_user2
@@ -515,10 +530,12 @@
# regular_user2 should be able to access history5's 2.bed dataset since it is associated with a
# sharing role, and regular_user3 should be able to access history5's 1.bed, but not 2.bed even
# though they can see it in their shared history.
+
# We first need to unshare history5 from regular_user2 so that we can re-share it.
self.unshare_history( self.security.encode_id( history5.id ),
self.security.encode_id( regular_user2.id ),
strings_displayed=[ regular_user1.email, regular_user2.email ] )
+
# Make sure the history was unshared correctly
self.logout()
self.login( email=regular_user2.email )
@@ -528,11 +545,14 @@
raise AssertionError, "history5 still shared with regular_user2 after unsharing it with that user."
except:
pass
+
self.logout()
self.login( admin_user.email )
email = '%s,%s' % ( regular_user2.email, regular_user3.email )
- strings_displayed_after_submit = [ 'The following datasets can be shared with %s with no changes' % email,
- 'The following datasets can be shared with %s by updating their permissions' % email ]
+ strings_displayed_after_submit = [
+ 'The following datasets can be shared with %s with no changes' % email,
+ 'The following datasets can be shared with %s by updating their permissions' % email ]
+
# history5 will be shared with regular_user1, regular_user2 and regular_user3
self.share_current_history( email,
strings_displayed_after_submit=strings_displayed_after_submit,
@@ -547,30 +567,35 @@
'activatable',
strings_displayed_after_submit=[ 'is now included in your previously stored histories.' ] )
global history5_clone3
- history5_clone3 = sa_session.query( galaxy.model.History ) \
- .filter( and_( galaxy.model.History.table.c.deleted==False,
- galaxy.model.History.table.c.user_id==regular_user2.id ) ) \
- .order_by( desc( galaxy.model.History.table.c.create_time ) ) \
- .first()
+ history5_clone3 = (
+ sa_session.query( galaxy.model.History )
+ .filter( and_( galaxy.model.History.table.c.deleted==False,
+ galaxy.model.History.table.c.user_id==regular_user2.id ) )
+ .order_by( desc( galaxy.model.History.table.c.create_time ) )
+ .first() )
assert history5_clone3 is not None, "Problem retrieving history5_clone3 from database"
+
# Check list of histories to make sure shared history3 was cloned
self.view_stored_active_histories( strings_displayed=[ "Clone of '%s'" % history5.name ] )
# Make sure the dataset is accessible
self.switch_history( id=self.security.encode_id( history5_clone3.id ), name=history5_clone3.name )
- # Make sure both datasets are in the history
+ # Make sure both datasets are in the history
self.check_history_for_string( '1.bed' )
self.check_history_for_string( '2.bed' )
# Get both new hdas from the db that were created for the shared history
- hda_1_bed = sa_session.query( galaxy.model.HistoryDatasetAssociation ) \
- .filter( and_( galaxy.model.HistoryDatasetAssociation.table.c.history_id==history5_clone3.id,
- galaxy.model.HistoryDatasetAssociation.table.c.name=='1.bed' ) ) \
- .first()
+ hda_1_bed = (
+ sa_session.query( galaxy.model.HistoryDatasetAssociation )
+ .filter( and_( galaxy.model.HistoryDatasetAssociation.table.c.history_id==history5_clone3.id,
+ galaxy.model.HistoryDatasetAssociation.table.c.name=='1.bed' ) )
+ .first() )
assert hda_1_bed is not None, "Problem retrieving hda_1_bed from database"
- hda_2_bed = sa_session.query( galaxy.model.HistoryDatasetAssociation ) \
- .filter( and_( galaxy.model.HistoryDatasetAssociation.table.c.history_id==history5_clone3.id,
- galaxy.model.HistoryDatasetAssociation.table.c.name=='2.bed' ) ) \
- .first()
+ hda_2_bed = (
+ sa_session.query( galaxy.model.HistoryDatasetAssociation )
+ .filter( and_( galaxy.model.HistoryDatasetAssociation.table.c.history_id==history5_clone3.id,
+ galaxy.model.HistoryDatasetAssociation.table.c.name=='2.bed' ) )
+ .first() )
assert hda_2_bed is not None, "Problem retrieving hda_2_bed from database"
+
# Make sure 1.bed is accessible since it is public
self.display_history_item( str( hda_1_bed.id ), strings_displayed=[ 'chr1' ] )
# Make sure 2.bed is accessible since it is associated with a sharing role
@@ -582,34 +607,39 @@
self.login( email=regular_user3.email )
# Shared history5 should be in regular_user2's list of shared histories
self.view_shared_histories( strings_displayed=[ history5.name, admin_user.email ] )
+
# Clone restricted history5
self.clone_history( self.security.encode_id( history5.id ),
'activatable',
strings_displayed_after_submit=[ 'is now included in your previously stored histories.' ] )
global history5_clone4
- history5_clone4 = sa_session.query( galaxy.model.History ) \
- .filter( and_( galaxy.model.History.table.c.deleted==False,
- galaxy.model.History.table.c.user_id==regular_user3.id ) ) \
- .order_by( desc( galaxy.model.History.table.c.create_time ) ) \
- .first()
+ history5_clone4 = (
+ sa_session.query( galaxy.model.History )
+ .filter( and_( galaxy.model.History.table.c.deleted==False,
+ galaxy.model.History.table.c.user_id==regular_user3.id ) )
+ .order_by( desc( galaxy.model.History.table.c.create_time ) )
+ .first() )
assert history5_clone4 is not None, "Problem retrieving history5_clone4 from database"
+
# Check list of histories to make sure shared history3 was cloned
self.view_stored_active_histories( strings_displayed=[ "Clone of '%s'" % history5.name ] )
# Make sure the dataset is accessible
self.switch_history( id=self.security.encode_id( history5_clone4.id ), name=history5_clone4.name )
- # Make sure both datasets are in the history
+ # Make sure both datasets are in the history
self.check_history_for_string( '1.bed' )
self.check_history_for_string( '2.bed' )
# Get both new hdas from the db that were created for the shared history
- hda_1_bed = sa_session.query( galaxy.model.HistoryDatasetAssociation ) \
- .filter( and_( galaxy.model.HistoryDatasetAssociation.table.c.history_id==history5_clone4.id,
- galaxy.model.HistoryDatasetAssociation.table.c.name=='1.bed' ) ) \
- .first()
+ hda_1_bed = (
+ sa_session.query( galaxy.model.HistoryDatasetAssociation )
+ .filter( and_( galaxy.model.HistoryDatasetAssociation.table.c.history_id==history5_clone4.id,
+ galaxy.model.HistoryDatasetAssociation.table.c.name=='1.bed' ) )
+ .first() )
assert hda_1_bed is not None, "Problem retrieving hda_1_bed from database"
- hda_2_bed = sa_session.query( galaxy.model.HistoryDatasetAssociation ) \
- .filter( and_( galaxy.model.HistoryDatasetAssociation.table.c.history_id==history5_clone4.id,
- galaxy.model.HistoryDatasetAssociation.table.c.name=='2.bed' ) ) \
- .first()
+ hda_2_bed = (
+ sa_session.query( galaxy.model.HistoryDatasetAssociation )
+ .filter( and_( galaxy.model.HistoryDatasetAssociation.table.c.history_id==history5_clone4.id,
+ galaxy.model.HistoryDatasetAssociation.table.c.name=='2.bed' ) )
+ .first() )
assert hda_2_bed is not None, "Problem retrieving hda_2_bed from database"
# Make sure 1.bed is accessible since it is public
self.display_history_item( str( hda_1_bed.id ), strings_displayed=[ 'chr1' ] )
@@ -619,20 +649,31 @@
raise AssertionError, "History item 2.bed is accessible by user %s when is should not be" % regular_user3.email
except:
pass
- self.check_history_for_string( 'You do not have permission to view this dataset' )
+
+ # check the history page json for hda_2_bed and if it's accessible
+ def hda_2_bed_is_inaccessible( hda_list ):
+ for hda in hda_list:
+ if hda[ 'id' ] == self.security.encode_id( hda_2_bed.id ):
+ return ( not hda[ 'accessible' ] )
+ return False
+ self.check_history_json( r'\bhdas\s*=\s*(.*);', hda_2_bed_is_inaccessible )
+
# Admin users can view all datasets ( using the history/view feature ), so make sure 2.bed is accessible to the admin
self.logout()
self.login( email=admin_user.email )
self.view_history( str( hda_2_bed.history_id ), strings_displayed=[ '<td>NM_005997_cds_0_0_chr1_147962193_r</td>' ] )
self.logout()
self.login( email=regular_user3.email )
+
# Delete the clone so the next test will be valid
self.delete_history( id=self.security.encode_id( history5_clone4.id ) )
+
def test_065_sharing_private_history_by_choosing_to_not_share( self ):
"""Testing sharing a restricted history with multiple users by choosing not to share"""
- # Logged in as regular_user3
+ # Logged in as regular_user3 - login as admin
self.logout()
self.login( email=admin_user.email )
+
# Unshare history5 from regular_user2
self.unshare_history( self.security.encode_id( history5.id ),
self.security.encode_id( regular_user2.id ),
@@ -641,7 +682,8 @@
self.unshare_history( self.security.encode_id( history5.id ),
self.security.encode_id( regular_user3.id ),
strings_displayed=[ regular_user1.email, regular_user3.email ] )
- # Make sure the history was unshared correctly
+
+ # Make sure the histories were unshared correctly
self.logout()
self.login( email=regular_user2.email )
self.visit_page( "root/history_options" )
@@ -650,6 +692,7 @@
raise AssertionError, "history5 still shared with regular_user2 after unshaing it with that user."
except:
pass
+
self.logout()
self.login( email=regular_user3.email )
self.visit_page( "root/history_options" )
@@ -660,42 +703,73 @@
pass
self.logout()
self.login( email=admin_user.email )
+
def test_070_history_show_and_hide_deleted_datasets( self ):
"""Testing displaying deleted history items"""
+ #NOTE: due to the new client-side rendering of the history, this test isn't very apt
+ # (a) searching for strings in the dom doesn't work (they won't be twill's html) and
+ # (b) all datasets are included in the bootstrapped hda json regardless of the show_deleted setting
+ #CE: for now, I'm changing this to simply check whether the show_deleted flag
+ # is being properly passed to the history control
+ #TODO: this test needs to be moved to client-side testing framework (selenium or other)
+
# Logged in as admin_user
+ # create a new history and upload a new hda (1.bed) into it
self.new_history( name=urllib.quote( 'show hide deleted datasets' ) )
- latest_history = sa_session.query( galaxy.model.History ) \
- .filter( and_( galaxy.model.History.table.c.deleted==False,
- galaxy.model.History.table.c.user_id==admin_user.id ) ) \
- .order_by( desc( galaxy.model.History.table.c.create_time ) ) \
- .first()
+ latest_history = (
+ sa_session.query( galaxy.model.History )
+ .filter( and_( galaxy.model.History.table.c.deleted==False,
+ galaxy.model.History.table.c.user_id==admin_user.id ) )
+ .order_by( desc( galaxy.model.History.table.c.create_time ) )
+ .first() )
assert latest_history is not None, "Problem retrieving latest_history from database"
self.upload_file('1.bed', dbkey='hg18')
- latest_hda = sa_session.query( galaxy.model.HistoryDatasetAssociation ) \
- .order_by( desc( galaxy.model.HistoryDatasetAssociation.table.c.create_time ) ) \
- .first()
+ latest_hda = (
+ sa_session.query( galaxy.model.HistoryDatasetAssociation )
+ .order_by( desc( galaxy.model.HistoryDatasetAssociation.table.c.create_time ) )
+ .first() )
+
+ # delete that item and make sure the 'history empty' message shows
self.home()
+ log.info( 'deleting last hda' )
self.delete_history_item( str( latest_hda.id ) )
- self.check_history_for_string( 'Your history is empty' )
+ # check the historyPanel settings.show_deleted for a null json value (no show_deleted in query string)
+ self.check_history_json( r'\bpage_show_deleted\s*=\s*(.*),', lambda x: x == None )
+
+ # reload this history with the show_deleted flag set in the query string
+ # the deleted dataset should be there with the proper 'deleted' text
self.home()
- self.visit_url( "%s/history/?show_deleted=True" % self.url )
- self.check_page_for_string( 'This dataset has been deleted.' )
- self.check_page_for_string( '1.bed' )
+ log.info( 'turning show_deleted on' )
+ #self.visit_url( "%s/history/?show_deleted=True" % self.url )
+ # check the historyPanel settings.show_deleted for a true json value
+ self.check_history_json( r'\bpage_show_deleted\s*=\s*(.*),', lambda x: x == True, show_deleted=True )
+
+ # reload this history again with the show_deleted flag set TO FALSE in the query string
+ # make sure the 'history empty' message shows
self.home()
- self.visit_url( "%s/history/?show_deleted=False" % self.url )
- self.check_page_for_string( 'Your history is empty' )
+ log.info( 'turning show_deleted off' )
+ #self.visit_url( "%s/history/?show_deleted=False" % self.url )
+ # check the historyPanel settings.show_deleted for a false json value
+ self.check_history_json( r'\bpage_show_deleted\s*=\s*(.*),', lambda x: x == False, show_deleted=False )
+
+ # delete this history
self.delete_history( self.security.encode_id( latest_history.id ) )
+
def test_075_deleting_and_undeleting_history_items( self ):
"""Testing deleting and un-deleting history items"""
# logged in as admin_user
+
# Deleting the current history in the last method created a new history
- latest_history = sa_session.query( galaxy.model.History ) \
- .filter( and_( galaxy.model.History.table.c.deleted==False,
- galaxy.model.History.table.c.user_id==admin_user.id ) ) \
- .order_by( desc( galaxy.model.History.table.c.create_time ) ) \
- .first()
+ latest_history = (
+ sa_session.query( galaxy.model.History )
+ .filter( and_( galaxy.model.History.table.c.deleted==False,
+ galaxy.model.History.table.c.user_id==admin_user.id ) )
+ .order_by( desc( galaxy.model.History.table.c.create_time ) )
+ .first() )
assert latest_history is not None, "Problem retrieving latest_history from database"
- self.rename_history( self.security.encode_id( latest_history.id ), latest_history.name, new_name=urllib.quote( 'delete undelete history items' ) )
+
+ self.rename_history( self.security.encode_id( latest_history.id ),
+ latest_history.name, new_name=urllib.quote( 'delete undelete history items' ) )
# Add a new history item
self.upload_file( '1.bed', dbkey='hg15' )
latest_hda = sa_session.query( galaxy.model.HistoryDatasetAssociation ) \
@@ -722,6 +796,7 @@
self.check_page_for_string( '1.bed' )
self.check_page_for_string( 'hg15' )
self.delete_history( self.security.encode_id( latest_history.id ) )
+
def test_080_copying_history_items_between_histories( self ):
"""Testing copying history items between histories"""
# logged in as admin_user
@@ -776,6 +851,7 @@
self.check_history_for_string( hda1.name )
self.delete_history( self.security.encode_id( history6.id ) )
self.delete_history( self.security.encode_id( history7.id ) )
+
def test_085_reset_data_for_later_test_runs( self ):
"""Reseting data to enable later test runs to to be valid"""
# logged in as admin_user
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: greg: Handle circular repository dependencies to "n" levels of depth.
by Bitbucket 10 Dec '12
by Bitbucket 10 Dec '12
10 Dec '12
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/e19bf2b11763/
changeset: e19bf2b11763
user: greg
date: 2012-12-10 22:27:29
summary: Handle circular repository dependencies to "n" levels of depth.
affected #: 3 files
diff -r c460f284077f0b400901e4cd791d94a311425751 -r e19bf2b117638221414239698f840730a2cd0569 lib/galaxy/util/shed_util.py
--- a/lib/galaxy/util/shed_util.py
+++ b/lib/galaxy/util/shed_util.py
@@ -1,8 +1,6 @@
import os, tempfile, shutil, logging, urllib2
from galaxy import util
-from galaxy.datatypes.checkers import *
-from galaxy.util.json import *
-from galaxy.util.shed_util_common import *
+from shed_util_common import *
from galaxy.tools.search import ToolBoxSearch
from galaxy.tool_shed.tool_dependencies.install_util import create_or_update_tool_dependency, install_package, set_environment
from galaxy.tool_shed.encoding_util import *
diff -r c460f284077f0b400901e4cd791d94a311425751 -r e19bf2b117638221414239698f840730a2cd0569 lib/galaxy/util/shed_util_common.py
--- a/lib/galaxy/util/shed_util_common.py
+++ b/lib/galaxy/util/shed_util_common.py
@@ -194,28 +194,18 @@
option_value = trans.security.encode_id( repository.id )
repositories_select_field.add_option( option_label, option_value )
return repositories_select_field
-def can_add_entry_to_all_repository_dependencies( current_repository_key, repository_dependency, all_repository_dependencies ):
- """
- Handle circular repository dependencies that could result in an infinite loop by determining if it is safe to add an entry to the
- repository dependencies container.
- """
- # First check for an exact match - if this is true, the changeset revision was not updated.
- repository_dependency_as_key = container_util.generate_repository_dependencies_key_for_repository( repository_dependency[ 0 ],
- repository_dependency[ 1 ],
- repository_dependency[ 2 ],
- repository_dependency[ 3] )
- current_repository_key_as_repository_dependency = current_repository_key.split( container_util.STRSEP )
- if repository_dependency_as_key in all_repository_dependencies:
- val = all_repository_dependencies[ repository_dependency_as_key ]
- if current_repository_key_as_repository_dependency in val:
- return False
- # Now handle the case where an update to the changeset revision was done, so everything will match except the changeset_revision.
- repository_dependency_as_partial_key = container_util.STRSEP.join( [ repository_dependency[ 0 ], repository_dependency[ 1 ], repository_dependency[ 2 ] ] )
- for key in all_repository_dependencies:
- if key.startswith( repository_dependency_as_partial_key ):
- val = all_repository_dependencies[ key ]
- if current_repository_key_as_repository_dependency in val:
- return False
+def can_add_to_key_rd_dicts( key_rd_dict, key_rd_dicts ):
+ """Handle the case where an update to the changeset revision was done."""
+ k = key_rd_dict.keys()[ 0 ]
+ rd = key_rd_dict[ k ]
+ partial_rd = rd[ 0:3 ]
+ for kr_dict in key_rd_dicts:
+ key = kr_dict.keys()[ 0 ]
+ if key == k:
+ val = kr_dict[ key ]
+ for repository_dependency in val:
+ if repository_dependency[ 0:3 ] == partial_rd:
+ return False
return True
def can_generate_tool_dependency_metadata( root, metadata_dict ):
"""
@@ -555,13 +545,15 @@
metadata = repository_metadata.metadata
if metadata:
# Get a dictionary of all repositories upon which the contents of the received repository depends.
- repository_dependencies = get_repository_dependencies_for_changeset_revision( trans,
- repo,
- repository,
- repository_metadata,
- str( url_for( '/', qualified=True ) ).rstrip( '/' ),
- repository_dependencies=None,
- all_repository_dependencies=None )
+ repository_dependencies = get_repository_dependencies_for_changeset_revision( trans=trans,
+ repo=repo,
+ repository=repository,
+ repository_metadata=repository_metadata,
+ toolshed_base_url=str( url_for( '/', qualified=True ) ).rstrip( '/' ),
+ key_rd_dicts_to_be_processed=None,
+ all_repository_dependencies=None,
+ handled_key_rd_dicts=None,
+ circular_repository_dependencies=None )
# Cast unicode to string.
repo_info_dict[ str( repository.name ) ] = ( str( repository.description ),
str( repository_clone_url ),
@@ -1037,6 +1029,11 @@
if ctx_file_name == stripped_filename:
return manifest_ctx, ctx_file
return None, None
+def get_key_for_repository_changeset_revision( toolshed_base_url, repository, repository_metadata ):
+ return container_util.generate_repository_dependencies_key_for_repository( toolshed_base_url=toolshed_base_url,
+ repository_name=repository.name,
+ repository_owner=repository.user.username,
+ changeset_revision=repository_metadata.changeset_revision )
def get_file_context_from_ctx( ctx, filename ):
# We have to be careful in determining if we found the correct file because multiple files with the same name may be in different directories
# within ctx if the files were moved within the change set. For example, in the following ctx.files() list, the former may have been moved to
@@ -1197,123 +1194,68 @@
.filter( and_( trans.model.Repository.table.c.name == name,
trans.model.Repository.table.c.user_id == user.id ) ) \
.first()
-def get_repository_dependencies_for_changeset_revision( trans, repo, repository, repository_metadata, toolshed_base_url, repository_dependencies=None,
- all_repository_dependencies=None, handled=None ):
+def get_repository_dependencies_for_changeset_revision( trans, repo, repository, repository_metadata, toolshed_base_url,
+ key_rd_dicts_to_be_processed=None, all_repository_dependencies=None,
+ handled_key_rd_dicts=None, circular_repository_dependencies=None ):
"""
Return a dictionary of all repositories upon which the contents of the received repository_metadata record depend. The dictionary keys
are name-spaced values consisting of toolshed_base_url/repository_name/repository_owner/changeset_revision and the values are lists of
repository_dependency tuples consisting of ( toolshed_base_url, repository_name, repository_owner, changeset_revision ). This method
ensures that all required repositories to the nth degree are returned.
"""
- if handled is None:
- handled = []
+ if handled_key_rd_dicts is None:
+ handled_key_rd_dicts = []
if all_repository_dependencies is None:
all_repository_dependencies = {}
- if repository_dependencies is None:
- repository_dependencies = []
+ if key_rd_dicts_to_be_processed is None:
+ key_rd_dicts_to_be_processed = []
+ if circular_repository_dependencies is None:
+ circular_repository_dependencies = []
+ # Assume the current repository does not have repository dependencies defined for it.
+ current_repository_key = None
metadata = repository_metadata.metadata
if metadata and 'repository_dependencies' in metadata:
+ current_repository_key = get_key_for_repository_changeset_revision( toolshed_base_url, repository, repository_metadata )
repository_dependencies_dict = metadata[ 'repository_dependencies' ]
- # The repository_dependencies entry in the metadata is a dictionary that may have a value for a 'description' key. We want to
- # store the value of this key only once, the first time through this recursive method.
- current_repository_key = container_util.generate_repository_dependencies_key_for_repository( toolshed_base_url=toolshed_base_url,
- repository_name=repository.name,
- repository_owner=repository.user.username,
- changeset_revision=repository_metadata.changeset_revision )
if not all_repository_dependencies:
- # Initialize the all_repository_dependencies dictionary. It's safe to assume that current_repository_key in this case will have a value.
- all_repository_dependencies[ 'root_key' ] = current_repository_key
- all_repository_dependencies[ current_repository_key ] = []
- if 'description' not in all_repository_dependencies:
- description = repository_dependencies_dict.get( 'description', None )
- all_repository_dependencies[ 'description' ] = description
- # The next key of interest in repository_dependencies_dict is 'repository_dependencies', which is a list of tuples.
- repository_dependencies_tups = repository_dependencies_dict[ 'repository_dependencies' ]
- if repository_dependencies_tups and current_repository_key:
- # Remove all repository dependencies that point to a revision within its own repository.
- repository_dependencies_tups = remove_ropository_dependency_reference_to_self( repository_dependencies_tups, current_repository_key )
- for repository_dependency in repository_dependencies_tups:
- if repository_dependency not in handled and repository_dependency not in repository_dependencies:
- # The following if statement handles repositories dependencies that are circular in nature.
- if current_repository_key:
- if current_repository_key in all_repository_dependencies:
- # Add all repository dependencies for the current repository into it's entry in all_repository_dependencies.
- all_repository_dependencies_val = all_repository_dependencies[ current_repository_key ]
- if repository_dependency not in all_repository_dependencies_val:
- all_repository_dependencies_val.append( repository_dependency )
- all_repository_dependencies[ current_repository_key ] = all_repository_dependencies_val
- elif can_add_entry_to_all_repository_dependencies( current_repository_key, repository_dependency, all_repository_dependencies ):
- # We don't have a circular dependency that could result in an infinite loop.
- all_repository_dependencies[ current_repository_key ] = [ repository_dependency ]
- repository_dependencies.append( repository_dependency )
- else:
- # The current repository does not have repository dependencies defined for it.
- current_repository_key = None
- # The following if statement handles repositories dependencies that are circular in nature.
- if current_repository_key and current_repository_key in all_repository_dependencies:
- repository_dependencies_tups = [ rd for rd in all_repository_dependencies[ current_repository_key ] ]
- if repository_dependencies_tups:
- repository_dependency = repository_dependencies_tups.pop( 0 )
- if repository_dependency not in handled:
- handled.append( repository_dependency )
- if repository_dependency in repository_dependencies:
- repository_dependencies.remove( repository_dependency )
- toolshed, name, owner, changeset_revision = repository_dependency
- if tool_shed_is_this_tool_shed( toolshed ):
- required_repository = get_repository_by_name_and_owner( trans, name, owner )
- required_repository_metadata = get_repository_metadata_by_repository_id_changset_revision( trans,
- trans.security.encode_id( required_repository.id ),
- changeset_revision )
- if required_repository_metadata:
- required_repo_dir = required_repository.repo_path( trans.app )
- required_repo = hg.repository( get_configured_ui(), required_repo_dir )
- else:
- # The repository changeset_revision is no longer installable, so see if there's been an update.
- required_repo_dir = required_repository.repo_path( trans.app )
- required_repo = hg.repository( get_configured_ui(), required_repo_dir )
- required_changeset_revision = get_next_downloadable_changeset_revision( required_repository, required_repo, changeset_revision )
- required_repository_metadata = get_repository_metadata_by_repository_id_changset_revision( trans,
- trans.security.encode_id( required_repository.id ),
- required_changeset_revision )
- if required_repository_metadata:
- # The changeset_revision defined in a repository_dependencies.xml file is outdated, so we need to fix appropriate
- # entries in our all_repository_dependencies dictionary.
- updated_repository_dependency = [ toolshed, name, owner, required_changeset_revision ]
- for k, v in all_repository_dependencies.items():
- if k in [ 'root_key', 'description' ]:
- continue
- for i, current_repository_dependency in enumerate( v ):
- cts, cn, co, ccr = current_repository_dependency
- if toolshed == cts and name == cn and owner == co and changeset_revision == ccr:
- if updated_repository_dependency in v:
- # We've already stored the updated repository_dependency, so remove the outdated one.
- v = v.remove( repository_dependency )
- all_repository_dependencies[ k ] = v
- else:
- # Store the updated repository_dependency.
- v[ i ] = updated_repository_dependency
- all_repository_dependencies[ k ] = v
- if required_repository_metadata:
- # The required_repository_metadata changeset_revision is installable.
- required_metadata = required_repository_metadata.metadata
- if required_metadata:
- for repository_dependency in repository_dependencies_tups:
- if repository_dependency not in repository_dependencies:
- repository_dependencies.append( repository_dependency )
- return get_repository_dependencies_for_changeset_revision( trans=trans,
- repo=required_repo,
- repository=required_repository,
- repository_metadata=required_repository_metadata,
- toolshed_base_url=toolshed,
- repository_dependencies=repository_dependencies,
- all_repository_dependencies=all_repository_dependencies,
- handled=handled )
- else:
- # The repository is in a different tool shed, so build an url and send a request.
- error_message = "Repository dependencies are currently supported only within the same tool shed. Ignoring repository dependency definition "
- error_message += "for tool shed %s, name %s, owner %s, changeset revision %s" % ( toolshed, name, owner, changeset_revision )
- log.debug( error_message )
+ all_repository_dependencies = initialize_all_repository_dependencies( current_repository_key,
+ repository_dependencies_dict,
+ all_repository_dependencies )
+ # Handle the repository dependencies defined in the current repository, if any, and populate the various repository dependency objects for
+ # this round of processing.
+ current_repository_key_rd_dicts, key_rd_dicts_to_be_processed, handled_key_rd_dicts, all_repository_dependencies = \
+ populate_repository_dependency_objects_for_processing( trans,
+ current_repository_key,
+ repository_dependencies_dict,
+ key_rd_dicts_to_be_processed,
+ handled_key_rd_dicts,
+ circular_repository_dependencies,
+ all_repository_dependencies )
+ if current_repository_key:
+ if current_repository_key_rd_dicts:
+ # There should be only a single current_repository_key_rd_dict in this list.
+ current_repository_key_rd_dict = current_repository_key_rd_dicts[ 0 ]
+ # Handle circular repository dependencies.
+ if not in_circular_repository_dependencies( current_repository_key_rd_dict, circular_repository_dependencies ):
+ if current_repository_key in all_repository_dependencies:
+ handle_current_repository_dependency( trans,
+ current_repository_key,
+ key_rd_dicts_to_be_processed,
+ all_repository_dependencies,
+ handled_key_rd_dicts,
+ circular_repository_dependencies )
+ elif key_rd_dicts_to_be_processed:
+ handle_next_repository_dependency( trans, key_rd_dicts_to_be_processed, all_repository_dependencies, handled_key_rd_dicts, circular_repository_dependencies )
+ elif key_rd_dicts_to_be_processed:
+ handle_next_repository_dependency( trans, key_rd_dicts_to_be_processed, all_repository_dependencies, handled_key_rd_dicts, circular_repository_dependencies )
+ elif key_rd_dicts_to_be_processed:
+ handle_next_repository_dependency( trans, key_rd_dicts_to_be_processed, all_repository_dependencies, handled_key_rd_dicts, circular_repository_dependencies )
return all_repository_dependencies
+def get_repository_dependency_as_key( repository_dependency ):
+ return container_util.generate_repository_dependencies_key_for_repository( repository_dependency[ 0 ],
+ repository_dependency[ 1 ],
+ repository_dependency[ 2 ],
+ repository_dependency[ 3] )
def get_repository_file_contents( file_path ):
if checkers.is_gzip( file_path ):
safe_str = to_safe_string( '\ngzip compressed file\n' )
@@ -1415,11 +1357,77 @@
relative_path_to_sample_file = relative_path_to_sample_file[ len( tool_path ) + 1 :]
sample_file_metadata_paths.append( relative_path_to_sample_file )
return sample_file_metadata_paths, sample_file_copy_paths
+def get_updated_changeset_revisions_for_repository_dependencies( trans, key_rd_dicts ):
+ updated_key_rd_dicts = []
+ for key_rd_dict in key_rd_dicts:
+ key = key_rd_dict.keys()[ 0 ]
+ repository_dependency = key_rd_dict[ key ]
+ toolshed, name, owner, changeset_revision = repository_dependency
+ if tool_shed_is_this_tool_shed( toolshed ):
+ repository = get_repository_by_name_and_owner( trans, name, owner )
+ repository_metadata = get_repository_metadata_by_repository_id_changset_revision( trans,
+ trans.security.encode_id( repository.id ),
+ changeset_revision )
+ if repository_metadata:
+ # The repository changeset_revision is installable, so no updates are available.
+ new_key_rd_dict = {}
+ new_key_rd_dict[ key ] = repository_dependency
+ updated_key_rd_dicts.append( key_rd_dict )
+ else:
+ # The repository changeset_revision is no longer installable, so see if there's been an update.
+ repo_dir = repository.repo_path( trans.app )
+ repo = hg.repository( get_configured_ui(), repo_dir )
+ changeset_revision = get_next_downloadable_changeset_revision( repository, repo, changeset_revision )
+ repository_metadata = get_repository_metadata_by_repository_id_changset_revision( trans,
+ trans.security.encode_id( repository.id ),
+ changeset_revision )
+ if repository_metadata:
+ new_key_rd_dict = {}
+ new_key_rd_dict[ key ] = [ toolshed, name, owner, repository_metadata.changeset_revision ]
+ # We have the updated changset revision.
+ updated_key_rd_dicts.append( new_key_rd_dict )
+ return updated_key_rd_dicts
def get_user_by_username( trans, username ):
"""Get a user from the database by username"""
return trans.sa_session.query( trans.model.User ) \
.filter( trans.model.User.table.c.username == username ) \
.one()
+def handle_circular_repository_dependency( repository_key, repository_dependency, circular_repository_dependencies, handled_key_rd_dicts, all_repository_dependencies ):
+ all_repository_dependencies_root_key = all_repository_dependencies[ 'root_key' ]
+ repository_dependency_as_key = get_repository_dependency_as_key( repository_dependency )
+ repository_key_as_repository_dependency = repository_key.split( container_util.STRSEP )
+ update_circular_repository_dependencies( repository_key,
+ repository_dependency,
+ all_repository_dependencies[ repository_dependency_as_key ],
+ circular_repository_dependencies )
+ if all_repository_dependencies_root_key != repository_dependency_as_key:
+ all_repository_dependencies[ repository_key ] = [ repository_dependency ]
+ return circular_repository_dependencies, handled_key_rd_dicts, all_repository_dependencies
+def handle_current_repository_dependency( trans, current_repository_key, key_rd_dicts_to_be_processed, all_repository_dependencies, handled_key_rd_dicts,
+ circular_repository_dependencies ):
+ current_repository_key_rd_dicts = []
+ for rd in all_repository_dependencies[ current_repository_key ]:
+ rd_copy = [ str( item ) for item in rd ]
+ new_key_rd_dict = {}
+ new_key_rd_dict[ current_repository_key ] = rd_copy
+ current_repository_key_rd_dicts.append( new_key_rd_dict )
+ if current_repository_key_rd_dicts:
+ toolshed, required_repo, required_repository, required_repository_metadata, repository_key_rd_dicts, key_rd_dicts_to_be_processed, handled_key_rd_dicts = \
+ handle_key_rd_dicts_for_repository( trans,
+ current_repository_key,
+ current_repository_key_rd_dicts,
+ key_rd_dicts_to_be_processed,
+ handled_key_rd_dicts,
+ circular_repository_dependencies )
+ return get_repository_dependencies_for_changeset_revision( trans=trans,
+ repo=required_repo,
+ repository=required_repository,
+ repository_metadata=required_repository_metadata,
+ toolshed_base_url=toolshed,
+ key_rd_dicts_to_be_processed=key_rd_dicts_to_be_processed,
+ all_repository_dependencies=all_repository_dependencies,
+ handled_key_rd_dicts=handled_key_rd_dicts,
+ circular_repository_dependencies=circular_repository_dependencies )
def handle_existing_tool_dependencies_that_changed_in_update( app, repository, original_dependency_dict, new_dependency_dict ):
"""
This method is called when a Galaxy admin is getting updates for an installed tool shed repository in order to cover the case where an
@@ -1438,6 +1446,56 @@
else:
deleted_tool_dependency_names.append( original_dependency_val_dict[ 'name' ] )
return updated_tool_dependency_names, deleted_tool_dependency_names
+def handle_key_rd_dicts_for_repository( trans, current_repository_key, repository_key_rd_dicts, key_rd_dicts_to_be_processed, handled_key_rd_dicts, circular_repository_dependencies ):
+ key_rd_dict = repository_key_rd_dicts.pop( 0 )
+ repository_dependency = key_rd_dict[ current_repository_key ]
+ toolshed, name, owner, changeset_revision = repository_dependency
+ if tool_shed_is_this_tool_shed( toolshed ):
+ required_repository = get_repository_by_name_and_owner( trans, name, owner )
+ required_repository_metadata = get_repository_metadata_by_repository_id_changset_revision( trans,
+ trans.security.encode_id( required_repository.id ),
+ changeset_revision )
+ if required_repository_metadata:
+ required_repo_dir = required_repository.repo_path( trans.app )
+ required_repo = hg.repository( get_configured_ui(), required_repo_dir )
+ # The required_repository_metadata changeset_revision is installable.
+ required_metadata = required_repository_metadata.metadata
+ if required_metadata:
+ for current_repository_key_rd_dict in repository_key_rd_dicts:
+ if not in_key_rd_dicts( current_repository_key_rd_dict, key_rd_dicts_to_be_processed ):
+ key_rd_dicts_to_be_processed.append( current_repository_key_rd_dict )
+ # Mark the current repository_dependency as handled_key_rd_dicts.
+ if not in_key_rd_dicts( key_rd_dict, handled_key_rd_dicts ):
+ handled_key_rd_dicts.append( key_rd_dict )
+ # Remove the current repository from the list of repository_dependencies to be processed.
+ if in_key_rd_dicts( key_rd_dict, key_rd_dicts_to_be_processed ):
+ key_rd_dicts_to_be_processed = remove_from_key_rd_dicts( key_rd_dict, key_rd_dicts_to_be_processed )
+ else:
+ # The repository is in a different tool shed, so build an url and send a request.
+ error_message = "Repository dependencies are currently supported only within the same tool shed. Ignoring repository dependency definition "
+ error_message += "for tool shed %s, name %s, owner %s, changeset revision %s" % ( toolshed, name, owner, changeset_revision )
+ log.debug( error_message )
+ return toolshed, required_repo, required_repository, required_repository_metadata, repository_key_rd_dicts, key_rd_dicts_to_be_processed, handled_key_rd_dicts
+def handle_next_repository_dependency( trans, key_rd_dicts_to_be_processed, all_repository_dependencies, handled_key_rd_dicts, circular_repository_dependencies ):
+ next_repository_key_rd_dict = key_rd_dicts_to_be_processed.pop( 0 )
+ next_repository_key_rd_dicts = [ next_repository_key_rd_dict ]
+ next_repository_key = next_repository_key_rd_dict.keys()[ 0 ]
+ toolshed, required_repo, required_repository, required_repository_metadata, repository_key_rd_dicts, key_rd_dicts_to_be_processed, handled_key_rd_dicts = \
+ handle_key_rd_dicts_for_repository( trans,
+ next_repository_key,
+ next_repository_key_rd_dicts,
+ key_rd_dicts_to_be_processed,
+ handled_key_rd_dicts,
+ circular_repository_dependencies )
+ return get_repository_dependencies_for_changeset_revision( trans=trans,
+ repo=required_repo,
+ repository=required_repository,
+ repository_metadata=required_repository_metadata,
+ toolshed_base_url=toolshed,
+ key_rd_dicts_to_be_processed=key_rd_dicts_to_be_processed,
+ all_repository_dependencies=all_repository_dependencies,
+ handled_key_rd_dicts=handled_key_rd_dicts,
+ circular_repository_dependencies=circular_repository_dependencies )
def handle_sample_files_and_load_tool_from_disk( trans, repo_files_dir, tool_config_filepath, work_dir ):
# Copy all sample files from disk to a temporary directory since the sample files may be in multiple directories.
message = ''
@@ -1489,8 +1547,59 @@
message = str( e )
error = True
return error, message
+def in_all_repository_dependencies( repository_key, repository_dependency, all_repository_dependencies ):
+ """Return True if { repository_key :repository_dependency } is in all_repository_dependencies."""
+ for key, val in all_repository_dependencies.items():
+ if key != repository_key:
+ continue
+ if repository_dependency in val:
+ return True
+ return False
+def in_circular_repository_dependencies( repository_key_rd_dict, circular_repository_dependencies ):
+ """
+ Return True if any combination of a circular dependency tuple is the key : value pair defined in the received repository_key_rd_dict. This
+ means that each circular dependency tuple is converted into the key : value pair for vomparision.
+ """
+ for tup in circular_repository_dependencies:
+ rd_0, rd_1 = tup
+ rd_0_as_key = get_repository_dependency_as_key( rd_0 )
+ rd_1_as_key = get_repository_dependency_as_key( rd_1 )
+ if rd_0_as_key in repository_key_rd_dict and repository_key_rd_dict[ rd_0_as_key ] == rd_1:
+ return True
+ if rd_1_as_key in repository_key_rd_dict and repository_key_rd_dict[ rd_1_as_key ] == rd_0:
+ return True
+ return False
+def in_key_rd_dicts( key_rd_dict, key_rd_dicts ):
+ k = key_rd_dict.keys()[ 0 ]
+ v = key_rd_dict[ k ]
+ for key_rd_dict in key_rd_dicts:
+ for key, val in key_rd_dict.items():
+ if key == k and val == v:
+ return True
+ return False
+def is_circular_repository_dependency( repository_key, repository_dependency, all_repository_dependencies ):
+ """
+ Return True if the received repository_dependency is a key in all_repository_dependencies whose list of repository dependencies
+ includes the received repository_key.
+ """
+ repository_dependency_as_key = get_repository_dependency_as_key( repository_dependency )
+ repository_key_as_repository_dependency = repository_key.split( container_util.STRSEP )
+ for key, val in all_repository_dependencies.items():
+ if key != repository_dependency_as_key:
+ continue
+ if repository_key_as_repository_dependency in val:
+ return True
+ return False
def is_downloadable( metadata_dict ):
return 'datatypes' in metadata_dict or 'repository_dependencies' in metadata_dict or 'tools' in metadata_dict or 'workflows' in metadata_dict
+def initialize_all_repository_dependencies( current_repository_key, repository_dependencies_dict, all_repository_dependencies ):
+ # Initialize the all_repository_dependencies dictionary. It's safe to assume that current_repository_key in this case will have a value.
+ all_repository_dependencies[ 'root_key' ] = current_repository_key
+ all_repository_dependencies[ current_repository_key ] = []
+ # Store the value of the 'description' key only once, the first time through this recursive method.
+ description = repository_dependencies_dict.get( 'description', None )
+ all_repository_dependencies[ 'description' ] = description
+ return all_repository_dependencies
def load_tool_from_config( app, full_path ):
try:
tool = app.toolbox.load_tool( full_path )
@@ -1553,24 +1662,78 @@
"key": full_path }
folder_contents.append( node )
return folder_contents
+def populate_repository_dependency_objects_for_processing( trans, current_repository_key, repository_dependencies_dict, key_rd_dicts_to_be_processed,
+ handled_key_rd_dicts, circular_repository_dependencies, all_repository_dependencies ):
+ current_repository_key_rd_dicts = []
+ for rd in repository_dependencies_dict[ 'repository_dependencies' ]:
+ new_key_rd_dict = {}
+ new_key_rd_dict[ current_repository_key ] = rd
+ current_repository_key_rd_dicts.append( new_key_rd_dict )
+ if current_repository_key_rd_dicts and current_repository_key:
+ # Remove all repository dependencies that point to a revision within its own repository.
+ current_repository_key_rd_dicts = remove_ropository_dependency_reference_to_self( current_repository_key_rd_dicts )
+ current_repository_key_rd_dicts = get_updated_changeset_revisions_for_repository_dependencies( trans, current_repository_key_rd_dicts )
+ for key_rd_dict in current_repository_key_rd_dicts:
+ is_circular = False
+ if not in_key_rd_dicts( key_rd_dict, handled_key_rd_dicts ) and not in_key_rd_dicts( key_rd_dict, key_rd_dicts_to_be_processed ):
+ repository_dependency = key_rd_dict[ current_repository_key ]
+ if current_repository_key in all_repository_dependencies:
+ # Add all repository dependencies for the current repository into it's entry in all_repository_dependencies.
+ all_repository_dependencies_val = all_repository_dependencies[ current_repository_key ]
+ if repository_dependency not in all_repository_dependencies_val:
+ all_repository_dependencies_val.append( repository_dependency )
+ all_repository_dependencies[ current_repository_key ] = all_repository_dependencies_val
+ elif not in_all_repository_dependencies( current_repository_key, repository_dependency, all_repository_dependencies ):
+ # Handle circular repository dependencies.
+ if is_circular_repository_dependency( current_repository_key, repository_dependency, all_repository_dependencies ):
+ is_circular = True
+ circular_repository_dependencies, handled_key_rd_dicts, all_repository_dependencies = \
+ handle_circular_repository_dependency( current_repository_key,
+ repository_dependency,
+ circular_repository_dependencies,
+ handled_key_rd_dicts,
+ all_repository_dependencies )
+ else:
+ all_repository_dependencies[ current_repository_key ] = [ repository_dependency ]
+ if not is_circular and can_add_to_key_rd_dicts( key_rd_dict, key_rd_dicts_to_be_processed ):
+ new_key_rd_dict = {}
+ new_key_rd_dict[ current_repository_key ] = repository_dependency
+ key_rd_dicts_to_be_processed.append( new_key_rd_dict )
+ return current_repository_key_rd_dicts, key_rd_dicts_to_be_processed, handled_key_rd_dicts, all_repository_dependencies
def remove_dir( dir ):
if os.path.exists( dir ):
try:
shutil.rmtree( dir )
except:
pass
-def remove_ropository_dependency_reference_to_self( repository_dependencies, repository_key ):
+def remove_from_key_rd_dicts( key_rd_dict, key_rd_dicts ):
+ k = key_rd_dict.keys()[ 0 ]
+ v = key_rd_dict[ k ]
+ clean_key_rd_dicts = []
+ for krd_dict in key_rd_dicts:
+ key = krd_dict.keys()[ 0 ]
+ val = krd_dict[ key ]
+ if key == k and val == v:
+ continue
+ clean_key_rd_dicts.append( krd_dict )
+ return clean_key_rd_dicts
+def remove_ropository_dependency_reference_to_self( key_rd_dicts ):
"""Remove all repository dependencies that point to a revision within its own repository."""
- clean_repository_dependencies = []
- repository_tup = repository_key.split( container_util.STRSEP )
+ clean_key_rd_dicts = []
+ key = key_rd_dicts[ 0 ].keys()[ 0 ]
+ repository_tup = key.split( container_util.STRSEP )
rd_toolshed, rd_name, rd_owner, rd_changeset_revision = repository_tup
- for repository_dependency in repository_dependencies:
+ for key_rd_dict in key_rd_dicts:
+ k = key_rd_dict.keys()[ 0 ]
+ repository_dependency = key_rd_dict[ k ]
toolshed, name, owner, changeset_revision = repository_dependency
if rd_toolshed == toolshed and rd_name == name and rd_owner == owner:
log.debug( "Removing repository dependency for repository %s owned by %s since it refers to a revision within itself." % ( name, owner ) )
else:
- clean_repository_dependencies.append( repository_dependency )
- return clean_repository_dependencies
+ new_key_rd_dict = {}
+ new_key_rd_dict[ key ] = repository_dependency
+ clean_key_rd_dicts.append( new_key_rd_dict )
+ return clean_key_rd_dicts
def remove_tool_dependency_installation_directory( dependency_install_dir ):
if os.path.exists( dependency_install_dir ):
try:
@@ -1841,6 +2004,19 @@
else:
translated_string = ''
return translated_string
+def update_circular_repository_dependencies( repository_key, repository_dependency, repository_dependencies, circular_repository_dependencies ):
+ repository_dependency_as_key = get_repository_dependency_as_key( repository_dependency )
+ repository_key_as_repository_dependency = repository_key.split( container_util.STRSEP )
+ if repository_key_as_repository_dependency in repository_dependencies:
+ found = False
+ for tup in circular_repository_dependencies:
+ if repository_dependency in tup and repository_key_as_repository_dependency in tup:
+ # The circular dependency has already been included.
+ found = True
+ if not found:
+ new_circular_tup = [ repository_dependency, repository_key_as_repository_dependency ]
+ circular_repository_dependencies.append( new_circular_tup )
+ return circular_repository_dependencies
def update_existing_tool_dependency( app, repository, original_dependency_dict, new_dependencies_dict ):
"""
Update an exsiting tool dependency whose definition was updated in a change set pulled by a Galaxy administrator when getting updates
@@ -1889,8 +2065,7 @@
sa_session.flush()
new_tool_dependency = tool_dependency
else:
- # We have no new tool dependency definition based on a matching dependency name, so remove the existing tool dependency record
- # from the database.
+ # We have no new tool dependency definition based on a matching dependency name, so remove the existing tool dependency record from the database.
log.debug( "Deleting tool dependency with name '%s', type '%s' and version '%s' from the database since it is no longer defined." % \
( str( tool_dependency.name ), str( tool_dependency.type ), str( tool_dependency.version ) ) )
sa_session.delete( tool_dependency )
diff -r c460f284077f0b400901e4cd791d94a311425751 -r e19bf2b117638221414239698f840730a2cd0569 lib/galaxy/webapps/community/controllers/repository.py
--- a/lib/galaxy/webapps/community/controllers/repository.py
+++ b/lib/galaxy/webapps/community/controllers/repository.py
@@ -1782,14 +1782,14 @@
is_malicious = repository_metadata.malicious
if repository_metadata:
# Get a dictionary of all repositories upon which the contents of the current repository_metadata record depend.
- repository_dependencies = get_repository_dependencies_for_changeset_revision( trans,
- repo,
- repository,
- repository_metadata,
- str( url_for( '/', qualified=True ) ).rstrip( '/' ),
- repository_dependencies=None,
+ repository_dependencies = get_repository_dependencies_for_changeset_revision( trans=trans,
+ repo=repo,
+ repository=repository,
+ repository_metadata=repository_metadata,
+ toolshed_base_url=str( url_for( '/', qualified=True ) ).rstrip( '/' ),
+ key_rd_dicts_to_be_processed=None,
all_repository_dependencies=None,
- handled=None )
+ handled_key_rd_dicts=None )
if is_malicious:
if trans.app.security_agent.can_push( trans.app, trans.user, repository ):
message += malicious_error_can_push
@@ -1895,14 +1895,14 @@
repository_metadata_id = trans.security.encode_id( repository_metadata.id ),
metadata = repository_metadata.metadata
# Get a dictionary of all repositories upon which the contents of the current repository_metadata record depend.
- repository_dependencies = get_repository_dependencies_for_changeset_revision( trans,
- repo,
- repository,
- repository_metadata,
- str( url_for( '/', qualified=True ) ).rstrip( '/' ),
- repository_dependencies=None,
+ repository_dependencies = get_repository_dependencies_for_changeset_revision( trans=trans,
+ repo=repo,
+ repository=repository,
+ repository_metadata=repository_metadata,
+ toolshed_base_url=str( url_for( '/', qualified=True ) ).rstrip( '/' ),
+ key_rd_dicts_to_be_processed=None,
all_repository_dependencies=None,
- handled=None )
+ handled_key_rd_dicts=None )
else:
repository_metadata_id = None
metadata = None
@@ -2417,14 +2417,14 @@
repository_metadata_id = trans.security.encode_id( repository_metadata.id )
metadata = repository_metadata.metadata
# Get a dictionary of all repositories upon which the contents of the current repository_metadata record depend.
- repository_dependencies = get_repository_dependencies_for_changeset_revision( trans,
- repo,
- repository,
- repository_metadata,
- str( url_for( '/', qualified=True ) ).rstrip( '/' ),
- repository_dependencies=None,
+ repository_dependencies = get_repository_dependencies_for_changeset_revision( trans=trans,
+ repo=repo,
+ repository=repository,
+ repository_metadata=repository_metadata,
+ toolshed_base_url=str( url_for( '/', qualified=True ) ).rstrip( '/' ),
+ key_rd_dicts_to_be_processed=None,
all_repository_dependencies=None,
- handled=None )
+ handled_key_rd_dicts=None )
else:
repository_metadata_id = None
metadata = None
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: dan: Fix for value_to_display_text in DatasetToolparameter when encountering a non-set optional multiple dataset; inspired by a patch from Kyle Ellrott.
by Bitbucket 10 Dec '12
by Bitbucket 10 Dec '12
10 Dec '12
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/c460f284077f/
changeset: c460f284077f
user: dan
date: 2012-12-10 22:14:05
summary: Fix for value_to_display_text in DatasetToolparameter when encountering a non-set optional multiple dataset; inspired by a patch from Kyle Ellrott.
affected #: 1 file
diff -r fc84a8b469c3181be64083a91d0fc2faae5e73ef -r c460f284077f0b400901e4cd791d94a311425751 lib/galaxy/tools/parameters/basic.py
--- a/lib/galaxy/tools/parameters/basic.py
+++ b/lib/galaxy/tools/parameters/basic.py
@@ -1605,7 +1605,7 @@
return value.file_name
def value_to_display_text( self, value, app ):
- if not isinstance(value, list):
+ if value and not isinstance( value, list ):
value = [ value ]
if value:
return ", ".join( [ "%s: %s" % ( item.hid, item.name ) for item in value ] )
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: jgoecks: Trackster: use tile region in place of tile index for simplicity.
by Bitbucket 10 Dec '12
by Bitbucket 10 Dec '12
10 Dec '12
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/fc84a8b469c3/
changeset: fc84a8b469c3
user: jgoecks
date: 2012-12-10 22:08:44
summary: Trackster: use tile region in place of tile index for simplicity.
affected #: 1 file
diff -r c3acc86490780d1949c4a3abf1263ace7c5ece6c -r fc84a8b469c3181be64083a91d0fc2faae5e73ef static/scripts/viz/trackster/tracks.js
--- a/static/scripts/viz/trackster/tracks.js
+++ b/static/scripts/viz/trackster/tracks.js
@@ -2762,23 +2762,27 @@
}
*/
},
+
/**
* Generate a key for the tile cache.
* TODO: create a TileCache object (like DataCache) and generate key internally.
*/
- _gen_tile_cache_key: function(w_scale, tile_index) {
- return w_scale + '_' + tile_index;
+ _gen_tile_cache_key: function(w_scale, tile_region) {
+ return w_scale + '_' + tile_region;
},
+
/**
* Request that track be drawn.
*/
request_draw: function(force, clear_after) {
this.view.request_redraw(false, force, clear_after, this);
},
+
/**
* Actions to be taken before drawing.
*/
before_draw: function() {},
+
/**
* Draw track. It is possible to force a redraw rather than use cached tiles and/or clear old
* tiles after drawing new tiles.
@@ -2826,7 +2830,8 @@
is_tile = function(o) { return (o && 'track' in o); };
// Draw tiles.
while ( ( tile_index * TILE_SIZE * resolution ) < high ) {
- var draw_result = this.draw_helper( force, tile_index, resolution, this.tiles_div, w_scale );
+ var tile_region = this._get_tile_bounds(tile_index, resolution),
+ draw_result = this.draw_helper( force, tile_region, resolution, this.tiles_div, w_scale );
if ( is_tile(draw_result) ) {
drawn_tiles.push( draw_result );
} else {
@@ -2871,10 +2876,9 @@
* Retrieves from cache, draws, or sets up drawing for a single tile. Returns either a Tile object or a
* jQuery.Deferred object that is fulfilled when tile can be drawn again.
*/
- draw_helper: function(force, tile_index, resolution, parent_element, w_scale, kwargs) {
+ draw_helper: function(force, region, resolution, parent_element, w_scale, kwargs) {
var track = this,
- key = this._gen_tile_cache_key(w_scale, tile_index),
- region = this._get_tile_bounds(tile_index, resolution);
+ key = this._gen_tile_cache_key(w_scale, region);
// Init kwargs if necessary to avoid having to check if kwargs defined.
if (!kwargs) { kwargs = {}; }
@@ -3022,7 +3026,7 @@
/**
* Returns a genome region that corresponds to a tile at a particular resolution
- */
+ */
_get_tile_bounds: function(tile_index, resolution) {
var tile_low = Math.floor( tile_index * TILE_SIZE * resolution ),
tile_length = Math.ceil( TILE_SIZE * resolution ),
@@ -3223,11 +3227,10 @@
this.action_icons.param_space_viz_icon.hide();
},
can_draw: Drawable.prototype.can_draw,
- draw_helper: function(force, tile_index, resolution, parent_element, w_scale, kwargs) {
+ draw_helper: function(force, region, resolution, parent_element, w_scale, kwargs) {
// FIXME: this function is similar to TiledTrack.draw_helper -- can the two be merged/refactored?
var track = this,
- key = this._gen_tile_cache_key(w_scale, tile_index),
- region = this._get_tile_bounds(tile_index, resolution);
+ key = this._gen_tile_cache_key(w_scale, region);
// Init kwargs if necessary to avoid having to check if kwargs defined.
if (!kwargs) { kwargs = {}; }
@@ -3446,9 +3449,9 @@
/**
* Only retrieves data and draws tile if reference data can be displayed.
*/
- draw_helper: function(force, tile_index, resolution, parent_element, w_scale, kwargs) {
+ draw_helper: function(force, region, resolution, parent_element, w_scale, kwargs) {
if (w_scale > this.view.canvas_manager.char_width_px) {
- return TiledTrack.prototype.draw_helper.call(this, force, tile_index, resolution, parent_element, w_scale, kwargs);
+ return TiledTrack.prototype.draw_helper.call(this, force, region, resolution, parent_element, w_scale, kwargs);
}
else {
this.hide_contents();
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: jgoecks: Trackster: use underscore methods to simplify tile search/iteration and remove old debugging statement.
by Bitbucket 10 Dec '12
by Bitbucket 10 Dec '12
10 Dec '12
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/c3acc8649078/
changeset: c3acc8649078
user: jgoecks
date: 2012-12-10 21:47:05
summary: Trackster: use underscore methods to simplify tile search/iteration and remove old debugging statement.
affected #: 1 file
diff -r 3ee0e5ee1b375c0fd580c26a93850f2fa44f93f0 -r c3acc86490780d1949c4a3abf1263ace7c5ece6c static/scripts/viz/trackster/tracks.js
--- a/static/scripts/viz/trackster/tracks.js
+++ b/static/scripts/viz/trackster/tracks.js
@@ -2854,21 +2854,17 @@
//
// If some tiles have icons, set padding of tiles without icons so features and rows align.
//
- var icons_present = false;
- for (var tile_index = 0; tile_index < tiles.length; tile_index++) {
- if (tiles[tile_index].has_icons) {
- icons_present = true;
- break;
- }
- }
+ var icons_present = _.find(tiles, function(tile) {
+ return tile.has_icons;
+ });
+
if (icons_present) {
- for (var tile_index = 0; tile_index < tiles.length; tile_index++) {
- tile = tiles[tile_index];
+ _.each(tiles, function(tile) {
if (!tile.has_icons) {
// Need to align with other tile(s) that have icons.
tile.html_elt.css("padding-top", ERROR_PADDING);
}
- }
+ });
}
},
/**
@@ -4045,7 +4041,6 @@
var painter = new (this.painter)(filtered, tile_low, tile_high, this.prefs, mode, filter_alpha_scaler, filter_height_scaler, ref_seq);
var feature_mapper = null;
- // console.log(( tile_low - this.view.low ) * w_scale, tile_index, w_scale);
ctx.fillStyle = this.prefs.block_color;
ctx.font = ctx.canvas.manager.default_font;
ctx.textAlign = "right";
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/3ee0e5ee1b37/
changeset: 3ee0e5ee1b37
user: jgoecks
date: 2012-12-10 21:38:57
summary: Trackster: remove unused var.
affected #: 1 file
diff -r 2fdc0270baad3b020118d05cc3429993a3d80207 -r 3ee0e5ee1b375c0fd580c26a93850f2fa44f93f0 static/scripts/viz/trackster/tracks.js
--- a/static/scripts/viz/trackster/tracks.js
+++ b/static/scripts/viz/trackster/tracks.js
@@ -3278,7 +3278,6 @@
this.tile_predraw_init();
var canvas = track.view.canvas_manager.new_canvas(),
- tile_bounds = track._get_tile_bounds(tile_index, resolution),
tile_low = region.get('start'),
tile_high = region.get('end'),
all_data_index = 0,
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: jgoecks: Trackster: do not use width parameter in draw helper because it's not needed anymore.
by Bitbucket 10 Dec '12
by Bitbucket 10 Dec '12
10 Dec '12
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/2fdc0270baad/
changeset: 2fdc0270baad
user: jgoecks
date: 2012-12-10 21:31:38
summary: Trackster: do not use width parameter in draw helper because it's not needed anymore.
affected #: 1 file
diff -r 512c7561e1b95b381345296a0da3aa3effc05803 -r 2fdc0270baad3b020118d05cc3429993a3d80207 static/scripts/viz/trackster/tracks.js
--- a/static/scripts/viz/trackster/tracks.js
+++ b/static/scripts/viz/trackster/tracks.js
@@ -2766,8 +2766,8 @@
* Generate a key for the tile cache.
* TODO: create a TileCache object (like DataCache) and generate key internally.
*/
- _gen_tile_cache_key: function(width, w_scale, tile_index) {
- return width + '_' + w_scale + '_' + tile_index;
+ _gen_tile_cache_key: function(w_scale, tile_index) {
+ return w_scale + '_' + tile_index;
},
/**
* Request that track be drawn.
@@ -2826,7 +2826,7 @@
is_tile = function(o) { return (o && 'track' in o); };
// Draw tiles.
while ( ( tile_index * TILE_SIZE * resolution ) < high ) {
- var draw_result = this.draw_helper( force, width, tile_index, resolution, this.tiles_div, w_scale );
+ var draw_result = this.draw_helper( force, tile_index, resolution, this.tiles_div, w_scale );
if ( is_tile(draw_result) ) {
drawn_tiles.push( draw_result );
} else {
@@ -2875,9 +2875,9 @@
* Retrieves from cache, draws, or sets up drawing for a single tile. Returns either a Tile object or a
* jQuery.Deferred object that is fulfilled when tile can be drawn again.
*/
- draw_helper: function(force, width, tile_index, resolution, parent_element, w_scale, kwargs) {
+ draw_helper: function(force, tile_index, resolution, parent_element, w_scale, kwargs) {
var track = this,
- key = this._gen_tile_cache_key(width, w_scale, tile_index),
+ key = this._gen_tile_cache_key(w_scale, tile_index),
region = this._get_tile_bounds(tile_index, resolution);
// Init kwargs if necessary to avoid having to check if kwargs defined.
@@ -3227,10 +3227,10 @@
this.action_icons.param_space_viz_icon.hide();
},
can_draw: Drawable.prototype.can_draw,
- draw_helper: function(force, width, tile_index, resolution, parent_element, w_scale, kwargs) {
+ draw_helper: function(force, tile_index, resolution, parent_element, w_scale, kwargs) {
// FIXME: this function is similar to TiledTrack.draw_helper -- can the two be merged/refactored?
var track = this,
- key = this._gen_tile_cache_key(width, w_scale, tile_index),
+ key = this._gen_tile_cache_key(w_scale, tile_index),
region = this._get_tile_bounds(tile_index, resolution);
// Init kwargs if necessary to avoid having to check if kwargs defined.
@@ -3413,7 +3413,7 @@
for (var i = 0; i < tiles.length; i++) {
var tile = tiles[i];
if (tile.html_elt.find("canvas").height() !== max_height) {
- this.draw_helper(true, width, tile.index, tile.resolution, tile.html_elt.parent(), w_scale, { height: max_height } );
+ this.draw_helper(true, tile.index, tile.resolution, tile.html_elt.parent(), w_scale, { height: max_height } );
tile.html_elt.remove();
}
}
@@ -3451,9 +3451,9 @@
/**
* Only retrieves data and draws tile if reference data can be displayed.
*/
- draw_helper: function(force, width, tile_index, resolution, parent_element, w_scale, kwargs) {
+ draw_helper: function(force, tile_index, resolution, parent_element, w_scale, kwargs) {
if (w_scale > this.view.canvas_manager.char_width_px) {
- return TiledTrack.prototype.draw_helper.call(this, force, width, tile_index, resolution, parent_element, w_scale, kwargs);
+ return TiledTrack.prototype.draw_helper.call(this, force, tile_index, resolution, parent_element, w_scale, kwargs);
}
else {
this.hide_contents();
@@ -3815,7 +3815,7 @@
var tile = tiles[i];
if (tile.max_val !== global_max) {
tile.html_elt.remove();
- track.draw_helper(true, width, tile.index, tile.resolution, tile.html_elt.parent(), w_scale, { more_tile_data: { max: global_max } } );
+ track.draw_helper(true, tile.index, tile.resolution, tile.html_elt.parent(), w_scale, { more_tile_data: { max: global_max } } );
}
}
}
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: inithello: More tool shed functional test enhancements.
by Bitbucket 10 Dec '12
by Bitbucket 10 Dec '12
10 Dec '12
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/512c7561e1b9/
changeset: 512c7561e1b9
user: inithello
date: 2012-12-10 20:41:14
summary: More tool shed functional test enhancements.
affected #: 8 files
diff -r bd020b5aa5314718dbd7afe9e4e9e05bb3b53c7a -r 512c7561e1b95b381345296a0da3aa3effc05803 test/tool_shed/base/twilltestcase.py
--- a/test/tool_shed/base/twilltestcase.py
+++ b/test/tool_shed/base/twilltestcase.py
@@ -15,6 +15,7 @@
self.hgweb_config_dir = os.environ.get( 'TEST_HG_WEB_CONFIG_DIR' )
self.hgweb_config_manager = galaxy.webapps.community.util.hgweb_config.HgWebConfigManager()
self.hgweb_config_manager.hgweb_config_dir = self.hgweb_config_dir
+ self.tool_shed_test_tmp_dir = os.environ.get( 'TOOL_SHED_TEST_TMP_DIR', None)
self.host = os.environ.get( 'TOOL_SHED_TEST_HOST' )
self.port = os.environ.get( 'TOOL_SHED_TEST_PORT' )
self.url = "http://%s:%s" % ( self.host, self.port )
@@ -213,6 +214,9 @@
string = string.replace( character, replacement )
return string
def generate_repository_dependency_xml( self, repository, xml_filename, dependency_description='' ):
+ file_path = os.path.split( xml_filename )[0]
+ if not os.path.exists( file_path ):
+ os.makedirs( file_path )
changeset_revision = self.get_repository_tip( repository )
if dependency_description:
description = ' description="%s"' % dependency_description
@@ -226,6 +230,13 @@
description=description )
# Save the generated xml to the specified location.
file( xml_filename, 'w' ).write( repository_dependency_xml )
+ def generate_temp_path( self, test_script_path, additional_paths=[] ):
+ return os.path.join( self.tool_shed_test_tmp_dir, test_script_path, os.sep.join( additional_paths ) )
+ def get_filename( self, filename, filepath=None ):
+ if filepath is not None:
+ return os.path.abspath( os.path.join( filepath, filename ) )
+ else:
+ return os.path.abspath( os.path.join( self.file_dir, filename ) )
def get_latest_repository_metadata_for_repository( self, repository ):
# TODO: This will not work as expected. Fix it.
return repository.metadata_revisions[ 0 ]
@@ -335,6 +346,7 @@
def upload_file( self,
repository,
filename,
+ filepath=None,
valid_tools_only=True,
strings_displayed=[],
strings_not_displayed=[],
@@ -344,6 +356,6 @@
strings_displayed.append( "has been successfully uploaded to the repository." )
for key in kwd:
tc.fv( "1", key, kwd[ key ] )
- tc.formfile( "1", "file_data", self.get_filename( filename ) )
+ tc.formfile( "1", "file_data", self.get_filename( filename, filepath ) )
tc.submit( "upload_button" )
self.check_for_strings( strings_displayed, strings_not_displayed )
diff -r bd020b5aa5314718dbd7afe9e4e9e05bb3b53c7a -r 512c7561e1b95b381345296a0da3aa3effc05803 test/tool_shed/functional/test_0000_basic_repository_features.py
--- a/test/tool_shed/functional/test_0000_basic_repository_features.py
+++ b/test/tool_shed/functional/test_0000_basic_repository_features.py
@@ -1,7 +1,7 @@
from tool_shed.base.twilltestcase import ShedTwillTestCase, common, os
from tool_shed.base.test_db_util import get_repository_by_name_and_owner, get_user, get_private_role
-repository_name = 'filtering'
+repository_name = 'filtering_0000'
repository_description = "Galaxy's filtering tool"
repository_long_description = "Long description of Galaxy's filtering tool"
@@ -19,41 +19,48 @@
assert admin_user is not None, 'Problem retrieving user with email %s from the database' % common.admin_email
admin_user_private_role = get_private_role( admin_user )
def test_0005_create_categories( self ):
- """Create categories"""
- self.create_category( 'Text Manipulation', 'Tools for manipulating text' )
- self.create_category( 'Text Analysis', 'Tools for analyzing text' )
+ """Create categories for this test suite"""
+ self.create_category( 'Test 0000 Basic Repository Features 1', 'Test 0000 Basic Repository Features 1' )
+ self.create_category( 'Test 0000 Basic Repository Features 2', 'Test 0000 Basic Repository Features 2' )
def test_0010_create_repository( self ):
"""Create the filtering repository"""
+ self.logout()
+ self.login( email=common.test_user_1_email, username=common.test_user_1_name )
+ test_user_1 = get_user( common.test_user_1_email )
+ assert test_user_1 is not None, 'Problem retrieving user with email %s from the database' % test_user_1_email
+ test_user_1_private_role = get_private_role( test_user_1 )
strings_displayed = [ 'Repository %s' % "'%s'" % repository_name,
'Repository %s has been created' % "'%s'" % repository_name ]
self.create_repository( repository_name,
repository_description,
repository_long_description=repository_long_description,
- categories=[ 'Text Manipulation' ],
+ categories=[ 'Test 0000 Basic Repository Features 1' ],
strings_displayed=strings_displayed )
def test_0015_edit_repository( self ):
"""Edit the repository name, description, and long description"""
- repository = get_repository_by_name_and_owner( repository_name, common.admin_username )
+ repository = get_repository_by_name_and_owner( repository_name, common.test_user_1_name )
new_name = "renamed_filtering"
new_description = "Edited filtering tool"
new_long_description = "Edited long description"
self.edit_repository_information( repository, repo_name=new_name, description=new_description, long_description=new_long_description )
def test_0020_change_repository_category( self ):
"""Change the categories associated with the filtering repository"""
- repository = get_repository_by_name_and_owner( repository_name, common.admin_username )
- self.edit_repository_categories( repository, categories_to_add=[ "Text Analysis" ], categories_to_remove=[ "Text Manipulation" ] )
+ repository = get_repository_by_name_and_owner( repository_name, common.test_user_1_name )
+ self.edit_repository_categories( repository,
+ categories_to_add=[ "Test 0000 Basic Repository Features 2" ],
+ categories_to_remove=[ "Test 0000 Basic Repository Features 1" ] )
def test_0025_grant_write_access( self ):
'''Grant write access to another user'''
- repository = get_repository_by_name_and_owner( repository_name, common.admin_username )
- self.grant_write_access( repository, usernames=[ common.test_user_1_name ] )
- self.revoke_write_access( repository, common.test_user_1_name )
+ repository = get_repository_by_name_and_owner( repository_name, common.test_user_1_name )
+ self.grant_write_access( repository, usernames=[ common.admin_username ] )
+ self.revoke_write_access( repository, common.admin_username )
def test_0030_upload_filtering_1_1_0( self ):
"""Upload filtering_1.1.0.tar to the repository"""
- repository = get_repository_by_name_and_owner( repository_name, common.admin_username )
+ repository = get_repository_by_name_and_owner( repository_name, common.test_user_1_name )
self.upload_file( repository, 'filtering/filtering_1.1.0.tar', commit_message="Uploaded filtering 1.1.0" )
def test_0035_verify_repository( self ):
'''Display basic repository pages'''
- repository = get_repository_by_name_and_owner( repository_name, common.admin_username )
+ repository = get_repository_by_name_and_owner( repository_name, common.test_user_1_name )
latest_changeset_revision = self.get_repository_tip( repository )
self.check_for_valid_tools( repository, strings_displayed=[ 'Filter1' ] )
self.check_count_of_metadata_revisions_associated_with_repository( repository, metadata_count=1 )
@@ -61,16 +68,20 @@
self.check_repository_tools_for_changeset_revision( repository, tip )
self.check_repository_metadata( repository, tip_only=False )
self.browse_repository( repository, strings_displayed=[ 'Browse %s revision' % repository.name, '(repository tip)' ] )
- self.display_repository_clone_page( common.admin_username,
+ self.display_repository_clone_page( common.test_user_1_name,
repository_name,
strings_displayed=[ 'Uploaded filtering 1.1.0', latest_changeset_revision ] )
def test_0040_alter_repository_states( self ):
'''Test toggling the malicious and deprecated repository flags.'''
- repository = get_repository_by_name_and_owner( repository_name, common.admin_username )
+ repository = get_repository_by_name_and_owner( repository_name, common.test_user_1_name )
+ self.logout()
+ self.login( email=common.admin_email, username=common.admin_username )
self.set_repository_malicious( repository, set_malicious=True, strings_displayed=[ 'The repository tip has been defined as malicious.' ] )
self.set_repository_malicious( repository,
set_malicious=False,
strings_displayed=[ 'The repository tip has been defined as <b>not</b> malicious.' ] )
+ self.logout()
+ self.login( email=common.test_user_1_email, username=common.test_user_1_name )
self.set_repository_deprecated( repository,
strings_displayed=[ 'has been marked as deprecated', 'Mark as not deprecated' ] )
self.display_manage_repository_page( repository,
@@ -82,7 +93,7 @@
set_deprecated=False )
def test_0045_display_repository_tip_file( self ):
'''Display the contents of filtering.xml in the repository tip revision'''
- repository = get_repository_by_name_and_owner( repository_name, common.admin_username )
+ repository = get_repository_by_name_and_owner( repository_name, common.test_user_1_name )
self.display_repository_file_contents( repository=repository,
filename='filtering.xml',
filepath=None,
@@ -90,16 +101,16 @@
strings_not_displayed=[] )
def test_0050_upload_filtering_txt_file( self ):
'''Upload filtering.txt file associated with tool version 1.1.0.'''
- repository = get_repository_by_name_and_owner( repository_name, common.admin_username )
+ repository = get_repository_by_name_and_owner( repository_name, common.test_user_1_name )
self.upload_file( repository,
- 'filtering/filtering.txt',
+ 'filtering/filtering_0000.txt',
commit_message="Uploaded filtering.txt",
uncompress_file='No',
remove_repo_files_not_in_tar='No' )
self.display_manage_repository_page( repository, strings_displayed=[ 'Readme file for filtering 1.1.0' ] )
def test_0055_upload_filtering_test_data( self ):
'''Upload filtering test data.'''
- repository = get_repository_by_name_and_owner( repository_name, common.admin_username )
+ repository = get_repository_by_name_and_owner( repository_name, common.test_user_1_name )
self.upload_file( repository, 'filtering/filtering_test_data.tar', commit_message="Uploaded filtering test data", remove_repo_files_not_in_tar='No' )
self.display_repository_file_contents( repository=repository,
filename='1.bed',
@@ -109,14 +120,14 @@
self.check_repository_metadata( repository, tip_only=True )
def test_0060_upload_filtering_2_2_0( self ):
'''Upload filtering version 2.2.0'''
- repository = get_repository_by_name_and_owner( repository_name, common.admin_username )
+ repository = get_repository_by_name_and_owner( repository_name, common.test_user_1_name )
self.upload_file( repository,
'filtering/filtering_2.2.0.tar',
commit_message="Uploaded filtering 2.2.0",
remove_repo_files_not_in_tar='No' )
def test_0065_verify_filtering_repository( self ):
'''Verify the new tool versions and repository metadata.'''
- repository = get_repository_by_name_and_owner( repository_name, common.admin_username )
+ repository = get_repository_by_name_and_owner( repository_name, common.test_user_1_name )
tip = self.get_repository_tip( repository )
self.check_for_valid_tools( repository )
strings_displayed = self.get_repository_metadata_revisions( repository ).append( 'Select a revision' )
@@ -126,7 +137,7 @@
self.check_repository_metadata( repository, tip_only=False )
def test_0070_upload_readme_txt_file( self ):
'''Upload readme.txt file associated with tool version 2.2.0.'''
- repository = get_repository_by_name_and_owner( repository_name, common.admin_username )
+ repository = get_repository_by_name_and_owner( repository_name, common.test_user_1_name )
self.upload_file( repository, 'readme.txt', commit_message="Uploaded readme.txt" )
self.display_manage_repository_page( repository, strings_displayed=[ 'This is a readme file.' ] )
# Verify that there is a different readme file for each metadata revision.
@@ -134,13 +145,13 @@
self.display_manage_repository_page( repository, strings_displayed=[ 'Readme file for filtering 1.1.0', 'This is a readme file.' ] )
def test_0075_delete_readme_txt_file( self ):
'''Delete the readme.txt file.'''
- repository = get_repository_by_name_and_owner( repository_name, common.admin_username )
+ repository = get_repository_by_name_and_owner( repository_name, common.test_user_1_name )
self.delete_files_from_repository( repository, filenames=[ 'readme.txt' ] )
self.check_count_of_metadata_revisions_associated_with_repository( repository, metadata_count=2 )
self.display_manage_repository_page( repository, strings_displayed=[ 'Readme file for filtering 1.1.0' ] )
def test_0080_search_for_valid_filter_tool( self ):
'''Search for the filtering tool by tool ID, name, and version.'''
- repository = get_repository_by_name_and_owner( repository_name, common.admin_username )
+ repository = get_repository_by_name_and_owner( repository_name, common.test_user_1_name )
tip_changeset = self.get_repository_tip( repository )
search_fields = dict( tool_id='Filter1', tool_name='filter', tool_version='2.2.0' )
self.search_for_valid_tools( search_fields=search_fields, strings_displayed=[ tip_changeset ], strings_not_displayed=[] )
diff -r bd020b5aa5314718dbd7afe9e4e9e05bb3b53c7a -r 512c7561e1b95b381345296a0da3aa3effc05803 test/tool_shed/functional/test_0010_repository_with_tool_dependencies.py
--- a/test/tool_shed/functional/test_0010_repository_with_tool_dependencies.py
+++ b/test/tool_shed/functional/test_0010_repository_with_tool_dependencies.py
@@ -1,7 +1,7 @@
from tool_shed.base.twilltestcase import ShedTwillTestCase, common, os
from tool_shed.base.test_db_util import get_repository_by_name_and_owner, get_user, get_private_role
-repository_name = 'freebayes'
+repository_name = 'freebayes_0010'
repository_description = "Galaxy's freebayes tool"
repository_long_description = "Long description of Galaxy's freebayes tool"
@@ -10,21 +10,28 @@
def test_0000_create_or_login_admin_user( self ):
"""Create necessary user accounts and login as an admin user."""
self.logout()
+ self.login( email=common.test_user_1_email, username=common.test_user_1_name )
+ test_user_1 = get_user( common.test_user_1_email )
+ assert test_user_1 is not None, 'Problem retrieving user with email %s from the database' % test_user_1_email
+ test_user_1_private_role = get_private_role( test_user_1 )
+ self.logout()
self.login( email=common.admin_email, username=common.admin_username )
admin_user = get_user( common.admin_email )
assert admin_user is not None, 'Problem retrieving user with email %s from the database' % admin_email
admin_user_private_role = get_private_role( admin_user )
def test_0005_create_category( self ):
- """Create SNP Analysis category."""
- self.create_category( 'SNP Analysis', 'Tools for single nucleotide polymorphism data such as WGA' )
+ """Create a category for this test suite"""
+ self.create_category( 'Test 0010 Repository With Tool Dependencies', 'Tests for a repository with tool dependencies.' )
def test_0010_create_freebayes_repository_and_upload_tool_xml( self ):
'''Create freebayes repository and upload freebayes.xml without tool_data_table_conf.xml.sample. This should result in an error message and invalid tool.'''
+ self.logout()
+ self.login( email=common.test_user_1_email, username=common.test_user_1_name )
self.create_repository( repository_name,
repository_description,
repository_long_description=repository_long_description,
- categories=[ 'SNP Analysis' ],
+ categories=[ 'Test 0010 Repository With Tool Dependencies' ],
strings_displayed=[] )
- repository = get_repository_by_name_and_owner( repository_name, common.admin_username )
+ repository = get_repository_by_name_and_owner( repository_name, common.test_user_1_name )
self.upload_file( repository,
'freebayes/freebayes.xml',
valid_tools_only=False,
@@ -37,7 +44,7 @@
strings_displayed=[ 'requires an entry', 'tool_data_table_conf.xml' ] )
def test_0015_upload_missing_tool_data_table_conf_file( self ):
'''Upload the missing tool_data_table_conf.xml.sample file to the repository.'''
- repository = get_repository_by_name_and_owner( repository_name, common.admin_username )
+ repository = get_repository_by_name_and_owner( repository_name, common.test_user_1_name )
self.upload_file( repository,
'freebayes/tool_data_table_conf.xml.sample',
valid_tools_only=False,
@@ -50,27 +57,27 @@
strings_displayed=[ 'refers to a file', 'sam_fa_indices.loc' ] )
def test_0020_upload_missing_sample_loc_file( self ):
'''Upload the missing sam_fa_indices.loc.sample file to the repository.'''
- repository = get_repository_by_name_and_owner( repository_name, common.admin_username )
+ repository = get_repository_by_name_and_owner( repository_name, common.test_user_1_name )
self.upload_file( repository,
'freebayes/sam_fa_indices.loc.sample',
strings_displayed=[],
commit_message='Uploaded tool data table .loc file.' )
def test_0025_upload_invalid_tool_dependency_xml( self ):
'''Upload tool_dependencies.xml defining version 0.9.5 of the freebayes package.'''
- repository = get_repository_by_name_and_owner( repository_name, common.admin_username )
+ repository = get_repository_by_name_and_owner( repository_name, common.test_user_1_name )
self.upload_file( repository,
os.path.join( 'freebayes', 'invalid_tool_dependencies', 'tool_dependencies.xml' ),
strings_displayed=[ 'Name, version and type from a tool requirement tag does not match' ],
commit_message='Uploaded invalid tool dependency XML.' )
def test_0030_upload_valid_tool_dependency_xml( self ):
'''Upload tool_dependencies.xml defining version 0.9.4_9696d0ce8a962f7bb61c4791be5ce44312b81cf8 of the freebayes package.'''
- repository = get_repository_by_name_and_owner( repository_name, common.admin_username )
+ repository = get_repository_by_name_and_owner( repository_name, common.test_user_1_name )
self.upload_file( repository,
os.path.join( 'freebayes', 'tool_dependencies.xml' ),
commit_message='Uploaded valid tool dependency XML.' )
def test_0035_verify_tool_dependencies( self ):
'''Verify that the uploaded tool_dependencies.xml specifies the correct package versions.'''
- repository = get_repository_by_name_and_owner( repository_name, common.admin_username )
+ repository = get_repository_by_name_and_owner( repository_name, common.test_user_1_name )
self.display_manage_repository_page( repository,
strings_displayed=[ 'freebayes', '0.9.4_9696d0ce8a9', 'samtools', '0.1.18', 'Valid tools' ],
strings_not_displayed=[ 'Invalid tools' ] )
diff -r bd020b5aa5314718dbd7afe9e4e9e05bb3b53c7a -r 512c7561e1b95b381345296a0da3aa3effc05803 test/tool_shed/functional/test_0020_basic_repository_dependencies.py
--- a/test/tool_shed/functional/test_0020_basic_repository_dependencies.py
+++ b/test/tool_shed/functional/test_0020_basic_repository_dependencies.py
@@ -1,7 +1,7 @@
from tool_shed.base.twilltestcase import ShedTwillTestCase, common, os
from tool_shed.base.test_db_util import get_repository_by_name_and_owner, get_user, get_private_role
-datatypes_repository_name = 'emboss_datatypes'
+datatypes_repository_name = 'emboss_datatypes_0020'
datatypes_repository_description = "Galaxy applicable data formats used by Emboss tools."
datatypes_repository_long_description = "Galaxy applicable data formats used by Emboss tools. This repository contains no tools."
@@ -14,20 +14,18 @@
def test_0000_initiate_users( self ):
"""Create necessary user accounts and login as an admin user."""
self.logout()
+ self.login( email=common.test_user_1_email, username=common.test_user_1_name )
+ test_user_1 = get_user( common.test_user_1_email )
+ assert test_user_1 is not None, 'Problem retrieving user with email %s from the database' % common.test_user_1_email
+ test_user_1_private_role = get_private_role( test_user_1 )
+ self.logout()
self.login( email=common.admin_email, username=common.admin_username )
admin_user = get_user( common.admin_email )
assert admin_user is not None, 'Problem retrieving user with email %s from the database' % common.admin_email
admin_user_private_role = get_private_role( admin_user )
- self.logout()
- self.login( email=common.test_user_1_email, username=common.test_user_1_name )
- test_user_1 = get_user( common.test_user_1_email )
- assert test_user_1 is not None, 'Problem retrieving user with email %s from the database' % common.test_user_1_email
- test_user_1_private_role = get_private_role( test_user_1 )
def test_0005_create_category( self ):
- """Create Sequence Analysis category"""
- self.logout()
- self.login( email=common.admin_email, username=common.admin_username )
- self.create_category( 'Sequence Analysis', 'Tools for performing Protein and DNA/RNA analysis' )
+ """Create a category for this test suite"""
+ self.create_category( 'Test 0020 Basic Repository Dependencies', 'Testing basic repository dependency features.' )
def test_0010_create_emboss_datatypes_repository_and_upload_tarball( self ):
'''Create and populate the emboss_datatypes repository.'''
self.logout()
@@ -35,7 +33,7 @@
self.create_repository( datatypes_repository_name,
datatypes_repository_description,
repository_long_description=datatypes_repository_long_description,
- categories=[ 'Sequence Analysis' ],
+ categories=[ 'Test 0020 Basic Repository Dependencies' ],
strings_displayed=[] )
repository = get_repository_by_name_and_owner( datatypes_repository_name, common.test_user_1_name )
self.upload_file( repository, 'emboss/datatypes/datatypes_conf.xml', commit_message='Uploaded datatypes_conf.xml.' )
@@ -48,7 +46,7 @@
self.create_repository( emboss_repository_name,
emboss_repository_description,
repository_long_description=emboss_repository_long_description,
- categories=[ 'Text Manipulation' ],
+ categories=[ 'Test 0020 Basic Repository Dependencies' ],
strings_displayed=[] )
repository = get_repository_by_name_and_owner( emboss_repository_name, common.test_user_1_name )
self.upload_file( repository, 'emboss/emboss.tar', commit_message='Uploaded emboss_5.tar' )
@@ -56,8 +54,13 @@
'''Generate and upload the repository_dependencies.xml file'''
repository = get_repository_by_name_and_owner( emboss_repository_name, common.test_user_1_name )
datatypes_repository = get_repository_by_name_and_owner( datatypes_repository_name, common.test_user_1_name )
- self.generate_repository_dependency_xml( datatypes_repository, self.get_filename( 'emboss/5/repository_dependencies.xml' ) )
- self.upload_file( repository, 'emboss/5/repository_dependencies.xml', commit_message='Uploaded repository_dependencies.xml' )
+ repository_dependencies_path = self.generate_temp_path( 'test_0020', additional_paths=[ 'emboss', '5' ] )
+ self.generate_repository_dependency_xml( datatypes_repository,
+ self.get_filename( 'repository_dependencies.xml', filepath=repository_dependencies_path ) )
+ self.upload_file( repository,
+ 'repository_dependencies.xml',
+ filepath=repository_dependencies_path,
+ commit_message='Uploaded repository_dependencies.xml' )
def test_0030_verify_emboss_5_repository_dependency_on_emboss_datatypes( self ):
'''Verify that the emboss_5 repository now depends on the emboss_datatypes repository with correct name, owner, and changeset revision.'''
repository = get_repository_by_name_and_owner( emboss_repository_name, common.test_user_1_name )
@@ -65,7 +68,3 @@
changeset_revision = self.get_repository_tip( datatypes_repository )
strings_displayed = [ datatypes_repository_name, common.test_user_1_name, changeset_revision, 'Repository dependencies' ]
self.display_manage_repository_page( repository, strings_displayed=strings_displayed )
- def test_0035_cleanup( self ):
- '''Clean up generated test data.'''
- if os.path.exists( self.get_filename( 'emboss/5/repository_dependencies.xml' ) ):
- os.remove( self.get_filename( 'emboss/5/repository_dependencies.xml' ) )
diff -r bd020b5aa5314718dbd7afe9e4e9e05bb3b53c7a -r 512c7561e1b95b381345296a0da3aa3effc05803 test/tool_shed/functional/test_0030_repository_dependency_revisions.py
--- a/test/tool_shed/functional/test_0030_repository_dependency_revisions.py
+++ b/test/tool_shed/functional/test_0030_repository_dependency_revisions.py
@@ -5,7 +5,7 @@
datatypes_repository_description = "Galaxy applicable data formats used by Emboss tools."
datatypes_repository_long_description = "Galaxy applicable data formats used by Emboss tools. This repository contains no tools."
-emboss_repository_name = 'emboss'
+emboss_repository_name = 'emboss_0030'
emboss_5_repository_name = 'emboss_5_0030'
emboss_6_repository_name = 'emboss_6_0030'
emboss_repository_description = 'Galaxy wrappers for Emboss version 5.0.0 tools'
@@ -13,38 +13,31 @@
class TestRepositoryDependencyRevisions( ShedTwillTestCase ):
'''Test dependencies on different revisions of a repository.'''
- '''
- create repository emboss_5_0030
- create repository emboss_6_0030
- create repository emboss_datatypes if necessary
- create repository emboss
- emboss_5 has repository_dependency.xml file that defines emboss_datatypes
- emboss_6 has repository_dependency.xml file that defines emboss_datatypes
- get information to create repository dependency imformation for emboss
- emboss depends on emboss_5
- then emboss depends on emboss_6
- verify per-changeset dependencies
- '''
def test_0000_initiate_users( self ):
"""Create necessary user accounts."""
self.logout()
+ self.login( email=common.test_user_1_email, username=common.test_user_1_name )
+ test_user_1 = get_user( common.test_user_1_email )
+ assert test_user_1 is not None, 'Problem retrieving user with email %s from the database' % regular_email
+ test_user_1_private_role = get_private_role( test_user_1 )
+ self.logout()
self.login( email=common.admin_email, username=common.admin_username )
admin_user = get_user( common.admin_email )
assert admin_user is not None, 'Problem retrieving user with email %s from the database' % admin_email
admin_user_private_role = get_private_role( admin_user )
+ def test_0005_create_category( self ):
+ """Create a category for this test suite"""
+ self.create_category( 'Test 0030 Repository Dependency Revisions', 'Testing repository dependencies by revision.' )
+ def test_0005_create_repositories( self ):
+ '''Create the emboss_5_0030, emboss_6_0030, emboss_datatypes, and emboss repositories and populate the emboss_datatypes repository.'''
self.logout()
self.login( email=common.test_user_1_email, username=common.test_user_1_name )
- test_user_1 = get_user( common.test_user_1_email )
- assert test_user_1 is not None, 'Problem retrieving user with email %s from the database' % regular_email
- test_user_1_private_role = get_private_role( test_user_1 )
- def test_0005_create_repositories( self ):
- '''Create the emboss_5_0030, emboss_6_0030, emboss_datatypes, and emboss repositories and populate the emboss_datatypes repository.'''
emboss_5_repository = get_repository_by_name_and_owner( emboss_5_repository_name, common.test_user_1_name )
if emboss_5_repository is None:
self.create_repository( emboss_5_repository_name,
emboss_repository_description,
repository_long_description=emboss_repository_long_description,
- categories=[ 'Sequence Analysis' ],
+ categories=[ 'Test 0030 Repository Dependency Revisions' ],
strings_displayed=[] )
emboss_5_repository = get_repository_by_name_and_owner( emboss_5_repository_name, common.test_user_1_name )
self.upload_file( emboss_5_repository, 'emboss/emboss.tar', commit_message='Uploaded tool tarball.' )
@@ -53,7 +46,7 @@
self.create_repository( emboss_6_repository_name,
emboss_repository_description,
repository_long_description=emboss_repository_long_description,
- categories=[ 'Sequence Analysis' ],
+ categories=[ 'Test 0030 Repository Dependency Revisions' ],
strings_displayed=[] )
emboss_6_repository = get_repository_by_name_and_owner( emboss_6_repository_name, common.test_user_1_name )
self.upload_file( emboss_6_repository, 'emboss/emboss.tar', commit_message='Uploaded tool tarball..' )
@@ -62,7 +55,7 @@
self.create_repository( datatypes_repository_name,
datatypes_repository_description,
repository_long_description=datatypes_repository_long_description,
- categories=[ 'Sequence Analysis' ],
+ categories=[ 'Test 0030 Repository Dependency Revisions' ],
strings_displayed=[] )
datatypes_repository = get_repository_by_name_and_owner( datatypes_repository_name, common.test_user_1_name )
if self.repository_is_new( datatypes_repository ):
@@ -72,45 +65,56 @@
self.create_repository( emboss_repository_name,
emboss_repository_description,
repository_long_description=emboss_repository_long_description,
- categories=[ 'Sequence Analysis' ],
+ categories=[ 'Test 0030 Repository Dependency Revisions' ],
strings_displayed=[] )
emboss_repository = get_repository_by_name_and_owner( emboss_repository_name, common.test_user_1_name )
self.upload_file( emboss_5_repository, 'emboss/emboss.tar', commit_message='Uploaded tool tarball.' )
def test_0010_generate_repository_dependencies_for_emboss_5( self ):
'''Generate a repository_dependencies.xml file specifying emboss_datatypes and upload it to the emboss_5 repository.'''
datatypes_repository = get_repository_by_name_and_owner( datatypes_repository_name, common.test_user_1_name )
- self.generate_repository_dependency_xml( datatypes_repository, self.get_filename( 'emboss/repository_dependencies.xml' ) )
+ repository_dependencies_path = self.generate_temp_path( 'test_0030', additional_paths=[ 'emboss' ] )
+ self.generate_repository_dependency_xml( datatypes_repository,
+ self.get_filename( 'repository_dependencies.xml', filepath=repository_dependencies_path ) )
emboss_5_repository = get_repository_by_name_and_owner( emboss_5_repository_name, common.test_user_1_name )
- self.upload_file( emboss_5_repository, 'emboss/repository_dependencies.xml', commit_message='Uploaded repository_depepndencies.xml.' )
+ self.upload_file( emboss_5_repository,
+ 'repository_dependencies.xml',
+ filepath=repository_dependencies_path,
+ commit_message='Uploaded repository_depepndencies.xml.' )
def test_0015_generate_repository_dependencies_for_emboss_6( self ):
'''Generate a repository_dependencies.xml file specifying emboss_datatypes and upload it to the emboss_6 repository.'''
emboss_6_repository = get_repository_by_name_and_owner( emboss_6_repository_name, common.test_user_1_name )
- self.upload_file( emboss_6_repository, 'emboss/repository_dependencies.xml', commit_message='Uploaded repository_depepndencies.xml.' )
+ repository_dependencies_path = self.generate_temp_path( 'test_0030', additional_paths=[ 'emboss' ] )
+ self.upload_file( emboss_6_repository,
+ 'repository_dependencies.xml',
+ filepath=repository_dependencies_path,
+ commit_message='Uploaded repository_depepndencies.xml.' )
def test_0020_generate_repository_dependency_on_emboss_5( self ):
'''Create and upload repository_dependencies.xml for the emboss_5_0030 repository.'''
emboss_repository = get_repository_by_name_and_owner( emboss_repository_name, common.test_user_1_name )
emboss_5_repository = get_repository_by_name_and_owner( emboss_5_repository_name, common.test_user_1_name )
+ repository_dependencies_path = self.generate_temp_path( 'test_0030', additional_paths=[ 'emboss', '5' ] )
self.generate_repository_dependency_xml( emboss_5_repository,
- self.get_filename( 'emboss/5/repository_dependencies.xml' ),
+ self.get_filename( 'repository_dependencies.xml', filepath=repository_dependencies_path ),
dependency_description='Emboss requires the Emboss 5 repository.' )
self.upload_file( emboss_repository,
- 'emboss/5/repository_dependencies.xml',
+ 'repository_dependencies.xml',
+ filepath=repository_dependencies_path,
commit_message='Uploaded dependency configuration specifying emboss_5' )
def test_0025_generate_repository_dependency_on_emboss_6( self ):
'''Create and upload repository_dependencies.xml for the emboss_6_0030 repository.'''
emboss_repository = get_repository_by_name_and_owner( emboss_repository_name, common.test_user_1_name )
emboss_6_repository = get_repository_by_name_and_owner( emboss_6_repository_name, common.test_user_1_name )
+ repository_dependencies_path = self.generate_temp_path( 'test_0030', additional_paths=[ 'emboss', '6' ] )
self.generate_repository_dependency_xml( emboss_6_repository,
- self.get_filename( 'emboss/6/repository_dependencies.xml' ),
+ self.get_filename( 'repository_dependencies.xml', filepath=repository_dependencies_path ),
dependency_description='Emboss requires the Emboss 6 repository.' )
self.upload_file( emboss_repository,
- 'emboss/6/repository_dependencies.xml',
+ 'repository_dependencies.xml',
+ filepath=repository_dependencies_path,
commit_message='Uploaded dependency configuration specifying emboss_6' )
def test_0030_verify_repository_dependency_revisions( self ):
'''Verify that different metadata revisions of the emboss repository have different repository dependencies.'''
repository = get_repository_by_name_and_owner( emboss_repository_name, common.test_user_1_name )
- # Reset emboss metadata to pick up the repository dependency changes.
-# self.reset_repository_metadata( repository )
repository_metadata = [ ( metadata.metadata, metadata.changeset_revision ) for metadata in self.get_repository_metadata( repository ) ]
datatypes_repository = get_repository_by_name_and_owner( datatypes_repository_name, common.test_user_1_name )
datatypes_tip = self.get_repository_tip( datatypes_repository )
@@ -127,8 +131,3 @@
self.display_manage_repository_page( repository,
changeset_revision=changeset_revision,
strings_displayed=[ str( metadata ) for metadata in repository_dependency_metadata ] )
- def test_0035_cleanup( self ):
- '''Clean up generated repository dependency XML files.'''
- for filename in [ 'emboss/5/repository_dependencies.xml', 'emboss/6/repository_dependencies.xml', 'emboss/repository_dependencies.xml' ]:
- if os.path.exists( self.get_filename( filename ) ):
- os.remove( self.get_filename( filename ) )
diff -r bd020b5aa5314718dbd7afe9e4e9e05bb3b53c7a -r 512c7561e1b95b381345296a0da3aa3effc05803 test/tool_shed/functional_tests.py
--- a/test/tool_shed/functional_tests.py
+++ b/test/tool_shed/functional_tests.py
@@ -10,6 +10,7 @@
# the hgweb.config file, the database, new repositories, etc. Since the tool shed browses repository contents via HTTP,
# the full path to the temporary directroy wher eht repositories are located cannot contain invalid url characters.
tool_shed_test_tmp_dir = os.path.join( tool_shed_home_directory, 'tmp' )
+os.environ[ 'TOOL_SHED_TEST_TMP_DIR' ] = tool_shed_test_tmp_dir
new_path = [ os.path.join( cwd, "lib" ) ]
new_path.extend( sys.path[1:] )
sys.path = new_path
@@ -270,7 +271,7 @@
for dir in [ tool_shed_test_tmp_dir ]:
if os.path.exists( dir ):
log.info( "Cleaning up temporary files in %s" % dir )
- #shutil.rmtree( dir )
+ shutil.rmtree( dir )
except:
pass
if success:
diff -r bd020b5aa5314718dbd7afe9e4e9e05bb3b53c7a -r 512c7561e1b95b381345296a0da3aa3effc05803 test/tool_shed/test_data/filtering/filtering.txt
--- a/test/tool_shed/test_data/filtering/filtering.txt
+++ /dev/null
@@ -1,1 +0,0 @@
-Readme file for filtering 1.1.0
diff -r bd020b5aa5314718dbd7afe9e4e9e05bb3b53c7a -r 512c7561e1b95b381345296a0da3aa3effc05803 test/tool_shed/test_data/filtering/filtering_0000.txt
--- /dev/null
+++ b/test/tool_shed/test_data/filtering/filtering_0000.txt
@@ -0,0 +1,1 @@
+Readme file for filtering 1.1.0
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: dan: Fix for SelectToolParameter rerun, workflow when multiple="true", submited by Jim Johnson.
by Bitbucket 10 Dec '12
by Bitbucket 10 Dec '12
10 Dec '12
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/bd020b5aa531/
changeset: bd020b5aa531
user: dan
date: 2012-12-10 18:48:58
summary: Fix for SelectToolParameter rerun,workflow when multiple="true", submited by Jim Johnson.
affected #: 1 file
diff -r 0042b30216fc5dc4f92b314c93a3aa9a0d810d77 -r bd020b5aa5314718dbd7afe9e4e9e05bb3b53c7a lib/galaxy/tools/parameters/basic.py
--- a/lib/galaxy/tools/parameters/basic.py
+++ b/lib/galaxy/tools/parameters/basic.py
@@ -714,7 +714,12 @@
else:
if value not in legal_values:
raise ValueError( "An invalid option was selected, please verify" )
- return value
+ return value
+ def to_html_value( self, value, app ):
+ if isinstance( value, list ):
+ return value
+ else:
+ return str( value )
def to_param_dict_string( self, value, other_values={} ):
if value is None:
return "None"
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0