galaxy-commits
Threads by month
- ----- 2024 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2023 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2022 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2021 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2020 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2019 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2018 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2017 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2016 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2015 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2014 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2013 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2012 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2011 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2010 -----
- December
- November
- October
- September
- August
- July
- June
- May
September 2013
- 1 participants
- 149 discussions
commit/galaxy-central: natefoo: Don't check old_style paths in the DiskObjectStore by default.
by commits-noreply@bitbucket.org 20 Sep '13
by commits-noreply@bitbucket.org 20 Sep '13
20 Sep '13
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/d4c6a250afd0/
Changeset: d4c6a250afd0
User: natefoo
Date: 2013-09-20 17:10:31
Summary: Don't check old_style paths in the DiskObjectStore by default.
Affected #: 3 files
diff -r ab20415126a768f456314cfe587e4ce71fd2049b -r d4c6a250afd03a942e70f04ee7da8335dfea476b lib/galaxy/config.py
--- a/lib/galaxy/config.py
+++ b/lib/galaxy/config.py
@@ -209,6 +209,7 @@
if self.nginx_upload_store:
self.nginx_upload_store = os.path.abspath( self.nginx_upload_store )
self.object_store = kwargs.get( 'object_store', 'disk' )
+ self.object_store_check_old_style = string_as_bool( kwargs.get( 'object_store_check_old_style', False ) )
self.object_store_cache_path = resolve_path( kwargs.get( "object_store_cache_path", "database/object_store_cache" ), self.root )
# Handle AWS-specific config options for backward compatibility
if kwargs.get( 'aws_access_key', None) is not None:
diff -r ab20415126a768f456314cfe587e4ce71fd2049b -r d4c6a250afd03a942e70f04ee7da8335dfea476b lib/galaxy/objectstore/__init__.py
--- a/lib/galaxy/objectstore/__init__.py
+++ b/lib/galaxy/objectstore/__init__.py
@@ -198,6 +198,7 @@
super(DiskObjectStore, self).__init__(config, file_path=file_path, extra_dirs=extra_dirs)
self.file_path = file_path or config.file_path
self.config = config
+ self.check_old_style = config.object_store_check_old_style
self.extra_dirs['job_work'] = config.job_working_directory
self.extra_dirs['temp'] = config.new_file_path
if extra_dirs is not None:
@@ -264,14 +265,13 @@
return os.path.abspath(path)
def exists(self, obj, **kwargs):
- path = self._construct_path(obj, old_style=True, **kwargs)
- # For backward compatibility, check root path first; otherwise, construct
- # and check hashed path
- if os.path.exists(path):
- return True
- else:
- path = self._construct_path(obj, **kwargs)
- return os.path.exists(path)
+ if self.check_old_style:
+ path = self._construct_path(obj, old_style=True, **kwargs)
+ # For backward compatibility, check root path first; otherwise, construct
+ # and check hashed path
+ if os.path.exists(path):
+ return True
+ return os.path.exists(self._construct_path(obj, **kwargs))
def create(self, obj, **kwargs):
if not self.exists(obj, **kwargs):
@@ -320,13 +320,13 @@
return content
def get_filename(self, obj, **kwargs):
- path = self._construct_path(obj, old_style=True, **kwargs)
- # For backward compatibility, check root path first; otherwise, construct
- # and return hashed path
- if os.path.exists(path):
- return path
- else:
- return self._construct_path(obj, **kwargs)
+ if self.check_old_style:
+ path = self._construct_path(obj, old_style=True, **kwargs)
+ # For backward compatibility, check root path first; otherwise, construct
+ # and return hashed path
+ if os.path.exists(path):
+ return path
+ return self._construct_path(obj, **kwargs)
def update_from_file(self, obj, file_name=None, create=False, **kwargs):
""" `create` parameter is not used in this implementation """
diff -r ab20415126a768f456314cfe587e4ce71fd2049b -r d4c6a250afd03a942e70f04ee7da8335dfea476b universe_wsgi.ini.sample
--- a/universe_wsgi.ini.sample
+++ b/universe_wsgi.ini.sample
@@ -209,6 +209,12 @@
# distributed, hierarchical)
#object_store = disk
+# *Extremely* old Galaxy instances created datasets at the root of the
+# `file_path` defined above. If your Galaxy instance has datasets at the root
+# (instead of in directories composed by hashing the dataset id), you should
+# enable this option to allow Galaxy to find them.
+#object_store_check_old_style = False
+
# Credentials used by certain (s3, swift) object store backends
#os_access_key = <your cloud object store access key>
#os_secret_key = <your cloud object store secret key>
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
20 Sep '13
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/ab20415126a7/
Changeset: ab20415126a7
User: greg
Date: 2013-09-20 17:05:30
Summary: Additional framework support for tool dependencies that define binary installation recipes. This change set also includes several fixes, including fixes for discovering and displaying installed and missing tool dependencies when installing a repository, fixes for the remaining issue that resulted in the creation of a so-called "white ghost" when reinstalling a repository that defines certain dependencies.
Affected #: 15 files
diff -r 93215e7e74d020b478803ce2755f76c7e5236f16 -r ab20415126a768f456314cfe587e4ce71fd2049b lib/galaxy/model/__init__.py
--- a/lib/galaxy/model/__init__.py
+++ b/lib/galaxy/model/__init__.py
@@ -25,7 +25,7 @@
from galaxy.datatypes.metadata import MetadataCollection
from galaxy.model.item_attrs import Dictifiable, UsesAnnotations
from galaxy.security import get_permitted_actions
-from galaxy.util import is_multi_byte, nice_size, Params, restore_text, send_mail
+from galaxy.util import asbool, is_multi_byte, nice_size, Params, restore_text, send_mail
from galaxy.util.bunch import Bunch
from galaxy.util.hash_util import new_secure_hash
from galaxy.web.framework.helpers import to_unicode
@@ -34,6 +34,7 @@
WorkflowMappingField)
from sqlalchemy.orm import object_session
from sqlalchemy.sql.expression import func
+from tool_shed.util import common_util
log = logging.getLogger( __name__ )
@@ -3463,7 +3464,28 @@
@property
def has_repository_dependencies( self ):
if self.metadata:
- return 'repository_dependencies' in self.metadata
+ repository_dependencies_dict = self.metadata.get( 'repository_dependencies', {} )
+ repository_dependencies = repository_dependencies_dict.get( 'repository_dependencies', [] )
+ # [["http://localhost:9009", "package_libgtextutils_0_6", "test", "e2003cbf18cd", "True", "True"]]
+ for rd_tup in repository_dependencies:
+ tool_shed, name, owner, changeset_revision, prior_installation_required, only_if_compiling_contained_td = \
+ common_util.parse_repository_dependency_tuple( rd_tup )
+ if not asbool( only_if_compiling_contained_td ):
+ return True
+ return False
+
+ @property
+ def has_repository_dependencies_only_if_compiling_contained_td( self ):
+ if self.metadata:
+ repository_dependencies_dict = self.metadata.get( 'repository_dependencies', {} )
+ repository_dependencies = repository_dependencies_dict.get( 'repository_dependencies', [] )
+ # [["http://localhost:9009", "package_libgtextutils_0_6", "test", "e2003cbf18cd", "True", "True"]]
+ for rd_tup in repository_dependencies:
+ tool_shed, name, owner, changeset_revision, prior_installation_required, only_if_compiling_contained_td = \
+ common_util.parse_repository_dependency_tuple( rd_tup )
+ if not asbool( only_if_compiling_contained_td ):
+ return False
+ return True
return False
@property
@@ -3695,10 +3717,14 @@
@property
def tuples_of_repository_dependencies_needed_for_compiling_td( self ):
- """Return this repository's repository dependencies that are necessary only for compiling this repository's tool dependencies."""
+ """
+ Return tuples defining this repository's repository dependencies that are necessary only for compiling this repository's tool
+ dependencies.
+ """
rd_tups_of_repositories_needed_for_compiling_td = []
- if self.has_repository_dependencies:
- rd_tups = self.metadata[ 'repository_dependencies' ][ 'repository_dependencies' ]
+ if self.metadata:
+ repository_dependencies = self.metadata.get( 'repository_dependencies', None )
+ rd_tups = repository_dependencies[ 'repository_dependencies' ]
for rd_tup in rd_tups:
if len( rd_tup ) == 6:
tool_shed, name, owner, changeset_revision, prior_installation_required, only_if_compiling_contained_td = rd_tup
diff -r 93215e7e74d020b478803ce2755f76c7e5236f16 -r ab20415126a768f456314cfe587e4ce71fd2049b lib/galaxy/webapps/galaxy/controllers/admin_toolshed.py
--- a/lib/galaxy/webapps/galaxy/controllers/admin_toolshed.py
+++ b/lib/galaxy/webapps/galaxy/controllers/admin_toolshed.py
@@ -812,7 +812,7 @@
status = kwd.get( 'status', 'done' )
shed_tool_conf = kwd.get( 'shed_tool_conf', None )
tool_shed_url = kwd[ 'tool_shed_url' ]
- # Handle repository dependencies.
+ # Handle repository dependencies, which do not include those that are required only for compiling a dependent repository's tool dependencies.
has_repository_dependencies = util.string_as_bool( kwd.get( 'has_repository_dependencies', False ) )
install_repository_dependencies = kwd.get( 'install_repository_dependencies', '' )
# Every repository will be installed into the same tool panel section or all will be installed outside of any sections.
@@ -1061,7 +1061,7 @@
repository_clone_url,
metadata,
trans.model.ToolShedRepository.installation_status.NEW,
- tool_shed_repository.installed_changeset_revision,
+ tool_shed_repository.changeset_revision,
tool_shed_repository.owner,
tool_shed_repository.dist_to_shed )
ctx_rev = suc.get_ctx_rev( trans.app,
@@ -1320,7 +1320,6 @@
missing_tool_dependencies = dependencies_for_repository_dict.get( 'missing_tool_dependencies', None )
repository_name = dependencies_for_repository_dict.get( 'name', None )
repository_owner = dependencies_for_repository_dict.get( 'repository_owner', None )
-
if installed_repository_dependencies or missing_repository_dependencies:
has_repository_dependencies = True
else:
diff -r 93215e7e74d020b478803ce2755f76c7e5236f16 -r ab20415126a768f456314cfe587e4ce71fd2049b lib/galaxy/webapps/tool_shed/api/repositories.py
--- a/lib/galaxy/webapps/tool_shed/api/repositories.py
+++ b/lib/galaxy/webapps/tool_shed/api/repositories.py
@@ -73,6 +73,7 @@
"changeset_revision": "3a08cc21466f",
"downloadable": true,
"has_repository_dependencies": false,
+ "has_repository_dependencies_only_if_compiling_contained_td": false,
"id": "f9cad7b01a472135",
"includes_datatypes": false,
"includes_tool_dependencies": false,
@@ -125,7 +126,8 @@
action='show',
id=encoded_repository_metadata_id )
# Get the repo_info_dict for installing the repository.
- repo_info_dict, includes_tools, includes_tool_dependencies, includes_tools_for_display_in_tool_panel, has_repository_dependencies = \
+ repo_info_dict, includes_tools, includes_tool_dependencies, includes_tools_for_display_in_tool_panel, \
+ has_repository_dependencies, has_repository_dependencies_only_if_compiling_contained_td = \
repository_util.get_repo_info_dict( trans, encoded_repository_id, changeset_revision )
return repository_dict, repository_metadata_dict, repo_info_dict
else:
diff -r 93215e7e74d020b478803ce2755f76c7e5236f16 -r ab20415126a768f456314cfe587e4ce71fd2049b lib/galaxy/webapps/tool_shed/controllers/repository.py
--- a/lib/galaxy/webapps/tool_shed/controllers/repository.py
+++ b/lib/galaxy/webapps/tool_shed/controllers/repository.py
@@ -1343,32 +1343,36 @@
def get_changeset_revision_and_ctx_rev( self, trans, **kwd ):
"""Handle a request from a local Galaxy instance to retrieve the changeset revision hash to which an installed repository can be updated."""
def has_galaxy_utilities( repository_metadata ):
- includes_data_managers = False
- includes_datatypes = False
- includes_tools = False
- includes_tools_for_display_in_tool_panel = False
- has_repository_dependencies = False
- includes_tool_dependencies = False
- includes_workflows = False
+ has_galaxy_utilities_dict = dict( includes_data_managers=False,
+ includes_datatypes=False,
+ includes_tools=False,
+ includes_tools_for_display_in_tool_panel=False,
+ has_repository_dependencies=False,
+ has_repository_dependencies_only_if_compiling_contained_td=False,
+ includes_tool_dependencies=False,
+ includes_workflows=False )
if repository_metadata:
includes_tools_for_display_in_tool_panel = repository_metadata.includes_tools_for_display_in_tool_panel
metadata = repository_metadata.metadata
if metadata:
if 'data_manager' in metadata:
- includes_data_managers = True
+ has_galaxy_utilities_dict[ 'includes_data_managers' ] = True
if 'datatypes' in metadata:
- includes_datatypes = True
+ has_galaxy_utilities_dict[ 'includes_datatypes' ] = True
if 'tools' in metadata:
- includes_tools = True
+ has_galaxy_utilities_dict[ 'includes_tools' ] = True
if 'tool_dependencies' in metadata:
- includes_tool_dependencies = True
- if 'repository_dependencies' in metadata:
- has_repository_dependencies = True
+ has_galaxy_utilities_dict[ 'includes_tool_dependencies' ] = True
+ repository_dependencies_dict = metadata.get( 'repository_dependencies', {} )
+ repository_dependencies = repository_dependencies_dict.get( 'repository_dependencies', [] )
+ has_repository_dependencies, has_repository_dependencies_only_if_compiling_contained_td = \
+ suc.get_repository_dependency_types( repository_dependencies )
+ has_galaxy_utilities_dict[ 'has_repository_dependencies' ] = has_repository_dependencies
+ has_galaxy_utilities_dict[ 'has_repository_dependencies_only_if_compiling_contained_td' ] = \
+ has_repository_dependencies_only_if_compiling_contained_td
if 'workflows' in metadata:
- includes_workflows = True
- return includes_data_managers, includes_datatypes, includes_tools, includes_tools_for_display_in_tool_panel, includes_tool_dependencies, has_repository_dependencies, includes_workflows
- message = kwd.get( 'message', '' )
- status = kwd.get( 'status', 'done' )
+ has_galaxy_utilities_dict[ 'includes_workflows' ] = True
+ return has_galaxy_utilities_dict
name = kwd.get( 'name', None )
owner = kwd.get( 'owner', None )
changeset_revision = kwd.get( 'changeset_revision', None )
@@ -1376,8 +1380,15 @@
repository_metadata = suc.get_repository_metadata_by_changeset_revision( trans,
trans.security.encode_id( repository.id ),
changeset_revision )
- includes_data_managers, includes_datatypes, includes_tools, includes_tools_for_display_in_tool_panel, includes_tool_dependencies, has_repository_dependencies, includes_workflows = \
- has_galaxy_utilities( repository_metadata )
+ has_galaxy_utilities_dict = has_galaxy_utilities( repository_metadata )
+ includes_data_managers = has_galaxy_utilities_dict[ 'includes_data_managers' ]
+ includes_datatypes = has_galaxy_utilities_dict[ 'includes_datatypes' ]
+ includes_tools = has_galaxy_utilities_dict[ 'includes_tools' ]
+ includes_tools_for_display_in_tool_panel = has_galaxy_utilities_dict[ 'includes_tools_for_display_in_tool_panel' ]
+ includes_tool_dependencies = has_galaxy_utilities_dict[ 'includes_tool_dependencies' ]
+ has_repository_dependencies = has_galaxy_utilities_dict[ 'has_repository_dependencies' ]
+ has_repository_dependencies_only_if_compiling_contained_td = has_galaxy_utilities_dict[ 'has_repository_dependencies_only_if_compiling_contained_td' ]
+ includes_workflows = has_galaxy_utilities_dict[ 'includes_workflows' ]
repo_dir = repository.repo_path( trans.app )
repo = hg.repository( suc.get_configured_ui(), repo_dir )
# Default to the received changeset revision and ctx_rev.
@@ -1392,6 +1403,7 @@
includes_tools_for_display_in_tool_panel=includes_tools_for_display_in_tool_panel,
includes_tool_dependencies=includes_tool_dependencies,
has_repository_dependencies=has_repository_dependencies,
+ has_repository_dependencies_only_if_compiling_contained_td=has_repository_dependencies_only_if_compiling_contained_td,
includes_workflows=includes_workflows )
if changeset_revision == repository.tip( trans.app ):
# If changeset_revision is the repository tip, there are no additional updates.
@@ -1407,6 +1419,7 @@
for changeset in repo.changelog:
includes_tools = False
has_repository_dependencies = False
+ has_repository_dependencies_only_if_compiling_contained_td = False
changeset_hash = str( repo.changectx( changeset ) )
ctx = suc.get_changectx_for_changeset( repo, changeset_hash )
if update_to_changeset_hash:
@@ -1414,8 +1427,15 @@
trans.security.encode_id( repository.id ),
changeset_hash )
if update_to_repository_metadata:
- includes_data_managers, includes_datatypes, includes_tools, includes_tools_for_display_in_tool_panel, includes_tool_dependencies, has_repository_dependencies, includes_workflows = \
- has_galaxy_utilities( update_to_repository_metadata )
+ has_galaxy_utilities_dict = has_galaxy_utilities( repository_metadata )
+ includes_data_managers = has_galaxy_utilities_dict[ 'includes_data_managers' ]
+ includes_datatypes = has_galaxy_utilities_dict[ 'includes_datatypes' ]
+ includes_tools = has_galaxy_utilities_dict[ 'includes_tools' ]
+ includes_tools_for_display_in_tool_panel = has_galaxy_utilities_dict[ 'includes_tools_for_display_in_tool_panel' ]
+ includes_tool_dependencies = has_galaxy_utilities_dict[ 'includes_tool_dependencies' ]
+ has_repository_dependencies = has_galaxy_utilities_dict[ 'has_repository_dependencies' ]
+ has_repository_dependencies_only_if_compiling_contained_td = has_galaxy_utilities_dict[ 'has_repository_dependencies_only_if_compiling_contained_td' ]
+ includes_workflows = has_galaxy_utilities_dict[ 'includes_workflows' ]
# We found a RepositoryMetadata record.
if changeset_hash == repository.tip( trans.app ):
# The current ctx is the repository tip, so use it.
@@ -1435,6 +1455,7 @@
update_dict[ 'includes_tool_dependencies' ] = includes_tool_dependencies
update_dict[ 'includes_workflows' ] = includes_workflows
update_dict[ 'has_repository_dependencies' ] = has_repository_dependencies
+ update_dict[ 'has_repository_dependencies_only_if_compiling_contained_td' ] = has_repository_dependencies_only_if_compiling_contained_td
update_dict[ 'changeset_revision' ] = str( latest_changeset_revision )
update_dict[ 'ctx_rev' ] = str( update_to_ctx.rev() )
return encoding_util.tool_shed_encode( update_dict )
@@ -1611,14 +1632,18 @@
includes_tools = False
includes_tools_for_display_in_tool_panel = False
has_repository_dependencies = False
+ has_repository_dependencies_only_if_compiling_contained_td = False
includes_tool_dependencies = False
repo_info_dicts = []
for tup in zip( util.listify( repository_ids ), util.listify( changeset_revisions ) ):
repository_id, changeset_revision = tup
- repo_info_dict, cur_includes_tools, cur_includes_tool_dependencies, cur_includes_tools_for_display_in_tool_panel, cur_has_repository_dependencies = \
+ repo_info_dict, cur_includes_tools, cur_includes_tool_dependencies, cur_includes_tools_for_display_in_tool_panel, \
+ cur_has_repository_dependencies, cur_has_repository_dependencies_only_if_compiling_contained_td = \
repository_util.get_repo_info_dict( trans, repository_id, changeset_revision )
if cur_has_repository_dependencies and not has_repository_dependencies:
has_repository_dependencies = True
+ if cur_has_repository_dependencies_only_if_compiling_contained_td and not has_repository_dependencies_only_if_compiling_contained_td:
+ has_repository_dependencies_only_if_compiling_contained_td = True
if cur_includes_tools and not includes_tools:
includes_tools = True
if cur_includes_tool_dependencies and not includes_tool_dependencies:
@@ -1629,6 +1654,7 @@
return dict( includes_tools=includes_tools,
includes_tools_for_display_in_tool_panel=includes_tools_for_display_in_tool_panel,
has_repository_dependencies=has_repository_dependencies,
+ has_repository_dependencies_only_if_compiling_contained_td=has_repository_dependencies_only_if_compiling_contained_td,
includes_tool_dependencies=includes_tool_dependencies,
repo_info_dicts=repo_info_dicts )
@@ -1708,7 +1734,9 @@
tool_version_dicts = []
for changeset in repo.changelog:
current_changeset_revision = str( repo.changectx( changeset ) )
- repository_metadata = suc.get_repository_metadata_by_changeset_revision( trans, trans.security.encode_id( repository.id ), current_changeset_revision )
+ repository_metadata = suc.get_repository_metadata_by_changeset_revision( trans,
+ trans.security.encode_id( repository.id ),
+ current_changeset_revision )
if repository_metadata and repository_metadata.tool_versions:
tool_version_dicts.append( repository_metadata.tool_versions )
if current_changeset_revision == changeset_revision:
@@ -1766,22 +1794,30 @@
includes_workflows = True
readme_files_dict = readme_util.build_readme_files_dict( metadata )
# See if the repo_info_dict was populated with repository_dependencies or tool_dependencies.
+ has_repository_dependencies = False
+ has_repository_dependencies_only_if_compiling_contained_td = False
+ includes_tool_dependencies = False
for name, repo_info_tuple in repo_info_dict.items():
- description, repository_clone_url, changeset_revision, ctx_rev, repository_owner, repository_dependencies, tool_dependencies = \
- suc.get_repo_info_tuple_contents( repo_info_tuple )
- if repository_dependencies:
- has_repository_dependencies = True
- else:
- has_repository_dependencies = False
- if tool_dependencies:
- includes_tool_dependencies = True
- else:
- includes_tool_dependencies = False
+ if not has_repository_dependencies or not has_repository_dependencies_only_if_compiling_contained_td or not includes_tool_dependencies:
+ description, repository_clone_url, changeset_revision, ctx_rev, repository_owner, repository_dependencies, tool_dependencies = \
+ suc.get_repo_info_tuple_contents( repo_info_tuple )
+ for rd_key, rd_tups in repository_dependencies.items():
+ if rd_key in [ 'root_key', 'description' ]:
+ continue
+ curr_has_repository_dependencies, curr_has_repository_dependencies_only_if_compiling_contained_td = \
+ suc.get_repository_dependency_types( rd_tups )
+ if curr_has_repository_dependencies and not has_repository_dependencies:
+ has_repository_dependencies = True
+ if curr_has_repository_dependencies_only_if_compiling_contained_td and not has_repository_dependencies_only_if_compiling_contained_td:
+ has_repository_dependencies_only_if_compiling_contained_td = True
+ if tool_dependencies and not includes_tool_dependencies:
+ includes_tool_dependencies = True
return dict( includes_data_managers=includes_data_managers,
includes_datatypes=includes_datatypes,
includes_tools=includes_tools,
includes_tools_for_display_in_tool_panel=includes_tools_for_display_in_tool_panel,
has_repository_dependencies=has_repository_dependencies,
+ has_repository_dependencies_only_if_compiling_contained_td=has_repository_dependencies_only_if_compiling_contained_td,
includes_tool_dependencies=includes_tool_dependencies,
includes_workflows=includes_workflows,
readme_files_dict=readme_files_dict,
@@ -2434,7 +2470,9 @@
try:
commands.remove( repo.ui, repo, selected_file, force=True )
except Exception, e:
- log.debug( "Error removing files using the mercurial API, so trying a different approach, the error was: %s" % str( e ))
+ log.debug( "Error removing the following file using the mercurial API:\n %s" % str( selected_file ) )
+ log.debug( "The error was: %s" % str( e ))
+ log.debug( "Attempting to remove the file using a different approach." )
relative_selected_file = selected_file.split( 'repo_%d' % repository.id )[1].lstrip( '/' )
repo.dirstate.remove( relative_selected_file )
repo.dirstate.write()
diff -r 93215e7e74d020b478803ce2755f76c7e5236f16 -r ab20415126a768f456314cfe587e4ce71fd2049b lib/galaxy/webapps/tool_shed/model/__init__.py
--- a/lib/galaxy/webapps/tool_shed/model/__init__.py
+++ b/lib/galaxy/webapps/tool_shed/model/__init__.py
@@ -253,6 +253,7 @@
self.time_last_tested = time_last_tested
self.tool_test_results = tool_test_results
self.has_repository_dependencies = has_repository_dependencies
+ # We don't consider the special case has_repository_dependencies_only_if_compiling_contained_td here.
self.includes_datatypes = includes_datatypes
self.includes_tools = includes_tools
self.includes_tool_dependencies = includes_tool_dependencies
diff -r 93215e7e74d020b478803ce2755f76c7e5236f16 -r ab20415126a768f456314cfe587e4ce71fd2049b lib/tool_shed/galaxy_install/repository_util.py
--- a/lib/tool_shed/galaxy_install/repository_util.py
+++ b/lib/tool_shed/galaxy_install/repository_util.py
@@ -234,10 +234,10 @@
else:
includes_tools = False
includes_tools_for_display_in_tool_panel = repository_metadata.includes_tools_for_display_in_tool_panel
- if 'repository_dependencies' in metadata:
- has_repository_dependencies = True
- else:
- has_repository_dependencies = False
+ repository_dependencies_dict = metadata.get( 'repository_dependencies', {} )
+ repository_dependencies = repository_dependencies_dict.get( 'repository_dependencies', [] )
+ has_repository_dependencies, has_repository_dependencies_only_if_compiling_contained_td = \
+ suc.get_repository_dependency_types( repository_dependencies )
if 'tool_dependencies' in metadata:
includes_tool_dependencies = True
else:
@@ -246,6 +246,7 @@
# Here's where we may have to handle enhancements to the callers. See above comment.
includes_tools = False
has_repository_dependencies = False
+ has_repository_dependencies_only_if_compiling_contained_td = False
includes_tool_dependencies = False
includes_tools_for_display_in_tool_panel = False
ctx = suc.get_changectx_for_changeset( repo, changeset_revision )
@@ -259,7 +260,8 @@
repository_metadata=repository_metadata,
tool_dependencies=None,
repository_dependencies=None )
- return repo_info_dict, includes_tools, includes_tool_dependencies, includes_tools_for_display_in_tool_panel, has_repository_dependencies
+ return repo_info_dict, includes_tools, includes_tool_dependencies, includes_tools_for_display_in_tool_panel, \
+ has_repository_dependencies, has_repository_dependencies_only_if_compiling_contained_td
def get_repository_components_for_installation( encoded_tsr_id, encoded_tsr_ids, repo_info_dicts, tool_panel_section_keys ):
"""
@@ -311,6 +313,7 @@
includes_tool_dependencies = update_dict.get( 'includes_tool_dependencies', False )
includes_workflows = update_dict.get( 'includes_workflows', False )
has_repository_dependencies = update_dict.get( 'has_repository_dependencies', False )
+ has_repository_dependencies_only_if_compiling_contained_td = update_dict.get( 'has_repository_dependencies_only_if_compiling_contained_td', False )
changeset_revision = update_dict.get( 'changeset_revision', None )
ctx_rev = update_dict.get( 'ctx_rev', None )
changeset_revision_dict[ 'includes_data_managers' ] = includes_data_managers
@@ -320,6 +323,7 @@
changeset_revision_dict[ 'includes_tool_dependencies' ] = includes_tool_dependencies
changeset_revision_dict[ 'includes_workflows' ] = includes_workflows
changeset_revision_dict[ 'has_repository_dependencies' ] = has_repository_dependencies
+ changeset_revision_dict[ 'has_repository_dependencies_only_if_compiling_contained_td' ] = has_repository_dependencies_only_if_compiling_contained_td
changeset_revision_dict[ 'changeset_revision' ] = changeset_revision
changeset_revision_dict[ 'ctx_rev' ] = ctx_rev
except Exception, e:
@@ -331,6 +335,7 @@
changeset_revision_dict[ 'includes_tool_dependencies' ] = False
changeset_revision_dict[ 'includes_workflows' ] = False
changeset_revision_dict[ 'has_repository_dependencies' ] = False
+ changeset_revision_dict[ 'has_repository_dependencies_only_if_compiling_contained_td' ] = False
changeset_revision_dict[ 'changeset_revision' ] = None
changeset_revision_dict[ 'ctx_rev' ] = None
return changeset_revision_dict
diff -r 93215e7e74d020b478803ce2755f76c7e5236f16 -r ab20415126a768f456314cfe587e4ce71fd2049b lib/tool_shed/galaxy_install/tool_dependencies/install_util.py
--- a/lib/tool_shed/galaxy_install/tool_dependencies/install_util.py
+++ b/lib/tool_shed/galaxy_install/tool_dependencies/install_util.py
@@ -394,7 +394,7 @@
if not binary_installed:
print 'Binary installation did not occur, so proceeding with install and compile recipe.'
# Make sure to reset for installation if attempt at binary installation resulted in an error.
- if tool_dependency.status != app.model.ToolDependency.installation_status.NEW:
+ if tool_dependency.status != app.model.ToolDependency.installation_status.NEVER_INSTALLED:
removed, error_message = tool_dependency_util.remove_tool_dependency( app, tool_dependency )
install_via_fabric( app,
tool_dependency,
diff -r 93215e7e74d020b478803ce2755f76c7e5236f16 -r ab20415126a768f456314cfe587e4ce71fd2049b lib/tool_shed/util/common_install_util.py
--- a/lib/tool_shed/util/common_install_util.py
+++ b/lib/tool_shed/util/common_install_util.py
@@ -12,6 +12,7 @@
from tool_shed.util import encoding_util
from tool_shed.util import data_manager_util
from tool_shed.util import datatype_util
+from tool_shed.util import tool_dependency_util
from tool_shed.util import tool_util
from tool_shed.util import xml_util
from tool_shed.galaxy_install.tool_dependencies.install_util import install_package
@@ -69,16 +70,28 @@
Return dictionaries containing the sets of installed and missing tool dependencies and repository dependencies associated with the repository defined
by the received repo_info_dict.
"""
+ repository = None
+ installed_rd = {}
+ installed_td = {}
+ missing_rd = {}
+ missing_td = {}
name = repo_info_dict.keys()[ 0 ]
repo_info_tuple = repo_info_dict[ name ]
- description, repository_clone_url, changeset_revision, ctx_rev, repository_owner, repository_dependencies, installed_td = \
+ description, repository_clone_url, changeset_revision, ctx_rev, repository_owner, repository_dependencies, tool_dependencies = \
suc.get_repo_info_tuple_contents( repo_info_tuple )
+ if tool_dependencies:
+ if not includes_tool_dependencies:
+ includes_tool_dependencies = True
+ # Inspect the tool_dependencies dictionary to separate the installed and missing tool dependencies. We don't add to installed_td
+ # and missing_td here because at this point they are empty.
+ installed_td, missing_td = get_installed_and_missing_tool_dependencies( trans, tool_shed_url, tool_dependencies )
+ # In cases where a repository dependency is required only for compiling a dependent repository's tool dependency, the value of
+ # repository_dependencies will be an empty dictionary here.
if repository_dependencies:
# We have a repository with one or more defined repository dependencies.
missing_td = {}
- # Handle the scenario where a repository was installed, then uninstalled and an error occurred during the re-installation process.
- # In this case, a record for the repository will exist in the database with the status of 'New'.
- repository = suc.get_repository_for_dependency_relationship( trans.app, tool_shed_url, name, repository_owner, changeset_revision )
+ if not repository:
+ repository = suc.get_repository_for_dependency_relationship( trans.app, tool_shed_url, name, repository_owner, changeset_revision )
if repository and repository.metadata:
installed_rd, missing_rd = get_installed_and_missing_repository_dependencies( trans, repository )
else:
@@ -86,73 +99,70 @@
# Discover all repository dependencies and retrieve information for installing them.
all_repo_info_dict = get_required_repo_info_dicts( trans, tool_shed_url, util.listify( repo_info_dict ) )
has_repository_dependencies = all_repo_info_dict.get( 'has_repository_dependencies', False )
+ has_repository_dependencies_only_if_compiling_contained_td = all_repo_info_dict.get( 'has_repository_dependencies_only_if_compiling_contained_td', False )
includes_tools_for_display_in_tool_panel = all_repo_info_dict.get( 'includes_tools_for_display_in_tool_panel', False )
includes_tool_dependencies = all_repo_info_dict.get( 'includes_tool_dependencies', False )
includes_tools = all_repo_info_dict.get( 'includes_tools', False )
required_repo_info_dicts = all_repo_info_dict.get( 'all_repo_info_dicts', [] )
# Display tool dependencies defined for each of the repository dependencies.
if required_repo_info_dicts:
- all_tool_dependencies = {}
+ required_tool_dependencies = {}
for rid in required_repo_info_dicts:
for name, repo_info_tuple in rid.items():
- description, repository_clone_url, changeset_revision, ctx_rev, repository_owner, repository_dependencies, rid_installed_td = \
+ description, repository_clone_url, changeset_revision, ctx_rev, repository_owner, rid_repository_dependencies, rid_tool_dependencies = \
suc.get_repo_info_tuple_contents( repo_info_tuple )
- if rid_installed_td:
- for td_key, td_dict in rid_installed_td.items():
- if td_key not in all_tool_dependencies:
- all_tool_dependencies[ td_key ] = td_dict
- if all_tool_dependencies:
- if installed_td is None:
- installed_td = {}
- else:
- # Move all tool dependencies to the missing_tool_dependencies container.
- for td_key, td_dict in installed_td.items():
- if td_key not in missing_td:
- missing_td[ td_key ] = td_dict
- installed_td = {}
+ if rid_tool_dependencies:
+ for td_key, td_dict in rid_tool_dependencies.items():
+ if td_key not in required_tool_dependencies:
+ required_tool_dependencies[ td_key ] = td_dict
+ if required_tool_dependencies:
# Discover and categorize all tool dependencies defined for this repository's repository dependencies.
- required_tool_dependencies, required_missing_tool_dependencies = \
- get_installed_and_missing_tool_dependencies_for_new_install( trans, all_tool_dependencies )
- if required_tool_dependencies:
+ required_installed_td, required_missing_td = get_installed_and_missing_tool_dependencies( trans,
+ tool_shed_url,
+ required_tool_dependencies )
+ if required_installed_td:
if not includes_tool_dependencies:
includes_tool_dependencies = True
- for td_key, td_dict in required_tool_dependencies.items():
+ for td_key, td_dict in required_installed_td.items():
if td_key not in installed_td:
installed_td[ td_key ] = td_dict
- if required_missing_tool_dependencies:
+ if required_missing_td:
if not includes_tool_dependencies:
includes_tool_dependencies = True
- for td_key, td_dict in required_missing_tool_dependencies.items():
+ for td_key, td_dict in required_missing_td.items():
if td_key not in missing_td:
missing_td[ td_key ] = td_dict
else:
- # We have a single repository with no defined repository dependencies.
+ # We have a single repository with (possibly) no defined repository dependencies.
all_repo_info_dict = get_required_repo_info_dicts( trans, tool_shed_url, util.listify( repo_info_dict ) )
has_repository_dependencies = all_repo_info_dict.get( 'has_repository_dependencies', False )
+ has_repository_dependencies_only_if_compiling_contained_td = all_repo_info_dict.get( 'has_repository_dependencies_only_if_compiling_contained_td', False )
includes_tools_for_display_in_tool_panel = all_repo_info_dict.get( 'includes_tools_for_display_in_tool_panel', False )
includes_tool_dependencies = all_repo_info_dict.get( 'includes_tool_dependencies', False )
includes_tools = all_repo_info_dict.get( 'includes_tools', False )
required_repo_info_dicts = all_repo_info_dict.get( 'all_repo_info_dicts', [] )
- installed_rd = None
- missing_rd = None
- missing_td = None
- dependencies_for_repository_dict = dict( changeset_revision=changeset_revision,
- has_repository_dependencies=has_repository_dependencies,
- includes_tool_dependencies=includes_tool_dependencies,
- includes_tools=includes_tools,
- includes_tools_for_display_in_tool_panel=includes_tools_for_display_in_tool_panel,
- installed_repository_dependencies=installed_rd,
- installed_tool_dependencies=installed_td,
- missing_repository_dependencies=missing_rd,
- missing_tool_dependencies=missing_td,
- name=name,
- repository_owner=repository_owner )
+ dependencies_for_repository_dict = \
+ dict( changeset_revision=changeset_revision,
+ has_repository_dependencies=has_repository_dependencies,
+ has_repository_dependencies_only_if_compiling_contained_td=has_repository_dependencies_only_if_compiling_contained_td,
+ includes_tool_dependencies=includes_tool_dependencies,
+ includes_tools=includes_tools,
+ includes_tools_for_display_in_tool_panel=includes_tools_for_display_in_tool_panel,
+ installed_repository_dependencies=installed_rd,
+ installed_tool_dependencies=installed_td,
+ missing_repository_dependencies=missing_rd,
+ missing_tool_dependencies=missing_td,
+ name=name,
+ repository_owner=repository_owner )
return dependencies_for_repository_dict
def get_installed_and_missing_repository_dependencies( trans, repository ):
"""
Return the installed and missing repository dependencies for a tool shed repository that has a record in the Galaxy database, but
- may or may not be installed. In this case, the repository dependencies are associated with the repository in the database.
+ may or may not be installed. In this case, the repository dependencies are associated with the repository in the database. Do not
+ include a repository dependency if it is required only to compile a tool dependency defined for the dependent repository since these
+ special kinds of repository dependencies are really a dependency of the dependent repository's contained tool dependency, and only if
+ that tool dependency requires compilation.
"""
missing_repository_dependencies = {}
installed_repository_dependencies = {}
@@ -166,7 +176,14 @@
for tsr in repository.repository_dependencies:
prior_installation_required = suc.set_prior_installation_required( repository, tsr )
only_if_compiling_contained_td = suc.set_only_if_compiling_contained_td( repository, tsr )
- rd_tup = [ tsr.tool_shed, tsr.name, tsr.owner, tsr.changeset_revision, prior_installation_required, only_if_compiling_contained_td, tsr.id, tsr.status ]
+ rd_tup = [ tsr.tool_shed,
+ tsr.name,
+ tsr.owner,
+ tsr.changeset_revision,
+ prior_installation_required,
+ only_if_compiling_contained_td,
+ tsr.id,
+ tsr.status ]
if tsr.status == trans.model.ToolShedRepository.installation_status.INSTALLED:
installed_rd_tups.append( rd_tup )
else:
@@ -184,7 +201,7 @@
repository_dependencies = metadata.get( 'repository_dependencies', {} )
description = repository_dependencies.get( 'description', None )
# We need to add a root_key entry to one or both of installed_repository_dependencies dictionary and the missing_repository_dependencies
- # dictionary for proper display parsing.
+ # dictionaries for proper display parsing.
root_key = container_util.generate_repository_dependencies_key_for_repository( repository.tool_shed,
repository.name,
repository.owner,
@@ -210,7 +227,7 @@
installed_repository_dependencies = {}
missing_rd_tups = []
installed_rd_tups = []
- description, repository_clone_url, changeset_revision, ctx_rev, repository_owner, repository_dependencies, installed_td = \
+ description, repository_clone_url, changeset_revision, ctx_rev, repository_owner, repository_dependencies, tool_dependencies = \
suc.get_repo_info_tuple_contents( repo_info_tuple )
if repository_dependencies:
description = repository_dependencies[ 'description' ]
@@ -228,7 +245,7 @@
# tuple looks like: ( description, repository_clone_url, changeset_revision, ctx_rev, repository_owner, repository_dependencies, installed_td )
tmp_clone_url = suc.generate_clone_url_from_repo_info_tup( rd_tup )
tmp_repo_info_tuple = ( None, tmp_clone_url, changeset_revision, None, owner, None, None )
- repository, current_changeset_revision = suc.repository_was_previously_installed( trans, tool_shed, name, tmp_repo_info_tuple )
+ repository, installed_changeset_revision = suc.repository_was_previously_installed( trans, tool_shed, name, tmp_repo_info_tuple )
if repository:
new_rd_tup = [ tool_shed,
name,
@@ -273,29 +290,46 @@
missing_repository_dependencies[ 'description' ] = description
return installed_repository_dependencies, missing_repository_dependencies
-def get_installed_and_missing_tool_dependencies_for_new_install( trans, all_tool_dependencies ):
+def get_installed_and_missing_tool_dependencies( trans, tool_shed_url, tool_dependencies_dict ):
"""Return the lists of installed tool dependencies and missing tool dependencies for a set of repositories being installed into Galaxy."""
- # FIXME: confirm that this method currently populates and returns only missing tool dependencies. If so, this method should be enhanced to search for
- # installed tool dependencies defined as complex repository dependency relationships.
- if all_tool_dependencies:
- tool_dependencies = {}
- missing_tool_dependencies = {}
- for td_key, val in all_tool_dependencies.items():
- # Set environment tool dependencies are a list, set each member to never installed.
+ installed_tool_dependencies = {}
+ missing_tool_dependencies = {}
+ if tool_dependencies_dict:
+ for td_key, val in tool_dependencies_dict.items():
+ # Default the status to NEVER_INSTALLED.
+ tool_dependency_status = trans.model.ToolDependency.installation_status.NEVER_INSTALLED
+ # Set environment tool dependencies are a list.
if td_key == 'set_environment':
new_val = []
for requirement_dict in val:
- requirement_dict[ 'status' ] = trans.model.ToolDependency.installation_status.NEVER_INSTALLED
+ # {'repository_name': 'xx', 'name': 'bwa', 'version': '0.5.9', 'repository_owner': 'yy', 'changeset_revision': 'zz', 'type': 'package'}
+ tool_dependency = tool_dependency_util.get_tool_dependency_by_name_version_type( trans.app,
+ requirement_dict.get( 'name', None ),
+ requirement_dict.get( 'version', None ),
+ requirement_dict.get( 'type', 'package' ) )
+ if tool_dependency:
+ tool_dependency_status = tool_dependency.status
+ requirement_dict[ 'status' ] = tool_dependency_status
new_val.append( requirement_dict )
- missing_tool_dependencies[ td_key ] = new_val
+ if tool_dependency_status in [ trans.model.ToolDependency.installation_status.INSTALLED ]:
+ installed_tool_dependencies[ td_key ] = new_val
+ else:
+ missing_tool_dependencies[ td_key ] = new_val
else:
- # Since we have a new install, missing tool dependencies have never been installed.
- val[ 'status' ] = trans.model.ToolDependency.installation_status.NEVER_INSTALLED
+ # The val dictionary looks something like this:
+ # {'repository_name': 'xx', 'name': 'bwa', 'version': '0.5.9', 'repository_owner': 'yy', 'changeset_revision': 'zz', 'type': 'package'}
+ tool_dependency = tool_dependency_util.get_tool_dependency_by_name_version_type( trans.app,
+ val.get( 'name', None ),
+ val.get( 'version', None ),
+ val.get( 'type', 'package' ) )
+ if tool_dependency:
+ tool_dependency_status = tool_dependency.status
+ val[ 'status' ] = tool_dependency_status
+ if tool_dependency_status in [ trans.model.ToolDependency.installation_status.INSTALLED ]:
+ installed_tool_dependencies[ td_key ] = val
+ else:
missing_tool_dependencies[ td_key ] = val
- else:
- tool_dependencies = None
- missing_tool_dependencies = None
- return tool_dependencies, missing_tool_dependencies
+ return installed_tool_dependencies, missing_tool_dependencies
def get_required_repo_info_dicts( trans, tool_shed_url, repo_info_dicts ):
"""
@@ -328,7 +362,9 @@
toolshed, name, owner, changeset_revision = container_util.get_components_from_key( key )
components_list = [ toolshed, name, owner, changeset_revision ]
only_if_compiling_contained_td = 'False'
- # Skip listing a repository dependency if it is required only to compile a tool dependency defined for the dependent repository.
+ # Skip listing a repository dependency if it is required only to compile a tool dependency defined for the dependent repository since
+ # in this case, the repository dependency is really a dependency of the dependent repository's contained tool dependency, and only if
+ # that tool dependency requires compilation.
if not util.asbool( only_if_compiling_contained_td ):
if components_list not in required_repository_tups:
required_repository_tups.append( components_list )
@@ -337,8 +373,8 @@
only_if_compiling_contained_td = components_list[ 5 ]
except:
only_if_compiling_contained_td = 'False'
- # TODO: Fix this to display the tool dependency if only_if_compiling_contained_td is True, but clarify that installation may not
- # happen.
+ # Skip listing a repository dependency if it is required only to compile a tool dependency defined for the dependent repository
+ # (see above comment).
if not util.asbool( only_if_compiling_contained_td ):
if components_list not in required_repository_tups:
required_repository_tups.append( components_list )
diff -r 93215e7e74d020b478803ce2755f76c7e5236f16 -r ab20415126a768f456314cfe587e4ce71fd2049b lib/tool_shed/util/common_util.py
--- a/lib/tool_shed/util/common_util.py
+++ b/lib/tool_shed/util/common_util.py
@@ -1,3 +1,4 @@
+import logging
import os
import urllib2
from galaxy.util import json
@@ -5,6 +6,8 @@
from tool_shed.util import encoding_util
from tool_shed.util import xml_util
+log = logging.getLogger( __name__ )
+
REPOSITORY_OWNER = 'devteam'
def accumulate_tool_dependencies( tool_shed_accessible, tool_dependencies, all_tool_dependencies ):
diff -r 93215e7e74d020b478803ce2755f76c7e5236f16 -r ab20415126a768f456314cfe587e4ce71fd2049b lib/tool_shed/util/metadata_util.py
--- a/lib/tool_shed/util/metadata_util.py
+++ b/lib/tool_shed/util/metadata_util.py
@@ -280,13 +280,16 @@
def create_or_update_repository_metadata( trans, id, repository, changeset_revision, metadata_dict ):
"""Create or update a repository_metadatqa record in the tool shed."""
has_repository_dependencies = False
+ has_repository_dependencies_only_if_compiling_contained_td = False
includes_datatypes = False
includes_tools = False
includes_tool_dependencies = False
includes_workflows = False
if metadata_dict:
- if 'repository_dependencies' in metadata_dict:
- has_repository_dependencies = True
+ repository_dependencies_dict = metadata_dict.get( 'repository_dependencies', {} )
+ repository_dependencies = repository_dependencies_dict.get( 'repository_dependencies', [] )
+ has_repository_dependencies, has_repository_dependencies_only_if_compiling_contained_td = \
+ suc.get_repository_dependency_types( repository_dependencies )
if 'datatypes' in metadata_dict:
includes_datatypes = True
if 'tools' in metadata_dict:
@@ -295,7 +298,11 @@
includes_tool_dependencies = True
if 'workflows' in metadata_dict:
includes_workflows = True
- downloadable = has_repository_dependencies or includes_datatypes or includes_tools or includes_tool_dependencies or includes_workflows
+ if has_repository_dependencies or has_repository_dependencies_only_if_compiling_contained_td or includes_datatypes or \
+ includes_tools or includes_tool_dependencies or includes_workflows:
+ downloadable = True
+ else:
+ downloadable = False
repository_metadata = suc.get_repository_metadata_by_changeset_revision( trans, id, changeset_revision )
if repository_metadata:
# A repository metadata record already exists with the received changeset_revision, so we don't need to check the skip_tool_test table.
@@ -1851,10 +1858,13 @@
repository_metadata.includes_datatypes = True
else:
repository_metadata.includes_datatypes = False
- if 'repository_dependencies' in metadata_dict:
- repository_metadata.has_repository_dependencies = True
- else:
- repository_metadata.has_repository_dependencies = False
+ # We don't store information about the special type of repository dependency that is needed only for compiling a tool dependency
+ # defined for the dependent repository.
+ repository_dependencies_dict = metadata_dict.get( 'repository_dependencies', {} )
+ repository_dependencies = repository_dependencies_dict.get( 'repository_dependencies', [] )
+ has_repository_dependencies, has_repository_dependencies_only_if_compiling_contained_td = \
+ suc.get_repository_dependency_types( repository_dependencies )
+ repository_metadata.has_repository_dependencies = has_repository_dependencies
if 'tool_dependencies' in metadata_dict:
repository_metadata.includes_tool_dependencies = True
else:
diff -r 93215e7e74d020b478803ce2755f76c7e5236f16 -r ab20415126a768f456314cfe587e4ce71fd2049b lib/tool_shed/util/repository_dependency_util.py
--- a/lib/tool_shed/util/repository_dependency_util.py
+++ b/lib/tool_shed/util/repository_dependency_util.py
@@ -139,6 +139,7 @@
# repository dependencies are not to be installed, only those items contained in the received repo_info_dicts list will be processed.
if is_in_repo_info_dicts( repo_info_dict, repo_info_dicts ) or install_repository_dependencies:
if installed_tool_shed_repository.status in [ trans.model.ToolShedRepository.installation_status.ERROR,
+ trans.model.ToolShedRepository.installation_status.NEW,
trans.model.ToolShedRepository.installation_status.UNINSTALLED ]:
# The current tool shed repository is not currently installed, so we can update it's record in the database.
can_update = True
diff -r 93215e7e74d020b478803ce2755f76c7e5236f16 -r ab20415126a768f456314cfe587e4ce71fd2049b lib/tool_shed/util/shed_util_common.py
--- a/lib/tool_shed/util/shed_util_common.py
+++ b/lib/tool_shed/util/shed_util_common.py
@@ -868,6 +868,31 @@
.first()
return None
+def get_repository_dependency_types( repository_dependencies ):
+ """
+ Inspect the received list of repository_dependencies tuples and return boolean values for has_repository_dependencies and
+ has_repository_dependencies_only_if_compiling_contained_td.
+ """
+ # Set has_repository_dependencies, which will be True only if at least one repository_dependency is defined with the value of
+ # only_if_compiling_contained_td as False.
+ has_repository_dependencies = False
+ for rd_tup in repository_dependencies:
+ tool_shed, name, owner, changeset_revision, prior_installation_required, only_if_compiling_contained_td = \
+ common_util.parse_repository_dependency_tuple( rd_tup )
+ if not asbool( only_if_compiling_contained_td ):
+ has_repository_dependencies = True
+ break
+ # Set has_repository_dependencies_only_if_compiling_contained_td, which will be True only if at least one repository_dependency is
+ # defined with the value of only_if_compiling_contained_td as True.
+ has_repository_dependencies_only_if_compiling_contained_td = False
+ for rd_tup in repository_dependencies:
+ tool_shed, name, owner, changeset_revision, prior_installation_required, only_if_compiling_contained_td = \
+ common_util.parse_repository_dependency_tuple( rd_tup )
+ if asbool( only_if_compiling_contained_td ):
+ has_repository_dependencies_only_if_compiling_contained_td = True
+ break
+ return has_repository_dependencies, has_repository_dependencies_only_if_compiling_contained_td
+
def get_repository_for_dependency_relationship( app, tool_shed, name, owner, changeset_revision ):
"""Return an installed tool_shed_repository database record that is defined by either the current changeset revision or the installed_changeset_revision."""
# This method is used only in Galaxy, not the tool shed.
@@ -1389,14 +1414,23 @@
def repository_was_previously_installed( trans, tool_shed_url, repository_name, repo_info_tuple ):
"""
- Handle the case where the repository was previously installed using an older changeset_revsion, but later the repository was updated
- in the tool shed and now we're trying to install the latest changeset revision of the same repository instead of updating the one
- that was previously installed. We'll look in the database instead of on disk since the repository may be uninstalled.
+ Find out if a repository is already installed into Galaxy - there are several scenarios where this is necessary. For example, this method
+ will handle the case where the repository was previously installed using an older changeset_revsion, but later the repository was updated
+ in the tool shed and now we're trying to install the latest changeset revision of the same repository instead of updating the one that was
+ previously installed. We'll look in the database instead of on disk since the repository may be currently uninstalled.
"""
description, repository_clone_url, changeset_revision, ctx_rev, repository_owner, repository_dependencies, tool_dependencies = \
get_repo_info_tuple_contents( repo_info_tuple )
tool_shed = get_tool_shed_from_clone_url( repository_clone_url )
- # Get all previous change set revisions from the tool shed for the repository back to, but excluding, the previous valid changeset
+ # See if we can locate the repository using the value of changeset_revision.
+ tool_shed_repository = get_tool_shed_repository_by_shed_name_owner_installed_changeset_revision( trans.app,
+ tool_shed,
+ repository_name,
+ repository_owner,
+ changeset_revision )
+ if tool_shed_repository:
+ return tool_shed_repository, changeset_revision
+ # Get all previous changeset revisions from the tool shed for the repository back to, but excluding, the previous valid changeset
# revision to see if it was previously installed using one of them.
url = url_join( tool_shed_url,
'repository/previous_changeset_revisions?galaxy_url=%s&name=%s&owner=%s&changeset_revision=%s' % \
@@ -1410,14 +1444,14 @@
repository_name,
repository_owner,
previous_changeset_revision )
- if tool_shed_repository and tool_shed_repository.status not in [ trans.model.ToolShedRepository.installation_status.NEW ]:
+ if tool_shed_repository:
return tool_shed_repository, previous_changeset_revision
return None, None
def reset_previously_installed_repository( trans, repository ):
"""
Reset the atrributes of a tool_shed_repository that was previsouly installed. The repository will be in some state other than with a
- status of INSTALLED, so all atributes will be set to the default (NEW( state. This will enable the repository to be freshly installed.
+ status of INSTALLED, so all atributes will be set to the default NEW state. This will enable the repository to be freshly installed.
"""
repository.deleted = False
repository.tool_shed_status = None
diff -r 93215e7e74d020b478803ce2755f76c7e5236f16 -r ab20415126a768f456314cfe587e4ce71fd2049b lib/tool_shed/util/tool_dependency_util.py
--- a/lib/tool_shed/util/tool_dependency_util.py
+++ b/lib/tool_shed/util/tool_dependency_util.py
@@ -258,6 +258,14 @@
app.model.ToolDependency.table.c.type == type ) ) \
.first()
+def get_tool_dependency_by_name_version_type( app, name, version, type ):
+ sa_session = app.model.context.current
+ return sa_session.query( app.model.ToolDependency ) \
+ .filter( and_( app.model.ToolDependency.table.c.name == name,
+ app.model.ToolDependency.table.c.version == version,
+ app.model.ToolDependency.table.c.type == type ) ) \
+ .first()
+
def get_tool_dependency_by_name_version_type_repository( app, repository, name, version, type ):
sa_session = app.model.context.current
return sa_session.query( app.model.ToolDependency ) \
diff -r 93215e7e74d020b478803ce2755f76c7e5236f16 -r ab20415126a768f456314cfe587e4ce71fd2049b lib/tool_shed/util/tool_util.py
--- a/lib/tool_shed/util/tool_util.py
+++ b/lib/tool_shed/util/tool_util.py
@@ -962,7 +962,7 @@
suc.config_elems_to_xml_file( trans.app, config_elems, shed_tool_conf, tool_path )
def remove_from_tool_panel( trans, repository, shed_tool_conf, uninstall ):
- """A tool shed repository is being deactivated or uninstalled so handle tool panel alterations accordingly."""
+ """A tool shed repository is being deactivated or uninstalled, so handle tool panel alterations accordingly."""
# Determine where the tools are currently defined in the tool panel and store this information so the tools can be displayed
# in the same way when the repository is activated or reinstalled.
tool_panel_dict = suc.generate_tool_panel_dict_from_shed_tool_conf_entries( trans.app, repository )
diff -r 93215e7e74d020b478803ce2755f76c7e5236f16 -r ab20415126a768f456314cfe587e4ce71fd2049b test/tool_shed/base/twilltestcase.py
--- a/test/tool_shed/base/twilltestcase.py
+++ b/test/tool_shed/base/twilltestcase.py
@@ -114,7 +114,7 @@
# version=section_version )
# This dict is appended to tool_panel_section_metadata[ tool_guid ]
tool_panel_section = tool_panel_section_metadata[ tool_guid ][ 0 ][ 'name' ]
- assert tool_panel_section == expected_tool_panel_section, 'Expected tool panel section %s, found %s\nMetadata: %s\n' % \
+ assert tool_panel_section == expected_tool_panel_section, 'Expected to find tool panel section *%s*, but instead found *%s*\nMetadata: %s\n' % \
( expected_tool_panel_section, tool_panel_section, metadata )
def check_installed_repository_tool_dependencies( self,
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: guerler: Drag&drop multiple file upload: updates and fixes
by commits-noreply@bitbucket.org 19 Sep '13
by commits-noreply@bitbucket.org 19 Sep '13
19 Sep '13
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/93215e7e74d0/
Changeset: 93215e7e74d0
User: guerler
Date: 2013-09-20 04:53:34
Summary: Drag&drop multiple file upload: updates and fixes
Affected #: 9 files
diff -r 6f77298e8d1628fb55d8ba72a69ab62b7864bb7a -r 93215e7e74d020b478803ce2755f76c7e5236f16 lib/galaxy/webapps/galaxy/api/history_contents.py
--- a/lib/galaxy/webapps/galaxy/api/history_contents.py
+++ b/lib/galaxy/webapps/galaxy/api/history_contents.py
@@ -235,8 +235,15 @@
# get file destination
file_destination = dataset.get_file_name()
+ # check if the directory exists
+ dn = os.path.dirname(file_destination)
+ if not os.path.exists(dn):
+ os.makedirs(dn)
+
+ # get file and directory names
+ fn = os.path.basename(content.filename)
+
# save file locally
- fn = os.path.basename(content.filename)
open(file_destination, 'wb').write(content.file.read())
# log
diff -r 6f77298e8d1628fb55d8ba72a69ab62b7864bb7a -r 93215e7e74d020b478803ce2755f76c7e5236f16 static/scripts/galaxy.modal.js
--- a/static/scripts/galaxy.modal.js
+++ b/static/scripts/galaxy.modal.js
@@ -77,13 +77,25 @@
// link functions
$.each(options.buttons, function(name, value)
{
- footer.append($('<button></button>').text(name).click(value)).append(" ");
+ footer.append($('<button id="' + String(name).toLowerCase() + '"></button>').text(name).click(value)).append(" ");
});
} else
// default close button
footer.append($('<button></button>').text('Close').click(function() { self.hide() })).append(" ");
},
+ // enable buttons
+ enable: function(name)
+ {
+ $(this.el).find('#' + String(name).toLowerCase()).prop('disabled', false);
+ },
+
+ // disable buttons
+ disable: function(name)
+ {
+ $(this.el).find('#' + String(name).toLowerCase()).prop('disabled', true);
+ },
+
/*
HTML TEMPLATES
*/
diff -r 6f77298e8d1628fb55d8ba72a69ab62b7864bb7a -r 93215e7e74d020b478803ce2755f76c7e5236f16 static/scripts/galaxy.upload.js
--- a/static/scripts/galaxy.upload.js
+++ b/static/scripts/galaxy.upload.js
@@ -1,5 +1,5 @@
/*
- galaxy upload v1.0
+ galaxy upload
*/
// dependencies
@@ -14,12 +14,20 @@
// button
button_show : null,
- // file counter
- file_counter: 0,
+ // upload mod
+ uploadbox: null,
// initialize
initialize : function()
{
+ // wait for galaxy history panel (workaround due to the use of iframes)
+ if (!Galaxy.currHistoryPanel)
+ {
+ var self = this;
+ window.setTimeout(function() { self.initialize() }, 500)
+ return;
+ }
+
// add activate icon
var self = this;
this.button_show = new mod_master.GalaxyMasterIcon (
@@ -37,87 +45,252 @@
// events
events :
{
- 'mouseover' : 'event_mouseover',
- 'mouseleave' : 'event_mouseleave'
+ 'mouseover' : 'event_mouseover',
+ 'mouseleave' : 'event_mouseleave'
},
// mouse over
event_mouseover : function (e)
{
- $('#galaxy-upload-box').addClass('highlight');
},
// mouse left
event_mouseleave : function (e)
{
- $('#galaxy-upload-box').removeClass('highlight');
},
// start
- event_start : function(index, file, message)
+ event_announce : function(index, file, message)
{
+ // hide info
+ this.uploadbox.info().hide();
+
// make id
- var id = '#galaxy-upload-file-' + index;
+ var id = '#upload-' + index;
- // add tag
- $('#galaxy-upload-box').append(this.template_file(id));
+ // add upload item
+ $(this.el).append(this.template_file(id));
+
+ // scroll to bottom
+ $(this.el).scrollTop($(this.el).prop('scrollHeight'));
+
+ // access upload item
+ var it = this.get_upload_item(index);
+
+ // fade in
+ it.fadeIn();
// update title
- $('#galaxy-upload-file-' + index).find('.title').html(file.name);
+ it.find('.title').html(file.name);
+ // configure select field
+ it.find('#extension').select2(
+ {
+ placeholder: 'Auto-detect',
+ width: 'copy',
+ ajax: {
+ url: "http://www.weighttraining.com/sm/search",
+ dataType: 'jsonp',
+ quietMillis: 100,
+ data: function(term, page)
+ {
+ return {
+ types: ["exercise"],
+ limit: -1,
+ term: term
+ };
+ },
+ results: function(data, page)
+ {
+ return { results: data.results.exercise }
+ }
+ },
+ formatResult: function(exercise)
+ {
+ return "<div class='select2-user-result'>" + exercise.term + "</div>";
+ },
+ formatSelection: function(exercise)
+ {
+ return exercise.term;
+ },
+ initSelection : function (element, callback)
+ {
+ var elementText = $(element).attr('data-init-text');
+ callback({"term":elementText});
+ }
+ });
+
+ // add functionality to remove button
+ var self = this;
+ it.find('.remove').on('click', function() { self.event_remove (index) });
+
// initialize progress
this.event_progress(index, file, 0);
- // update counter
- this.file_counter++;
- this.refresh();
+ // update button status
+ this.modal.enable('Upload');
+ this.modal.enable('Reset');
+ },
+
+ // start
+ event_initialize : function(index, file, message)
+ {
+ // update on screen counter
+ this.button_show.number(message);
+
+ // get element
+ var it = this.get_upload_item(index);
+
+ // read in configuration
+ var data = {
+ source : "upload",
+ space_to_tabs : it.find('#space_to_tabs').is(':checked'),
+ extension : it.find('#extension').val()
+ }
+
+ // return additional data to be send with file
+ return data;
},
// progress
event_progress : function(index, file, message)
{
- // get progress bar
- var el = $('#galaxy-upload-file-' + index);
+ // get element
+ var it = this.get_upload_item(index);
// get value
var percentage = parseInt(message);
// update progress
- el.find('.progress').css({ width : percentage + '%' });
+ it.find('.progress-bar').css({ width : percentage + '%' });
// update info
- el.find('.info').html(percentage + '% of ' + this.size_to_string (file.size));
+ it.find('.info').html(percentage + '% of ' + this.size_to_string (file.size));
},
// end
event_success : function(index, file, message)
- {
+ {
+ // get element
+ var it = this.get_upload_item(index);
+
+ // update progress frame
+ it.addClass('panel-success');
+ it.removeClass('panel-default');
+
// update galaxy history
Galaxy.currHistoryPanel.refresh();
- // update counter
- this.file_counter--;
- this.refresh();
+ // make sure progress is shown correctly
+ this.event_progress(index, file, 100);
+
+ // update on screen counter
+ this.button_show.number('');
},
// end
event_error : function(index, file, message)
{
- // get file box
- var el = $('#galaxy-upload-file-' + index);
+ // get element
+ var it = this.get_upload_item(index);
// update progress frame
- el.find('.progress-frame').addClass('failed');
+ it.addClass('panel-danger');
+ it.removeClass('panel-default');
- // update error message
- el.find('.error').html("<strong>Failed:</strong> " + message);
-
- // update progress
+ // remove progress bar
+ it.find('.progress').remove();
+
+ // write error message
+ it.find('.error').html('<strong>Failed:</strong> ' + message);
+
+ // make sure progress is shown correctly
this.event_progress(index, file, 0);
- // update counter
- this.file_counter--;
- this.refresh();
+ // update on screen counter
+ this.button_show.number('');
+ },
+
+ // start upload process
+ event_upload : function()
+ {
+ // hide configuration
+ $(this.el).find('.panel-body').hide();
+
+ // switch icon
+ $(this.el).find('.remove').each(function()
+ {
+ $(this).removeClass('fa-icon-trash');
+ $(this).addClass('fa-icon-caret-down');
+ });
+
+ // update button status
+ this.modal.disable('Upload');
+
+ // configure url
+ var current_history = Galaxy.currHistoryPanel.model.get('id');
+ this.uploadbox.configure({url : galaxy_config.root + "api/histories/" + current_history + "/contents"});
+
+ // initiate upload procedure in plugin
+ this.uploadbox.upload();
+ },
+
+ // remove all
+ event_reset : function()
+ {
+ // remove from screen
+ var panels = $(this.el).find('.panel');
+ var self = this;
+ panels.fadeOut({complete: function()
+ {
+ // remove panels
+ panels.remove();
+
+ // show on screen info
+ self.uploadbox.info().fadeIn();
+ }});
+
+ // update button status
+ this.modal.disable('Upload');
+ this.modal.disable('Reset');
+
+ // remove from queue
+ this.uploadbox.reset();
+ },
+
+ // remove item from upload list
+ event_remove : function(index)
+ {
+ // remove
+ var self = this;
+ var it = this.get_upload_item(index);
+
+ // fade out and update button status
+ it.fadeOut({complete: function()
+ {
+ // remove from screen
+ it.remove();
+
+ // remove from queue
+ self.uploadbox.remove(index);
+
+ // update reset button
+ if ($(self.el).find('.panel').length > 0)
+ self.modal.enable('Reset');
+ else {
+ // disable reset button
+ self.modal.disable('Reset');
+
+ // show on screen info
+ self.uploadbox.info().fadeIn();
+ }
+
+ // update upload button
+ if (self.uploadbox.length() > 0)
+ self.modal.enable('Upload');
+ else
+ self.modal.disable('Upload');
+ }});
},
// show/hide upload frame
@@ -126,14 +299,6 @@
// prevent default
e.preventDefault();
- // wait for galaxy history panel (workaround due to the use of iframes)
- if (!Galaxy.currHistoryPanel)
- {
- var self = this;
- window.setTimeout(function() { self.event_show(e) }, 200)
- return;
- }
-
// create modal
if (!this.modal)
{
@@ -142,44 +307,45 @@
this.modal = new mod_modal.GalaxyModal(
{
title : 'Upload files from your local drive',
- body : this.template(),
+ body : this.template('upload-box'),
buttons : {
- 'Close' : function() {self.modal.hide()}
+ 'Select' : function() {self.uploadbox.select()},
+ 'Upload' : function() {self.event_upload()},
+ 'Reset' : function() {self.event_reset()},
+ 'Close' : function() {self.modal.hide()}
}
});
- // get current history
- var current_history = Galaxy.currHistoryPanel.model.get('id');
-
+ // set element
+ this.setElement('#upload-box');
+
// file upload
var self = this;
- $('#galaxy-upload-box').uploadbox(
+ this.uploadbox = this.$el.uploadbox(
{
- url : galaxy_config.root + "api/histories/" + current_history + "/contents",
dragover : self.event_mouseover,
dragleave : self.event_mouseleave,
- start : function(index, file, message) { self.event_start(index, file, message) },
+ announce : function(index, file, message) { self.event_announce(index, file, message) },
+ initialize : function(index, file, message) { return self.event_initialize(index, file, message) },
success : function(index, file, message) { self.event_success(index, file, message) },
progress : function(index, file, message) { self.event_progress(index, file, message) },
error : function(index, file, message) { self.event_error(index, file, message) },
- data : {source : "upload"}
});
- // set element
- this.setElement('#galaxy-upload-box');
+ // update button status
+ this.modal.disable('Upload');
+ this.modal.disable('Reset');
}
-
+
// show modal
this.modal.show();
},
- // update counter
- refresh: function ()
+ // get upload item
+ get_upload_item: function(index)
{
- if (this.file_counter > 0)
- this.button_show.number(this.file_counter);
- else
- this.button_show.number('');
+ // get element
+ return $(this.el).find('#upload-' + index);
},
// to string
@@ -197,21 +363,32 @@
},
// load html template
- template: function()
+ template: function(id)
{
- return '<form id="galaxy-upload-box" class="galaxy-upload-box"></form>';
+ return '<div id="' + id + '" class="upload-box"></div>';
},
// load html template
template_file: function(id)
{
- return '<div id="' + id.substr(1) + '" class="file corner-soft shadow">' +
- '<div class="title"></div>' +
- '<div class="error"></div>' +
- '<div class="progress-frame corner-soft">' +
- '<div class="progress"></div>' +
+ return '<div id="' + id.substr(1) + '" class="panel panel-default">' +
+ '<div class="panel-heading">' +
+ '<h5 class="title"></h5>' +
+ '<h5 class="info"></h5>' +
+ '<div class="remove fa-icon-trash"></div>' +
'</div>' +
- '<div class="info"></div>' +
+ '<div class="panel-body">' +
+ '<div class="menu">' +
+ //'<input id="extension" type="hidden" width="10px"/> ' +
+ '<span><input id="space_to_tabs" type="checkbox">Convert spaces to tabs</input></span>' +
+ '</div>' +
+ '</div>' +
+ '<div class="panel-footer">' +
+ '<div class="progress">' +
+ '<div class="progress-bar progress-bar-success"></div>' +
+ '</div>' +
+ '<h6 class="error"></h6>' +
+ '</div>' +
'</div>';
}
});
diff -r 6f77298e8d1628fb55d8ba72a69ab62b7864bb7a -r 93215e7e74d020b478803ce2755f76c7e5236f16 static/scripts/utils/galaxy.uploadbox.js
--- a/static/scripts/utils/galaxy.uploadbox.js
+++ b/static/scripts/utils/galaxy.uploadbox.js
@@ -1,5 +1,5 @@
/*
- galaxy upload lib v1.0 - uses FileReader, FormData and XMLHttpRequest
+ galaxy upload lib - uses FileReader, FormData and XMLHttpRequest
*/
;(function($)
{
@@ -11,67 +11,71 @@
{
url : '',
paramname : 'content',
- maxfilesize : 2048,
- data : {},
+ maxfilesize : 250,
dragover : function() {},
dragleave : function() {},
+ announce : function() {},
initialize : function() {},
- start : function() {},
progress : function() {},
success : function() {},
error : function(index, file, message) { alert(message); },
error_browser : "Your browser does not support drag-and-drop file uploads.",
- error_filesize : "This file is too large. Please use an FTP client to upload it.",
- error_default : "The upload failed. Please make sure the file is available and accessible.",
- text_default : "Drag&drop files here or click to browse your local drive.",
- text_degrade : "Click here to browse your local drive. <br><br>Unfortunately, your browser does not support multiple file uploads or drag&drop.<br>Please upgrade to i.e. Firefox 4+, Chrome 7+, IE 10+, Opera 12+ or Safari 6+."
+ error_filesize : "This file is too large (>250MB). Please use an FTP client to upload it.",
+ error_default : "Please make sure the file is available.",
+ text_default : "Drag&drop files into this box or click 'Select' to select files!",
+ text_degrade : "Unfortunately, your browser does not support multiple file uploads or drag&drop.<br>Please upgrade to i.e. Firefox 4+, Chrome 7+, IE 10+, Opera 12+ or Safari 6+."
}
- // global file queue
- var queue = [];
+ // options
+ var opts = {};
+
+ // file queue
+ var queue = {};
- // global counter for file being currently processed
- var queue_index = -1;
+ // counter for file being currently processed
+ var queue_index = 0;
- // global queue status
+ // queue length
+ var queue_length = 0;
+
+ // indicates if queue is currently running
var queue_status = false;
+ // element
+ var el = null;
+
// attach to element
$.fn.uploadbox = function(options)
{
// parse options
- var opts = $.extend({}, default_opts, options);
+ opts = $.extend({}, default_opts, options);
// compatibility
var mode = window.File && window.FileReader && window.FormData && window.XMLHttpRequest;
+ // element
+ el = this;
+
// append upload button
- this.append('<input id="uploadbox_input" type="file" style="display: none" multiple>');
- this.append('<div id="uploadbox_info"></div>');
+ el.append('<input id="uploadbox_input" type="file" style="display: none" multiple>');
+ el.append('<div id="uploadbox_info"></div>');
// set info text
if (mode)
- this.find('#uploadbox_info').html(opts.text_default);
+ el.find('#uploadbox_info').html(opts.text_default);
else
- this.find('#uploadbox_info').html(opts.text_degrade);
+ el.find('#uploadbox_info').html(opts.text_degrade);
// attach events
- this.on('drop', drop);
- this.on('dragover', dragover);
- this.on('dragleave', dragleave);
-
- // attach click event
- this.on('click', function(e)
- {
- e.stopPropagation();
- $('#uploadbox_input').trigger(e);
- });
+ el.on('drop', drop);
+ el.on('dragover', dragover);
+ el.on('dragleave', dragleave);
// attach change event
$('#uploadbox_input').change(function(e)
{
- var files = e.target.files;
- upload(files);
+ // add files to queue
+ add(e.target.files);
});
// drop event
@@ -81,11 +85,8 @@
if(!e.dataTransfer)
return;
- // get files from event
- var files = e.dataTransfer.files;
-
- // start upload
- upload(files);
+ // add files to queue
+ add(e.dataTransfer.files);
// prevent default
e.preventDefault();
@@ -98,14 +99,14 @@
function dragover(e)
{
e.preventDefault();
- opts.dragover.call(this, e);
+ opts.dragover.call(e);
}
// drag leave
function dragleave(e)
{
e.stopPropagation();
- opts.dragleave.call(this, e);
+ opts.dragleave.call(e);
}
// progress
@@ -117,39 +118,61 @@
}
// respond to an upload request
- function upload(files)
+ function add(files)
{
- // get current queue size
- var queue_sofar = queue.length;
+ // add new files to queue
+ for (var i = 0; i < files.length; i++)
+ {
+ // new identifier
+ var index = String(++queue_index);
- // add new files to queue
- for (var index = 0; index < files.length; index++)
- queue.push(files[index]);
+ // add to queue
+ queue[index] = files[i];
- // tell client about new uploads
- for (var index = queue_sofar; index < queue.length; index++)
- opts.start(index, queue[index], "");
+ // increase counter
+ queue_length++;
- // initiate processing loop if process loop is not running already
- if (!queue_status)
- process();
+ // announce
+ opts.announce(index, queue[index], "");
+ }
}
+ // remove entry from queue
+ function remove(index)
+ {
+ if (queue[index])
+ {
+ // remove from queue
+ delete queue[index];
+
+ // update counter
+ queue_length--;
+ }
+ }
+
// process an upload, recursive
function process()
{
- // check if for files
- if (queue_index + 1 == queue.length)
+ // get an identifier from the queue
+ var index = -1;
+ for (var key in queue)
{
- queue_status = false;
- return;
+ index = key;
+ break;
}
- // set status
- queue_status = true;
+ // validate
+ if (queue_length == 0)
+ return;
- // identify current index
- var index = ++queue_index;
+ // get current file from queue
+ var file = queue[index];
+
+ // remove from queue
+ remove(index)
+
+ // start
+ var data = opts.initialize(index, file, length);
// add file to queue
try
@@ -158,50 +181,45 @@
var reader = new FileReader();
// identify maximum file size
- var file = queue[index];
var filesize = file.size;
var maxfilesize = 1048576 * opts.maxfilesize;
-
+
// set index
reader.index = index;
if (filesize < maxfilesize)
{
- // link loadend is always called at the end
- reader.onloadend = function(e)
+ // link load
+ reader.onload = function(e)
{
- send(index, file)
+ send(index, file, data)
};
// link error
reader.onerror = function(e)
{
- opts.error(index, file, opts.error_default);
- queue_status = false;
+ error(index, file, opts.error_default);
};
// read data
reader.readAsDataURL(file);
} else {
// skip file
- opts.error(index, file, opts.error_filesize);
-
- // restart process
- process();
+ error(index, file, opts.error_filesize);
}
} catch (err)
{
// parse error
- opts.error(index, file, err);
+ error(index, file, err);
}
}
// send file
- function send (index, file)
+ function send (index, file, data)
{
// construct form data
var formData = new FormData();
- for (var key in opts.data)
- formData.append(key, opts.data[key]);
+ for (var key in data)
+ formData.append(key, data[key]);
formData.append(opts.paramname, file, file.name);
// prepare request
@@ -237,22 +255,94 @@
// pass any error to the error option
if (xhr.status < 200 || xhr.status > 299)
{
+ // format error
+ var text = xhr.statusText;
+ if (!xhr.statusText)
+ text = opts.error_default;
+
// request error
- opts.error(index, file, xhr.statusText + " (Server Code " + xhr.status + ")");
-
- // reset status
- queue_status = false;
- } else {
+ error(index, file, text + " (Server Code " + xhr.status + ")");
+ } else
// parse response
- opts.success(index, file, response);
-
- // upload next file
- process();
- }
+ success(index, file, response);
}
}
- // return
- return this;
+ // success
+ function success (index, file, msg)
+ {
+ // parse message
+ opts.success(index, file, msg);
+
+ // restart process after success
+ process();
+ }
+
+ // error
+ function error (index, file, err)
+ {
+ // parse error
+ opts.error(index, file, err);
+
+ // restart process after error
+ process();
+ }
+
+ /*
+ public interface
+ */
+
+ // open file browser for selection
+ function select()
+ {
+ $('#uploadbox_input').trigger('click');
+ }
+
+ // remove all entries from queue
+ function reset(index)
+ {
+ for (index in queue)
+ remove(index);
+ }
+
+ // initiate upload process
+ function upload()
+ {
+ if (!queue_status)
+ process();
+ }
+
+ // current queue length
+ function length()
+ {
+ return queue_length;
+ }
+
+ // set options
+ function configure(options)
+ {
+ // update current configuration
+ opts = $.extend({}, opts, options);
+
+ // return new configuration
+ return opts;
+ }
+
+ // visibility of on screen information
+ function info()
+ {
+ return el.find('#uploadbox_info');
+ }
+
+ // export functions
+ return {
+ 'select' : select,
+ 'remove' : remove,
+ 'upload' : upload,
+ 'reset' : reset,
+ 'length' : length,
+ 'configure' : configure,
+ 'info' : info
+ };
}
})(jQuery);
\ No newline at end of file
diff -r 6f77298e8d1628fb55d8ba72a69ab62b7864bb7a -r 93215e7e74d020b478803ce2755f76c7e5236f16 static/style/blue/base.css
--- a/static/style/blue/base.css
+++ b/static/style/blue/base.css
@@ -1122,15 +1122,12 @@
.galaxy-frame .frame .f-close{right:5px;top:1px}
.galaxy-frame .frame .f-pin{left:6px;top:1px}
.galaxy-frame .frame .f-resize{background:#fff;width:16px;height:16px;color:#2c3143;right:0px;bottom:0px;text-align:center;line-height:16px;border:0px}
-.galaxy-upload-box{width:100%;height:200px;max-height:200px;padding:10px 0px 0px 0px;text-align:center;cursor:pointer;overflow:scroll;font-size:12px;line-height:1.428571429;-moz-border-radius:5px;border-radius:5px;border:1px dashed #bfbfbf}.galaxy-upload-box .corner-soft{-moz-border-radius:3px;border-radius:3px}
-.galaxy-upload-box .shadow{-webkit-box-shadow:0 0 2px rgba(0,0,0,0.3)}
-.galaxy-upload-box .highlight{border:1px dashed #333}
-.galaxy-upload-box .file{position:relative;margin:5px 20px 5px 20px;border:1px solid #bfbfbf;color:#333}.galaxy-upload-box .file .title{margin:3px 130px 0px 5px;text-align:left;overflow:hidden;border:0px}
-.galaxy-upload-box .file .progress-frame{border:0px;margin:0px 5px 3px 5px;height:7px;background:#bfbfbf}
-.galaxy-upload-box .file .progress{background:#5cb85c;height:100%;width:0%}
-.galaxy-upload-box .file .failed{background:#d9534f}
-.galaxy-upload-box .file .error{font-size:11px;text-align:left;overflow:hidden;margin:0px 5px 0px 5px}
-.galaxy-upload-box .file .info{position:absolute;top:4px;right:5px;font-size:11px;text-align:right;overflow:hidden;max-width:100px;max-height:12px}
+.upload-box{width:100%;height:250px;max-height:250px;text-align:center;overflow:scroll;font-size:12px;line-height:1.33;-moz-border-radius:5px;border-radius:5px;border:1px dashed #bfbfbf;padding:20px}.upload-box .panel{display:none}.upload-box .panel .panel-heading{position:relative;height:19px;padding:5px}.upload-box .panel .panel-heading .title{position:absolute;top:2px;font-weight:normal;text-align:left;margin:0px;max-width:300px;overflow:hidden}
+.upload-box .panel .panel-heading .info{position:absolute;top:3px;font-weight:normal;right:20px;text-align:right;margin:0px}
+.upload-box .panel .panel-heading .remove{position:absolute;cursor:pointer;top:0px;right:3px}
+.upload-box .panel .panel-body{position:relative;padding:5px}
+.upload-box .panel .panel-footer{position:relative;height:20px;padding:0px}.upload-box .panel .panel-footer .progress{height:10px;margin:5px}
+.upload-box .panel .panel-footer .error{font-weight:normal;margin:2px}
.unselectable{-webkit-user-select:none;-moz-user-select:none;-ms-user-select:none;-o-user-select:none;user-select:none}
.parent-width{-webkit-box-sizing:border-box;-moz-box-sizing:border-box;box-sizing:border-box;width:100%;*width:90%}
.clear:before,.clear:after{content:" ";display:table;}
diff -r 6f77298e8d1628fb55d8ba72a69ab62b7864bb7a -r 93215e7e74d020b478803ce2755f76c7e5236f16 static/style/blue/sprite-fugue.png
Binary file static/style/blue/sprite-fugue.png has changed
diff -r 6f77298e8d1628fb55d8ba72a69ab62b7864bb7a -r 93215e7e74d020b478803ce2755f76c7e5236f16 static/style/blue/sprite-history-buttons.png
Binary file static/style/blue/sprite-history-buttons.png has changed
diff -r 6f77298e8d1628fb55d8ba72a69ab62b7864bb7a -r 93215e7e74d020b478803ce2755f76c7e5236f16 static/style/blue/sprite-history-states.png
Binary file static/style/blue/sprite-history-states.png has changed
diff -r 6f77298e8d1628fb55d8ba72a69ab62b7864bb7a -r 93215e7e74d020b478803ce2755f76c7e5236f16 static/style/src/less/upload.less
--- a/static/style/src/less/upload.less
+++ b/static/style/src/less/upload.less
@@ -1,87 +1,80 @@
-.galaxy-upload-box
+.upload-box
{
width : 100%;
- height : 200px;
- max-height : 200px;
- padding : 10px 0px 0px 0px;
+ height : 250px;
+ max-height : 250px;
text-align : center;
- cursor : pointer;
overflow : scroll;
font-size : @font-size-base;
- line-height : @line-height-base;
+ line-height : @line-height-large;
-moz-border-radius: @border-radius-large;
- border-radius: @border-radius-large;
+ border-radius : @border-radius-large;
border : 1px dashed @btn-default-border;
+ padding : 20px;
- .corner-soft
+ .panel
{
- -moz-border-radius: @border-radius-base;
- border-radius: @border-radius-base;
- }
+ display: none;
- .shadow
- {
- -webkit-box-shadow: 0 0 2px rgba(0,0,0,0.3);
- }
+ .panel-heading
+ {
+ position: relative;
+ height: 19px;
+ padding: 5px;
- .highlight
- {
- border : 1px dashed @btn-default-color;
- }
+ .title
+ {
+ position: absolute;
+ top: 2px;
+ font-weight: normal;
+ text-align: left;
+ margin: 0px;
+ max-width: 300px;
+ overflow: hidden;
+ }
- .file
- {
- position : relative;
- margin : 5px 20px 5px 20px;
- border : 1px solid @btn-default-border;
- color : @btn-default-color;
+ .info
+ {
+ position: absolute;
+ top: 3px;
+ font-weight: normal;
+ right: 20px;
+ text-align: right;
+ margin: 0px;
+ }
- .title
- {
- margin : 3px 130px 0px 5px;
- text-align : left;
- overflow : hidden;
- border : 0px;
+ .remove
+ {
+ position: absolute;
+ cursor: pointer;
+ top: 0px;
+ right: 3px;
+ }
}
- .progress-frame
+ .panel-body
{
- border : 0px;
- margin : 0px 5px 3px 5px;
- height : 7px;
- background : @btn-default-border;
+ position: relative;
+ padding: 5px;
}
- .progress
+ .panel-footer
{
- background : @bs-success;
- height : 100%;
- width : 0%;
- }
-
- .failed
- {
- background : @bs-danger;
- }
-
- .error
- {
- font-size : @font-size-small;
- text-align : left;
- overflow : hidden;
- margin : 0px 5px 0px 5px;
- }
-
- .info
- {
- position : absolute;
- top : 4px;
- right : 5px;
- font-size : @font-size-small;
- text-align : right;
- overflow : hidden;
- max-width : 100px;
- max-height : 12px;
+ position:relative;
+ height: 20px;
+ padding: 0px;
+
+ .progress
+ {
+ height: 10px;
+ margin: 5px;
+ }
+
+ .error
+ {
+ font-weight: normal;
+ margin: 2px;
+ }
}
}
}
\ No newline at end of file
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: carlfeberhard: Saved histories grid: fix to persistant adv. search, use location.pathname for Grid.base_url to prevent mixed-content; pack scripts
by commits-noreply@bitbucket.org 19 Sep '13
by commits-noreply@bitbucket.org 19 Sep '13
19 Sep '13
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/6f77298e8d16/
Changeset: 6f77298e8d16
User: carlfeberhard
Date: 2013-09-20 00:32:00
Summary: Saved histories grid: fix to persistant adv. search, use location.pathname for Grid.base_url to prevent mixed-content; pack scripts
Affected #: 6 files
diff -r 5cbf058d648db06541540d11e6dd1d1535a12292 -r 6f77298e8d1628fb55d8ba72a69ab62b7864bb7a lib/galaxy/web/framework/helpers/grids.py
--- a/lib/galaxy/web/framework/helpers/grids.py
+++ b/lib/galaxy/web/framework/helpers/grids.py
@@ -198,7 +198,10 @@
if page_num == 0:
# Show all rows in page.
total_num_rows = query.count()
+ # persistant page='all'
page_num = 1
+ #page_num = 'all'
+ #extra_url_args['page'] = page_num
num_pages = 1
else:
# Show a limited number of rows. Before modifying query, get the total number of rows that query
diff -r 5cbf058d648db06541540d11e6dd1d1535a12292 -r 6f77298e8d1628fb55d8ba72a69ab62b7864bb7a static/scripts/galaxy.grids.js
--- a/static/scripts/galaxy.grids.js
+++ b/static/scripts/galaxy.grids.js
@@ -472,7 +472,7 @@
go_to_URL();
return;
}
-
+
// If there's an operation, do POST; otherwise, do GET.
var method = (grid.get('operation') ? "POST" : "GET" );
$('.loading-elt-overlay').show(); // Show overlay to indicate loading and prevent user actions.
@@ -565,4 +565,4 @@
// return
return true;
-}
\ No newline at end of file
+}
diff -r 5cbf058d648db06541540d11e6dd1d1535a12292 -r 6f77298e8d1628fb55d8ba72a69ab62b7864bb7a static/scripts/packed/galaxy.upload.js
--- a/static/scripts/packed/galaxy.upload.js
+++ b/static/scripts/packed/galaxy.upload.js
@@ -1,1 +1,1 @@
-define(["utils/galaxy.css","galaxy.modal","galaxy.master","utils/galaxy.uploadbox","libs/backbone/backbone-relational"],function(c,b,d){var a=Backbone.View.extend({modal:null,button_show:null,file_counter:0,initialize:function(){c.load_file("static/style/galaxy.upload.css");var e=this;this.button_show=new d.GalaxyMasterIcon({icon:"fa-icon-upload",tooltip:"Upload Files",on_click:function(f){e.event_show(f)},with_number:true});Galaxy.master.prepend(this.button_show)},events:{mouseover:"event_mouseover",mouseleave:"event_mouseleave"},event_mouseover:function(f){$("#galaxy-upload-box").addClass("galaxy-upload-highlight")},event_mouseleave:function(f){$("#galaxy-upload-box").removeClass("galaxy-upload-highlight")},event_start:function(e,f,g){var h="#galaxy-upload-file-"+e;$("#galaxy-upload-box").append(this.template_file(h));$("#galaxy-upload-file-"+e).find(".title").html(f.name);this.event_progress(e,f,0);this.file_counter++;this.refresh()},event_progress:function(f,g,i){var h=$("#galaxy-upload-file-"+f);var e=parseInt(i);h.find(".progress").css({width:e+"%"});h.find(".info").html(e+"% of "+this.size_to_string(g.size))},event_success:function(e,f,g){Galaxy.currHistoryPanel.refresh();this.file_counter--;this.refresh()},event_error:function(e,f,h){var g=$("#galaxy-upload-file-"+e);g.find(".progress-frame").addClass("failed");g.find(".error").html("<strong>Failed:</strong> "+h);this.event_progress(e,f,0);this.file_counter--;this.refresh()},event_show:function(h){h.preventDefault();if(!Galaxy.currHistoryPanel){var g=this;window.setTimeout(function(){g.event_show(h)},200);return}if(!this.modal){this.modal=new b.GalaxyModal({title:"Upload files from your local drive",body:this.template()});var f=Galaxy.currHistoryPanel.model.get("id");var g=this;$("#galaxy-upload-box").uploadbox({url:galaxy_config.root+"api/histories/"+f+"/contents",dragover:g.event_mouseover,dragleave:g.event_mouseleave,start:function(e,i,j){g.event_start(e,i,j)},success:function(e,i,j){g.event_success(e,i,j)},progress:function(e,i,j){g.event_progress(e,i,j)},error:function(e,i,j){g.event_error(e,i,j)},data:{source:"upload"}});this.setElement("#galaxy-upload-box")}this.modal.show()},refresh:function(){if(this.file_counter>0){this.button_show.number(this.file_counter)}else{this.button_show.number("")}},size_to_string:function(e){var f="";if(e>=100000000000){e=e/100000000000;f="TB"}else{if(e>=100000000){e=e/100000000;f="GB"}else{if(e>=100000){e=e/100000;f="MB"}else{if(e>=100){e=e/100;f="KB"}else{e=e*10;f="b"}}}}return"<strong>"+(Math.round(e)/10)+"</strong> "+f},template:function(){return'<form id="galaxy-upload-box" class="galaxy-upload-box galaxy-corner"></form>'},template_file:function(e){return'<div id="'+e.substr(1)+'" class="galaxy-upload-file galaxy-corner-soft galaxy-shadow"><div class="title"></div><div class="error"></div><div class="progress-frame galaxy-corner-soft"><div class="progress"></div></div><div class="info"></div></div>'}});return{GalaxyUpload:a}});
\ No newline at end of file
+define(["galaxy.modal","galaxy.master","utils/galaxy.uploadbox","libs/backbone/backbone-relational"],function(b,c){var a=Backbone.View.extend({modal:null,button_show:null,file_counter:0,initialize:function(){var d=this;this.button_show=new c.GalaxyMasterIcon({icon:"fa-icon-upload",tooltip:"Upload Files",on_click:function(f){d.event_show(f)},with_number:true});Galaxy.master.prepend(this.button_show)},events:{mouseover:"event_mouseover",mouseleave:"event_mouseleave"},event_mouseover:function(d){$("#galaxy-upload-box").addClass("highlight")},event_mouseleave:function(d){$("#galaxy-upload-box").removeClass("highlight")},event_start:function(d,e,f){var g="#galaxy-upload-file-"+d;$("#galaxy-upload-box").append(this.template_file(g));$("#galaxy-upload-file-"+d).find(".title").html(e.name);this.event_progress(d,e,0);this.file_counter++;this.refresh()},event_progress:function(e,f,h){var g=$("#galaxy-upload-file-"+e);var d=parseInt(h);g.find(".progress").css({width:d+"%"});g.find(".info").html(d+"% of "+this.size_to_string(f.size))},event_success:function(d,e,f){Galaxy.currHistoryPanel.refresh();this.file_counter--;this.refresh()},event_error:function(d,e,g){var f=$("#galaxy-upload-file-"+d);f.find(".progress-frame").addClass("failed");f.find(".error").html("<strong>Failed:</strong> "+g);this.event_progress(d,e,0);this.file_counter--;this.refresh()},event_show:function(g){g.preventDefault();if(!Galaxy.currHistoryPanel){var f=this;window.setTimeout(function(){f.event_show(g)},200);return}if(!this.modal){var f=this;this.modal=new b.GalaxyModal({title:"Upload files from your local drive",body:this.template(),buttons:{Close:function(){f.modal.hide()}}});var d=Galaxy.currHistoryPanel.model.get("id");var f=this;$("#galaxy-upload-box").uploadbox({url:galaxy_config.root+"api/histories/"+d+"/contents",dragover:f.event_mouseover,dragleave:f.event_mouseleave,start:function(e,h,i){f.event_start(e,h,i)},success:function(e,h,i){f.event_success(e,h,i)},progress:function(e,h,i){f.event_progress(e,h,i)},error:function(e,h,i){f.event_error(e,h,i)},data:{source:"upload"}});this.setElement("#galaxy-upload-box")}this.modal.show()},refresh:function(){if(this.file_counter>0){this.button_show.number(this.file_counter)}else{this.button_show.number("")}},size_to_string:function(d){var e="";if(d>=100000000000){d=d/100000000000;e="TB"}else{if(d>=100000000){d=d/100000000;e="GB"}else{if(d>=100000){d=d/100000;e="MB"}else{if(d>=100){d=d/100;e="KB"}else{d=d*10;e="b"}}}}return"<strong>"+(Math.round(d)/10)+"</strong> "+e},template:function(){return'<form id="galaxy-upload-box" class="galaxy-upload-box"></form>'},template_file:function(d){return'<div id="'+d.substr(1)+'" class="file corner-soft shadow"><div class="title"></div><div class="error"></div><div class="progress-frame corner-soft"><div class="progress"></div></div><div class="info"></div></div>'}});return{GalaxyUpload:a}});
\ No newline at end of file
diff -r 5cbf058d648db06541540d11e6dd1d1535a12292 -r 6f77298e8d1628fb55d8ba72a69ab62b7864bb7a static/scripts/packed/galaxy.workflow_editor.canvas.js
--- a/static/scripts/packed/galaxy.workflow_editor.canvas.js
+++ b/static/scripts/packed/galaxy.workflow_editor.canvas.js
@@ -1,1 +1,1 @@
-function Terminal(a){this.element=a;this.connectors=[]}$.extend(Terminal.prototype,{connect:function(a){this.connectors.push(a);if(this.node){this.node.changed()}},disconnect:function(a){this.connectors.splice($.inArray(a,this.connectors),1);if(this.node){this.node.changed()}},redraw:function(){$.each(this.connectors,function(a,b){b.redraw()})},destroy:function(){$.each(this.connectors.slice(),function(a,b){b.destroy()})}});function OutputTerminal(a,b){Terminal.call(this,a);this.datatypes=b}OutputTerminal.prototype=new Terminal();function InputTerminal(b,c,a){Terminal.call(this,b);this.datatypes=c;this.multiple=a}InputTerminal.prototype=new Terminal();$.extend(InputTerminal.prototype,{can_accept:function(a){if(this.connectors.length<1||this.multiple){for(var c in this.datatypes){var f=new Array();f=f.concat(a.datatypes);if(a.node.post_job_actions){for(var d in a.node.post_job_actions){var g=a.node.post_job_actions[d];if(g.action_type=="ChangeDatatypeAction"&&(g.output_name==""||g.output_name==a.name)&&g.action_arguments){f.push(g.action_arguments.newtype)}}}for(var b in f){if(f[b]=="input"||issubtype(f[b],this.datatypes[c])){return true}}}}return false}});function Connector(b,a){this.canvas=null;this.dragging=false;this.inner_color="#FFFFFF";this.outer_color="#D8B365";if(b&&a){this.connect(b,a)}}$.extend(Connector.prototype,{connect:function(b,a){this.handle1=b;if(this.handle1){this.handle1.connect(this)}this.handle2=a;if(this.handle2){this.handle2.connect(this)}},destroy:function(){if(this.handle1){this.handle1.disconnect(this)}if(this.handle2){this.handle2.disconnect(this)}$(this.canvas).remove()},redraw:function(){var d=$("#canvas-container");if(!this.canvas){this.canvas=document.createElement("canvas");if(window.G_vmlCanvasManager){G_vmlCanvasManager.initElement(this.canvas)}d.append($(this.canvas));if(this.dragging){this.canvas.style.zIndex="300"}}var n=function(c){return $(c).offset().left-d.offset().left};var i=function(c){return $(c).offset().top-d.offset().top};if(!this.handle1||!this.handle2){return}var h=n(this.handle1.element)+5;var g=i(this.handle1.element)+5;var p=n(this.handle2.element)+5;var m=i(this.handle2.element)+5;var f=100;var k=Math.min(h,p);var a=Math.max(h,p);var j=Math.min(g,m);var t=Math.max(g,m);var b=Math.min(Math.max(Math.abs(t-j)/2,100),300);var o=k-f;var s=j-f;var q=a-k+2*f;var l=t-j+2*f;this.canvas.style.left=o+"px";this.canvas.style.top=s+"px";this.canvas.setAttribute("width",q);this.canvas.setAttribute("height",l);h-=o;g-=s;p-=o;m-=s;var r=this.canvas.getContext("2d");r.lineCap="round";r.strokeStyle=this.outer_color;r.lineWidth=7;r.beginPath();r.moveTo(h,g);r.bezierCurveTo(h+b,g,p-b,m,p,m);r.stroke();r.strokeStyle=this.inner_color;r.lineWidth=5;r.beginPath();r.moveTo(h,g);r.bezierCurveTo(h+b,g,p-b,m,p,m);r.stroke()}});function Node(a){this.element=a;this.input_terminals={};this.output_terminals={};this.tool_errors={}}$.extend(Node.prototype,{enable_input_terminal:function(f,b,c,a){var d=this;$(f).each(function(){var g=this.terminal=new InputTerminal(this,c,a);g.node=d;g.name=b;$(this).bind("dropinit",function(h,i){return $(i.drag).hasClass("output-terminal")&&g.can_accept(i.drag.terminal)}).bind("dropstart",function(h,i){if(i.proxy.terminal){i.proxy.terminal.connectors[0].inner_color="#BBFFBB"}}).bind("dropend",function(h,i){if(i.proxy.terminal){i.proxy.terminal.connectors[0].inner_color="#FFFFFF"}}).bind("drop",function(h,i){(new Connector(i.drag.terminal,g)).redraw()}).bind("hover",function(){if(g.connectors.length>0){var h=$("<div class='callout'></div>").css({display:"none"}).appendTo("body").append($("<div class='buttons'></div>").append($("<img/>").attr("src",galaxy_config.root+"static/images/delete_icon.png").click(function(){$.each(g.connectors,function(j,i){if(i){i.destroy()}});h.remove()}))).bind("mouseleave",function(){$(this).remove()});h.css({top:$(this).offset().top-2,left:$(this).offset().left-h.width(),"padding-right":$(this).width()}).show()}});d.input_terminals[b]=g})},enable_output_terminal:function(d,a,b){var c=this;$(d).each(function(){var g=this;var f=this.terminal=new OutputTerminal(this,b);f.node=c;f.name=a;$(this).bind("dragstart",function(j,k){$(k.available).addClass("input-terminal-active");workflow.check_changes_in_active_form();var i=$('<div class="drag-terminal" style="position: absolute;"></div>').appendTo("#canvas-container").get(0);i.terminal=new OutputTerminal(i);var l=new Connector();l.dragging=true;l.connect(this.terminal,i.terminal);return i}).bind("drag",function(i,j){var h=function(){var l=$(j.proxy).offsetParent().offset(),k=j.offsetX-l.left,m=j.offsetY-l.top;$(j.proxy).css({left:k,top:m});j.proxy.terminal.redraw();canvas_manager.update_viewport_overlay()};h();$("#canvas-container").get(0).scroll_panel.test(i,h)}).bind("dragend",function(h,i){i.proxy.terminal.connectors[0].destroy();$(i.proxy).remove();$(i.available).removeClass("input-terminal-active");$("#canvas-container").get(0).scroll_panel.stop()});c.output_terminals[a]=f})},redraw:function(){$.each(this.input_terminals,function(a,b){b.redraw()});$.each(this.output_terminals,function(a,b){b.redraw()})},destroy:function(){$.each(this.input_terminals,function(a,b){b.destroy()});$.each(this.output_terminals,function(a,b){b.destroy()});workflow.remove_node(this);$(this.element).remove()},make_active:function(){$(this.element).addClass("toolForm-active")},make_inactive:function(){var a=this.element.get(0);(function(b){b.removeChild(a);b.appendChild(a)})(a.parentNode);$(a).removeClass("toolForm-active")},init_field_data:function(h){var g=this.element;if(h.type){this.type=h.type}this.name=h.name;this.form_html=h.form_html;this.tool_state=h.tool_state;this.tool_errors=h.tool_errors;this.tooltip=h.tooltip?h.tooltip:"";this.annotation=h.annotation;this.post_job_actions=h.post_job_actions?h.post_job_actions:{};this.workflow_outputs=h.workflow_outputs?h.workflow_outputs:[];if(this.tool_errors){g.addClass("tool-node-error")}else{g.removeClass("tool-node-error")}var d=this;var c=Math.max(150,g.width());var a=g.find(".toolFormBody");a.find("div").remove();var i=$("<div class='inputs'></div>").appendTo(a);$.each(h.data_inputs,function(k,f){var j=$("<div class='terminal input-terminal'></div>");d.enable_input_terminal(j,f.name,f.extensions,f.multiple);var b=$("<div class='form-row dataRow input-data-row' name='"+f.name+"'>"+f.label+"</div>");b.css({position:"absolute",left:-1000,top:-1000,display:"none"});$("body").append(b);c=Math.max(c,b.outerWidth());b.css({position:"",left:"",top:"",display:""});b.remove();i.append(b.prepend(j))});if((h.data_inputs.length>0)&&(h.data_outputs.length>0)){a.append($("<div class='rule'></div>"))}$.each(h.data_outputs,function(k,b){var j=$("<div class='terminal output-terminal'></div>");d.enable_output_terminal(j,b.name,b.extensions);var f=b.name;if(b.extensions.indexOf("input")<0){f=f+" ("+b.extensions.join(", ")+")"}var m=$("<div class='form-row dataRow'>"+f+"</div>");if(d.type=="tool"){var l=$("<div class='callout "+f+"'></div>").css({display:"none"}).append($("<div class='buttons'></div>").append($("<img/>").attr("src",galaxy_config.root+"static/images/fugue/asterisk-small-outline.png").click(function(){if($.inArray(b.name,d.workflow_outputs)!=-1){d.workflow_outputs.splice($.inArray(b.name,d.workflow_outputs),1);l.find("img").attr("src",galaxy_config.root+"static/images/fugue/asterisk-small-outline.png")}else{d.workflow_outputs.push(b.name);l.find("img").attr("src",galaxy_config.root+"static/images/fugue/asterisk-small.png")}workflow.has_changes=true;canvas_manager.draw_overview()}))).tooltip({delay:500,title:"Flag this as a workflow output. All non-flagged outputs will be hidden."});l.css({top:"50%",margin:"-8px 0px 0px 0px",right:8});l.show();m.append(l);if($.inArray(b.name,d.workflow_outputs)===-1){l.find("img").attr("src",galaxy_config.root+"static/images/fugue/asterisk-small-outline.png")}else{l.find("img").attr("src",galaxy_config.root+"static/images/fugue/asterisk-small.png")}m.hover(function(){l.find("img").attr("src",galaxy_config.root+"static/images/fugue/asterisk-small-yellow.png")},function(){if($.inArray(b.name,d.workflow_outputs)===-1){l.find("img").attr("src",galaxy_config.root+"static/images/fugue/asterisk-small-outline.png")}else{l.find("img").attr("src",galaxy_config.root+"static/images/fugue/asterisk-small.png")}})}m.css({position:"absolute",left:-1000,top:-1000,display:"none"});$("body").append(m);c=Math.max(c,m.outerWidth()+17);m.css({position:"",left:"",top:"",display:""});m.detach();a.append(m.append(j))});g.css("width",Math.min(250,Math.max(g.width(),c)));workflow.node_changed(this)},update_field_data:function(f){var c=$(this.element),d=this;this.tool_state=f.tool_state;this.form_html=f.form_html;this.tool_errors=f.tool_errors;this.annotation=f.annotation;var g=$.parseJSON(f.post_job_actions);this.post_job_actions=g?g:{};if(this.tool_errors){c.addClass("tool-node-error")}else{c.removeClass("tool-node-error")}var h=c.find("div.inputs");var b=$("<div class='inputs'></div>");var a=h.find("div.input-data-row");$.each(f.data_inputs,function(l,j){var k=$("<div class='terminal input-terminal'></div>");d.enable_input_terminal(k,j.name,j.extensions,j.multiple);h.find("div[name='"+j.name+"']").each(function(){$(this).find(".input-terminal").each(function(){var i=this.terminal.connectors[0];if(i){k[0].terminal.connectors[0]=i;i.handle2=k[0].terminal}});$(this).remove()});b.append($("<div class='form-row dataRow input-data-row' name='"+j.name+"'>"+j.label+"</div>").prepend(k))});h.replaceWith(b);h.find("div.input-data-row > .terminal").each(function(){this.terminal.destroy()});this.changed();this.redraw()},error:function(d){var a=$(this.element).find(".toolFormBody");a.find("div").remove();var c="<div style='color: red; text-style: italic;'>"+d+"</div>";this.form_html=c;a.html(c);workflow.node_changed(this)},changed:function(){workflow.node_changed(this)}});function Workflow(a){this.canvas_container=a;this.id_counter=0;this.nodes={};this.name=null;this.has_changes=false;this.active_form_has_changes=false}$.extend(Workflow.prototype,{add_node:function(a){a.id=this.id_counter;a.element.attr("id","wf-node-step-"+a.id);this.id_counter++;this.nodes[a.id]=a;this.has_changes=true;a.workflow=this},remove_node:function(a){if(this.active_node==a){this.clear_active_node()}delete this.nodes[a.id];this.has_changes=true},remove_all:function(){wf=this;$.each(this.nodes,function(b,a){a.destroy();wf.remove_node(a)})},rectify_workflow_outputs:function(){var b=false;var a=false;$.each(this.nodes,function(c,d){if(d.workflow_outputs&&d.workflow_outputs.length>0){b=true}$.each(d.post_job_actions,function(g,f){if(f.action_type==="HideDatasetAction"){a=true}})});if(b!==false||a!==false){$.each(this.nodes,function(c,g){if(g.type==="tool"){var f=false;if(g.post_job_actions==null){g.post_job_actions={};f=true}var d=[];$.each(g.post_job_actions,function(i,h){if(h.action_type=="HideDatasetAction"){d.push(i)}});if(d.length>0){$.each(d,function(h,j){f=true;delete g.post_job_actions[j]})}if(b){$.each(g.output_terminals,function(i,j){var h=true;$.each(g.workflow_outputs,function(l,m){if(j.name===m){h=false}});if(h===true){f=true;var k={action_type:"HideDatasetAction",output_name:j.name,action_arguments:{}};g.post_job_actions["HideDatasetAction"+j.name]=null;g.post_job_actions["HideDatasetAction"+j.name]=k}})}if(workflow.active_node==g&&f===true){workflow.reload_active_node()}}})}},to_simple:function(){var a={};$.each(this.nodes,function(c,f){var g={};$.each(f.input_terminals,function(i,j){g[j.name]=null;var h=[];$.each(j.connectors,function(k,l){h[k]={id:l.handle1.node.id,output_name:l.handle1.name};g[j.name]=h})});var b={};if(f.post_job_actions){$.each(f.post_job_actions,function(j,h){var k={action_type:h.action_type,output_name:h.output_name,action_arguments:h.action_arguments};b[h.action_type+h.output_name]=null;b[h.action_type+h.output_name]=k})}if(!f.workflow_outputs){f.workflow_outputs=[]}var d={id:f.id,type:f.type,tool_id:f.tool_id,tool_state:f.tool_state,tool_errors:f.tool_errors,input_connections:g,position:$(f.element).position(),annotation:f.annotation,post_job_actions:f.post_job_actions,workflow_outputs:f.workflow_outputs};a[f.id]=d});return{steps:a}},from_simple:function(b){wf=this;var c=0;wf.name=b.name;var a=false;$.each(b.steps,function(g,f){var d=prebuild_node("tool",f.name,f.tool_id);d.init_field_data(f);if(f.position){d.element.css({top:f.position.top,left:f.position.left})}d.id=f.id;wf.nodes[d.id]=d;c=Math.max(c,parseInt(g));if(!a&&d.type==="tool"){if(d.workflow_outputs.length>0){a=true}else{$.each(d.post_job_actions,function(i,h){if(h.action_type==="HideDatasetAction"){a=true}})}}});wf.id_counter=c+1;$.each(b.steps,function(g,f){var d=wf.nodes[g];$.each(f.input_connections,function(i,h){if(h){if($.isArray(h)){$.each(h,function(m,k){var n=wf.nodes[k.id];var o=new Connector();o.connect(n.output_terminals[k.output_name],d.input_terminals[i]);o.redraw()})}else{var j=wf.nodes[h.id];var l=new Connector();l.connect(j.output_terminals[h.output_name],d.input_terminals[i]);l.redraw()}}});if(a&&d.type==="tool"){$.each(d.output_terminals,function(h,i){if(d.post_job_actions["HideDatasetAction"+i.name]===undefined){d.workflow_outputs.push(i.name);callout=$(d.element).find(".callout."+i.name);callout.find("img").attr("src",galaxy_config.root+"static/images/fugue/asterisk-small.png");workflow.has_changes=true}})}})},check_changes_in_active_form:function(){if(this.active_form_has_changes){this.has_changes=true;$("#right-content").find("form").submit();this.active_form_has_changes=false}},reload_active_node:function(){if(this.active_node){var a=this.active_node;this.clear_active_node();this.activate_node(a)}},clear_active_node:function(){if(this.active_node){this.active_node.make_inactive();this.active_node=null}parent.show_form_for_tool("<div>No node selected</div>")},activate_node:function(a){if(this.active_node!=a){this.check_changes_in_active_form();this.clear_active_node();parent.show_form_for_tool(a.form_html+a.tooltip,a);a.make_active();this.active_node=a}},node_changed:function(a){this.has_changes=true;if(this.active_node==a){this.check_changes_in_active_form();parent.show_form_for_tool(a.form_html+a.tooltip,a)}},layout:function(){this.check_changes_in_active_form();this.has_changes=true;var i={};var b={};$.each(this.nodes,function(l,k){if(i[l]===undefined){i[l]=0}if(b[l]===undefined){b[l]=[]}});$.each(this.nodes,function(l,k){$.each(k.input_terminals,function(m,n){$.each(n.connectors,function(p,q){var o=q.handle1.node;i[k.id]+=1;b[o.id].push(k.id)})})});node_ids_by_level=[];while(true){level_parents=[];for(var a in i){if(i[a]==0){level_parents.push(a)}}if(level_parents.length==0){break}node_ids_by_level.push(level_parents);for(var f in level_parents){var j=level_parents[f];delete i[j];for(var g in b[j]){i[b[j][g]]-=1}}}if(i.length){return}var d=this.nodes;var h=80;v_pad=30;var c=h;$.each(node_ids_by_level,function(k,l){l.sort(function(p,o){return $(d[p].element).position().top-$(d[o].element).position().top});var m=0;var n=v_pad;$.each(l,function(o,r){var q=d[r];var p=$(q.element);$(p).css({top:n,left:c});m=Math.max(m,$(p).width());n+=$(p).height()+v_pad});c+=m+h});$.each(d,function(k,l){l.redraw()})},bounds_for_all_nodes:function(){var d=Infinity,b=-Infinity,c=Infinity,a=-Infinity,f;$.each(this.nodes,function(h,g){e=$(g.element);f=e.position();d=Math.min(d,f.left);b=Math.max(b,f.left+e.width());c=Math.min(c,f.top);a=Math.max(a,f.top+e.width())});return{xmin:d,xmax:b,ymin:c,ymax:a}},fit_canvas_to_nodes:function(){var a=this.bounds_for_all_nodes();var f=this.canvas_container.position();var i=this.canvas_container.parent();var d=fix_delta(a.xmin,100);var h=fix_delta(a.ymin,100);d=Math.max(d,f.left);h=Math.max(h,f.top);var c=f.left-d;var g=f.top-h;var b=round_up(a.xmax+100,100)+d;var j=round_up(a.ymax+100,100)+h;b=Math.max(b,-c+i.width());j=Math.max(j,-g+i.height());this.canvas_container.css({left:c,top:g,width:b,height:j});this.canvas_container.children().each(function(){var k=$(this).position();$(this).css("left",k.left+d);$(this).css("top",k.top+h)})}});function fix_delta(a,b){if(a<b||a>3*b){new_pos=(Math.ceil(((a%b))/b)+1)*b;return(-(a-new_pos))}return 0}function round_up(a,b){return Math.ceil(a/b)*b}function prebuild_node(l,j,r){var i=$("<div class='toolForm toolFormInCanvas'></div>");var g=new Node(i);g.type=l;if(l=="tool"){g.tool_id=r}var n=$("<div class='toolFormTitle unselectable'>"+j+"</div>");i.append(n);i.css("left",$(window).scrollLeft()+20);i.css("top",$(window).scrollTop()+20);var m=$("<div class='toolFormBody'></div>");var h="<div><img height='16' align='middle' src='"+galaxy_config.root+"static/images/loading_small_white_bg.gif'/> loading tool info...</div>";m.append(h);g.form_html=h;i.append(m);var k=$("<div class='buttons' style='float: right;'></div>");k.append($("<img/>").attr("src",galaxy_config.root+"static/images/delete_icon.png").click(function(b){g.destroy()}).hover(function(){$(this).attr("src",galaxy_config.root+"static/images/delete_icon_dark.png")},function(){$(this).attr("src",galaxy_config.root+"static/images/delete_icon.png")}));i.appendTo("#canvas-container");var d=$("#canvas-container").position();var c=$("#canvas-container").parent();var a=i.width();var q=i.height();i.css({left:(-d.left)+(c.width()/2)-(a/2),top:(-d.top)+(c.height()/2)-(q/2)});k.prependTo(n);a+=(k.width()+10);i.css("width",a);$(i).bind("dragstart",function(){workflow.activate_node(g)}).bind("dragend",function(){workflow.node_changed(this);workflow.fit_canvas_to_nodes();canvas_manager.draw_overview()}).bind("dragclickonly",function(){workflow.activate_node(g)}).bind("drag",function(o,p){var f=$(this).offsetParent().offset(),b=p.offsetX-f.left,s=p.offsetY-f.top;$(this).css({left:b,top:s});$(this).find(".terminal").each(function(){this.terminal.redraw()})});return g}var ext_to_type=null;var type_to_type=null;function issubtype(b,a){b=ext_to_type[b];a=ext_to_type[a];return(type_to_type[b])&&(a in type_to_type[b])}function populate_datatype_info(a){ext_to_type=a.ext_to_class_name;type_to_type=a.class_to_classes}function ScrollPanel(a){this.panel=a}$.extend(ScrollPanel.prototype,{test:function(v,d){clearTimeout(this.timeout);var k=v.pageX,j=v.pageY,l=$(this.panel),c=l.position(),b=l.width(),i=l.height(),w=l.parent(),s=w.width(),a=w.height(),r=w.offset(),p=r.left,m=r.top,A=p+w.width(),u=m+w.height(),B=-(b-(s/2)),z=-(i-(a/2)),g=(s/2),f=(a/2),h=false,q=5,o=23;if(k-q<p){if(c.left<g){var n=Math.min(o,g-c.left);l.css("left",c.left+n);h=true}}else{if(k+q>A){if(c.left>B){var n=Math.min(o,c.left-B);l.css("left",c.left-n);h=true}}else{if(j-q<m){if(c.top<f){var n=Math.min(o,f-c.top);l.css("top",c.top+n);h=true}}else{if(j+q>u){if(c.top>z){var n=Math.min(o,c.top-B);l.css("top",(c.top-n)+"px");h=true}}}}}if(h){d();var l=this;this.timeout=setTimeout(function(){l.test(v,d)},50)}},stop:function(b,a){clearTimeout(this.timeout)}});function CanvasManager(b,a){this.cv=b;this.cc=this.cv.find("#canvas-container");this.oc=a.find("#overview-canvas");this.ov=a.find("#overview-viewport");this.init_drag()}$.extend(CanvasManager.prototype,{init_drag:function(){var b=this;var a=function(f,g){f=Math.min(f,b.cv.width()/2);f=Math.max(f,-b.cc.width()+b.cv.width()/2);g=Math.min(g,b.cv.height()/2);g=Math.max(g,-b.cc.height()+b.cv.height()/2);b.cc.css({left:f,top:g});b.update_viewport_overlay()};this.cc.each(function(){this.scroll_panel=new ScrollPanel(this)});var d,c;this.cv.bind("dragstart",function(){var g=$(this).offset();var f=b.cc.position();c=f.top-g.top;d=f.left-g.left}).bind("drag",function(f,g){a(g.offsetX+d,g.offsetY+c)}).bind("dragend",function(){workflow.fit_canvas_to_nodes();b.draw_overview()});this.ov.bind("drag",function(k,l){var h=b.cc.width(),n=b.cc.height(),m=b.oc.width(),j=b.oc.height(),f=$(this).offsetParent().offset(),i=l.offsetX-f.left,g=l.offsetY-f.top;a(-(i/m*h),-(g/j*n))}).bind("dragend",function(){workflow.fit_canvas_to_nodes();b.draw_overview()});$("#overview-border").bind("drag",function(g,i){var j=$(this).offsetParent();var h=j.offset();var f=Math.max(j.width()-(i.offsetX-h.left),j.height()-(i.offsetY-h.top));$(this).css({width:f,height:f});b.draw_overview()});$("#overview-border div").bind("drag",function(){})},update_viewport_overlay:function(){var b=this.cc,f=this.cv,a=this.oc,c=this.ov,d=b.width(),j=b.height(),i=a.width(),g=a.height(),h=b.position();c.css({left:-(h.left/d*i),top:-(h.top/j*g),width:(f.width()/d*i)-2,height:(f.height()/j*g)-2})},draw_overview:function(){var j=$("#overview-canvas"),m=j.parent().parent().width(),i=j.get(0).getContext("2d"),d=$("#canvas-container").width(),l=$("#canvas-container").height();var g,a,k,f;var h=this.cv.width();var b=this.cv.height();if(d<h&&l<b){k=d/h*m;f=(m-k)/2;g=l/b*m;a=(m-g)/2}else{if(d<l){a=0;g=m;k=Math.ceil(g*d/l);f=(m-k)/2}else{k=m;f=0;g=Math.ceil(k*l/d);a=(m-g)/2}}j.parent().css({left:f,top:a,width:k,height:g});j.attr("width",k);j.attr("height",g);$.each(workflow.nodes,function(t,q){i.fillStyle="#D2C099";i.strokeStyle="#D8B365";i.lineWidth=1;var s=$(q.element),n=s.position(),c=n.left/d*k,r=n.top/l*g,o=s.width()/d*k,p=s.height()/l*g;if(q.tool_errors){i.fillStyle="#FFCCCC";i.strokeStyle="#AA6666"}else{if(q.workflow_outputs!=undefined&&q.workflow_outputs.length>0){i.fillStyle="#E8A92D";i.strokeStyle="#E8A92D"}}i.fillRect(c,r,o,p);i.strokeRect(c,r,o,p)});this.update_viewport_overlay()}});
\ No newline at end of file
+function Terminal(a){this.element=a;this.connectors=[]}$.extend(Terminal.prototype,{connect:function(a){this.connectors.push(a);if(this.node){this.node.changed()}},disconnect:function(a){this.connectors.splice($.inArray(a,this.connectors),1);if(this.node){this.node.changed()}},redraw:function(){$.each(this.connectors,function(a,b){b.redraw()})},destroy:function(){$.each(this.connectors.slice(),function(a,b){b.destroy()})}});function OutputTerminal(a,b){Terminal.call(this,a);this.datatypes=b}OutputTerminal.prototype=new Terminal();function InputTerminal(b,c,a){Terminal.call(this,b);this.datatypes=c;this.multiple=a}InputTerminal.prototype=new Terminal();$.extend(InputTerminal.prototype,{can_accept:function(a){if(this.connectors.length<1||this.multiple){for(var c in this.datatypes){var f=new Array();f=f.concat(a.datatypes);if(a.node.post_job_actions){for(var d in a.node.post_job_actions){var g=a.node.post_job_actions[d];if(g.action_type=="ChangeDatatypeAction"&&(g.output_name==""||g.output_name==a.name)&&g.action_arguments){f.push(g.action_arguments.newtype)}}}for(var b in f){if(f[b]=="input"||issubtype(f[b],this.datatypes[c])){return true}}}}return false}});function Connector(b,a){this.canvas=null;this.dragging=false;this.inner_color="#FFFFFF";this.outer_color="#D8B365";if(b&&a){this.connect(b,a)}}$.extend(Connector.prototype,{connect:function(b,a){this.handle1=b;if(this.handle1){this.handle1.connect(this)}this.handle2=a;if(this.handle2){this.handle2.connect(this)}},destroy:function(){if(this.handle1){this.handle1.disconnect(this)}if(this.handle2){this.handle2.disconnect(this)}$(this.canvas).remove()},redraw:function(){var d=$("#canvas-container");if(!this.canvas){this.canvas=document.createElement("canvas");if(window.G_vmlCanvasManager){G_vmlCanvasManager.initElement(this.canvas)}d.append($(this.canvas));if(this.dragging){this.canvas.style.zIndex="300"}}var n=function(c){return $(c).offset().left-d.offset().left};var i=function(c){return $(c).offset().top-d.offset().top};if(!this.handle1||!this.handle2){return}var h=n(this.handle1.element)+5;var g=i(this.handle1.element)+5;var p=n(this.handle2.element)+5;var m=i(this.handle2.element)+5;var f=100;var k=Math.min(h,p);var a=Math.max(h,p);var j=Math.min(g,m);var t=Math.max(g,m);var b=Math.min(Math.max(Math.abs(t-j)/2,100),300);var o=k-f;var s=j-f;var q=a-k+2*f;var l=t-j+2*f;this.canvas.style.left=o+"px";this.canvas.style.top=s+"px";this.canvas.setAttribute("width",q);this.canvas.setAttribute("height",l);h-=o;g-=s;p-=o;m-=s;var r=this.canvas.getContext("2d");r.lineCap="round";r.strokeStyle=this.outer_color;r.lineWidth=7;r.beginPath();r.moveTo(h,g);r.bezierCurveTo(h+b,g,p-b,m,p,m);r.stroke();r.strokeStyle=this.inner_color;r.lineWidth=5;r.beginPath();r.moveTo(h,g);r.bezierCurveTo(h+b,g,p-b,m,p,m);r.stroke()}});function Node(a){this.element=a;this.input_terminals={};this.output_terminals={};this.tool_errors={}}$.extend(Node.prototype,{enable_input_terminal:function(f,b,c,a){var d=this;$(f).each(function(){var g=this.terminal=new InputTerminal(this,c,a);g.node=d;g.name=b;$(this).bind("dropinit",function(h,i){return $(i.drag).hasClass("output-terminal")&&g.can_accept(i.drag.terminal)}).bind("dropstart",function(h,i){if(i.proxy.terminal){i.proxy.terminal.connectors[0].inner_color="#BBFFBB"}}).bind("dropend",function(h,i){if(i.proxy.terminal){i.proxy.terminal.connectors[0].inner_color="#FFFFFF"}}).bind("drop",function(h,i){(new Connector(i.drag.terminal,g)).redraw()}).bind("hover",function(){if(g.connectors.length>0){var h=$("<div class='callout'></div>").css({display:"none"}).appendTo("body").append($("<div class='button'></div>").append($("<div/>").addClass("fa-icon-button fa-icon-remove").click(function(){$.each(g.connectors,function(j,i){if(i){i.destroy()}});h.remove()}))).bind("mouseleave",function(){$(this).remove()});h.css({top:$(this).offset().top-2,left:$(this).offset().left-h.width(),"padding-right":$(this).width()}).show()}});d.input_terminals[b]=g})},enable_output_terminal:function(d,a,b){var c=this;$(d).each(function(){var g=this;var f=this.terminal=new OutputTerminal(this,b);f.node=c;f.name=a;$(this).bind("dragstart",function(j,k){$(k.available).addClass("input-terminal-active");workflow.check_changes_in_active_form();var i=$('<div class="drag-terminal" style="position: absolute;"></div>').appendTo("#canvas-container").get(0);i.terminal=new OutputTerminal(i);var l=new Connector();l.dragging=true;l.connect(this.terminal,i.terminal);return i}).bind("drag",function(i,j){var h=function(){var l=$(j.proxy).offsetParent().offset(),k=j.offsetX-l.left,m=j.offsetY-l.top;$(j.proxy).css({left:k,top:m});j.proxy.terminal.redraw();canvas_manager.update_viewport_overlay()};h();$("#canvas-container").get(0).scroll_panel.test(i,h)}).bind("dragend",function(h,i){i.proxy.terminal.connectors[0].destroy();$(i.proxy).remove();$(i.available).removeClass("input-terminal-active");$("#canvas-container").get(0).scroll_panel.stop()});c.output_terminals[a]=f})},redraw:function(){$.each(this.input_terminals,function(a,b){b.redraw()});$.each(this.output_terminals,function(a,b){b.redraw()})},destroy:function(){$.each(this.input_terminals,function(a,b){b.destroy()});$.each(this.output_terminals,function(a,b){b.destroy()});workflow.remove_node(this);$(this.element).remove()},make_active:function(){$(this.element).addClass("toolForm-active")},make_inactive:function(){var a=this.element.get(0);(function(b){b.removeChild(a);b.appendChild(a)})(a.parentNode);$(a).removeClass("toolForm-active")},init_field_data:function(h){var g=this.element;if(h.type){this.type=h.type}this.name=h.name;this.form_html=h.form_html;this.tool_state=h.tool_state;this.tool_errors=h.tool_errors;this.tooltip=h.tooltip?h.tooltip:"";this.annotation=h.annotation;this.post_job_actions=h.post_job_actions?h.post_job_actions:{};this.workflow_outputs=h.workflow_outputs?h.workflow_outputs:[];if(this.tool_errors){g.addClass("tool-node-error")}else{g.removeClass("tool-node-error")}var d=this;var c=Math.max(150,g.width());var a=g.find(".toolFormBody");a.find("div").remove();var i=$("<div class='inputs'></div>").appendTo(a);$.each(h.data_inputs,function(k,f){var j=$("<div class='terminal input-terminal'></div>");d.enable_input_terminal(j,f.name,f.extensions,f.multiple);var b=$("<div class='form-row dataRow input-data-row' name='"+f.name+"'>"+f.label+"</div>");b.css({position:"absolute",left:-1000,top:-1000,display:"none"});$("body").append(b);c=Math.max(c,b.outerWidth());b.css({position:"",left:"",top:"",display:""});b.remove();i.append(b.prepend(j))});if((h.data_inputs.length>0)&&(h.data_outputs.length>0)){a.append($("<div class='rule'></div>"))}$.each(h.data_outputs,function(k,b){var j=$("<div class='terminal output-terminal'></div>");d.enable_output_terminal(j,b.name,b.extensions);var f=b.name;if(b.extensions.indexOf("input")<0){f=f+" ("+b.extensions.join(", ")+")"}var m=$("<div class='form-row dataRow'>"+f+"</div>");if(d.type=="tool"){var l=$("<div class='callout "+f+"'></div>").css({display:"none"}).append($("<div class='buttons'></div>").append($("<img/>").attr("src",galaxy_config.root+"static/images/fugue/asterisk-small-outline.png").click(function(){if($.inArray(b.name,d.workflow_outputs)!=-1){d.workflow_outputs.splice($.inArray(b.name,d.workflow_outputs),1);l.find("img").attr("src",galaxy_config.root+"static/images/fugue/asterisk-small-outline.png")}else{d.workflow_outputs.push(b.name);l.find("img").attr("src",galaxy_config.root+"static/images/fugue/asterisk-small.png")}workflow.has_changes=true;canvas_manager.draw_overview()}))).tooltip({delay:500,title:"Flag this as a workflow output. All non-flagged outputs will be hidden."});l.css({top:"50%",margin:"-8px 0px 0px 0px",right:8});l.show();m.append(l);if($.inArray(b.name,d.workflow_outputs)===-1){l.find("img").attr("src",galaxy_config.root+"static/images/fugue/asterisk-small-outline.png")}else{l.find("img").attr("src",galaxy_config.root+"static/images/fugue/asterisk-small.png")}m.hover(function(){l.find("img").attr("src",galaxy_config.root+"static/images/fugue/asterisk-small-yellow.png")},function(){if($.inArray(b.name,d.workflow_outputs)===-1){l.find("img").attr("src",galaxy_config.root+"static/images/fugue/asterisk-small-outline.png")}else{l.find("img").attr("src",galaxy_config.root+"static/images/fugue/asterisk-small.png")}})}m.css({position:"absolute",left:-1000,top:-1000,display:"none"});$("body").append(m);c=Math.max(c,m.outerWidth()+17);m.css({position:"",left:"",top:"",display:""});m.detach();a.append(m.append(j))});g.css("width",Math.min(250,Math.max(g.width(),c)));workflow.node_changed(this)},update_field_data:function(f){var c=$(this.element),d=this;this.tool_state=f.tool_state;this.form_html=f.form_html;this.tool_errors=f.tool_errors;this.annotation=f.annotation;var g=$.parseJSON(f.post_job_actions);this.post_job_actions=g?g:{};if(this.tool_errors){c.addClass("tool-node-error")}else{c.removeClass("tool-node-error")}var h=c.find("div.inputs");var b=$("<div class='inputs'></div>");var a=h.find("div.input-data-row");$.each(f.data_inputs,function(l,j){var k=$("<div class='terminal input-terminal'></div>");d.enable_input_terminal(k,j.name,j.extensions,j.multiple);h.find("div[name='"+j.name+"']").each(function(){$(this).find(".input-terminal").each(function(){var i=this.terminal.connectors[0];if(i){k[0].terminal.connectors[0]=i;i.handle2=k[0].terminal}});$(this).remove()});b.append($("<div class='form-row dataRow input-data-row' name='"+j.name+"'>"+j.label+"</div>").prepend(k))});h.replaceWith(b);h.find("div.input-data-row > .terminal").each(function(){this.terminal.destroy()});this.changed();this.redraw()},error:function(d){var a=$(this.element).find(".toolFormBody");a.find("div").remove();var c="<div style='color: red; text-style: italic;'>"+d+"</div>";this.form_html=c;a.html(c);workflow.node_changed(this)},changed:function(){workflow.node_changed(this)}});function Workflow(a){this.canvas_container=a;this.id_counter=0;this.nodes={};this.name=null;this.has_changes=false;this.active_form_has_changes=false}$.extend(Workflow.prototype,{add_node:function(a){a.id=this.id_counter;a.element.attr("id","wf-node-step-"+a.id);this.id_counter++;this.nodes[a.id]=a;this.has_changes=true;a.workflow=this},remove_node:function(a){if(this.active_node==a){this.clear_active_node()}delete this.nodes[a.id];this.has_changes=true},remove_all:function(){wf=this;$.each(this.nodes,function(b,a){a.destroy();wf.remove_node(a)})},rectify_workflow_outputs:function(){var b=false;var a=false;$.each(this.nodes,function(c,d){if(d.workflow_outputs&&d.workflow_outputs.length>0){b=true}$.each(d.post_job_actions,function(g,f){if(f.action_type==="HideDatasetAction"){a=true}})});if(b!==false||a!==false){$.each(this.nodes,function(c,g){if(g.type==="tool"){var f=false;if(g.post_job_actions==null){g.post_job_actions={};f=true}var d=[];$.each(g.post_job_actions,function(i,h){if(h.action_type=="HideDatasetAction"){d.push(i)}});if(d.length>0){$.each(d,function(h,j){f=true;delete g.post_job_actions[j]})}if(b){$.each(g.output_terminals,function(i,j){var h=true;$.each(g.workflow_outputs,function(l,m){if(j.name===m){h=false}});if(h===true){f=true;var k={action_type:"HideDatasetAction",output_name:j.name,action_arguments:{}};g.post_job_actions["HideDatasetAction"+j.name]=null;g.post_job_actions["HideDatasetAction"+j.name]=k}})}if(workflow.active_node==g&&f===true){workflow.reload_active_node()}}})}},to_simple:function(){var a={};$.each(this.nodes,function(c,f){var g={};$.each(f.input_terminals,function(i,j){g[j.name]=null;var h=[];$.each(j.connectors,function(k,l){h[k]={id:l.handle1.node.id,output_name:l.handle1.name};g[j.name]=h})});var b={};if(f.post_job_actions){$.each(f.post_job_actions,function(j,h){var k={action_type:h.action_type,output_name:h.output_name,action_arguments:h.action_arguments};b[h.action_type+h.output_name]=null;b[h.action_type+h.output_name]=k})}if(!f.workflow_outputs){f.workflow_outputs=[]}var d={id:f.id,type:f.type,tool_id:f.tool_id,tool_state:f.tool_state,tool_errors:f.tool_errors,input_connections:g,position:$(f.element).position(),annotation:f.annotation,post_job_actions:f.post_job_actions,workflow_outputs:f.workflow_outputs};a[f.id]=d});return{steps:a}},from_simple:function(b){wf=this;var c=0;wf.name=b.name;var a=false;$.each(b.steps,function(g,f){var d=prebuild_node("tool",f.name,f.tool_id);d.init_field_data(f);if(f.position){d.element.css({top:f.position.top,left:f.position.left})}d.id=f.id;wf.nodes[d.id]=d;c=Math.max(c,parseInt(g));if(!a&&d.type==="tool"){if(d.workflow_outputs.length>0){a=true}else{$.each(d.post_job_actions,function(i,h){if(h.action_type==="HideDatasetAction"){a=true}})}}});wf.id_counter=c+1;$.each(b.steps,function(g,f){var d=wf.nodes[g];$.each(f.input_connections,function(i,h){if(h){if($.isArray(h)){$.each(h,function(m,k){var n=wf.nodes[k.id];var o=new Connector();o.connect(n.output_terminals[k.output_name],d.input_terminals[i]);o.redraw()})}else{var j=wf.nodes[h.id];var l=new Connector();l.connect(j.output_terminals[h.output_name],d.input_terminals[i]);l.redraw()}}});if(a&&d.type==="tool"){$.each(d.output_terminals,function(h,i){if(d.post_job_actions["HideDatasetAction"+i.name]===undefined){d.workflow_outputs.push(i.name);callout=$(d.element).find(".callout."+i.name);callout.find("img").attr("src",galaxy_config.root+"static/images/fugue/asterisk-small.png");workflow.has_changes=true}})}})},check_changes_in_active_form:function(){if(this.active_form_has_changes){this.has_changes=true;$("#right-content").find("form").submit();this.active_form_has_changes=false}},reload_active_node:function(){if(this.active_node){var a=this.active_node;this.clear_active_node();this.activate_node(a)}},clear_active_node:function(){if(this.active_node){this.active_node.make_inactive();this.active_node=null}parent.show_form_for_tool("<div>No node selected</div>")},activate_node:function(a){if(this.active_node!=a){this.check_changes_in_active_form();this.clear_active_node();parent.show_form_for_tool(a.form_html+a.tooltip,a);a.make_active();this.active_node=a}},node_changed:function(a){this.has_changes=true;if(this.active_node==a){this.check_changes_in_active_form();parent.show_form_for_tool(a.form_html+a.tooltip,a)}},layout:function(){this.check_changes_in_active_form();this.has_changes=true;var i={};var b={};$.each(this.nodes,function(l,k){if(i[l]===undefined){i[l]=0}if(b[l]===undefined){b[l]=[]}});$.each(this.nodes,function(l,k){$.each(k.input_terminals,function(m,n){$.each(n.connectors,function(p,q){var o=q.handle1.node;i[k.id]+=1;b[o.id].push(k.id)})})});node_ids_by_level=[];while(true){level_parents=[];for(var a in i){if(i[a]==0){level_parents.push(a)}}if(level_parents.length==0){break}node_ids_by_level.push(level_parents);for(var f in level_parents){var j=level_parents[f];delete i[j];for(var g in b[j]){i[b[j][g]]-=1}}}if(i.length){return}var d=this.nodes;var h=80;v_pad=30;var c=h;$.each(node_ids_by_level,function(k,l){l.sort(function(p,o){return $(d[p].element).position().top-$(d[o].element).position().top});var m=0;var n=v_pad;$.each(l,function(o,r){var q=d[r];var p=$(q.element);$(p).css({top:n,left:c});m=Math.max(m,$(p).width());n+=$(p).height()+v_pad});c+=m+h});$.each(d,function(k,l){l.redraw()})},bounds_for_all_nodes:function(){var d=Infinity,b=-Infinity,c=Infinity,a=-Infinity,f;$.each(this.nodes,function(h,g){e=$(g.element);f=e.position();d=Math.min(d,f.left);b=Math.max(b,f.left+e.width());c=Math.min(c,f.top);a=Math.max(a,f.top+e.width())});return{xmin:d,xmax:b,ymin:c,ymax:a}},fit_canvas_to_nodes:function(){var a=this.bounds_for_all_nodes();var f=this.canvas_container.position();var i=this.canvas_container.parent();var d=fix_delta(a.xmin,100);var h=fix_delta(a.ymin,100);d=Math.max(d,f.left);h=Math.max(h,f.top);var c=f.left-d;var g=f.top-h;var b=round_up(a.xmax+100,100)+d;var j=round_up(a.ymax+100,100)+h;b=Math.max(b,-c+i.width());j=Math.max(j,-g+i.height());this.canvas_container.css({left:c,top:g,width:b,height:j});this.canvas_container.children().each(function(){var k=$(this).position();$(this).css("left",k.left+d);$(this).css("top",k.top+h)})}});function fix_delta(a,b){if(a<b||a>3*b){new_pos=(Math.ceil(((a%b))/b)+1)*b;return(-(a-new_pos))}return 0}function round_up(a,b){return Math.ceil(a/b)*b}function prebuild_node(l,j,r){var i=$("<div class='toolForm toolFormInCanvas'></div>");var g=new Node(i);g.type=l;if(l=="tool"){g.tool_id=r}var n=$("<div class='toolFormTitle unselectable'>"+j+"</div>");i.append(n);i.css("left",$(window).scrollLeft()+20);i.css("top",$(window).scrollTop()+20);var m=$("<div class='toolFormBody'></div>");var h="<div><img height='16' align='middle' src='"+galaxy_config.root+"static/images/loading_small_white_bg.gif'/> loading tool info...</div>";m.append(h);g.form_html=h;i.append(m);var k=$("<div class='buttons' style='float: right;'></div>");k.append($("<div>").addClass("fa-icon-button fa-icon-remove").click(function(b){g.destroy()}));i.appendTo("#canvas-container");var d=$("#canvas-container").position();var c=$("#canvas-container").parent();var a=i.width();var q=i.height();i.css({left:(-d.left)+(c.width()/2)-(a/2),top:(-d.top)+(c.height()/2)-(q/2)});k.prependTo(n);a+=(k.width()+10);i.css("width",a);$(i).bind("dragstart",function(){workflow.activate_node(g)}).bind("dragend",function(){workflow.node_changed(this);workflow.fit_canvas_to_nodes();canvas_manager.draw_overview()}).bind("dragclickonly",function(){workflow.activate_node(g)}).bind("drag",function(o,p){var f=$(this).offsetParent().offset(),b=p.offsetX-f.left,s=p.offsetY-f.top;$(this).css({left:b,top:s});$(this).find(".terminal").each(function(){this.terminal.redraw()})});return g}var ext_to_type=null;var type_to_type=null;function issubtype(b,a){b=ext_to_type[b];a=ext_to_type[a];return(type_to_type[b])&&(a in type_to_type[b])}function populate_datatype_info(a){ext_to_type=a.ext_to_class_name;type_to_type=a.class_to_classes}function ScrollPanel(a){this.panel=a}$.extend(ScrollPanel.prototype,{test:function(v,d){clearTimeout(this.timeout);var k=v.pageX,j=v.pageY,l=$(this.panel),c=l.position(),b=l.width(),i=l.height(),w=l.parent(),s=w.width(),a=w.height(),r=w.offset(),p=r.left,m=r.top,A=p+w.width(),u=m+w.height(),B=-(b-(s/2)),z=-(i-(a/2)),g=(s/2),f=(a/2),h=false,q=5,o=23;if(k-q<p){if(c.left<g){var n=Math.min(o,g-c.left);l.css("left",c.left+n);h=true}}else{if(k+q>A){if(c.left>B){var n=Math.min(o,c.left-B);l.css("left",c.left-n);h=true}}else{if(j-q<m){if(c.top<f){var n=Math.min(o,f-c.top);l.css("top",c.top+n);h=true}}else{if(j+q>u){if(c.top>z){var n=Math.min(o,c.top-B);l.css("top",(c.top-n)+"px");h=true}}}}}if(h){d();var l=this;this.timeout=setTimeout(function(){l.test(v,d)},50)}},stop:function(b,a){clearTimeout(this.timeout)}});function CanvasManager(b,a){this.cv=b;this.cc=this.cv.find("#canvas-container");this.oc=a.find("#overview-canvas");this.ov=a.find("#overview-viewport");this.init_drag()}$.extend(CanvasManager.prototype,{init_drag:function(){var b=this;var a=function(f,g){f=Math.min(f,b.cv.width()/2);f=Math.max(f,-b.cc.width()+b.cv.width()/2);g=Math.min(g,b.cv.height()/2);g=Math.max(g,-b.cc.height()+b.cv.height()/2);b.cc.css({left:f,top:g});b.update_viewport_overlay()};this.cc.each(function(){this.scroll_panel=new ScrollPanel(this)});var d,c;this.cv.bind("dragstart",function(){var g=$(this).offset();var f=b.cc.position();c=f.top-g.top;d=f.left-g.left}).bind("drag",function(f,g){a(g.offsetX+d,g.offsetY+c)}).bind("dragend",function(){workflow.fit_canvas_to_nodes();b.draw_overview()});this.ov.bind("drag",function(k,l){var h=b.cc.width(),n=b.cc.height(),m=b.oc.width(),j=b.oc.height(),f=$(this).offsetParent().offset(),i=l.offsetX-f.left,g=l.offsetY-f.top;a(-(i/m*h),-(g/j*n))}).bind("dragend",function(){workflow.fit_canvas_to_nodes();b.draw_overview()});$("#overview-border").bind("drag",function(g,i){var j=$(this).offsetParent();var h=j.offset();var f=Math.max(j.width()-(i.offsetX-h.left),j.height()-(i.offsetY-h.top));$(this).css({width:f,height:f});b.draw_overview()});$("#overview-border div").bind("drag",function(){})},update_viewport_overlay:function(){var b=this.cc,f=this.cv,a=this.oc,c=this.ov,d=b.width(),j=b.height(),i=a.width(),g=a.height(),h=b.position();c.css({left:-(h.left/d*i),top:-(h.top/j*g),width:(f.width()/d*i)-2,height:(f.height()/j*g)-2})},draw_overview:function(){var j=$("#overview-canvas"),m=j.parent().parent().width(),i=j.get(0).getContext("2d"),d=$("#canvas-container").width(),l=$("#canvas-container").height();var g,a,k,f;var h=this.cv.width();var b=this.cv.height();if(d<h&&l<b){k=d/h*m;f=(m-k)/2;g=l/b*m;a=(m-g)/2}else{if(d<l){a=0;g=m;k=Math.ceil(g*d/l);f=(m-k)/2}else{k=m;f=0;g=Math.ceil(k*l/d);a=(m-g)/2}}j.parent().css({left:f,top:a,width:k,height:g});j.attr("width",k);j.attr("height",g);$.each(workflow.nodes,function(t,q){i.fillStyle="#D2C099";i.strokeStyle="#D8B365";i.lineWidth=1;var s=$(q.element),n=s.position(),c=n.left/d*k,r=n.top/l*g,o=s.width()/d*k,p=s.height()/l*g;if(q.tool_errors){i.fillStyle="#FFCCCC";i.strokeStyle="#AA6666"}else{if(q.workflow_outputs!=undefined&&q.workflow_outputs.length>0){i.fillStyle="#E8A92D";i.strokeStyle="#E8A92D"}}i.fillRect(c,r,o,p);i.strokeRect(c,r,o,p)});this.update_viewport_overlay()}});
\ No newline at end of file
diff -r 5cbf058d648db06541540d11e6dd1d1535a12292 -r 6f77298e8d1628fb55d8ba72a69ab62b7864bb7a templates/grid_base.mako
--- a/templates/grid_base.mako
+++ b/templates/grid_base.mako
@@ -84,10 +84,9 @@
/** Returns true if string denotes true. */
var is_true = function(s) { return _.indexOf(['True', 'true', 't'], s) !== -1; };
-
// Create grid.
var grid = new Grid({
- url_base: '${trans.request.path_url}',
+ url_base: location.pathname,
async: is_true('${grid.use_async}'),
async_ops: async_ops,
categorical_filters: categorical_filters,
@@ -95,6 +94,8 @@
sort_key: '${sort_key}',
show_item_checkboxes: is_true('${context.get('show_item_checkboxes', False)}'),
cur_page: ${cur_page_num},
+ // persistant page="all"
+ //cur_page: ('${cur_page_num}' === 'all')?('all'):(Number('${cur_page_num}')),
num_pages: ${num_pages}
});
diff -r 5cbf058d648db06541540d11e6dd1d1535a12292 -r 6f77298e8d1628fb55d8ba72a69ab62b7864bb7a templates/grid_common.mako
--- a/templates/grid_common.mako
+++ b/templates/grid_common.mako
@@ -102,8 +102,26 @@
## Print grid search/filtering UI.
<%def name="render_grid_filters( grid, render_advanced_search=True )">
+ <%
+ # Show advanced search if flag set or if there are filters for advanced search fields.
+ advanced_search_display = "none"
+ if 'advanced-search' in kwargs and kwargs['advanced-search'] in ['True', 'true']:
+ advanced_search_display = "block"
+
+ for column in grid.columns:
+ if column.filterable == "advanced":
+ ## Show div if current filter has value that is different from the default filter.
+ if column.key in cur_filter_dict and column.key in default_filter_dict and \
+ cur_filter_dict[column.key] != default_filter_dict[column.key]:
+ advanced_search_display = "block"
+
+ # do not show standard search if showing adv.
+ standard_search_display = "block"
+ if advanced_search_display == "block":
+ standard_search_display = "none"
+ %>
## Standard search.
- <div id="standard-search">
+ <div id="standard-search" style="display: ${standard_search_display};"><table><tr><td style="padding: 0;"><table>
@@ -139,19 +157,6 @@
</div>
## Advanced search.
- <%
- # Show advanced search if flag set or if there are filters for advanced search fields.
- advanced_search_display = "none"
- if 'advanced-search' in kwargs and kwargs['advanced-search'] in ['True', 'true']:
- advanced_search_display = "block"
-
- for column in grid.columns:
- if column.filterable == "advanced":
- ## Show div if current filter has value that is different from the default filter.
- if column.key in cur_filter_dict and column.key in default_filter_dict and \
- cur_filter_dict[column.key] != default_filter_dict[column.key]:
- advanced_search_display = "block"
- %><div id="advanced-search" style="display: ${advanced_search_display}; margin-top: 5px; border: 1px solid #ccc;"><table><tr><td style="text-align: left" colspan="100">
@@ -170,7 +175,7 @@
%if column.key in cur_filter_dict and column.key in default_filter_dict and \
cur_filter_dict[column.key] != default_filter_dict[column.key]:
<script type="text/javascript">
- $('#advanced-search').css("display", "none");
+ $('#advanced-search').css("display", "block");
</script>
%endif
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: carlfeberhard: QA: fix to deleting current history in test_get_data
by commits-noreply@bitbucket.org 19 Sep '13
by commits-noreply@bitbucket.org 19 Sep '13
19 Sep '13
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/5cbf058d648d/
Changeset: 5cbf058d648d
User: carlfeberhard
Date: 2013-09-20 00:28:24
Summary: QA: fix to deleting current history in test_get_data
Affected #: 1 file
diff -r cb95dad5175ec10cf191bddcaf2e1d115546dd7c -r 5cbf058d648db06541540d11e6dd1d1535a12292 test/functional/test_get_data.py
--- a/test/functional/test_get_data.py
+++ b/test/functional/test_get_data.py
@@ -22,7 +22,7 @@
"""
# in order to remove a lot of boiler plate - and not have cascading errors
history = get_latest_history_for_user( user )
- self.delete_history( id=self.security.encode_id( history.id ) )
+ self.delete_current_history()
self.is_history_empty()
return get_latest_history_for_user( user )
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: carlfeberhard: QA, browser tests: dump stdout if casper didn't return good JSON
by commits-noreply@bitbucket.org 19 Sep '13
by commits-noreply@bitbucket.org 19 Sep '13
19 Sep '13
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/cb95dad5175e/
Changeset: cb95dad5175e
User: carlfeberhard
Date: 2013-09-19 21:56:29
Summary: QA, browser tests: dump stdout if casper didn't return good JSON
Affected #: 1 file
diff -r 3a15758ba67e2429698184a4300c9608c65ce06d -r cb95dad5175ec10cf191bddcaf2e1d115546dd7c test/casperjs/casperjs_runner.py
--- a/test/casperjs/casperjs_runner.py
+++ b/test/casperjs/casperjs_runner.py
@@ -176,7 +176,14 @@
err_string = ( "%s\n%s" %( get_msg( last_error ),
self.browser_backtrace_to_string( get_trace( last_error ) ) ) )
- # if we couldn't parse json from what's returned on the error, raise a vanilla exc
+ # if we couldn't parse json from what's returned on the error, dump stdout
+ except ValueError, val_err:
+ if str( val_err ) == 'No JSON object could be decoded':
+ log.debug( '(error parsing returned JSON from casperjs, dumping stdout...)\n:%s', stdout_output )
+ else:
+ raise
+
+ # otherwise, raise a vanilla exc
except Exception, exc:
log.debug( '(failed to parse error returned from %s: %s)', _PATH_TO_HEADLESS, str( exc ) )
return HeadlessJSJavascriptError(
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: carlfeberhard: Visualizations registry, QA: initial unit tests; Plugin framework unit tests: remove unneeded tests
by commits-noreply@bitbucket.org 19 Sep '13
by commits-noreply@bitbucket.org 19 Sep '13
19 Sep '13
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/3a15758ba67e/
Changeset: 3a15758ba67e
User: carlfeberhard
Date: 2013-09-19 16:09:19
Summary: Visualizations registry, QA: initial unit tests; Plugin framework unit tests: remove unneeded tests
Affected #: 6 files
diff -r 285453d3908440a10c8863fe90f9833df5a5e261 -r 3a15758ba67e2429698184a4300c9608c65ce06d lib/galaxy/visualization/registry.py
--- a/lib/galaxy/visualization/registry.py
+++ b/lib/galaxy/visualization/registry.py
@@ -82,8 +82,9 @@
:rtype: bool
:returns: True if the path contains a plugin
"""
- # plugin_path must be a directory, have a config dir
+ # plugin_path must be a directory, have a config dir, and a config file matching the plugin dir name
if not os.path.isdir( plugin_path ):
+ # super won't work here - different criteria
return False
if not 'config' in os.listdir( plugin_path ):
return False
@@ -112,6 +113,7 @@
# config file is required, otherwise skip this visualization
plugin[ 'config_file' ] = os.path.join( plugin_path, 'config', ( plugin.name + '.xml' ) )
config = self.config_parser.parse_file( plugin.config_file )
+
if not config:
return None
plugin[ 'config' ] = config
diff -r 285453d3908440a10c8863fe90f9833df5a5e261 -r 3a15758ba67e2429698184a4300c9608c65ce06d test/unit/visualizations/registry/test_VisualizationsRegistry.py
--- /dev/null
+++ b/test/unit/visualizations/registry/test_VisualizationsRegistry.py
@@ -0,0 +1,156 @@
+"""
+"""
+import os
+import imp
+import unittest
+
+utility = imp.load_source( 'utility', '../../util/utility.py' )
+log = utility.set_up_filelogger( __name__ + '.log' )
+
+relative_test_path = '/test/unit/visualizations/registry'
+utility.add_galaxy_lib_to_path( relative_test_path )
+
+from galaxy.visualization.registry import VisualizationsRegistry
+
+base_mock = imp.load_source( 'mock', '../../web/base/mock.py' )
+
+# ----------------------------------------------------------------------------- globals
+glx_dir = os.getcwd().replace( relative_test_path, '' )
+template_cache_dir = os.path.join( glx_dir, 'database', 'compiled_templates' )
+vis_reg_path = 'config/plugins/visualizations'
+
+config1 = """\
+<?xml version="1.0" encoding="UTF-8"?>
+<!DOCTYPE visualization SYSTEM "../../visualization.dtd">
+<visualization name="scatterplot">
+ <data_sources>
+ <data_source>
+ <model_class>HistoryDatasetAssociation</model_class>
+ <test type="isinstance" test_attr="datatype" result_type="datatype">tabular.Tabular</test>
+ <to_param param_attr="id">dataset_id</to_param>
+ </data_source>
+ </data_sources>
+ <params>
+ <param type="dataset" var_name_in_template="hda" required="true">dataset_id</param>
+ </params>
+ <template>scatterplot.mako</template>
+</visualization>
+"""
+
+# -----------------------------------------------------------------------------
+class VisualizationsRegistry_TestCase( unittest.TestCase ):
+
+ # ------------------------------------------------------------------------- vis plugin discovery
+ def test_plugin_load_from_repo( self ):
+ """should attempt load if criteria met"""
+ mock_app = base_mock.MockApp( glx_dir )
+ plugin_mgr = VisualizationsRegistry( mock_app,
+ directories_setting=vis_reg_path,
+ template_cache_dir=template_cache_dir )
+
+ expected_plugins_path = os.path.join( glx_dir, vis_reg_path )
+ expected_plugin_names = [
+ 'circster',
+ 'graphview',
+ 'phyloviz',
+ 'scatterplot',
+ 'sweepster',
+ 'trackster',
+ ]
+
+ self.assertEqual( plugin_mgr.base_url, 'visualizations' )
+ self.assertItemsEqual( plugin_mgr.directories, [ expected_plugins_path ] )
+ self.assertItemsEqual( plugin_mgr.plugins.keys(), expected_plugin_names )
+
+ scatterplot = plugin_mgr.plugins[ 'scatterplot' ]
+ self.assertEqual( scatterplot.name, 'scatterplot' )
+ self.assertEqual( scatterplot.path, os.path.join( expected_plugins_path, 'scatterplot' ) )
+ self.assertEqual( scatterplot.base_url, '/'.join([ plugin_mgr.base_url, scatterplot.name ]) )
+ self.assertTrue( scatterplot.serves_static )
+ self.assertEqual( scatterplot.static_path, os.path.join( scatterplot.path, 'static' ) )
+ self.assertEqual( scatterplot.static_url, '/'.join([ scatterplot.base_url, 'static' ]) )
+ self.assertTrue( scatterplot.serves_templates )
+ self.assertEqual( scatterplot.template_path, os.path.join( scatterplot.path, 'templates' ) )
+ self.assertEqual( scatterplot.template_lookup.__class__.__name__, 'TemplateLookup' )
+
+ trackster = plugin_mgr.plugins[ 'trackster' ]
+ self.assertEqual( trackster.name, 'trackster' )
+ self.assertEqual( trackster.path, os.path.join( expected_plugins_path, 'trackster' ) )
+ self.assertEqual( trackster.base_url, '/'.join([ plugin_mgr.base_url, trackster.name ]) )
+ self.assertFalse( trackster.serves_static )
+ self.assertFalse( trackster.serves_templates )
+
+ def test_plugin_load( self ):
+ """"""
+ mock_app_dir = base_mock.MockDir({
+ 'plugins' : {
+ 'vis1' : {
+ 'config' : {
+ 'vis1.xml' : config1
+ },
+ 'static' : {},
+ 'templates' : {},
+ },
+ 'vis2' : {
+ 'config' : {
+ 'vis2.xml' : config1
+ }
+ },
+ 'not_a_vis1' : {
+ 'config' : {
+ 'vis1.xml' : 'blerbler'
+ },
+ },
+ 'not_a_vis1' : {
+ # no config
+ 'static' : {},
+ 'templates' : {},
+ },
+ # empty
+ 'not_a_vis2' : {},
+ 'not_a_vis3' : 'blerbler',
+ # bad config
+ 'not_a_vis4' : {
+ 'config' : {
+ 'not_a_vis4.xml' : 'blerbler'
+ }
+ },
+ }
+ })
+ mock_app = base_mock.MockApp( mock_app_dir.root_path )
+ plugin_mgr = VisualizationsRegistry( mock_app,
+ directories_setting='plugins',
+ template_cache_dir='bler' )
+
+ expected_plugins_path = os.path.join( mock_app_dir.root_path, 'plugins' )
+ expected_plugin_names = [ 'vis1', 'vis2' ]
+
+ self.assertEqual( plugin_mgr.base_url, 'visualizations' )
+ self.assertItemsEqual( plugin_mgr.directories, [ expected_plugins_path ] )
+ self.assertItemsEqual( plugin_mgr.plugins.keys(), expected_plugin_names )
+
+ vis1 = plugin_mgr.plugins[ 'vis1' ]
+ self.assertEqual( vis1.name, 'vis1' )
+ self.assertEqual( vis1.path, os.path.join( expected_plugins_path, 'vis1' ) )
+ self.assertEqual( vis1.base_url, '/'.join([ plugin_mgr.base_url, vis1.name ]) )
+ self.assertTrue( vis1.serves_static )
+ self.assertEqual( vis1.static_path, os.path.join( vis1.path, 'static' ) )
+ self.assertEqual( vis1.static_url, '/'.join([ vis1.base_url, 'static' ]) )
+ self.assertTrue( vis1.serves_templates )
+ self.assertEqual( vis1.template_path, os.path.join( vis1.path, 'templates' ) )
+ self.assertEqual( vis1.template_lookup.__class__.__name__, 'TemplateLookup' )
+
+ vis2 = plugin_mgr.plugins[ 'vis2' ]
+ self.assertEqual( vis2.name, 'vis2' )
+ self.assertEqual( vis2.path, os.path.join( expected_plugins_path, 'vis2' ) )
+ self.assertEqual( vis2.base_url, '/'.join([ plugin_mgr.base_url, vis2.name ]) )
+ self.assertFalse( vis2.serves_static )
+ self.assertFalse( vis2.serves_templates )
+
+ mock_app_dir.remove()
+
+
+#TODO: config parser tests (in separate file)
+
+if __name__ == '__main__':
+ unittest.main()
diff -r 285453d3908440a10c8863fe90f9833df5a5e261 -r 3a15758ba67e2429698184a4300c9608c65ce06d test/unit/visualizations/registry/test_plugins.py
--- a/test/unit/visualizations/registry/test_plugins.py
+++ /dev/null
@@ -1,42 +0,0 @@
-from os import makedirs
-from os.path import join, dirname, basename
-from tempfile import mkdtemp
-from shutil import rmtree
-
-from galaxy.visualization.registry import VisualizationsRegistry
-
-
-def test_visualization_loading():
- visualizations = __default_viz_root()
- registry = VisualizationsRegistry(visualizations, 'foo')
- assert "VisualizationsRegistry" in str(registry)
- assert visualizations in str(registry)
- assert registry.name == "visualizations"
- __assert_scatterplot_registered(registry)
- assert registry._get_template_paths() == [visualizations]
-
-
-def test_multiple_visualization_roots():
- temp_dir = mkdtemp()
- try:
- makedirs(join(temp_dir, "coolplugin5"))
- visualization_dirs = "%s,%s" % (__default_viz_root(), temp_dir)
- registry = VisualizationsRegistry(visualization_dirs, 'foo')
- __assert_scatterplot_registered(registry)
- assert "coolplugin5" in \
- [basename(path) for path in registry.get_plugin_directories()]
- assert registry._get_template_paths() == \
- [__default_viz_root(), temp_dir]
- finally:
- rmtree(temp_dir)
-
-
-def __assert_scatterplot_registered(registry):
- assert 'scatterplot' in \
- [basename(path) for path in registry.get_plugin_directories()]
-
-
-def __default_viz_root():
- galaxy_root = join(dirname(__file__), '..', '..')
- visualizations = join(galaxy_root, 'config', 'plugins', 'visualizations')
- return visualizations
diff -r 285453d3908440a10c8863fe90f9833df5a5e261 -r 3a15758ba67e2429698184a4300c9608c65ce06d test/unit/web/base/test_HookPluginManager.py
--- a/test/unit/web/base/test_HookPluginManager.py
+++ b/test/unit/web/base/test_HookPluginManager.py
@@ -88,7 +88,6 @@
self.assertItemsEqual( plugin_mgr.directories, [ expected_plugins_path ] )
self.assertItemsEqual( plugin_mgr.plugins.keys(), [ 'plugin1' ] )
- self.assertFalse( plugin_mgr.plugins.get( 'not_a_plugin', False ) )
plugin = plugin_mgr.plugins[ 'plugin1' ]
self.assertEqual( plugin.name, 'plugin1' )
@@ -249,6 +248,5 @@
mock_app_dir.remove()
-
if __name__ == '__main__':
unittest.main()
diff -r 285453d3908440a10c8863fe90f9833df5a5e261 -r 3a15758ba67e2429698184a4300c9608c65ce06d test/unit/web/base/test_PageServingPluginManager.py
--- a/test/unit/web/base/test_PageServingPluginManager.py
+++ b/test/unit/web/base/test_PageServingPluginManager.py
@@ -45,7 +45,6 @@
self.assertEqual( plugin_mgr.base_url, 'test' )
self.assertItemsEqual( plugin_mgr.directories, [ expected_plugins_path ] )
self.assertItemsEqual( plugin_mgr.plugins.keys(), [ 'plugin1', 'plugin2', 'plugin3' ] )
- self.assertFalse( plugin_mgr.plugins.get( 'not_a_plugin', False ) )
plugin1 = plugin_mgr.plugins[ 'plugin1' ]
self.assertEqual( plugin1.name, 'plugin1' )
diff -r 285453d3908440a10c8863fe90f9833df5a5e261 -r 3a15758ba67e2429698184a4300c9608c65ce06d test/unit/web/base/test_PluginManager.py
--- a/test/unit/web/base/test_PluginManager.py
+++ b/test/unit/web/base/test_PluginManager.py
@@ -36,7 +36,6 @@
self.assertEqual( plugin_mgr.plugins[ 'plugin1' ].path, os.path.join( expected_plugins_path, 'plugin1' ) )
self.assertEqual( plugin_mgr.plugins[ 'plugin2' ].name, 'plugin2' )
self.assertEqual( plugin_mgr.plugins[ 'plugin2' ].path, os.path.join( expected_plugins_path, 'plugin2' ) )
- self.assertFalse( plugin_mgr.plugins.get( 'file1', False ) )
mock_app_dir.remove()
@@ -60,7 +59,6 @@
self.assertEqual( plugin_mgr.plugins[ 'plugin1' ].path, os.path.join( expected_plugins_path, 'plugin1' ) )
self.assertEqual( plugin_mgr.plugins[ 'plugin2' ].name, 'plugin2' )
self.assertEqual( plugin_mgr.plugins[ 'plugin2' ].path, os.path.join( expected_plugins_path, 'plugin2' ) )
- self.assertFalse( plugin_mgr.plugins.get( 'file1', False ) )
def test_multiple_dirs( self ):
"""should search in multiple directories"""
@@ -94,8 +92,6 @@
self.assertEqual( plugin_mgr.plugins[ 'plugin3' ].path, os.path.join( expected_plugins_abs_path, 'plugin3' ) )
self.assertEqual( plugin_mgr.plugins[ 'plugin4' ].name, 'plugin4' )
self.assertEqual( plugin_mgr.plugins[ 'plugin4' ].path, os.path.join( expected_plugins_abs_path, 'plugin4' ) )
- self.assertFalse( plugin_mgr.plugins.get( 'file1', False ) )
- self.assertFalse( plugin_mgr.plugins.get( 'file2', False ) )
if __name__ == '__main__':
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: dannon: Include hg19 in ucsc_build_sites.txt
by commits-noreply@bitbucket.org 19 Sep '13
by commits-noreply@bitbucket.org 19 Sep '13
19 Sep '13
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/285453d39084/
Changeset: 285453d39084
User: dannon
Date: 2013-09-19 15:59:49
Summary: Include hg19 in ucsc_build_sites.txt
Affected #: 1 file
diff -r 6eb358cc82c15782abc2c4e912f935830dfc2ece -r 285453d3908440a10c8863fe90f9833df5a5e261 tool-data/shared/ucsc/ucsc_build_sites.txt
--- a/tool-data/shared/ucsc/ucsc_build_sites.txt
+++ b/tool-data/shared/ucsc/ucsc_build_sites.txt
@@ -1,5 +1,5 @@
#Harvested from http://genome.ucsc.edu/cgi-bin/das/dsn
-main http://genome.ucsc.edu/cgi-bin/hgTracks? priPac1,danRer4,mm9,mm8,droAna1,mm5,caeRem2,mm7,mm6,panTro1,dm3,panTro2,anoCar1,ce4,galGal3,galGal2,ce1,rn3,rn2,droMoj1,droMoj2,rn4,droYak1,droYak2,dp3,dp2,dm1,canFam1,danRer5,canFam2,danRer3,danRer2,ornAna1,ci2,ci1,tetNig1,bosTau1,bosTau3,bosTau2,equCab1,oryLat1,droAna2,droEre1,ponAbe2,rheMac2,sacCer1,droPer1,droSim1,monDom1,cb1,dm2,droSec1,strPur1,droVir2,droVir1,strPur2,sc1,xenTro1,droGri1,xenTro2,cb3,gasAcu1,caePb1,anoGam1,fr2,fr1,hg15,hg16,hg17,hg18,felCat3,apiMel2,monDom4,apiMel1,ce2
+main http://genome.ucsc.edu/cgi-bin/hgTracks? priPac1,danRer4,mm9,mm8,droAna1,mm5,caeRem2,mm7,mm6,panTro1,dm3,panTro2,anoCar1,ce4,galGal3,galGal2,ce1,rn3,rn2,droMoj1,droMoj2,rn4,droYak1,droYak2,dp3,dp2,dm1,canFam1,danRer5,canFam2,danRer3,danRer2,ornAna1,ci2,ci1,tetNig1,bosTau1,bosTau3,bosTau2,equCab1,oryLat1,droAna2,droEre1,ponAbe2,rheMac2,sacCer1,droPer1,droSim1,monDom1,cb1,dm2,droSec1,strPur1,droVir2,droVir1,strPur2,sc1,xenTro1,droGri1,xenTro2,cb3,gasAcu1,caePb1,anoGam1,fr2,fr1,hg15,hg16,hg17,hg18,hg19,felCat3,apiMel2,monDom4,apiMel1,ce2
#Harvested from http://archaea.ucsc.edu/cgi-bin/das/dsn
archaea http://archaea.ucsc.edu/cgi-bin/hgTracks? alkaEhrl_MLHE_1,shewW318,idioLoih_L2TR,sulSol1,erwiCaro_ATROSEPTICA,symbTher_IAM14863,moorTher_ATCC39073,therFusc_YX,methHung1,bradJapo,therElon,shewPutrCN32,pediPent_ATCC25745,mariMari_MCS10,nanEqu1,baciSubt,chlaTrac,magnMagn_AMB_1,chroViol,ralsSola,acidCryp_JF_5,erytLito_HTCC2594,desuVulg_HILDENBOROUG,pyrAer1,sulfToko1,shewANA3,paraSp_UWE25,geobKaus_HTA426,rhizEtli_CFN_42,uncuMeth_RCI,candBloc_FLORIDANUS,deinRadi,yersPest_CO92,saccEryt_NRRL_2338,rhodRHA1,candCars_RUDDII,burkMall_ATCC23344,eschColi_O157H7,burk383,psycIngr_37,rhodSpha_2_4_1,wolbEndo_OF_DROSOPHIL,burkViet_G4,propAcne_KPA171202,enteFaec_V583,campJeju_81_176,acidJS42,heliPylo_26695,pseuHalo_TAC125,chroSale_DSM3043,methVann1,archFulg1,neisMeni_Z2491_1,fusoNucl,vermEise_EF01_2,anabVari_ATCC29413,tropWhip_TW08_27,heliHepa,acinSp_ADP1,anapMarg_ST_MARIES,natrPhar1,haheChej_KCTC_2396,therPetr_RKU_1,neisGono_FA1090_1,colwPsyc_34H,desuPsyc_LSV54,hyphNept_ATCC15444,vibrChol1,deinGeot_DSM11300,strePyog_M1_GAS,franCcI3,salmTyph,metaSedu,lactSali_UCC118,trepPall,neisMeni_MC58_1,syntWolf_GOETTINGEN,flavJohn_UW101,methBoon1,haemSomn_129PT,shewLoihPV4,igniHosp1,haemInfl_KW20,haloHalo_SL1,ferrAcid1,sphiAlas_RB2256,candPela_UBIQUE_HTCC1,caldSacc_DSM8903,aerPer1,lactPlan,carbHydr_Z_2901,therTher_HB8,vibrVuln_YJ016_1,rhodPalu_CGA009,acidCell_11B,siliPome_DSS_3,therVolc1,haloWals1,rubrXyla_DSM9941,shewAmaz,nocaJS61,vibrVuln_CMCP6_1,sinoMeli,ureaUrea,baciHalo,bartHens_HOUSTON_1,nitrWino_NB_255,hypeButy1,methBurt2,polaJS66,mesoLoti,methMari_C7,caulCres,neisMeni_FAM18_1,acidBact_ELLIN345,caldMaqu1,salmEnte_PARATYPI_ATC,glucOxyd_621H,cytoHutc_ATCC33406,nitrEuro,therMari,coxiBurn,woliSucc,heliPylo_HPAG1,mesoFlor_L1,pyrHor1,methAeol1,procMari_CCMP1375,pyroArse1,oenoOeni_PSU_1,alcaBork_SK2,wiggBrev,actiPleu_L20,lactLact,methJann1,paraDeni_PD1222,borrBurg,pyroIsla1,orieTsut_BORYONG,shewMR4,methKand1,methCaps_BATH,onioYell_PHYTOPLASMA,bordBron,cenaSymb1,burkCeno_HI2424,franTula_TULARENSIS,pyrFur2,mariAqua_VT8,heliPylo_J99,psycArct_273_4,vibrChol_MO10_1,vibrPara1,rickBell_RML369_C,metAce1,buchSp,ehrlRumi_WELGEVONDEN,methLabrZ_1,chlaPneu_CWL029,thioCrun_XCL_2,pyroCali1,chloTepi_TLS,stapAure_MU50,novoArom_DSM12444,magnMC1,zymoMobi_ZM4,salmTyph_TY2,chloChlo_CAD3,azoaSp_EBN1,therTher_HB27,bifiLong,picrTorr1,listInno,bdelBact,gramFors_KT0803,sulfAcid1,geobTher_NG80_2,peloCarb,ralsEutr_JMP134,mannSucc_MBEL55E,syneSp_WH8102,methTherPT1,clavMich_NCPPB_382,therAcid1,syntAcid_SB,porpGing_W83,therNeut0,leifXyli_XYLI_CTCB0,shewFrig,photProf_SS9,thioDeni_ATCC25259,methMaze1,desuRedu_MI_1,burkThai_E264,campFetu_82_40,blocFlor,jannCCS1,nitrMult_ATCC25196,streCoel,soliUsit_ELLIN6076,pastMult,saliRube_DSM13855,methTher1,nostSp,shigFlex_2A,saccDegr_2_40,oceaIhey,dehaEthe_195,rhodRubr_ATCC11170,arthFB24,shewMR7,pireSp,anaeDeha_2CP_C,haloVolc1,dichNodo_VCS1703A,tricEryt_IMS101,mycoGeni,thioDeni_ATCC33889,methSmit1,geobUran_RF4,shewDeni,halMar1,desuHafn_Y51,methStad1,granBeth_CGDNIH1,therPend1,legiPneu_PHILADELPHIA,vibrChol_O395_1,nitrOcea_ATCC19707,campJeju_RM1221,methPetr_PM1,heliAcin_SHEEBA,eschColi_APEC_O1,peloTher_SI,haloHalo1,syntFuma_MPOB,xyleFast,gloeViol,leucMese_ATCC8293,bactThet_VPI_5482,xantCamp,sodaGlos_MORSITANS,geobSulf,roseDeni_OCH_114,coryEffi_YS_314,brucMeli,mycoTube_H37RV,vibrFisc_ES114_1,pyrAby1,burkXeno_LB400,polyQLWP,stapMari1,peloLute_DSM273,burkCeno_AU_1054,shewBalt,nocaFarc_IFM10152,ente638,mculMari1,saliTrop_CNB_440,neorSenn_MIYAYAMA,aquiAeol,dechArom_RCB,myxoXant_DK_1622,burkPseu_1106A,burkCepa_AMMD,methMari_C5_1,azorCaul2,methFlag_KT,leptInte,eschColi_K12,synePCC6,baumCica_HOMALODISCA,methBark1,pseuAeru,geobMeta_GS15,eschColi_CFT073,photLumi,metMar1,hermArse,campJeju,therKoda1,aeroHydr_ATCC7966,baciAnth_AMES,shewOnei,therTeng,lawsIntr_PHE_MN1_00
#Harvested from http://genome-test.cse.ucsc.edu/cgi-bin/das/dsn
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: carlfeberhard: Plugin frameworks, QA: add unit tests for PluginManager, HookPluginManager, and PageServingPluginManager
by commits-noreply@bitbucket.org 18 Sep '13
by commits-noreply@bitbucket.org 18 Sep '13
18 Sep '13
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/6eb358cc82c1/
Changeset: 6eb358cc82c1
User: carlfeberhard
Date: 2013-09-19 00:24:00
Summary: Plugin frameworks, QA: add unit tests for PluginManager, HookPluginManager, and PageServingPluginManager
Affected #: 12 files
diff -r 40fb094fe82b6518a8138311a6427c539c6be8f8 -r 6eb358cc82c15782abc2c4e912f935830dfc2ece lib/galaxy/web/base/pluginframework.py
--- a/lib/galaxy/web/base/pluginframework.py
+++ b/lib/galaxy/web/base/pluginframework.py
@@ -280,28 +280,65 @@
Search all plugins for a function named ``hook_fn_prefix`` + ``hook_name``
and run it passing in args and kwargs.
+ Return values from each hook are returned in a dictionary keyed with the
+ plugin names.
+
:type hook_name: string
:param hook_name: name (suffix) of the hook to run
- :rtype: 2-tuple containing (list, dict)
- :returns: (possibly modified) args, kwargs
+ :rtype: dictionary
+ :returns: where keys are plugin.names and
+ values return values from the hooks
"""
#TODO: is hook prefix necessary?
#TODO: could be made more efficient if cached by hook_name in the manager on load_plugin
# (low maint. overhead since no dynamic loading/unloading of plugins)
hook_fn_name = ''.join([ self.hook_fn_prefix, hook_name ])
+ returned = {}
+ for plugin_name, plugin in self.plugins.items():
+ hook_fn = getattr( plugin.module, hook_fn_name, None )
+
+ if hook_fn and hasattr( hook_fn, '__call__' ):
+ try:
+ #log.debug( 'calling %s from %s(%s)', hook_fn.func_name, plugin.name, plugin.module )
+ fn_returned = hook_fn( *args, **kwargs )
+ returned[ plugin.name ] = fn_returned
+ except Exception, exc:
+ # fail gracefully and continue with other plugins
+ log.exception( 'Hook function "%s" failed for plugin "%s"', hook_name, plugin.name )
+
+ # not sure of utility of this - seems better to be fire-and-forget pub-sub
+ return returned
+
+ def filter_hook( self, hook_name, hook_arg, *args, **kwargs ):
+ """
+ Search all plugins for a function named ``hook_fn_prefix`` + ``hook_name``
+ and run the first with ``hook_arg`` and every function after with the
+ return value of the previous.
+
+ ..note:
+ This makes plugin load order very important.
+
+ :type hook_name: string
+ :param hook_name: name (suffix) of the hook to run
+ :type hook_arg: any
+ :param hook_arg: the arg to be passed between hook functions
+ :rtype: any
+ :returns: the modified hook_arg
+ """
+ hook_fn_name = ''.join([ self.hook_fn_prefix, hook_name ])
for plugin_name, plugin in self.plugins.items():
hook_fn = getattr( plugin.module, hook_fn_name, None )
if hook_fn and hasattr( hook_fn, '__call__' ):
try:
- #log.debug( 'calling %s from %s(%s)', hook_fn.func_name, plugin.name, plugin.module )
- hook_fn( *args, **kwargs )
+ hook_arg = hook_fn( hook_arg, *args, **kwargs )
+
except Exception, exc:
# fail gracefully and continue with other plugins
- log.exception( 'Hook function "%s" failed for plugin "%s"', hook_name, plugin.name )
+ log.exception( 'Filter hook function "%s" failed for plugin "%s"', hook_name, plugin.name )
- # may have been altered by hook fns, return in order to act like filter
- return args, kwargs
+ # may have been altered by hook fns, return
+ return hook_arg
# ============================================================================= exceptions
@@ -370,12 +407,12 @@
:rtype: bool
:returns: True if the path contains a plugin
"""
- if not os.path.isdir( plugin_path ):
+ if not super( PageServingPluginManager, self ).is_plugin( plugin_path ):
return False
- #TODO: this is not reliable and forces the inclusion of empty dirs in some situations
- if self.serves_templates and not 'templates' in os.listdir( plugin_path ):
- return False
- if self.serves_static and not 'static' in os.listdir( plugin_path ):
+ # reject only if we don't have either
+ listdir = os.listdir( plugin_path )
+ if( ( 'templates' not in listdir )
+ and ( 'static' not in listdir ) ):
return False
return True
diff -r 40fb094fe82b6518a8138311a6427c539c6be8f8 -r 6eb358cc82c15782abc2c4e912f935830dfc2ece test/unit/datatypes/dataproviders/test_base_dataproviders.py
--- a/test/unit/datatypes/dataproviders/test_base_dataproviders.py
+++ b/test/unit/datatypes/dataproviders/test_base_dataproviders.py
@@ -7,15 +7,16 @@
#TODO: fix off by ones in FilteredDataProvider counters
+import imp
import unittest
import StringIO
import tempfilecache
-import utility
+utility = imp.load_source( 'utility', '../../util/utility.py' )
log = utility.set_up_filelogger( __name__ + '.log' )
+utility.add_galaxy_lib_to_path( 'test/unit/datatypes/dataproviders' )
-utility.add_galaxy_lib_to_path( 'test/unit/datatypes/dataproviders' )
from galaxy.datatypes.dataproviders import base, exceptions
from galaxy import eggs
diff -r 40fb094fe82b6518a8138311a6427c539c6be8f8 -r 6eb358cc82c15782abc2c4e912f935830dfc2ece test/unit/datatypes/dataproviders/test_line_dataproviders.py
--- a/test/unit/datatypes/dataproviders/test_line_dataproviders.py
+++ b/test/unit/datatypes/dataproviders/test_line_dataproviders.py
@@ -7,17 +7,17 @@
#TODO: fix off by ones in FilteredDataProvider counters
+import imp
import unittest
import StringIO
import tempfilecache
-import utility
-
import test_base_dataproviders
+utility = imp.load_source( 'utility', '../../util/utility.py' )
log = utility.set_up_filelogger( __name__ + '.log' )
+utility.add_galaxy_lib_to_path( 'test/unit/datatypes/dataproviders' )
-utility.add_galaxy_lib_to_path( 'test/unit/datatypes/dataproviders' )
from galaxy import eggs
from galaxy.datatypes.dataproviders import line
diff -r 40fb094fe82b6518a8138311a6427c539c6be8f8 -r 6eb358cc82c15782abc2c4e912f935830dfc2ece test/unit/datatypes/dataproviders/utility.py
--- a/test/unit/datatypes/dataproviders/utility.py
+++ /dev/null
@@ -1,45 +0,0 @@
-"""
-Unit test utilities.
-"""
-
-import os
-import sys
-import logging
-import textwrap
-
-def set_up_filelogger( logname, level=logging.DEBUG ):
- """
- Sets up logging to a file named `logname`
- (removing it first if it already exists).
-
- Usable with 'nosetests' to get logging msgs from failed tests
- (no logfile created).
- Usable with 'nosetests --nologcapture' to get logging msgs for all tests
- (in logfile).
- """
- if os.path.exists( logname ): os.unlink( logname )
- logging.basicConfig( filename=logname, level=logging.DEBUG )
- return logging
-
-def add_galaxy_lib_to_path( this_dir_relative_to_root ):
- """
- Adds `<galaxy>/lib` to `sys.path` given the scripts directory relative
- to `<galaxy>`.
- .. example::
- utility.add_galaxy_lib_to_path( '/test/unit/datatypes/dataproviders' )
- """
- glx_lib = os.path.join( os.getcwd().replace( this_dir_relative_to_root, '' ), 'lib' )
- sys.path.append( glx_lib )
-
-def clean_multiline_string( multiline_string, sep='\n' ):
- """
- Dedent, split, remove first and last empty lines, rejoin.
- """
- multiline_string = textwrap.dedent( multiline_string )
- string_list = multiline_string.split( sep )
- if not string_list[0]:
- string_list = string_list[1:]
- if not string_list[-1]:
- string_list = string_list[:-1]
- #return '\n'.join( docstrings )
- return ''.join([ ( s + '\n' ) for s in string_list ])
diff -r 40fb094fe82b6518a8138311a6427c539c6be8f8 -r 6eb358cc82c15782abc2c4e912f935830dfc2ece test/unit/test_dataproviders.pyc
Binary file test/unit/test_dataproviders.pyc has changed
diff -r 40fb094fe82b6518a8138311a6427c539c6be8f8 -r 6eb358cc82c15782abc2c4e912f935830dfc2ece test/unit/test_plugins.py
--- a/test/unit/test_plugins.py
+++ /dev/null
@@ -1,42 +0,0 @@
-from os import makedirs
-from os.path import join, dirname, basename
-from tempfile import mkdtemp
-from shutil import rmtree
-
-from galaxy.visualization.registry import VisualizationsRegistry
-
-
-def test_visualization_loading():
- visualizations = __default_viz_root()
- registry = VisualizationsRegistry(visualizations, 'foo')
- assert "VisualizationsRegistry" in str(registry)
- assert visualizations in str(registry)
- assert registry.name == "visualizations"
- __assert_scatterplot_registered(registry)
- assert registry._get_template_paths() == [visualizations]
-
-
-def test_multiple_visualization_roots():
- temp_dir = mkdtemp()
- try:
- makedirs(join(temp_dir, "coolplugin5"))
- visualization_dirs = "%s,%s" % (__default_viz_root(), temp_dir)
- registry = VisualizationsRegistry(visualization_dirs, 'foo')
- __assert_scatterplot_registered(registry)
- assert "coolplugin5" in \
- [basename(path) for path in registry.get_plugin_directories()]
- assert registry._get_template_paths() == \
- [__default_viz_root(), temp_dir]
- finally:
- rmtree(temp_dir)
-
-
-def __assert_scatterplot_registered(registry):
- assert 'scatterplot' in \
- [basename(path) for path in registry.get_plugin_directories()]
-
-
-def __default_viz_root():
- galaxy_root = join(dirname(__file__), '..', '..')
- visualizations = join(galaxy_root, 'config', 'plugins', 'visualizations')
- return visualizations
diff -r 40fb094fe82b6518a8138311a6427c539c6be8f8 -r 6eb358cc82c15782abc2c4e912f935830dfc2ece test/unit/util/utility.py
--- /dev/null
+++ b/test/unit/util/utility.py
@@ -0,0 +1,45 @@
+"""
+Unit test utilities.
+"""
+
+import os
+import sys
+import logging
+import textwrap
+
+def set_up_filelogger( logname, level=logging.DEBUG ):
+ """
+ Sets up logging to a file named `logname`
+ (removing it first if it already exists).
+
+ Usable with 'nosetests' to get logging msgs from failed tests
+ (no logfile created).
+ Usable with 'nosetests --nologcapture' to get logging msgs for all tests
+ (in logfile).
+ """
+ if os.path.exists( logname ): os.unlink( logname )
+ logging.basicConfig( filename=logname, level=logging.DEBUG )
+ return logging
+
+def add_galaxy_lib_to_path( this_dir_relative_to_root ):
+ """
+ Adds `<galaxy>/lib` to `sys.path` given the scripts directory relative
+ to `<galaxy>`.
+ .. example::
+ utility.add_galaxy_lib_to_path( '/test/unit/datatypes/dataproviders' )
+ """
+ glx_lib = os.path.join( os.getcwd().replace( this_dir_relative_to_root, '' ), 'lib' )
+ sys.path.append( glx_lib )
+
+def clean_multiline_string( multiline_string, sep='\n' ):
+ """
+ Dedent, split, remove first and last empty lines, rejoin.
+ """
+ multiline_string = textwrap.dedent( multiline_string )
+ string_list = multiline_string.split( sep )
+ if not string_list[0]:
+ string_list = string_list[1:]
+ if not string_list[-1]:
+ string_list = string_list[:-1]
+ #return '\n'.join( docstrings )
+ return ''.join([ ( s + '\n' ) for s in string_list ])
diff -r 40fb094fe82b6518a8138311a6427c539c6be8f8 -r 6eb358cc82c15782abc2c4e912f935830dfc2ece test/unit/visualizations/registry/test_plugins.py
--- /dev/null
+++ b/test/unit/visualizations/registry/test_plugins.py
@@ -0,0 +1,42 @@
+from os import makedirs
+from os.path import join, dirname, basename
+from tempfile import mkdtemp
+from shutil import rmtree
+
+from galaxy.visualization.registry import VisualizationsRegistry
+
+
+def test_visualization_loading():
+ visualizations = __default_viz_root()
+ registry = VisualizationsRegistry(visualizations, 'foo')
+ assert "VisualizationsRegistry" in str(registry)
+ assert visualizations in str(registry)
+ assert registry.name == "visualizations"
+ __assert_scatterplot_registered(registry)
+ assert registry._get_template_paths() == [visualizations]
+
+
+def test_multiple_visualization_roots():
+ temp_dir = mkdtemp()
+ try:
+ makedirs(join(temp_dir, "coolplugin5"))
+ visualization_dirs = "%s,%s" % (__default_viz_root(), temp_dir)
+ registry = VisualizationsRegistry(visualization_dirs, 'foo')
+ __assert_scatterplot_registered(registry)
+ assert "coolplugin5" in \
+ [basename(path) for path in registry.get_plugin_directories()]
+ assert registry._get_template_paths() == \
+ [__default_viz_root(), temp_dir]
+ finally:
+ rmtree(temp_dir)
+
+
+def __assert_scatterplot_registered(registry):
+ assert 'scatterplot' in \
+ [basename(path) for path in registry.get_plugin_directories()]
+
+
+def __default_viz_root():
+ galaxy_root = join(dirname(__file__), '..', '..')
+ visualizations = join(galaxy_root, 'config', 'plugins', 'visualizations')
+ return visualizations
diff -r 40fb094fe82b6518a8138311a6427c539c6be8f8 -r 6eb358cc82c15782abc2c4e912f935830dfc2ece test/unit/web/base/mock.py
--- /dev/null
+++ b/test/unit/web/base/mock.py
@@ -0,0 +1,61 @@
+"""
+"""
+import tempfile
+import os
+import shutil
+
+class MockDir( object ):
+
+ def __init__( self, structure_dict, where=None ):
+ self.structure_dict = structure_dict
+ self.create_root( structure_dict, where )
+
+ def create_root( self, structure_dict, where=None ):
+ self.root_path = tempfile.mkdtemp( dir=where )
+ #print 'created root:', self.root_path
+ self.create_structure( self.root_path, structure_dict )
+
+ def create_structure( self, current_path, structure_dict ):
+ for k, v in structure_dict.items():
+ # if value is string, create a file in the current path and write v as file contents
+ if isinstance( v, str ):
+ self.create_file( os.path.join( current_path, k ), v )
+ # if it's a dict, create a dir here named k and recurse into it
+ if isinstance( v, dict ):
+ subdir_path = os.path.join( current_path, k )
+ #print 'subdir:', subdir_path
+ os.mkdir( subdir_path )
+ self.create_structure( subdir_path, v )
+
+ def create_file( self, path, contents ):
+ #print 'file:', path
+ with open( path, 'w' ) as newfile:
+ newfile.write( contents )
+
+ def remove( self ):
+ #print 'removing:', self.root_path
+ shutil.rmtree( self.root_path )
+
+
+class MockAppConfig( object ):
+ def __init__( self, root ):
+ self.root = root
+
+class MockApp( object ):
+ def __init__( self, root ):
+ self.config = MockAppConfig( root )
+
+class MockTrans( object ):
+ def fill_template( self, filename, template_lookup=None, **kwargs ):
+ template = template_lookup.get_template( filename )
+ template.output_encoding = 'utf-8'
+ return template.render( **kwargs )
+
+if __name__ == '__main__':
+ td = MockDir({
+ 'file1' : 'Hello\nthere,\t...you',
+ 'dir1' : {
+ 'file2' : 'Blerbler',
+ }
+ })
+ td.remove()
diff -r 40fb094fe82b6518a8138311a6427c539c6be8f8 -r 6eb358cc82c15782abc2c4e912f935830dfc2ece test/unit/web/base/test_HookPluginManager.py
--- /dev/null
+++ b/test/unit/web/base/test_HookPluginManager.py
@@ -0,0 +1,254 @@
+"""
+"""
+import os
+import imp
+import unittest
+import types
+
+utility = imp.load_source( 'utility', '../../util/utility.py' )
+log = utility.set_up_filelogger( __name__ + '.log' )
+utility.add_galaxy_lib_to_path( 'test/unit/web/base' )
+
+from galaxy.web.base.pluginframework import HookPluginManager
+
+import mock
+
+# ----------------------------------------------------------------------------- globals
+loading_point = HookPluginManager.loading_point_filename
+
+contents1 = """
+import os
+
+def bler( x, y=3 ):
+ return ( x, y )
+"""
+
+contents2 = """
+raise Exception( 'Bler' )
+"""
+
+contents3 = """
+import contents1
+
+def blah( w ):
+ return tuple( [ w ] + list( contents1.bler( 2 ) ) )
+"""
+
+contents4 = """
+from galaxy import util
+
+def blah( s ):
+ return util.listify( s )
+"""
+
+contents5 = """
+def hook_blah( s ):
+ return s.title()
+
+def hook_filter_test( s ):
+ s += ' one'
+ return s
+"""
+
+contents6 = """
+def hook_blah( s ):
+ return s.upper()
+
+def hook_filter_test( s ):
+ s += ' two'
+ return s
+"""
+
+contents7 = """
+def hook_blah( s ):
+ raise Exception( 'bler' )
+
+def hook_filter_test( s ):
+ raise Exception( 'bler' )
+"""
+
+# -----------------------------------------------------------------------------
+class HookPluginManager_TestCase( unittest.TestCase ):
+
+ def test_loading_point( self ):
+ """should attempt load on dirs containing loading_point file"""
+ mock_app_dir = mock.MockDir({
+ 'plugins' : {
+ 'plugin1' : {
+ loading_point : contents1
+ },
+ 'not_a_plugin' : 'blerbler'
+ }
+ })
+ mock_app = mock.MockApp( mock_app_dir.root_path )
+ plugin_mgr = HookPluginManager( mock_app, directories_setting='plugins' )
+
+ app_path = mock_app_dir.root_path
+ expected_plugins_path = os.path.join( app_path, 'plugins' )
+
+ self.assertItemsEqual( plugin_mgr.directories, [ expected_plugins_path ] )
+ self.assertItemsEqual( plugin_mgr.plugins.keys(), [ 'plugin1' ] )
+ self.assertFalse( plugin_mgr.plugins.get( 'not_a_plugin', False ) )
+
+ plugin = plugin_mgr.plugins[ 'plugin1' ]
+ self.assertEqual( plugin.name, 'plugin1' )
+ self.assertEqual( plugin.path, os.path.join( expected_plugins_path, 'plugin1' ) )
+ self.assertIsInstance( plugin.module, types.ModuleType )
+ self.assertEqual( plugin.module.bler( 2 ), ( 2, 3 ) )
+
+ mock_app_dir.remove()
+
+ def test_bad_loading_points( self ):
+ """should NOT attempt load on dirs NOT containing loading_point file"""
+ mock_app_dir = mock.MockDir({
+ 'plugins' : {
+ 'plugin1' : {},
+ 'plugin2' : {
+ 'plogin.py' : 'wot'
+ }
+ }
+ })
+ mock_app = mock.MockApp( mock_app_dir.root_path )
+ plugin_mgr = HookPluginManager( mock_app, directories_setting='plugins' )
+
+ app_path = mock_app_dir.root_path
+ expected_plugins_path = os.path.join( app_path, 'plugins' )
+
+ self.assertItemsEqual( plugin_mgr.directories, [ expected_plugins_path ] )
+ self.assertEqual( plugin_mgr.plugins.keys(), [] )
+
+ mock_app_dir.remove()
+
+ def test_bad_import( self ):
+ """should error gracefully (skip) on bad import"""
+ mock_app_dir = mock.MockDir({
+ 'plugins' : {
+ 'plugin1' : {
+ loading_point : contents2
+ }
+ }
+ })
+ mock_app = mock.MockApp( mock_app_dir.root_path )
+ plugin_mgr = HookPluginManager( mock_app, directories_setting='plugins' )
+
+ app_path = mock_app_dir.root_path
+ expected_plugins_path = os.path.join( app_path, 'plugins' )
+
+ self.assertItemsEqual( plugin_mgr.directories, [ expected_plugins_path ] )
+ self.assertEqual( plugin_mgr.plugins.keys(), [] )
+
+ mock_app_dir.remove()
+
+ def test_import_w_rel_import( self ):
+ """should allow loading_point to rel. import other modules"""
+ mock_app_dir = mock.MockDir({
+ 'plugins' : {
+ 'plugin1' : {
+ 'contents1.py': contents1,
+ loading_point : contents3
+ }
+ }
+ })
+ mock_app = mock.MockApp( mock_app_dir.root_path )
+ plugin_mgr = HookPluginManager( mock_app, directories_setting='plugins', skip_bad_plugins=False )
+
+ app_path = mock_app_dir.root_path
+ expected_plugins_path = os.path.join( app_path, 'plugins' )
+
+ self.assertItemsEqual( plugin_mgr.directories, [ expected_plugins_path ] )
+ self.assertItemsEqual( plugin_mgr.plugins.keys(), [ 'plugin1' ] )
+
+ plugin = plugin_mgr.plugins[ 'plugin1' ]
+ self.assertEqual( plugin.name, 'plugin1' )
+ self.assertEqual( plugin.path, os.path.join( expected_plugins_path, 'plugin1' ) )
+ self.assertIsInstance( plugin.module, types.ModuleType )
+ self.assertEqual( plugin.module.blah( 1 ), ( 1, 2, 3 ) )
+
+ mock_app_dir.remove()
+
+ def test_import_w_galaxy_import( self ):
+ """should allow loading_point to rel. import GALAXY modules"""
+ mock_app_dir = mock.MockDir({
+ 'plugins' : {
+ 'plugin1' : {
+ loading_point : contents4
+ }
+ }
+ })
+ mock_app = mock.MockApp( mock_app_dir.root_path )
+ plugin_mgr = HookPluginManager( mock_app, directories_setting='plugins', skip_bad_plugins=False )
+
+ app_path = mock_app_dir.root_path
+ expected_plugins_path = os.path.join( app_path, 'plugins' )
+
+ self.assertItemsEqual( plugin_mgr.directories, [ expected_plugins_path ] )
+ self.assertItemsEqual( plugin_mgr.plugins.keys(), [ 'plugin1' ] )
+
+ plugin = plugin_mgr.plugins[ 'plugin1' ]
+ self.assertEqual( plugin.name, 'plugin1' )
+ self.assertEqual( plugin.path, os.path.join( expected_plugins_path, 'plugin1' ) )
+ self.assertIsInstance( plugin.module, types.ModuleType )
+
+ self.assertEqual( plugin.module.blah( 'one,two' ), [ 'one', 'two' ] )
+
+ mock_app_dir.remove()
+
+ def test_run_hooks( self ):
+ """should run hooks of loaded plugins"""
+ mock_app_dir = mock.MockDir({
+ 'plugins' : {
+ 'plugin1' : {
+ loading_point : contents5
+ },
+ 'plugin2' : {
+ loading_point : contents6
+ }
+ }
+ })
+ mock_app = mock.MockApp( mock_app_dir.root_path )
+ plugin_mgr = HookPluginManager( mock_app, directories_setting='plugins', skip_bad_plugins=False )
+ app_path = mock_app_dir.root_path
+ expected_plugins_path = os.path.join( app_path, 'plugins' )
+ self.assertItemsEqual( plugin_mgr.plugins.keys(), [ 'plugin1', 'plugin2' ] )
+
+ return_val_dict = plugin_mgr.run_hook( 'blah', 'one two check' )
+ self.assertEqual( return_val_dict, { 'plugin1' : 'One Two Check', 'plugin2' : 'ONE TWO CHECK' } )
+
+ result = plugin_mgr.filter_hook( 'filter_test', 'check' )
+ self.assertEqual( result, 'check one two' )
+
+ mock_app_dir.remove()
+
+ def test_hook_errs( self ):
+ """should fail gracefully if hook fails (and continue with other plugins)"""
+ mock_app_dir = mock.MockDir({
+ 'plugins' : {
+ 'plugin1' : {
+ loading_point : contents5
+ },
+ 'plugin2' : {
+ loading_point : contents6
+ },
+ 'plugin3' : {
+ loading_point : contents7
+ }
+ }
+ })
+ mock_app = mock.MockApp( mock_app_dir.root_path )
+ plugin_mgr = HookPluginManager( mock_app, directories_setting='plugins', skip_bad_plugins=False )
+ app_path = mock_app_dir.root_path
+ expected_plugins_path = os.path.join( app_path, 'plugins' )
+ self.assertItemsEqual( plugin_mgr.plugins.keys(), [ 'plugin1', 'plugin2', 'plugin3' ] )
+
+ return_val_dict = plugin_mgr.run_hook( 'blah', 'one two check' )
+ self.assertEqual( return_val_dict, { 'plugin1' : 'One Two Check', 'plugin2' : 'ONE TWO CHECK' } )
+
+ result = plugin_mgr.filter_hook( 'filter_test', 'check' )
+ self.assertEqual( result, 'check one two' )
+
+ mock_app_dir.remove()
+
+
+
+if __name__ == '__main__':
+ unittest.main()
diff -r 40fb094fe82b6518a8138311a6427c539c6be8f8 -r 6eb358cc82c15782abc2c4e912f935830dfc2ece test/unit/web/base/test_PageServingPluginManager.py
--- /dev/null
+++ b/test/unit/web/base/test_PageServingPluginManager.py
@@ -0,0 +1,129 @@
+"""
+"""
+import os
+import imp
+import unittest
+
+utility = imp.load_source( 'utility', '../../util/utility.py' )
+log = utility.set_up_filelogger( __name__ + '.log' )
+utility.add_galaxy_lib_to_path( 'test/unit/web/base' )
+
+from galaxy.web.base.pluginframework import PageServingPluginManager
+
+import mock
+
+# ----------------------------------------------------------------------------- globals
+contents1 = """${what} ${you} ${say}"""
+
+# -----------------------------------------------------------------------------
+class PageServingPluginManager_TestCase( unittest.TestCase ):
+
+ def test_plugin_load( self ):
+ """should attempt load if criteria met"""
+ mock_app_dir = mock.MockDir({
+ 'plugins' : {
+ 'plugin1' : {
+ 'templates' : {},
+ 'static' : {}
+ },
+ 'plugin2' : {
+ 'static' : {}
+ },
+ 'plugin3' : {
+ 'templates' : {}
+ },
+ 'not_a_plugin1' : 'blerbler',
+ 'not_a_plugin2' : {},
+ }
+ })
+ mock_app = mock.MockApp( mock_app_dir.root_path )
+ plugin_mgr = PageServingPluginManager( mock_app, 'test', directories_setting='plugins' )
+
+ app_path = mock_app_dir.root_path
+ expected_plugins_path = os.path.join( app_path, 'plugins' )
+
+ self.assertEqual( plugin_mgr.base_url, 'test' )
+ self.assertItemsEqual( plugin_mgr.directories, [ expected_plugins_path ] )
+ self.assertItemsEqual( plugin_mgr.plugins.keys(), [ 'plugin1', 'plugin2', 'plugin3' ] )
+ self.assertFalse( plugin_mgr.plugins.get( 'not_a_plugin', False ) )
+
+ plugin1 = plugin_mgr.plugins[ 'plugin1' ]
+ self.assertEqual( plugin1.name, 'plugin1' )
+ self.assertEqual( plugin1.path, os.path.join( expected_plugins_path, 'plugin1' ) )
+ self.assertEqual( plugin1.base_url, '/'.join([ plugin_mgr.base_url, plugin1.name ]) )
+ self.assertTrue( plugin1.serves_static )
+ self.assertEqual( plugin1.static_path, os.path.join( plugin1.path, 'static' ) )
+ self.assertEqual( plugin1.static_url, '/'.join([ plugin1.base_url, 'static' ]) )
+ self.assertTrue( plugin1.serves_templates )
+ self.assertEqual( plugin1.template_path, os.path.join( plugin1.path, 'templates' ) )
+ self.assertEqual( plugin1.template_lookup.__class__.__name__, 'TemplateLookup' )
+
+ plugin2 = plugin_mgr.plugins[ 'plugin2' ]
+ self.assertEqual( plugin2.name, 'plugin2' )
+ self.assertEqual( plugin2.path, os.path.join( expected_plugins_path, 'plugin2' ) )
+ self.assertEqual( plugin2.base_url, '/'.join([ plugin_mgr.base_url, plugin2.name ]) )
+ self.assertTrue( plugin2.serves_static )
+ self.assertEqual( plugin2.static_path, os.path.join( plugin2.path, 'static' ) )
+ self.assertEqual( plugin2.static_url, '/'.join([ plugin2.base_url, 'static' ]) )
+ self.assertFalse( plugin2.serves_templates )
+
+ plugin3 = plugin_mgr.plugins[ 'plugin3' ]
+ self.assertEqual( plugin3.name, 'plugin3' )
+ self.assertEqual( plugin3.path, os.path.join( expected_plugins_path, 'plugin3' ) )
+ self.assertEqual( plugin3.base_url, '/'.join([ plugin_mgr.base_url, plugin3.name ]) )
+ self.assertFalse( plugin3.serves_static )
+ self.assertTrue( plugin3.serves_templates )
+ self.assertEqual( plugin1.template_path, os.path.join( plugin1.path, 'templates' ) )
+ self.assertEqual( plugin1.template_lookup.__class__.__name__, 'TemplateLookup' )
+
+ mock_app_dir.remove()
+
+ def test_plugin_static_map( self ):
+ """"""
+ mock_app_dir = mock.MockDir({
+ 'plugins' : {
+ 'plugin1' : {
+ 'templates' : {},
+ 'static' : {}
+ }
+ }
+ })
+ mock_app = mock.MockApp( mock_app_dir.root_path )
+ plugin_mgr = PageServingPluginManager( mock_app, 'test', directories_setting='plugins' )
+
+ app_path = mock_app_dir.root_path
+ expected_plugins_path = os.path.join( app_path, 'plugins' )
+ self.assertItemsEqual( plugin_mgr.plugins.keys(), [ 'plugin1' ] )
+ plugin = plugin_mgr.plugins[ 'plugin1' ]
+ self.assertEqual( plugin_mgr.get_static_urls_and_paths(), [( plugin.static_url, plugin.static_path )] )
+
+ mock_app_dir.remove()
+
+ def test_plugin_templates( self ):
+ """"""
+ mock_app_dir = mock.MockDir({
+ 'plugins' : {
+ 'plugin1' : {
+ 'templates' : {
+ 'test.mako' : contents1
+ },
+ }
+ }
+ })
+ mock_app = mock.MockApp( mock_app_dir.root_path )
+ plugin_mgr = PageServingPluginManager( mock_app, 'test', directories_setting='plugins' )
+
+ app_path = mock_app_dir.root_path
+ expected_plugins_path = os.path.join( app_path, 'plugins' )
+ self.assertItemsEqual( plugin_mgr.plugins.keys(), [ 'plugin1' ] )
+
+ plugin = plugin_mgr.plugins[ 'plugin1' ]
+ rendered = plugin_mgr.fill_template( mock.MockTrans(), plugin, 'test.mako',
+ what='Hey', you='Ho', say='HeyHey HoHo' )
+ self.assertEqual( rendered, 'Hey Ho HeyHey HoHo' )
+
+ mock_app_dir.remove()
+
+
+if __name__ == '__main__':
+ unittest.main()
diff -r 40fb094fe82b6518a8138311a6427c539c6be8f8 -r 6eb358cc82c15782abc2c4e912f935830dfc2ece test/unit/web/base/test_PluginManager.py
--- /dev/null
+++ b/test/unit/web/base/test_PluginManager.py
@@ -0,0 +1,103 @@
+"""
+Unit tests for ``galaxy.web.base.pluginframework.PluginManager``
+"""
+import os
+import imp
+import unittest
+
+utility = imp.load_source( 'utility', '../../util/utility.py' )
+log = utility.set_up_filelogger( __name__ + '.log' )
+utility.add_galaxy_lib_to_path( 'test/unit/web/base' )
+
+from galaxy.web.base.pluginframework import PluginManager
+
+import mock
+
+class PluginManager_TestCase( unittest.TestCase ):
+
+ def test_rel_path_search( self ):
+ """should be able to search given rel. path"""
+ mock_app_dir = mock.MockDir({
+ 'plugins' : {
+ 'plugin1' : {},
+ 'plugin2' : {},
+ 'file1' : 'blerbler'
+ }
+ })
+ mock_app = mock.MockApp( mock_app_dir.root_path )
+ plugin_mgr = PluginManager( mock_app, directories_setting='plugins' )
+
+ app_path = mock_app_dir.root_path
+ expected_plugins_path = os.path.join( app_path, 'plugins' )
+
+ self.assertItemsEqual( plugin_mgr.directories, [ expected_plugins_path ] )
+ self.assertItemsEqual( plugin_mgr.plugins.keys(), [ 'plugin1', 'plugin2' ] )
+ self.assertEqual( plugin_mgr.plugins[ 'plugin1' ].name, 'plugin1' )
+ self.assertEqual( plugin_mgr.plugins[ 'plugin1' ].path, os.path.join( expected_plugins_path, 'plugin1' ) )
+ self.assertEqual( plugin_mgr.plugins[ 'plugin2' ].name, 'plugin2' )
+ self.assertEqual( plugin_mgr.plugins[ 'plugin2' ].path, os.path.join( expected_plugins_path, 'plugin2' ) )
+ self.assertFalse( plugin_mgr.plugins.get( 'file1', False ) )
+
+ mock_app_dir.remove()
+
+ def test_abs_path_search( self ):
+ """should be able to search given abs. path"""
+ mock_app_dir = mock.MockDir({})
+ mock_plugin_dir = mock.MockDir({
+ 'plugin1' : {},
+ 'plugin2' : {},
+ 'file1' : 'blerbler'
+ })
+ mock_app = mock.MockApp( mock_app_dir.root_path )
+ plugin_mgr = PluginManager( mock_app, directories_setting=mock_plugin_dir.root_path )
+
+ app_path = mock_app_dir.root_path
+ expected_plugins_path = mock_plugin_dir.root_path
+
+ self.assertItemsEqual( plugin_mgr.directories, [ expected_plugins_path ] )
+ self.assertItemsEqual( plugin_mgr.plugins.keys(), [ 'plugin1', 'plugin2' ] )
+ self.assertEqual( plugin_mgr.plugins[ 'plugin1' ].name, 'plugin1' )
+ self.assertEqual( plugin_mgr.plugins[ 'plugin1' ].path, os.path.join( expected_plugins_path, 'plugin1' ) )
+ self.assertEqual( plugin_mgr.plugins[ 'plugin2' ].name, 'plugin2' )
+ self.assertEqual( plugin_mgr.plugins[ 'plugin2' ].path, os.path.join( expected_plugins_path, 'plugin2' ) )
+ self.assertFalse( plugin_mgr.plugins.get( 'file1', False ) )
+
+ def test_multiple_dirs( self ):
+ """should search in multiple directories"""
+ mock_app_dir = mock.MockDir({
+ 'plugins' : {
+ 'plugin1' : {},
+ 'plugin2' : {},
+ 'file1' : 'blerbler'
+ }
+ })
+ mock_abs_plugin_dir = mock.MockDir({
+ 'plugin3' : {},
+ 'plugin4' : {},
+ 'file2' : 'blerbler'
+ })
+ mock_app = mock.MockApp( mock_app_dir.root_path )
+ directories_setting=','.join([ 'plugins', mock_abs_plugin_dir.root_path ])
+ plugin_mgr = PluginManager( mock_app, directories_setting=directories_setting )
+
+ app_path = mock_app_dir.root_path
+ expected_plugins_rel_path = os.path.join( app_path, 'plugins' )
+ expected_plugins_abs_path = mock_abs_plugin_dir.root_path
+
+ self.assertItemsEqual( plugin_mgr.directories, [ expected_plugins_rel_path, expected_plugins_abs_path ] )
+ self.assertItemsEqual( plugin_mgr.plugins.keys(), [ 'plugin1', 'plugin2', 'plugin3', 'plugin4' ] )
+ self.assertEqual( plugin_mgr.plugins[ 'plugin1' ].name, 'plugin1' )
+ self.assertEqual( plugin_mgr.plugins[ 'plugin1' ].path, os.path.join( expected_plugins_rel_path, 'plugin1' ) )
+ self.assertEqual( plugin_mgr.plugins[ 'plugin2' ].name, 'plugin2' )
+ self.assertEqual( plugin_mgr.plugins[ 'plugin2' ].path, os.path.join( expected_plugins_rel_path, 'plugin2' ) )
+ self.assertEqual( plugin_mgr.plugins[ 'plugin3' ].name, 'plugin3' )
+ self.assertEqual( plugin_mgr.plugins[ 'plugin3' ].path, os.path.join( expected_plugins_abs_path, 'plugin3' ) )
+ self.assertEqual( plugin_mgr.plugins[ 'plugin4' ].name, 'plugin4' )
+ self.assertEqual( plugin_mgr.plugins[ 'plugin4' ].path, os.path.join( expected_plugins_abs_path, 'plugin4' ) )
+ self.assertFalse( plugin_mgr.plugins.get( 'file1', False ) )
+ self.assertFalse( plugin_mgr.plugins.get( 'file2', False ) )
+
+
+if __name__ == '__main__':
+ unittest.main()
+
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: carlfeberhard: Plugin framework: fix relative path parsing in PluginManager
by commits-noreply@bitbucket.org 18 Sep '13
by commits-noreply@bitbucket.org 18 Sep '13
18 Sep '13
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/40fb094fe82b/
Changeset: 40fb094fe82b
User: carlfeberhard
Date: 2013-09-18 20:49:58
Summary: Plugin framework: fix relative path parsing in PluginManager
Affected #: 1 file
diff -r eb69169a04efb1fba610b943fd38a8ceb991d315 -r 40fb094fe82b6518a8138311a6427c539c6be8f8 lib/galaxy/web/base/pluginframework.py
--- a/lib/galaxy/web/base/pluginframework.py
+++ b/lib/galaxy/web/base/pluginframework.py
@@ -93,7 +93,7 @@
for directory in util.listify( directories_setting ):
directory = directory.strip()
- if directory.startswith( '/' ):
+ if not directory.startswith( '/' ):
directory = os.path.join( galaxy_root, directory )
if not os.path.exists( directory ):
log.warn( '%s, directory not found: %s', self, directory )
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0