galaxy-commits
Threads by month
- ----- 2024 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2023 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2022 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2021 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2020 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2019 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2018 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2017 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2016 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2015 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2014 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2013 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2012 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2011 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2010 -----
- December
- November
- October
- September
- August
- July
- June
- May
March 2013
- 1 participants
- 183 discussions
commit/galaxy-central: greg: Remove ~/tool_shed/util/shed_util.py
by commits-noreply@bitbucket.org 15 Mar '13
by commits-noreply@bitbucket.org 15 Mar '13
15 Mar '13
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/1cc6973069b5/
changeset: 1cc6973069b5
user: greg
date: 2013-03-15 21:09:08
summary: Remove ~/tool_shed/util/shed_util.py
affected #: 1 file
diff -r 320c9b7f08c42c16ac942f05329c134dcdbb2770 -r 1cc6973069b5e7afeb5c2d401c262b0d39efd24b lib/tool_shed/util/shed_util.py
--- a/lib/tool_shed/util/shed_util.py
+++ /dev/null
@@ -1,24 +0,0 @@
-import os, tempfile, shutil, logging, urllib2, threading
-from galaxy.datatypes import checkers
-from galaxy.web import url_for
-from galaxy import util
-from galaxy.util import json
-from galaxy.webapps.tool_shed.util import container_util
-from tool_shed.galaxy_install.tool_dependencies.install_util import install_package, set_environment
-from galaxy.model.orm import and_
-import tool_shed.util.shed_util_common as suc
-from tool_shed.util import encoding_util, repository_dependency_util, tool_dependency_util
-
-from galaxy import eggs
-import pkg_resources
-
-pkg_resources.require( 'mercurial' )
-from mercurial import hg, ui, commands
-
-pkg_resources.require( 'elementtree' )
-from elementtree import ElementTree, ElementInclude
-from elementtree.ElementTree import Element, SubElement
-
-log = logging.getLogger( __name__ )
-
-
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: greg: Refactor shed_util and shed_util_common into appropriate tool shed Galaxy utilities components.
by commits-noreply@bitbucket.org 15 Mar '13
by commits-noreply@bitbucket.org 15 Mar '13
15 Mar '13
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/320c9b7f08c4/
changeset: 320c9b7f08c4
user: greg
date: 2013-03-15 21:01:09
summary: Refactor shed_util and shed_util_common into appropriate tool shed Galaxy utilities components.
affected #: 24 files
diff -r 62bd6fe0e9aa297b4448dfbcae32d27c8e313c38 -r 320c9b7f08c42c16ac942f05329c134dcdbb2770 lib/galaxy/webapps/galaxy/controllers/admin_toolshed.py
--- a/lib/galaxy/webapps/galaxy/controllers/admin_toolshed.py
+++ b/lib/galaxy/webapps/galaxy/controllers/admin_toolshed.py
@@ -5,10 +5,10 @@
from galaxy.web.framework.helpers import iff, grids
from galaxy.util import json
from galaxy.model.orm import or_
-import tool_shed.util.shed_util as shed_util
import tool_shed.util.shed_util_common as suc
-import tool_shed.util.metadata_util as metadata_util
-from tool_shed.util import encoding_util
+from tool_shed.util import common_install_util, data_manager_util, datatype_util, encoding_util, metadata_util
+from tool_shed.util import repository_dependency_util, tool_dependency_util, tool_util
+from tool_shed.galaxy_install import repository_util
from galaxy.webapps.tool_shed.util import workflow_util
import tool_shed.galaxy_install.grids.admin_toolshed_grids as admin_toolshed_grids
import pkg_resources
@@ -36,7 +36,7 @@
repository_id = kwd[ 'id' ]
repository = suc.get_installed_tool_shed_repository( trans, repository_id )
try:
- shed_util.activate_repository( trans, repository )
+ common_install_util.activate_repository( trans, repository )
except Exception, e:
error_message = "Error activating repository %s: %s" % ( repository.name, str( e ) )
log.debug( error_message )
@@ -92,7 +92,7 @@
# can reset the metadata if necessary. This will ensure that information about repository dependencies and tool dependencies
# will be current. Only allow selecting a different section in the tool panel if the repository was uninstalled and it contained
# tools that should be displayed in the tool panel.
- changeset_revision_dict = shed_util.get_update_to_changeset_revision_and_ctx_rev( trans, repository )
+ changeset_revision_dict = repository_util.get_update_to_changeset_revision_and_ctx_rev( trans, repository )
current_changeset_revision = changeset_revision_dict.get( 'changeset_revision', None )
current_ctx_rev = changeset_revision_dict.get( 'ctx_rev', None )
if current_changeset_revision and current_ctx_rev:
@@ -142,7 +142,7 @@
params = util.Params( kwd )
message = util.restore_text( params.get( 'message', '' ) )
status = params.get( 'status', 'done' )
- tool_dependency = shed_util.get_tool_dependency( trans, kwd[ 'id' ] )
+ tool_dependency = tool_dependency_util.get_tool_dependency( trans, kwd[ 'id' ] )
if tool_dependency.in_error_state:
message = "This tool dependency is not installed correctly (see the <b>Tool dependency installation error</b> below). "
message += "Choose <b>Uninstall this tool dependency</b> from the <b>Repository Actions</b> menu, correct problems "
@@ -217,16 +217,16 @@
if params.get( 'deactivate_or_uninstall_repository_button', False ):
if tool_shed_repository.includes_tools_for_display_in_tool_panel:
# Handle tool panel alterations.
- shed_util.remove_from_tool_panel( trans, tool_shed_repository, shed_tool_conf, uninstall=remove_from_disk_checked )
+ tool_util.remove_from_tool_panel( trans, tool_shed_repository, shed_tool_conf, uninstall=remove_from_disk_checked )
if tool_shed_repository.includes_data_managers:
- shed_util.remove_from_data_manager( trans.app, tool_shed_repository )
+ data_manager_util.remove_from_data_manager( trans.app, tool_shed_repository )
if tool_shed_repository.includes_datatypes:
# Deactivate proprietary datatypes.
- installed_repository_dict = shed_util.load_installed_datatypes( trans.app, tool_shed_repository, repository_install_dir, deactivate=True )
+ installed_repository_dict = datatype_util.load_installed_datatypes( trans.app, tool_shed_repository, repository_install_dir, deactivate=True )
if installed_repository_dict and 'converter_path' in installed_repository_dict:
- shed_util.load_installed_datatype_converters( trans.app, installed_repository_dict, deactivate=True )
+ datatype_util.load_installed_datatype_converters( trans.app, installed_repository_dict, deactivate=True )
if installed_repository_dict and 'display_path' in installed_repository_dict:
- shed_util.load_installed_display_applications( trans.app, installed_repository_dict, deactivate=True )
+ datatype_util.load_installed_display_applications( trans.app, installed_repository_dict, deactivate=True )
if remove_from_disk_checked:
try:
# Remove the repository from disk.
@@ -244,7 +244,7 @@
tool_shed_repository.uninstalled = True
# Remove all installed tool dependencies, but don't touch any repository dependencies..
for tool_dependency in tool_shed_repository.installed_tool_dependencies:
- uninstalled, error_message = shed_util.remove_tool_dependency( trans, tool_dependency )
+ uninstalled, error_message = tool_dependency_util.remove_tool_dependency( trans, tool_dependency )
if error_message:
errors = '%s %s' % ( errors, error_message )
tool_shed_repository.deleted = True
@@ -371,7 +371,7 @@
return repo_information_dict
def get_versions_of_tool( self, app, guid ):
- tool_version = shed_util.get_tool_version( app, guid )
+ tool_version = tool_util.get_tool_version( app, guid )
return tool_version.get_version_ids( app, reverse=True )
def handle_repository_contents( self, trans, tool_shed_repository, tool_path, repository_clone_url, relative_install_dir, tool_shed=None,
@@ -395,26 +395,26 @@
trans.sa_session.add( tool_shed_repository )
trans.sa_session.flush()
if 'tool_dependencies' in metadata_dict and not reinstalling:
- tool_dependencies = shed_util.create_tool_dependency_objects( trans.app, tool_shed_repository, relative_install_dir, set_status=True )
+ tool_dependencies = tool_dependency_util.create_tool_dependency_objects( trans.app, tool_shed_repository, relative_install_dir, set_status=True )
if 'tools' in metadata_dict:
- tool_panel_dict = shed_util.generate_tool_panel_dict_for_new_install( metadata_dict[ 'tools' ], tool_section )
+ tool_panel_dict = tool_util.generate_tool_panel_dict_for_new_install( metadata_dict[ 'tools' ], tool_section )
sample_files = metadata_dict.get( 'sample_files', [] )
- tool_index_sample_files = shed_util.get_tool_index_sample_files( sample_files )
- shed_util.copy_sample_files( self.app, tool_index_sample_files, tool_path=tool_path )
+ tool_index_sample_files = tool_util.get_tool_index_sample_files( sample_files )
+ tool_util.copy_sample_files( self.app, tool_index_sample_files, tool_path=tool_path )
sample_files_copied = [ str( s ) for s in tool_index_sample_files ]
repository_tools_tups = suc.get_repository_tools_tups( trans.app, metadata_dict )
if repository_tools_tups:
# Handle missing data table entries for tool parameters that are dynamically generated select lists.
- repository_tools_tups = shed_util.handle_missing_data_table_entry( trans.app, relative_install_dir, tool_path, repository_tools_tups )
+ repository_tools_tups = tool_util.handle_missing_data_table_entry( trans.app, relative_install_dir, tool_path, repository_tools_tups )
# Handle missing index files for tool parameters that are dynamically generated select lists.
- repository_tools_tups, sample_files_copied = shed_util.handle_missing_index_file( trans.app,
+ repository_tools_tups, sample_files_copied = tool_util.handle_missing_index_file( trans.app,
tool_path,
sample_files,
repository_tools_tups,
sample_files_copied )
# Copy remaining sample files included in the repository to the ~/tool-data directory of the local Galaxy instance.
- shed_util.copy_sample_files( trans.app, sample_files, tool_path=tool_path, sample_files_copied=sample_files_copied )
- shed_util.add_to_tool_panel( app=trans.app,
+ tool_util.copy_sample_files( trans.app, sample_files, tool_path=tool_path, sample_files_copied=sample_files_copied )
+ tool_util.add_to_tool_panel( app=trans.app,
repository_name=tool_shed_repository.name,
repository_clone_url=repository_clone_url,
changeset_revision=tool_shed_repository.installed_changeset_revision,
@@ -424,8 +424,13 @@
tool_panel_dict=tool_panel_dict,
new_install=True )
if 'data_manager' in metadata_dict:
- new_data_managers = shed_util.install_data_managers( trans.app, trans.app.config.shed_data_manager_config_file, metadata_dict, shed_config_dict, relative_install_dir,
- tool_shed_repository, repository_tools_tups )
+ new_data_managers = data_manager_util.install_data_managers( trans.app,
+ trans.app.config.shed_data_manager_config_file,
+ metadata_dict,
+ shed_config_dict,
+ relative_install_dir,
+ tool_shed_repository,
+ repository_tools_tups )
if 'datatypes' in metadata_dict:
tool_shed_repository.status = trans.model.ToolShedRepository.installation_status.LOADING_PROPRIETARY_DATATYPES
if not tool_shed_repository.includes_datatypes:
@@ -437,16 +442,16 @@
files_dir = os.path.join( shed_config_dict['tool_path'], files_dir )
datatypes_config = suc.get_config_from_disk( 'datatypes_conf.xml', files_dir )
# Load data types required by tools.
- converter_path, display_path = shed_util.alter_config_and_load_prorietary_datatypes( trans.app, datatypes_config, files_dir, override=False )
+ converter_path, display_path = datatype_util.alter_config_and_load_prorietary_datatypes( trans.app, datatypes_config, files_dir, override=False )
if converter_path or display_path:
# Create a dictionary of tool shed repository related information.
- repository_dict = shed_util.create_repository_dict_for_proprietary_datatypes( tool_shed=tool_shed,
- name=tool_shed_repository.name,
- owner=tool_shed_repository.owner,
- installed_changeset_revision=tool_shed_repository.installed_changeset_revision,
- tool_dicts=metadata_dict.get( 'tools', [] ),
- converter_path=converter_path,
- display_path=display_path )
+ repository_dict = datatype_util.create_repository_dict_for_proprietary_datatypes( tool_shed=tool_shed,
+ name=tool_shed_repository.name,
+ owner=tool_shed_repository.owner,
+ installed_changeset_revision=tool_shed_repository.installed_changeset_revision,
+ tool_dicts=metadata_dict.get( 'tools', [] ),
+ converter_path=converter_path,
+ display_path=display_path )
if converter_path:
# Load proprietary datatype converters
trans.app.datatypes_registry.load_datatype_converters( trans.app.toolbox, installed_repository_dict=repository_dict )
@@ -533,10 +538,10 @@
tool_shed_repository = tool_dependencies[ 0 ].tool_shed_repository
# Get the tool_dependencies.xml file from the repository.
tool_dependencies_config = suc.get_config_from_disk( 'tool_dependencies.xml', tool_shed_repository.repo_path( trans.app ) )
- installed_tool_dependencies = shed_util.handle_tool_dependencies( app=trans.app,
- tool_shed_repository=tool_shed_repository,
- tool_dependencies_config=tool_dependencies_config,
- tool_dependencies=tool_dependencies )
+ installed_tool_dependencies = common_install_util.handle_tool_dependencies( app=trans.app,
+ tool_shed_repository=tool_shed_repository,
+ tool_dependencies_config=tool_dependencies_config,
+ tool_dependencies=tool_dependencies )
for installed_tool_dependency in installed_tool_dependencies:
if installed_tool_dependency.status == trans.app.model.ToolDependency.installation_status.ERROR:
message += ' %s' % suc.to_safe_string( installed_tool_dependency.error_message )
@@ -564,7 +569,7 @@
tool_dependency_ids = util.listify( params.get( 'id', None ) )
tool_dependencies = []
for tool_dependency_id in tool_dependency_ids:
- tool_dependency = shed_util.get_tool_dependency( trans, tool_dependency_id )
+ tool_dependency = tool_dependency_util.get_tool_dependency( trans, tool_dependency_id )
tool_dependencies.append( tool_dependency )
if kwd.get( 'install_tool_dependencies_button', False ):
# Filter tool dependencies to only those that are installed.
@@ -616,10 +621,10 @@
if isinstance( repo_info_dict, basestring ):
repo_info_dict = encoding_util.tool_shed_decode( repo_info_dict )
# Clone each repository to the configured location.
- shed_util.update_tool_shed_repository_status( trans.app, tool_shed_repository, trans.model.ToolShedRepository.installation_status.CLONING )
+ suc.update_tool_shed_repository_status( trans.app, tool_shed_repository, trans.model.ToolShedRepository.installation_status.CLONING )
repo_info_tuple = repo_info_dict[ tool_shed_repository.name ]
description, repository_clone_url, changeset_revision, ctx_rev, repository_owner, repository_dependencies, tool_dependencies = repo_info_tuple
- relative_clone_dir = shed_util.generate_tool_shed_repository_install_dir( repository_clone_url, tool_shed_repository.installed_changeset_revision )
+ relative_clone_dir = suc.generate_tool_shed_repository_install_dir( repository_clone_url, tool_shed_repository.installed_changeset_revision )
clone_dir = os.path.join( tool_path, relative_clone_dir )
relative_install_dir = os.path.join( relative_clone_dir, tool_shed_repository.name )
install_dir = os.path.join( tool_path, relative_install_dir )
@@ -627,12 +632,12 @@
if cloned_ok:
if reinstalling:
# Since we're reinstalling the repository we need to find the latest changeset revision to which is can be updated.
- changeset_revision_dict = shed_util.get_update_to_changeset_revision_and_ctx_rev( trans, tool_shed_repository )
+ changeset_revision_dict = repository_util.get_update_to_changeset_revision_and_ctx_rev( trans, tool_shed_repository )
current_changeset_revision = changeset_revision_dict.get( 'changeset_revision', None )
current_ctx_rev = changeset_revision_dict.get( 'ctx_rev', None )
if current_ctx_rev != ctx_rev:
repo = hg.repository( suc.get_configured_ui(), path=os.path.abspath( install_dir ) )
- shed_util.pull_repository( repo, repository_clone_url, current_changeset_revision )
+ repository_util.pull_repository( repo, repository_clone_url, current_changeset_revision )
suc.update_repository( repo, ctx_rev=current_ctx_rev )
self.handle_repository_contents( trans,
tool_shed_repository=tool_shed_repository,
@@ -647,9 +652,9 @@
metadata = tool_shed_repository.metadata
if 'tools' in metadata:
# Get the tool_versions from the tool shed for each tool in the installed change set.
- shed_util.update_tool_shed_repository_status( trans.app,
- tool_shed_repository,
- trans.model.ToolShedRepository.installation_status.SETTING_TOOL_VERSIONS )
+ suc.update_tool_shed_repository_status( trans.app,
+ tool_shed_repository,
+ trans.model.ToolShedRepository.installation_status.SETTING_TOOL_VERSIONS )
tool_shed_url = suc.get_url_from_repository_tool_shed( trans.app, tool_shed_repository )
url = suc.url_join( tool_shed_url,
'/repository/get_tool_versions?name=%s&owner=%s&changeset_revision=%s' % \
@@ -659,7 +664,7 @@
response.close()
if text:
tool_version_dicts = json.from_json_string( text )
- shed_util.handle_tool_versions( trans.app, tool_version_dicts, tool_shed_repository )
+ tool_util.handle_tool_versions( trans.app, tool_version_dicts, tool_shed_repository )
else:
message += "Version information for the tools included in the <b>%s</b> repository is missing. " % name
message += "Reset all of this repository's metadata in the tool shed, then set the installed tool versions "
@@ -668,20 +673,20 @@
if install_tool_dependencies and tool_shed_repository.tool_dependencies and 'tool_dependencies' in metadata:
work_dir = tempfile.mkdtemp()
# Install tool dependencies.
- shed_util.update_tool_shed_repository_status( trans.app,
- tool_shed_repository,
- trans.model.ToolShedRepository.installation_status.INSTALLING_TOOL_DEPENDENCIES )
+ suc.update_tool_shed_repository_status( trans.app,
+ tool_shed_repository,
+ trans.model.ToolShedRepository.installation_status.INSTALLING_TOOL_DEPENDENCIES )
# Get the tool_dependencies.xml file from the repository.
tool_dependencies_config = suc.get_config_from_disk( 'tool_dependencies.xml', install_dir )#relative_install_dir )
- installed_tool_dependencies = shed_util.handle_tool_dependencies( app=trans.app,
- tool_shed_repository=tool_shed_repository,
- tool_dependencies_config=tool_dependencies_config,
- tool_dependencies=tool_shed_repository.tool_dependencies )
+ installed_tool_dependencies = common_install_util.handle_tool_dependencies( app=trans.app,
+ tool_shed_repository=tool_shed_repository,
+ tool_dependencies_config=tool_dependencies_config,
+ tool_dependencies=tool_shed_repository.tool_dependencies )
try:
shutil.rmtree( work_dir )
except:
pass
- shed_util.update_tool_shed_repository_status( trans.app, tool_shed_repository, trans.model.ToolShedRepository.installation_status.INSTALLED )
+ suc.update_tool_shed_repository_status( trans.app, tool_shed_repository, trans.model.ToolShedRepository.installation_status.INSTALLED )
else:
# An error occurred while cloning the repository, so reset everything necessary to enable another attempt.
self.set_repository_attributes( trans,
@@ -739,12 +744,12 @@
trans.sa_session.add( repository )
trans.sa_session.flush()
message = "The repository information has been updated."
- containers_dict = shed_util.populate_containers_dict_from_repository_metadata( trans=trans,
- tool_shed_url=tool_shed_url,
- tool_path=tool_path,
- repository=repository,
- reinstalling=False,
- required_repo_info_dicts=None )
+ containers_dict = metadata_util.populate_containers_dict_from_repository_metadata( trans=trans,
+ tool_shed_url=tool_shed_url,
+ tool_path=tool_path,
+ repository=repository,
+ reinstalling=False,
+ required_repo_info_dicts=None )
return trans.fill_template( '/admin/tool_shed_repository/manage_repository.mako',
repository=repository,
description=description,
@@ -825,9 +830,9 @@
params = util.Params( kwd )
message = util.restore_text( params.get( 'message', '' ) )
status = params.get( 'status', 'done' )
- tool_dependency_ids = shed_util.get_tool_dependency_ids( as_string=False, **kwd )
+ tool_dependency_ids = tool_dependency_util.get_tool_dependency_ids( as_string=False, **kwd )
# We need a tool_shed_repository, so get it from one of the tool_dependencies.
- tool_dependency = shed_util.get_tool_dependency( trans, tool_dependency_ids[ 0 ] )
+ tool_dependency = tool_dependency_util.get_tool_dependency( trans, tool_dependency_ids[ 0 ] )
tool_shed_repository = tool_dependency.tool_shed_repository
self.tool_dependency_grid.title = "Tool shed repository '%s' tool dependencies" % tool_shed_repository.name
self.tool_dependency_grid.global_actions = \
@@ -868,7 +873,7 @@
elif operation == 'uninstall':
tool_dependencies_for_uninstallation = []
for tool_dependency_id in tool_dependency_ids:
- tool_dependency = shed_util.get_tool_dependency( trans, tool_dependency_id )
+ tool_dependency = tool_dependency_util.get_tool_dependency( trans, tool_dependency_id )
if tool_dependency.status in [ trans.model.ToolDependency.installation_status.INSTALLED,
trans.model.ToolDependency.installation_status.ERROR ]:
tool_dependencies_for_uninstallation.append( tool_dependency )
@@ -883,7 +888,7 @@
if trans.app.config.tool_dependency_dir:
tool_dependencies_for_installation = []
for tool_dependency_id in tool_dependency_ids:
- tool_dependency = shed_util.get_tool_dependency( trans, tool_dependency_id )
+ tool_dependency = tool_dependency_util.get_tool_dependency( trans, tool_dependency_id )
if tool_dependency.status in [ trans.model.ToolDependency.installation_status.NEVER_INSTALLED,
trans.model.ToolDependency.installation_status.UNINSTALLED ]:
tool_dependencies_for_installation.append( tool_dependency )
@@ -978,15 +983,15 @@
install_tool_dependencies = False
tool_path = suc.get_tool_path_by_shed_tool_conf_filename( trans, shed_tool_conf )
created_or_updated_tool_shed_repositories, tool_panel_section_keys, repo_info_dicts, filtered_repo_info_dicts, message = \
- shed_util.create_repository_dependency_objects( trans,
- tool_path,
- tool_shed_url,
- repo_info_dicts,
- reinstalling=False,
- install_repository_dependencies=install_repository_dependencies,
- no_changes_checked=False,
- tool_panel_section=tool_panel_section,
- new_tool_panel_section=new_tool_panel_section )
+ repository_dependency_util.create_repository_dependency_objects( trans,
+ tool_path,
+ tool_shed_url,
+ repo_info_dicts,
+ reinstalling=False,
+ install_repository_dependencies=install_repository_dependencies,
+ no_changes_checked=False,
+ tool_panel_section=tool_panel_section,
+ new_tool_panel_section=new_tool_panel_section )
if message and len( repo_info_dicts ) == 1:
installed_tool_shed_repository = created_or_updated_tool_shed_repositories[ 0 ]
message+= 'Click <a href="%s">here</a> to manage the repository. ' % \
@@ -1069,39 +1074,39 @@
repo_info_dict = repo_info_dicts[ 0 ]
name, repository_owner, changeset_revision, includes_tool_dependencies, installed_repository_dependencies, \
missing_repository_dependencies, installed_tool_dependencies, missing_tool_dependencies = \
- shed_util.get_dependencies_for_repository( trans, tool_shed_url, repo_info_dict, includes_tool_dependencies )
- readme_files_dict = shed_util.get_readme_files_dict_for_display( trans, tool_shed_url, repo_info_dict )
+ common_install_util.get_dependencies_for_repository( trans, tool_shed_url, repo_info_dict, includes_tool_dependencies )
+ readme_files_dict = suc.get_readme_files_dict_for_display( trans, tool_shed_url, repo_info_dict )
# We're handling 1 of 2 scenarios here: (1) we're installing a tool shed repository for the first time, so we've retrieved the list of installed
# and missing repository dependencies from the database (2) we're handling the scenario where an error occurred during the installation process,
# so we have a tool_shed_repository record in the database with associated repository dependency records. Since we have the repository
# dependencies in either case, we'll merge the list of missing repository dependencies into the list of installed repository dependencies since
# each displayed repository dependency will display a status, whether installed or missing.
- containers_dict = shed_util.populate_containers_dict_for_new_install( trans=trans,
- tool_shed_url=tool_shed_url,
- tool_path=tool_path,
- readme_files_dict=readme_files_dict,
- installed_repository_dependencies=installed_repository_dependencies,
- missing_repository_dependencies=missing_repository_dependencies,
- installed_tool_dependencies=installed_tool_dependencies,
- missing_tool_dependencies=missing_tool_dependencies )
+ containers_dict = repository_util.populate_containers_dict_for_new_install( trans=trans,
+ tool_shed_url=tool_shed_url,
+ tool_path=tool_path,
+ readme_files_dict=readme_files_dict,
+ installed_repository_dependencies=installed_repository_dependencies,
+ missing_repository_dependencies=missing_repository_dependencies,
+ installed_tool_dependencies=installed_tool_dependencies,
+ missing_tool_dependencies=missing_tool_dependencies )
else:
# We're installing a list of repositories, each of which may have tool dependencies or repository dependencies.
containers_dicts = []
for repo_info_dict in repo_info_dicts:
name, repository_owner, changeset_revision, includes_tool_dependencies, installed_repository_dependencies, \
missing_repository_dependencies, installed_tool_dependencies, missing_tool_dependencies = \
- shed_util.get_dependencies_for_repository( trans, tool_shed_url, repo_info_dict, includes_tool_dependencies )
- containers_dict = shed_util.populate_containers_dict_for_new_install( trans=trans,
- tool_shed_url=tool_shed_url,
- tool_path=tool_path,
- readme_files_dict=None,
- installed_repository_dependencies=installed_repository_dependencies,
- missing_repository_dependencies=missing_repository_dependencies,
- installed_tool_dependencies=installed_tool_dependencies,
- missing_tool_dependencies=missing_tool_dependencies )
+ common_install_util.get_dependencies_for_repository( trans, tool_shed_url, repo_info_dict, includes_tool_dependencies )
+ containers_dict = repository_util.populate_containers_dict_for_new_install( trans=trans,
+ tool_shed_url=tool_shed_url,
+ tool_path=tool_path,
+ readme_files_dict=None,
+ installed_repository_dependencies=installed_repository_dependencies,
+ missing_repository_dependencies=missing_repository_dependencies,
+ installed_tool_dependencies=installed_tool_dependencies,
+ missing_tool_dependencies=missing_tool_dependencies )
containers_dicts.append( containers_dict )
# Merge all containers into a single container.
- containers_dict = shed_util.merge_containers_dicts_for_new_install( containers_dicts )
+ containers_dict = repository_util.merge_containers_dicts_for_new_install( containers_dicts )
# Handle tool dependencies check box.
if trans.app.config.tool_dependency_dir is None:
if includes_tool_dependencies:
@@ -1165,8 +1170,8 @@
install_tool_dependencies = CheckboxField.is_checked( kwd.get( 'install_tool_dependencies', '' ) )
shed_tool_conf, tool_path, relative_install_dir = suc.get_tool_panel_config_tool_path_install_dir( trans.app, tool_shed_repository )
repository_clone_url = suc.generate_clone_url_for_installed_repository( trans.app, tool_shed_repository )
- clone_dir = os.path.join( tool_path, shed_util.generate_tool_shed_repository_install_dir( repository_clone_url,
- tool_shed_repository.installed_changeset_revision ) )
+ clone_dir = os.path.join( tool_path, suc.generate_tool_shed_repository_install_dir( repository_clone_url,
+ tool_shed_repository.installed_changeset_revision ) )
relative_install_dir = os.path.join( clone_dir, tool_shed_repository.name )
tool_shed_url = suc.get_url_from_repository_tool_shed( trans.app, tool_shed_repository )
tool_section = None
@@ -1179,12 +1184,11 @@
includes_tool_dependencies = tool_shed_repository.includes_tool_dependencies
if tool_shed_repository.includes_tools_for_display_in_tool_panel:
# Handle the selected tool panel location for loading tools included in the tool shed repository.
- tool_section, new_tool_panel_section, tool_panel_section_key = \
- shed_util.handle_tool_panel_selection( trans=trans,
- metadata=metadata,
- no_changes_checked=no_changes_checked,
- tool_panel_section=tool_panel_section,
- new_tool_panel_section=new_tool_panel_section )
+ tool_section, new_tool_panel_section, tool_panel_section_key = tool_util.handle_tool_panel_selection( trans=trans,
+ metadata=metadata,
+ no_changes_checked=no_changes_checked,
+ tool_panel_section=tool_panel_section,
+ new_tool_panel_section=new_tool_panel_section )
# The repository's status must be updated from 'Uninstall' to 'New' when initiating reinstall so the repository_installation_updater will function.
tool_shed_repository = suc.create_or_update_tool_shed_repository( trans.app,
tool_shed_repository.name,
@@ -1217,28 +1221,28 @@
tool_dependencies = metadata.get( 'tool_dependencies', None )
else:
tool_dependencies = None
- repo_info_dict = suc.create_repo_info_dict( trans=trans,
- repository_clone_url=repository_clone_url,
- changeset_revision=tool_shed_repository.changeset_revision,
- ctx_rev=ctx_rev,
- repository_owner=tool_shed_repository.owner,
- repository_name=tool_shed_repository.name,
- repository=None,
- repository_metadata=None,
- tool_dependencies=tool_dependencies,
- repository_dependencies=repository_dependencies )
+ repo_info_dict = repository_util.create_repo_info_dict( trans=trans,
+ repository_clone_url=repository_clone_url,
+ changeset_revision=tool_shed_repository.changeset_revision,
+ ctx_rev=ctx_rev,
+ repository_owner=tool_shed_repository.owner,
+ repository_name=tool_shed_repository.name,
+ repository=None,
+ repository_metadata=None,
+ tool_dependencies=tool_dependencies,
+ repository_dependencies=repository_dependencies )
repo_info_dicts.append( repo_info_dict )
# Make sure all tool_shed_repository records exist.
created_or_updated_tool_shed_repositories, tool_panel_section_keys, repo_info_dicts, filtered_repo_info_dicts, message = \
- shed_util.create_repository_dependency_objects( trans=trans,
- tool_path=tool_path,
- tool_shed_url=tool_shed_url,
- repo_info_dicts=repo_info_dicts,
- reinstalling=True,
- install_repository_dependencies=install_repository_dependencies,
- no_changes_checked=no_changes_checked,
- tool_panel_section=tool_panel_section,
- new_tool_panel_section=new_tool_panel_section )
+ repository_dependency_util.create_repository_dependency_objects( trans=trans,
+ tool_path=tool_path,
+ tool_shed_url=tool_shed_url,
+ repo_info_dicts=repo_info_dicts,
+ reinstalling=True,
+ install_repository_dependencies=install_repository_dependencies,
+ no_changes_checked=no_changes_checked,
+ tool_panel_section=tool_panel_section,
+ new_tool_panel_section=new_tool_panel_section )
# Default the selected tool panel location for loading tools included in each newly installed required tool shed repository to the location
# selected for the repository selected for reinstallation.
for index, tps_key in enumerate( tool_panel_section_keys ):
@@ -1365,19 +1369,19 @@
repository_name=tool_shed_repository.name,
repository_owner=tool_shed_repository.owner,
changeset_revision=tool_shed_repository.changeset_revision )
- repo_info_dict = suc.create_repo_info_dict( trans=trans,
- repository_clone_url=repository_clone_url,
- changeset_revision=tool_shed_repository.changeset_revision,
- ctx_rev=tool_shed_repository.ctx_rev,
- repository_owner=tool_shed_repository.owner,
- repository_name=tool_shed_repository.name,
- repository=None,
- repository_metadata=None,
- tool_dependencies=tool_dependencies,
- repository_dependencies=repository_dependencies )
+ repo_info_dict = repository_util.create_repo_info_dict( trans=trans,
+ repository_clone_url=repository_clone_url,
+ changeset_revision=tool_shed_repository.changeset_revision,
+ ctx_rev=tool_shed_repository.ctx_rev,
+ repository_owner=tool_shed_repository.owner,
+ repository_name=tool_shed_repository.name,
+ repository=None,
+ repository_metadata=None,
+ tool_dependencies=tool_dependencies,
+ repository_dependencies=repository_dependencies )
repository_name, repository_owner, changeset_revision, includes_tool_dependencies, installed_repository_dependencies, \
missing_repository_dependencies, installed_tool_dependencies, missing_tool_dependencies = \
- shed_util.get_dependencies_for_repository( trans, tool_shed_url, repo_info_dict, includes_tool_dependencies )
+ common_install_util.get_dependencies_for_repository( trans, tool_shed_url, repo_info_dict, includes_tool_dependencies )
if installed_repository_dependencies or missing_repository_dependencies:
has_repository_dependencies = True
else:
@@ -1387,7 +1391,7 @@
if 'tool_panel_section' in metadata:
tool_panel_dict = metadata[ 'tool_panel_section' ]
if tool_panel_dict:
- if shed_util.panel_entry_per_tool( tool_panel_dict ):
+ if tool_util.panel_entry_per_tool( tool_panel_dict ):
# The following forces everything to be loaded into 1 section (or no section) in the tool panel.
tool_section_dicts = tool_panel_dict[ tool_panel_dict.keys()[ 0 ] ]
tool_section_dict = tool_section_dicts[ 0 ]
@@ -1415,17 +1419,17 @@
original_section_name = ''
tool_panel_section_select_field = None
shed_tool_conf_select_field = build_shed_tool_conf_select_field( trans )
- containers_dict = shed_util.populate_containers_dict_for_new_install( trans=trans,
- tool_shed_url=tool_shed_url,
- tool_path=tool_path,
- readme_files_dict=readme_files_dict,
- installed_repository_dependencies=installed_repository_dependencies,
- missing_repository_dependencies=missing_repository_dependencies,
- installed_tool_dependencies=installed_tool_dependencies,
- missing_tool_dependencies=missing_tool_dependencies )
+ containers_dict = repository_util.populate_containers_dict_for_new_install( trans=trans,
+ tool_shed_url=tool_shed_url,
+ tool_path=tool_path,
+ readme_files_dict=readme_files_dict,
+ installed_repository_dependencies=installed_repository_dependencies,
+ missing_repository_dependencies=missing_repository_dependencies,
+ installed_tool_dependencies=installed_tool_dependencies,
+ missing_tool_dependencies=missing_tool_dependencies )
# Since we're reinstalling we'll merge the list of missing repository dependencies into the list of installed repository dependencies since each displayed
# repository dependency will display a status, whether installed or missing.
- containers_dict = suc.merge_missing_repository_dependencies_to_installed_container( containers_dict )
+ containers_dict = repository_dependency_util.merge_missing_repository_dependencies_to_installed_container( containers_dict )
# Handle repository dependencies check box.
install_repository_dependencies_check_box = CheckboxField( 'install_repository_dependencies', checked=True )
# Handle tool dependencies check box.
@@ -1566,7 +1570,7 @@
response.close()
if text:
tool_version_dicts = json.from_json_string( text )
- shed_util.handle_tool_versions( trans.app, tool_version_dicts, repository )
+ tool_util.handle_tool_versions( trans.app, tool_version_dicts, repository )
message = "Tool versions have been set for all included tools."
status = 'done'
else:
@@ -1576,12 +1580,12 @@
status = 'error'
shed_tool_conf, tool_path, relative_install_dir = suc.get_tool_panel_config_tool_path_install_dir( trans.app, repository )
repo_files_dir = os.path.abspath( os.path.join( relative_install_dir, repository.name ) )
- containers_dict = shed_util.populate_containers_dict_from_repository_metadata( trans=trans,
- tool_shed_url=tool_shed_url,
- tool_path=tool_path,
- repository=repository,
- reinstalling=False,
- required_repo_info_dicts=None )
+ containers_dict = metadata_util.populate_containers_dict_from_repository_metadata( trans=trans,
+ tool_shed_url=tool_shed_url,
+ tool_path=tool_path,
+ repository=repository,
+ reinstalling=False,
+ required_repo_info_dicts=None )
return trans.fill_template( '/admin/tool_shed_repository/manage_repository.mako',
repository=repository,
description=repository.description,
@@ -1622,7 +1626,7 @@
tool_dependency_ids = util.listify( params.get( 'id', None ) )
tool_dependencies = []
for tool_dependency_id in tool_dependency_ids:
- tool_dependency = shed_util.get_tool_dependency( trans, tool_dependency_id )
+ tool_dependency = tool_dependency_util.get_tool_dependency( trans, tool_dependency_id )
tool_dependencies.append( tool_dependency )
tool_shed_repository = tool_dependencies[ 0 ].tool_shed_repository
if kwd.get( 'uninstall_tool_dependencies_button', False ):
@@ -1633,7 +1637,7 @@
if tool_dependency.can_uninstall:
tool_dependencies_for_uninstallation.append( tool_dependency )
for tool_dependency in tool_dependencies_for_uninstallation:
- uninstalled, error_message = shed_util.remove_tool_dependency( trans, tool_dependency )
+ uninstalled, error_message = tool_dependency_util.remove_tool_dependency( trans, tool_dependency )
if error_message:
errors = True
message = '%s %s' % ( message, error_message )
@@ -1681,12 +1685,12 @@
repo_files_dir = os.path.abspath( os.path.join( relative_install_dir, name ) )
repo = hg.repository( suc.get_configured_ui(), path=repo_files_dir )
repository_clone_url = os.path.join( tool_shed_url, 'repos', owner, name )
- shed_util.pull_repository( repo, repository_clone_url, latest_ctx_rev )
+ repository_util.pull_repository( repo, repository_clone_url, latest_ctx_rev )
suc.update_repository( repo, latest_ctx_rev )
tool_shed = suc.clean_tool_shed_url( tool_shed_url )
# Remove old Data Manager entries
if repository.includes_data_managers:
- shed_util.remove_from_data_manager( trans.app, repository )
+ data_manager_util.remove_from_data_manager( trans.app, repository )
# Update the repository metadata.
metadata_dict, invalid_file_tups = metadata_util.generate_metadata_for_changeset_revision( app=trans.app,
repository=repository,
@@ -1710,7 +1714,7 @@
if tool_panel_dict is None:
tool_panel_dict = suc.generate_tool_panel_dict_from_shed_tool_conf_entries( trans.app, repository )
repository_tools_tups = suc.get_repository_tools_tups( trans.app, metadata_dict )
- shed_util.add_to_tool_panel( app=trans.app,
+ tool_util.add_to_tool_panel( app=trans.app,
repository_name=repository.name,
repository_clone_url=repository_clone_url,
changeset_revision=repository.installed_changeset_revision,
@@ -1721,12 +1725,16 @@
new_install=False )
# Add new Data Manager entries
if 'data_manager' in metadata_dict:
- new_data_managers = shed_util.install_data_managers( trans.app, trans.app.config.shed_data_manager_config_file, metadata_dict,
- repository.get_shed_config_dict( trans.app ), os.path.join( relative_install_dir, name ),
- repository, repository_tools_tups )
+ new_data_managers = data_manager_util.install_data_managers( trans.app,
+ trans.app.config.shed_data_manager_config_file,
+ metadata_dict,
+ repository.get_shed_config_dict( trans.app ),
+ os.path.join( relative_install_dir, name ),
+ repository,
+ repository_tools_tups )
# Create tool_dependency records if necessary.
if 'tool_dependencies' in metadata_dict:
- tool_dependencies = shed_util.create_tool_dependency_objects( trans.app, repository, relative_install_dir, set_status=False )
+ tool_dependencies = tool_dependency_util.create_tool_dependency_objects( trans.app, repository, relative_install_dir, set_status=False )
message = "The installed repository named '%s' has been updated to change set revision '%s'. " % ( name, latest_changeset_revision )
# See if any tool dependencies can be installed.
shed_tool_conf, tool_path, relative_install_dir = suc.get_tool_panel_config_tool_path_install_dir( trans.app, repository )
@@ -1829,10 +1837,6 @@
def can_select_tool_panel_section():
pass
-def get_tool_dependency( trans, id ):
- """Get a tool_dependency from the database via id"""
- return trans.sa_session.query( trans.model.ToolDependency ).get( trans.security.decode_id( id ) )
-
def have_shed_tool_conf_for_install( trans ):
if not trans.app.toolbox.shed_tool_confs:
return False
diff -r 62bd6fe0e9aa297b4448dfbcae32d27c8e313c38 -r 320c9b7f08c42c16ac942f05329c134dcdbb2770 lib/galaxy/webapps/tool_shed/api/repository_revision_contents.py
--- a/lib/galaxy/webapps/tool_shed/api/repository_revision_contents.py
+++ b/lib/galaxy/webapps/tool_shed/api/repository_revision_contents.py
@@ -1,6 +1,7 @@
import logging
from galaxy.web.framework.helpers import time_ago
import tool_shed.util.shed_util_common as suc
+from tool_shed.util import metadata_util
from galaxy import web
from galaxy.web.base.controller import BaseAPIController
@@ -29,7 +30,7 @@
rval = []
repository_metadata_id = kwd.get( 'repository_metadata_id', None )
try:
- repository_metadata = suc.get_repository_metadata_by_id( trans, repository_metadata_id )
+ repository_metadata = metadata_util.get_repository_metadata_by_id( trans, repository_metadata_id )
repository_dict = repository_metadata.as_dict( value_mapper=default_value_mapper( trans, repository_metadata ) )
repository_dict[ 'url' ] = web.url_for( 'repository_revision_contents', repository_metadata_id=repository_metadata_id )
rval.append( repository_dict )
diff -r 62bd6fe0e9aa297b4448dfbcae32d27c8e313c38 -r 320c9b7f08c42c16ac942f05329c134dcdbb2770 lib/galaxy/webapps/tool_shed/api/repository_revisions.py
--- a/lib/galaxy/webapps/tool_shed/api/repository_revisions.py
+++ b/lib/galaxy/webapps/tool_shed/api/repository_revisions.py
@@ -1,6 +1,7 @@
import datetime
from galaxy.web.framework.helpers import time_ago
import tool_shed.util.shed_util_common as suc
+from tool_shed.util import metadata_util
from galaxy import web, util
from galaxy.model.orm import and_, or_
from galaxy.web.base.controller import BaseAPIController
@@ -73,7 +74,7 @@
Displays information about a repository_metadata record in the Tool Shed.
"""
try:
- repository_metadata = suc.get_repository_metadata_by_id( trans, id )
+ repository_metadata = metadata_util.get_repository_metadata_by_id( trans, id )
repository_data = repository_metadata.get_api_value( view='element',
value_mapper=default_value_mapper( trans, repository_metadata ) )
repository_data[ 'contents_url' ] = web.url_for( 'repository_revision_contents', repository_metadata_id=id )
@@ -91,7 +92,7 @@
"""
repository_metadata_id = kwd.get( 'id', None )
try:
- repository_metadata = suc.get_repository_metadata_by_id( trans, repository_metadata_id )
+ repository_metadata = metadata_util.get_repository_metadata_by_id( trans, repository_metadata_id )
flush_needed = False
for key, new_value in payload.items():
if hasattr( repository_metadata, key ):
diff -r 62bd6fe0e9aa297b4448dfbcae32d27c8e313c38 -r 320c9b7f08c42c16ac942f05329c134dcdbb2770 lib/galaxy/webapps/tool_shed/controllers/admin.py
--- a/lib/galaxy/webapps/tool_shed/controllers/admin.py
+++ b/lib/galaxy/webapps/tool_shed/controllers/admin.py
@@ -106,7 +106,7 @@
# The received id is a RepositoryMetadata object id, so we need to get the
# associated Repository and redirect to view_or_manage_repository with the
# changeset_revision.
- repository_metadata = suc.get_repository_metadata_by_id( trans, kwd[ 'id' ] )
+ repository_metadata = metadata_util.get_repository_metadata_by_id( trans, kwd[ 'id' ] )
repository = repository_metadata.repository
kwd[ 'id' ] = trans.security.encode_id( repository.id )
kwd[ 'changeset_revision' ] = repository_metadata.changeset_revision
@@ -196,7 +196,7 @@
ids = util.listify( id )
count = 0
for repository_metadata_id in ids:
- repository_metadata = suc.get_repository_metadata_by_id( trans, repository_metadata_id )
+ repository_metadata = metadata_util.get_repository_metadata_by_id( trans, repository_metadata_id )
trans.sa_session.delete( repository_metadata )
trans.sa_session.flush()
count += 1
@@ -332,7 +332,7 @@
for repository_metadata in repository.metadata_revisions:
metadata = repository_metadata.metadata
if metadata:
- if suc.is_downloadable( metadata ):
+ if metadata_util.is_downloadable( metadata ):
repository_metadata.downloadable = True
trans.sa_session.add( repository_metadata )
repository.deleted = False
diff -r 62bd6fe0e9aa297b4448dfbcae32d27c8e313c38 -r 320c9b7f08c42c16ac942f05329c134dcdbb2770 lib/galaxy/webapps/tool_shed/controllers/repository.py
--- a/lib/galaxy/webapps/tool_shed/controllers/repository.py
+++ b/lib/galaxy/webapps/tool_shed/controllers/repository.py
@@ -11,10 +11,9 @@
from galaxy.util import json
from galaxy.model.orm import and_, or_
import tool_shed.util.shed_util_common as suc
-import tool_shed.util.metadata_util as metadata_util
-from tool_shed.util import encoding_util
-from galaxy.webapps.tool_shed.util import workflow_util
-from galaxy.webapps.tool_shed.util import common_util
+from tool_shed.util import encoding_util, metadata_util, repository_dependency_util, tool_dependency_util
+from tool_shed.galaxy_install import repository_util
+from galaxy.webapps.tool_shed.util import common_util, workflow_util
import galaxy.tools
import tool_shed.grids.repository_grids as repository_grids
import tool_shed.grids.util as grids_util
@@ -85,7 +84,7 @@
operation = kwd[ 'operation' ].lower()
# The received id is a RepositoryMetadata id.
repository_metadata_id = kwd[ 'id' ]
- repository_metadata = suc.get_repository_metadata_by_id( trans, repository_metadata_id )
+ repository_metadata = metadata_util.get_repository_metadata_by_id( trans, repository_metadata_id )
repository_id = trans.security.encode_id( repository_metadata.repository_id )
changeset_revision = repository_metadata.changeset_revision
new_kwd = dict( id=repository_id,
@@ -266,7 +265,7 @@
operation = kwd[ 'operation' ].lower()
# The received id is a RepositoryMetadata id.
repository_metadata_id = kwd[ 'id' ]
- repository_metadata = suc.get_repository_metadata_by_id( trans, repository_metadata_id )
+ repository_metadata = metadata_util.get_repository_metadata_by_id( trans, repository_metadata_id )
repository_id = trans.security.encode_id( repository_metadata.repository_id )
changeset_revision = repository_metadata.changeset_revision
new_kwd = dict( id=repository_id,
@@ -287,7 +286,7 @@
operation = kwd[ 'operation' ].lower()
# The received id is a RepositoryMetadata id.
repository_metadata_id = kwd['id' ]
- repository_metadata = suc.get_repository_metadata_by_id( trans, repository_metadata_id )
+ repository_metadata = metadata_util.get_repository_metadata_by_id( trans, repository_metadata_id )
repository_id = trans.security.encode_id( repository_metadata.repository_id )
changeset_revision = repository_metadata.changeset_revision
new_kwd = dict( id=repository_id,
@@ -304,7 +303,7 @@
operation = kwd[ 'operation' ].lower()
# The received id is a RepositoryMetadata id.
repository_metadata_id = kwd[ 'id' ]
- repository_metadata = suc.get_repository_metadata_by_id( trans, repository_metadata_id )
+ repository_metadata = metadata_util.get_repository_metadata_by_id( trans, repository_metadata_id )
repository_id = trans.security.encode_id( repository_metadata.repository_id )
changeset_revision = repository_metadata.changeset_revision
new_kwd = dict( id=repository_id,
@@ -689,7 +688,7 @@
status = params.get( 'status', 'done' )
repository = suc.get_repository_by_id( trans, repository_id )
if repository:
- repository_metadata = suc.get_repository_metadata_by_id( trans, repository_metadata_id )
+ repository_metadata = metadata_util.get_repository_metadata_by_id( trans, repository_metadata_id )
changeset_revision = repository_metadata.changeset_revision
if repository_metadata:
metadata = repository_metadata.metadata
@@ -757,7 +756,7 @@
is_admin = trans.user_is_admin()
if operation == "view_or_manage_repository":
# The received id is a RepositoryMetadata id, so we have to get the repository id.
- repository_metadata = suc.get_repository_metadata_by_id( trans, item_id )
+ repository_metadata = metadata_util.get_repository_metadata_by_id( trans, item_id )
repository_id = trans.security.encode_id( repository_metadata.repository.id )
repository = suc.get_repository_in_tool_shed( trans, repository_id )
kwd[ 'id' ] = repository_id
@@ -774,7 +773,7 @@
encoded_repository_ids = []
changeset_revisions = []
for repository_metadata_id in util.listify( item_id ):
- repository_metadata = suc.get_repository_metadata_by_id( trans, repository_metadata_id )
+ repository_metadata = metadata_util.get_repository_metadata_by_id( trans, repository_metadata_id )
encoded_repository_ids.append( trans.security.encode_id( repository_metadata.repository.id ) )
changeset_revisions.append( repository_metadata.changeset_revision )
new_kwd = {}
@@ -844,7 +843,7 @@
is_admin = trans.user_is_admin()
if operation == "view_or_manage_repository":
# The received id is a RepositoryMetadata id, so we have to get the repository id.
- repository_metadata = suc.get_repository_metadata_by_id( trans, item_id )
+ repository_metadata = metadata_util.get_repository_metadata_by_id( trans, item_id )
repository_id = trans.security.encode_id( repository_metadata.repository.id )
repository = suc.get_repository_in_tool_shed( trans, repository_id )
kwd[ 'id' ] = repository_id
@@ -861,7 +860,7 @@
encoded_repository_ids = []
changeset_revisions = []
for repository_metadata_id in util.listify( item_id ):
- repository_metadata = suc.get_repository_metadata_by_id( trans, item_id )
+ repository_metadata = metadata_util.get_repository_metadata_by_id( trans, item_id )
encoded_repository_ids.append( trans.security.encode_id( repository_metadata.repository.id ) )
changeset_revisions.append( repository_metadata.changeset_revision )
new_kwd = {}
@@ -1085,14 +1084,15 @@
if repository_metadata:
metadata = repository_metadata.metadata
if metadata:
- repository_dependencies = suc.get_repository_dependencies_for_changeset_revision( trans=trans,
- repository=repository,
- repository_metadata=repository_metadata,
- toolshed_base_url=str( web.url_for( '/', qualified=True ) ).rstrip( '/' ),
- key_rd_dicts_to_be_processed=None,
- all_repository_dependencies=None,
- handled_key_rd_dicts=None,
- circular_repository_dependencies=None )
+ repository_dependencies = \
+ repository_dependency_util.get_repository_dependencies_for_changeset_revision( trans=trans,
+ repository=repository,
+ repository_metadata=repository_metadata,
+ toolshed_base_url=str( web.url_for( '/', qualified=True ) ).rstrip( '/' ),
+ key_rd_dicts_to_be_processed=None,
+ all_repository_dependencies=None,
+ handled_key_rd_dicts=None,
+ circular_repository_dependencies=None )
if repository_dependencies:
return encoding_util.tool_shed_encode( repository_dependencies )
return ''
@@ -1144,16 +1144,16 @@
repo_dir = repository.repo_path( trans.app )
repo = hg.repository( suc.get_configured_ui(), repo_dir )
ctx = suc.get_changectx_for_changeset( repo, changeset_revision )
- repo_info_dict = suc.create_repo_info_dict( trans=trans,
- repository_clone_url=repository_clone_url,
- changeset_revision=changeset_revision,
- ctx_rev=str( ctx.rev() ),
- repository_owner=repository.user.username,
- repository_name=repository.name,
- repository=repository,
- repository_metadata=repository_metadata,
- tool_dependencies=None,
- repository_dependencies=None )
+ repo_info_dict = repository_util.create_repo_info_dict( trans=trans,
+ repository_clone_url=repository_clone_url,
+ changeset_revision=changeset_revision,
+ ctx_rev=str( ctx.rev() ),
+ repository_owner=repository.user.username,
+ repository_name=repository.name,
+ repository=repository,
+ repository_metadata=repository_metadata,
+ tool_dependencies=None,
+ repository_dependencies=None )
repo_info_dicts.append( encoding_util.tool_shed_encode( repo_info_dict ) )
return dict( includes_tools=includes_tools,
includes_tools_for_display_in_tool_panel=includes_tools_for_display_in_tool_panel,
@@ -1257,16 +1257,16 @@
repo_dir = repository.repo_path( trans.app )
repo = hg.repository( suc.get_configured_ui(), repo_dir )
ctx = suc.get_changectx_for_changeset( repo, changeset_revision )
- repo_info_dict = suc.create_repo_info_dict( trans=trans,
- repository_clone_url=repository_clone_url,
- changeset_revision=changeset_revision,
- ctx_rev=str( ctx.rev() ),
- repository_owner=repository.user.username,
- repository_name=repository.name,
- repository=repository,
- repository_metadata=repository_metadata,
- tool_dependencies=None,
- repository_dependencies=None )
+ repo_info_dict = repository_util.create_repo_info_dict( trans=trans,
+ repository_clone_url=repository_clone_url,
+ changeset_revision=changeset_revision,
+ ctx_rev=str( ctx.rev() ),
+ repository_owner=repository.user.username,
+ repository_name=repository.name,
+ repository=repository,
+ repository_metadata=repository_metadata,
+ tool_dependencies=None,
+ repository_dependencies=None )
includes_data_managers = False
includes_datatypes = False
includes_tools = False
@@ -1686,15 +1686,16 @@
if repository_metadata:
metadata = repository_metadata.metadata
# Get a dictionary of all repositories upon which the contents of the current repository_metadata record depend.
- repository_dependencies = suc.get_repository_dependencies_for_changeset_revision( trans=trans,
- repository=repository,
- repository_metadata=repository_metadata,
- toolshed_base_url=str( web.url_for( '/', qualified=True ) ).rstrip( '/' ),
- key_rd_dicts_to_be_processed=None,
- all_repository_dependencies=None,
- handled_key_rd_dicts=None )
+ repository_dependencies = \
+ repository_dependency_util.get_repository_dependencies_for_changeset_revision( trans=trans,
+ repository=repository,
+ repository_metadata=repository_metadata,
+ toolshed_base_url=str( web.url_for( '/', qualified=True ) ).rstrip( '/' ),
+ key_rd_dicts_to_be_processed=None,
+ all_repository_dependencies=None,
+ handled_key_rd_dicts=None )
# Handle messaging for orphan tool dependencies.
- orphan_message = suc.generate_message_for_orphan_tool_dependencies( metadata )
+ orphan_message = tool_dependency_util.generate_message_for_orphan_tool_dependencies( metadata )
if orphan_message:
message += orphan_message
status = 'warning'
@@ -1809,13 +1810,14 @@
repository_metadata_id = trans.security.encode_id( repository_metadata.id ),
metadata = repository_metadata.metadata
# Get a dictionary of all repositories upon which the contents of the current repository_metadata record depend.
- repository_dependencies = suc.get_repository_dependencies_for_changeset_revision( trans=trans,
- repository=repository,
- repository_metadata=repository_metadata,
- toolshed_base_url=str( web.url_for( '/', qualified=True ) ).rstrip( '/' ),
- key_rd_dicts_to_be_processed=None,
- all_repository_dependencies=None,
- handled_key_rd_dicts=None )
+ repository_dependencies = \
+ repository_dependency_util.get_repository_dependencies_for_changeset_revision( trans=trans,
+ repository=repository,
+ repository_metadata=repository_metadata,
+ toolshed_base_url=str( web.url_for( '/', qualified=True ) ).rstrip( '/' ),
+ key_rd_dicts_to_be_processed=None,
+ all_repository_dependencies=None,
+ handled_key_rd_dicts=None )
if metadata:
if 'repository_dependencies' in metadata and not repository_dependencies:
message += 'The repository dependency definitions for this repository are invalid and will be ignored.'
@@ -2254,7 +2256,7 @@
repository = None
if repository:
repository_id = trans.security.encode_id( repository.id )
- repository_metadata = suc.get_repository_metadata_by_repository_id_changeset_revision( trans, repository_id, changeset_revision )
+ repository_metadata = metadata_util.get_repository_metadata_by_repository_id_changeset_revision( trans, repository_id, changeset_revision )
if not repository_metadata:
# Get updates to the received changeset_revision if any exist.
repo_dir = repository.repo_path( trans.app )
@@ -2262,7 +2264,7 @@
upper_bound_changeset_revision = suc.get_next_downloadable_changeset_revision( repository, repo, changeset_revision )
if upper_bound_changeset_revision:
changeset_revision = upper_bound_changeset_revision
- repository_metadata = suc.get_repository_metadata_by_repository_id_changeset_revision( trans, repository_id, changeset_revision )
+ repository_metadata = metadata_util.get_repository_metadata_by_repository_id_changeset_revision( trans, repository_id, changeset_revision )
if repository_metadata:
return trans.response.send_redirect( web.url_for( controller='repository',
action='index',
@@ -2471,15 +2473,16 @@
if repository_metadata:
metadata = repository_metadata.metadata
# Get a dictionary of all repositories upon which the contents of the current repository_metadata record depend.
- repository_dependencies = suc.get_repository_dependencies_for_changeset_revision( trans=trans,
- repository=repository,
- repository_metadata=repository_metadata,
- toolshed_base_url=str( web.url_for( '/', qualified=True ) ).rstrip( '/' ),
- key_rd_dicts_to_be_processed=None,
- all_repository_dependencies=None,
- handled_key_rd_dicts=None )
+ repository_dependencies = \
+ repository_dependency_util.get_repository_dependencies_for_changeset_revision( trans=trans,
+ repository=repository,
+ repository_metadata=repository_metadata,
+ toolshed_base_url=str( web.url_for( '/', qualified=True ) ).rstrip( '/' ),
+ key_rd_dicts_to_be_processed=None,
+ all_repository_dependencies=None,
+ handled_key_rd_dicts=None )
# Handle messaging for orphan tool dependencies.
- orphan_message = suc.generate_message_for_orphan_tool_dependencies( metadata )
+ orphan_message = tool_dependency_util.generate_message_for_orphan_tool_dependencies( metadata )
if orphan_message:
message += orphan_message
status = 'warning'
@@ -2621,7 +2624,7 @@
status = params.get( 'status', 'done' )
if workflow_name:
workflow_name = encoding_util.tool_shed_decode( workflow_name )
- repository_metadata = suc.get_repository_metadata_by_id( trans, repository_metadata_id )
+ repository_metadata = metadata_util.get_repository_metadata_by_id( trans, repository_metadata_id )
repository = suc.get_repository_in_tool_shed( trans, trans.security.encode_id( repository_metadata.repository_id ) )
changeset_revision = repository_metadata.changeset_revision
metadata = repository_metadata.metadata
diff -r 62bd6fe0e9aa297b4448dfbcae32d27c8e313c38 -r 320c9b7f08c42c16ac942f05329c134dcdbb2770 lib/galaxy/webapps/tool_shed/controllers/upload.py
--- a/lib/galaxy/webapps/tool_shed/controllers/upload.py
+++ b/lib/galaxy/webapps/tool_shed/controllers/upload.py
@@ -3,7 +3,7 @@
from galaxy import web, util
from galaxy.datatypes import checkers
import tool_shed.util.shed_util_common as suc
-import tool_shed.util.metadata_util as metadata_util
+from tool_shed.util import metadata_util, repository_dependency_util, tool_dependency_util
from galaxy import eggs
eggs.require('mercurial')
@@ -198,17 +198,17 @@
# so warning messages are important because orphans are always valid. The repository owner must be warned in case they did not intend to define an
# orphan dependency, but simply provided incorrect information (tool shed, name owner, changeset_revision) for the definition.
# Handle messaging for orphan tool dependencies.
- orphan_message = suc.generate_message_for_orphan_tool_dependencies( metadata_dict )
+ orphan_message = tool_dependency_util.generate_message_for_orphan_tool_dependencies( metadata_dict )
if orphan_message:
message += orphan_message
status = 'warning'
# Handle messaging for invalid tool dependencies.
- invalid_tool_dependencies_message = suc.generate_message_for_invalid_tool_dependencies( metadata_dict )
+ invalid_tool_dependencies_message = tool_dependency_util.generate_message_for_invalid_tool_dependencies( metadata_dict )
if invalid_tool_dependencies_message:
message += invalid_tool_dependencies_message
status = 'error'
# Handle messaging for invalid repository dependencies.
- invalid_repository_dependencies_message = suc.generate_message_for_invalid_repository_dependencies( metadata_dict )
+ invalid_repository_dependencies_message = repository_dependency_util.generate_message_for_invalid_repository_dependencies( metadata_dict )
if invalid_repository_dependencies_message:
message += invalid_repository_dependencies_message
status = 'error'
diff -r 62bd6fe0e9aa297b4448dfbcae32d27c8e313c38 -r 320c9b7f08c42c16ac942f05329c134dcdbb2770 lib/galaxy/webapps/tool_shed/util/workflow_util.py
--- a/lib/galaxy/webapps/tool_shed/util/workflow_util.py
+++ b/lib/galaxy/webapps/tool_shed/util/workflow_util.py
@@ -6,7 +6,7 @@
import logging, svgfig
from galaxy.util import json
import tool_shed.util.shed_util_common as suc
-from tool_shed.util import encoding_util
+from tool_shed.util import encoding_util, metadata_util
from galaxy.workflow.modules import InputDataModule, ToolModule, WorkflowModuleFactory
import galaxy.webapps.galaxy.controllers.workflow
import galaxy.tools
@@ -152,7 +152,7 @@
workflow_name = encoding_util.tool_shed_decode( workflow_name )
if trans.webapp.name == 'tool_shed':
# We're in the tool shed.
- repository_metadata = suc.get_repository_metadata_by_id( trans, repository_metadata_id )
+ repository_metadata = metadata_util.get_repository_metadata_by_id( trans, repository_metadata_id )
repository_id = trans.security.encode_id( repository_metadata.repository_id )
changeset_revision = repository_metadata.changeset_revision
metadata = repository_metadata.metadata
diff -r 62bd6fe0e9aa297b4448dfbcae32d27c8e313c38 -r 320c9b7f08c42c16ac942f05329c134dcdbb2770 lib/tool_shed/galaxy_install/__init__.py
--- a/lib/tool_shed/galaxy_install/__init__.py
+++ b/lib/tool_shed/galaxy_install/__init__.py
@@ -2,8 +2,8 @@
Classes encapsulating the management of repositories installed from Galaxy tool sheds.
"""
import os, logging
-import tool_shed.util.shed_util
import tool_shed.util.shed_util_common
+import tool_shed.util.datatype_util
from galaxy.model.orm import and_
from galaxy import eggs
@@ -51,13 +51,13 @@
.order_by( self.model.ToolShedRepository.table.c.id ):
relative_install_dir = self.get_repository_install_dir( tool_shed_repository )
if relative_install_dir:
- installed_repository_dict = tool_shed.util.shed_util.load_installed_datatypes( self.app, tool_shed_repository, relative_install_dir )
+ installed_repository_dict = tool_shed.util.datatype_util.load_installed_datatypes( self.app, tool_shed_repository, relative_install_dir )
if installed_repository_dict:
self.installed_repository_dicts.append( installed_repository_dict )
def load_proprietary_converters_and_display_applications( self, deactivate=False ):
for installed_repository_dict in self.installed_repository_dicts:
if installed_repository_dict[ 'converter_path' ]:
- tool_shed.util.shed_util.load_installed_datatype_converters( self.app, installed_repository_dict, deactivate=deactivate )
+ tool_shed.util.datatype_util.load_installed_datatype_converters( self.app, installed_repository_dict, deactivate=deactivate )
if installed_repository_dict[ 'display_path' ]:
- tool_shed.util.shed_util.load_installed_display_applications( self.app, installed_repository_dict, deactivate=deactivate )
+ tool_shed.util.datatype_util.load_installed_display_applications( self.app, installed_repository_dict, deactivate=deactivate )
\ No newline at end of file
diff -r 62bd6fe0e9aa297b4448dfbcae32d27c8e313c38 -r 320c9b7f08c42c16ac942f05329c134dcdbb2770 lib/tool_shed/galaxy_install/grids/admin_toolshed_grids.py
--- a/lib/tool_shed/galaxy_install/grids/admin_toolshed_grids.py
+++ b/lib/tool_shed/galaxy_install/grids/admin_toolshed_grids.py
@@ -3,7 +3,7 @@
from galaxy import model, util
from galaxy.web.framework.helpers import iff, grids
from galaxy.model.orm import or_
-import tool_shed.util.shed_util as shed_util
+from tool_shed.util import tool_dependency_util
log = logging.getLogger( __name__ )
@@ -345,7 +345,7 @@
]
def build_initial_query( self, trans, **kwd ):
- tool_dependency_ids = shed_util.get_tool_dependency_ids( as_string=False, **kwd )
+ tool_dependency_ids = tool_dependency_util.get_tool_dependency_ids( as_string=False, **kwd )
if tool_dependency_ids:
clause_list = []
for tool_dependency_id in tool_dependency_ids:
diff -r 62bd6fe0e9aa297b4448dfbcae32d27c8e313c38 -r 320c9b7f08c42c16ac942f05329c134dcdbb2770 lib/tool_shed/galaxy_install/install_manager.py
--- a/lib/tool_shed/galaxy_install/install_manager.py
+++ b/lib/tool_shed/galaxy_install/install_manager.py
@@ -6,11 +6,9 @@
from galaxy import util
from galaxy.tools import ToolSection
from galaxy.util.json import from_json_string, to_json_string
-import tool_shed.util.shed_util as shed_util
import tool_shed.util.shed_util_common as suc
-import tool_shed.util.metadata_util as metadata_util
+from tool_shed.util import common_install_util, common_util, datatype_util, metadata_util, tool_dependency_util, tool_util
from galaxy.util.odict import odict
-from tool_shed.util import common_util
class InstallManager( object ):
def __init__( self, app, latest_migration_script_number, tool_shed_install_config, migrated_tools_config, install_dependencies ):
@@ -174,7 +172,7 @@
# See if tool_config is defined inside of a section in self.proprietary_tool_panel_elems.
is_displayed, tool_sections = self.get_containing_tool_sections( tool_config )
if is_displayed:
- tool_panel_dict_for_tool_config = shed_util.generate_tool_panel_dict_for_tool_config( guid, tool_config, tool_sections=tool_sections )
+ tool_panel_dict_for_tool_config = tool_util.generate_tool_panel_dict_for_tool_config( guid, tool_config, tool_sections=tool_sections )
for k, v in tool_panel_dict_for_tool_config.items():
tool_panel_dict_for_display[ k ] = v
else:
@@ -195,43 +193,43 @@
self.app.sa_session.flush()
if 'tool_dependencies' in metadata_dict:
# All tool_dependency objects must be created before the tools are processed even if no tool dependencies will be installed.
- tool_dependencies = shed_util.create_tool_dependency_objects( self.app, tool_shed_repository, relative_install_dir, set_status=True )
+ tool_dependencies = tool_dependency_util.create_tool_dependency_objects( self.app, tool_shed_repository, relative_install_dir, set_status=True )
else:
tool_dependencies = None
if 'tools' in metadata_dict:
sample_files = metadata_dict.get( 'sample_files', [] )
sample_files = [ str( s ) for s in sample_files ]
- tool_index_sample_files = shed_util.get_tool_index_sample_files( sample_files )
- shed_util.copy_sample_files( self.app, tool_index_sample_files, tool_path=self.tool_path )
+ tool_index_sample_files = tool_util.get_tool_index_sample_files( sample_files )
+ tool_util.copy_sample_files( self.app, tool_index_sample_files, tool_path=self.tool_path )
sample_files_copied = [ s for s in tool_index_sample_files ]
repository_tools_tups = suc.get_repository_tools_tups( self.app, metadata_dict )
if repository_tools_tups:
# Handle missing data table entries for tool parameters that are dynamically generated select lists.
- repository_tools_tups = shed_util.handle_missing_data_table_entry( self.app, relative_install_dir, self.tool_path, repository_tools_tups )
+ repository_tools_tups = tool_util.handle_missing_data_table_entry( self.app, relative_install_dir, self.tool_path, repository_tools_tups )
# Handle missing index files for tool parameters that are dynamically generated select lists.
- repository_tools_tups, sample_files_copied = shed_util.handle_missing_index_file( self.app,
+ repository_tools_tups, sample_files_copied = tool_util.handle_missing_index_file( self.app,
self.tool_path,
sample_files,
repository_tools_tups,
sample_files_copied )
# Copy remaining sample files included in the repository to the ~/tool-data directory of the local Galaxy instance.
- shed_util.copy_sample_files( self.app, sample_files, tool_path=self.tool_path, sample_files_copied=sample_files_copied )
+ tool_util.copy_sample_files( self.app, sample_files, tool_path=self.tool_path, sample_files_copied=sample_files_copied )
if install_dependencies and tool_dependencies and 'tool_dependencies' in metadata_dict:
# Install tool dependencies.
- shed_util.update_tool_shed_repository_status( self.app,
- tool_shed_repository,
- self.app.model.ToolShedRepository.installation_status.INSTALLING_TOOL_DEPENDENCIES )
+ suc.update_tool_shed_repository_status( self.app,
+ tool_shed_repository,
+ self.app.model.ToolShedRepository.installation_status.INSTALLING_TOOL_DEPENDENCIES )
# Get the tool_dependencies.xml file from disk.
tool_dependencies_config = suc.get_config_from_disk( 'tool_dependencies.xml', repo_install_dir )
- installed_tool_dependencies = shed_util.handle_tool_dependencies( app=self.app,
- tool_shed_repository=tool_shed_repository,
- tool_dependencies_config=tool_dependencies_config,
- tool_dependencies=tool_dependencies )
+ installed_tool_dependencies = common_install_util.handle_tool_dependencies( app=self.app,
+ tool_shed_repository=tool_shed_repository,
+ tool_dependencies_config=tool_dependencies_config,
+ tool_dependencies=tool_dependencies )
for installed_tool_dependency in installed_tool_dependencies:
if installed_tool_dependency.status == self.app.model.ToolDependency.installation_status.ERROR:
print '\nThe following error occurred from the InstallManager while installing tool dependency ', installed_tool_dependency.name, ':'
print installed_tool_dependency.error_message, '\n\n'
- shed_util.add_to_tool_panel( self.app,
+ tool_util.add_to_tool_panel( self.app,
tool_shed_repository.name,
repository_clone_url,
tool_shed_repository.installed_changeset_revision,
@@ -250,16 +248,16 @@
datatypes_config = suc.get_config_from_disk( 'datatypes_conf.xml', repo_install_dir )
# Load proprietary data types required by tools. The value of override is not important here since the Galaxy server will be started
# after this installation completes.
- converter_path, display_path = shed_util.alter_config_and_load_prorietary_datatypes( self.app, datatypes_config, repo_install_dir, override=False ) #repo_install_dir was relative_install_dir
+ converter_path, display_path = datatype_util.alter_config_and_load_prorietary_datatypes( self.app, datatypes_config, repo_install_dir, override=False ) #repo_install_dir was relative_install_dir
if converter_path or display_path:
# Create a dictionary of tool shed repository related information.
- repository_dict = shed_util.create_repository_dict_for_proprietary_datatypes( tool_shed=self.tool_shed,
- name=tool_shed_repository.name,
- owner=self.repository_owner,
- installed_changeset_revision=tool_shed_repository.installed_changeset_revision,
- tool_dicts=metadata_dict.get( 'tools', [] ),
- converter_path=converter_path,
- display_path=display_path )
+ repository_dict = datatype_util.create_repository_dict_for_proprietary_datatypes( tool_shed=self.tool_shed,
+ name=tool_shed_repository.name,
+ owner=self.repository_owner,
+ installed_changeset_revision=tool_shed_repository.installed_changeset_revision,
+ tool_dicts=metadata_dict.get( 'tools', [] ),
+ converter_path=converter_path,
+ display_path=display_path )
if converter_path:
# Load proprietary datatype converters
self.app.datatypes_registry.load_datatype_converters( self.toolbox, installed_repository_dict=repository_dict )
@@ -297,7 +295,7 @@
current_changeset_revision=None,
owner=self.repository_owner,
dist_to_shed=True )
- shed_util.update_tool_shed_repository_status( self.app, tool_shed_repository, self.app.model.ToolShedRepository.installation_status.CLONING )
+ suc.update_tool_shed_repository_status( self.app, tool_shed_repository, self.app.model.ToolShedRepository.installation_status.CLONING )
cloned_ok, error_message = suc.clone_repository( repository_clone_url, os.path.abspath( install_dir ), ctx_rev )
if cloned_ok:
self.handle_repository_contents( tool_shed_repository=tool_shed_repository,
@@ -308,9 +306,9 @@
self.app.sa_session.refresh( tool_shed_repository )
metadata_dict = tool_shed_repository.metadata
if 'tools' in metadata_dict:
- shed_util.update_tool_shed_repository_status( self.app,
- tool_shed_repository,
- self.app.model.ToolShedRepository.installation_status.SETTING_TOOL_VERSIONS )
+ suc.update_tool_shed_repository_status( self.app,
+ tool_shed_repository,
+ self.app.model.ToolShedRepository.installation_status.SETTING_TOOL_VERSIONS )
# Get the tool_versions from the tool shed for each tool in the installed change set.
url = '%s/repository/get_tool_versions?name=%s&owner=%s&changeset_revision=%s' % \
( tool_shed_url, tool_shed_repository.name, self.repository_owner, installed_changeset_revision )
@@ -319,7 +317,7 @@
response.close()
if text:
tool_version_dicts = from_json_string( text )
- shed_util.handle_tool_versions( self.app, tool_version_dicts, tool_shed_repository )
+ tool_util.handle_tool_versions( self.app, tool_version_dicts, tool_shed_repository )
else:
# Set the tool versions since they seem to be missing for this repository in the tool shed.
# CRITICAL NOTE: These default settings may not properly handle all parent/child associations.
@@ -328,8 +326,8 @@
tool_id = tool_dict[ 'guid' ]
old_tool_id = tool_dict[ 'id' ]
tool_version = tool_dict[ 'version' ]
- tool_version_using_old_id = shed_util.get_tool_version( self.app, old_tool_id )
- tool_version_using_guid = shed_util.get_tool_version( self.app, tool_id )
+ tool_version_using_old_id = tool_util.get_tool_version( self.app, old_tool_id )
+ tool_version_using_guid = tool_util.get_tool_version( self.app, tool_id )
if not tool_version_using_old_id:
tool_version_using_old_id = self.app.model.ToolVersion( tool_id=old_tool_id,
tool_shed_repository=tool_shed_repository )
@@ -341,7 +339,7 @@
self.app.sa_session.add( tool_version_using_guid )
self.app.sa_session.flush()
# Associate the two versions as parent / child.
- tool_version_association = shed_util.get_tool_version_association( self.app,
+ tool_version_association = tool_util.get_tool_version_association( self.app,
tool_version_using_old_id,
tool_version_using_guid )
if not tool_version_association:
@@ -349,7 +347,7 @@
parent_id=tool_version_using_old_id.id )
self.app.sa_session.add( tool_version_association )
self.app.sa_session.flush()
- shed_util.update_tool_shed_repository_status( self.app, tool_shed_repository, self.app.model.ToolShedRepository.installation_status.INSTALLED )
+ suc.update_tool_shed_repository_status( self.app, tool_shed_repository, self.app.model.ToolShedRepository.installation_status.INSTALLED )
@property
def non_shed_tool_panel_configs( self ):
return common_util.get_non_shed_tool_panel_configs( self.app )
diff -r 62bd6fe0e9aa297b4448dfbcae32d27c8e313c38 -r 320c9b7f08c42c16ac942f05329c134dcdbb2770 lib/tool_shed/galaxy_install/repository_util.py
--- a/lib/tool_shed/galaxy_install/repository_util.py
+++ b/lib/tool_shed/galaxy_install/repository_util.py
@@ -1,89 +1,230 @@
-import tool_shed.util.shed_util as shed_util
+import os, logging, threading, urllib2
+from galaxy.web import url_for
+from galaxy.webapps.tool_shed.util import container_util
import tool_shed.util.shed_util_common as suc
-import tool_shed.util.metadata_util as metadata_util
+from tool_shed.util import encoding_util, repository_dependency_util, tool_dependency_util, tool_util
-def handle_repository_contents( app, tool_shed_repository, tool_path, repository_clone_url, relative_install_dir, tool_shed=None, tool_section=None,
- shed_tool_conf=None, reinstalling=False ):
+from galaxy import eggs
+import pkg_resources
+
+pkg_resources.require( 'mercurial' )
+from mercurial import hg, ui, commands
+
+log = logging.getLogger( __name__ )
+
+def create_repo_info_dict( trans, repository_clone_url, changeset_revision, ctx_rev, repository_owner, repository_name=None, repository=None,
+ repository_metadata=None, tool_dependencies=None, repository_dependencies=None ):
"""
- Generate the metadata for the installed tool shed repository, among other things. This method is called from Galaxy (never the tool shed)
- when an admin is installing a new repository or reinstalling an uninstalled repository.
+ Return a dictionary that includes all of the information needed to install a repository into a local Galaxy instance. The dictionary will also
+ contain the recursive list of repository dependencies defined for the repository, as well as the defined tool dependencies.
+
+ This method is called from Galaxy under three scenarios:
+ 1. During the tool shed repository installation process via the tool shed's get_repository_information() method. In this case both the received
+ repository and repository_metadata will be objects., but tool_dependencies and repository_dependencies will be None
+ 2. When a tool shed repository that was uninstalled from a Galaxy instance is being reinstalled with no updates available. In this case, both
+ repository and repository_metadata will be None, but tool_dependencies and repository_dependencies will be objects previously retrieved from the
+ tool shed if the repository includes definitions for them.
+ 3. When a tool shed repository that was uninstalled from a Galaxy instance is being reinstalled with updates available. In this case, this
+ method is reached via the tool shed's get_updated_repository_information() method, and both repository and repository_metadata will be objects
+ but tool_dependencies and repository_dependencies will be None.
"""
- sa_session = app.model.context.current
- shed_config_dict = app.toolbox.get_shed_config_dict_by_filename( shed_tool_conf )
- metadata_dict, invalid_file_tups = metadata_util.generate_metadata_for_changeset_revision( app=app,
- repository=tool_shed_repository,
- changeset_revision=tool_shed_repository.changeset_revision,
- repository_clone_url=repository_clone_url,
- shed_config_dict=shed_config_dict,
- relative_install_dir=relative_install_dir,
- repository_files_dir=None,
- resetting_all_metadata_on_repository=False,
- updating_installed_repository=False,
- persist=True )
- tool_shed_repository.metadata = metadata_dict
- sa_session.add( tool_shed_repository )
- sa_session.flush()
- if 'tool_dependencies' in metadata_dict and not reinstalling:
- tool_dependencies = shed_util.create_tool_dependency_objects( app, tool_shed_repository, relative_install_dir, set_status=True )
- if 'tools' in metadata_dict:
- tool_panel_dict = shed_util.generate_tool_panel_dict_for_new_install( metadata_dict[ 'tools' ], tool_section )
- sample_files = metadata_dict.get( 'sample_files', [] )
- tool_index_sample_files = shed_util.get_tool_index_sample_files( sample_files )
- shed_util.copy_sample_files( self.app, tool_index_sample_files, tool_path=tool_path )
- sample_files_copied = [ str( s ) for s in tool_index_sample_files ]
- repository_tools_tups = suc.get_repository_tools_tups( app, metadata_dict )
- if repository_tools_tups:
- # Handle missing data table entries for tool parameters that are dynamically generated select lists.
- repository_tools_tups = shed_util.handle_missing_data_table_entry( app, relative_install_dir, tool_path, repository_tools_tups )
- # Handle missing index files for tool parameters that are dynamically generated select lists.
- repository_tools_tups, sample_files_copied = shed_util.handle_missing_index_file( app,
- tool_path,
- sample_files,
- repository_tools_tups,
- sample_files_copied )
- # Copy remaining sample files included in the repository to the ~/tool-data directory of the local Galaxy instance.
- shed_util.copy_sample_files( app, sample_files, tool_path=tool_path, sample_files_copied=sample_files_copied )
- shed_util.add_to_tool_panel( app=app,
- repository_name=tool_shed_repository.name,
- repository_clone_url=repository_clone_url,
- changeset_revision=tool_shed_repository.installed_changeset_revision,
- repository_tools_tups=repository_tools_tups,
- owner=tool_shed_repository.owner,
- shed_tool_conf=shed_tool_conf,
- tool_panel_dict=tool_panel_dict,
- new_install=True )
- if 'data_manager' in metadata_dict:
- new_data_managers = shed_util.install_data_managers( app,
- app.config.shed_data_manager_config_file,
- metadata_dict,
- shed_config_dict,
- relative_install_dir,
- tool_shed_repository,
- repository_tools_tups )
- if 'datatypes' in metadata_dict:
- tool_shed_repository.status = app.model.ToolShedRepository.installation_status.LOADING_PROPRIETARY_DATATYPES
- if not tool_shed_repository.includes_datatypes:
- tool_shed_repository.includes_datatypes = True
- sa_session.add( tool_shed_repository )
- sa_session.flush()
- files_dir = relative_install_dir
- if shed_config_dict.get( 'tool_path' ):
- files_dir = os.path.join( shed_config_dict['tool_path'], files_dir )
- datatypes_config = suc.get_config_from_disk( 'datatypes_conf.xml', files_dir )
- # Load data types required by tools.
- converter_path, display_path = shed_util.alter_config_and_load_prorietary_datatypes( app, datatypes_config, files_dir, override=False )
- if converter_path or display_path:
- # Create a dictionary of tool shed repository related information.
- repository_dict = shed_util.create_repository_dict_for_proprietary_datatypes( tool_shed=tool_shed,
- name=tool_shed_repository.name,
- owner=tool_shed_repository.owner,
- installed_changeset_revision=tool_shed_repository.installed_changeset_revision,
- tool_dicts=metadata_dict.get( 'tools', [] ),
- converter_path=converter_path,
- display_path=display_path )
- if converter_path:
- # Load proprietary datatype converters
- app.datatypes_registry.load_datatype_converters( app.toolbox, installed_repository_dict=repository_dict )
- if display_path:
- # Load proprietary datatype display applications
- app.datatypes_registry.load_display_applications( installed_repository_dict=repository_dict )
+ repo_info_dict = {}
+ repository = suc.get_repository_by_name_and_owner( trans.app, repository_name, repository_owner )
+ if trans.webapp.name == 'tool_shed':
+ # We're in the tool shed.
+ repository_metadata = suc.get_repository_metadata_by_changeset_revision( trans, trans.security.encode_id( repository.id ), changeset_revision )
+ if repository_metadata:
+ metadata = repository_metadata.metadata
+ if metadata:
+ # Get a dictionary of all repositories upon which the contents of the received repository depends.
+ repository_dependencies = \
+ repository_dependency_util.get_repository_dependencies_for_changeset_revision( trans=trans,
+ repository=repository,
+ repository_metadata=repository_metadata,
+ toolshed_base_url=str( url_for( '/', qualified=True ) ).rstrip( '/' ),
+ key_rd_dicts_to_be_processed=None,
+ all_repository_dependencies=None,
+ handled_key_rd_dicts=None,
+ circular_repository_dependencies=None )
+ tool_dependencies = metadata.get( 'tool_dependencies', None )
+ if tool_dependencies:
+ new_tool_dependencies = {}
+ for dependency_key, requirements_dict in tool_dependencies.items():
+ if dependency_key in [ 'set_environment' ]:
+ new_set_environment_dict_list = []
+ for set_environment_dict in requirements_dict:
+ set_environment_dict[ 'repository_name' ] = repository_name
+ set_environment_dict[ 'repository_owner' ] = repository_owner
+ set_environment_dict[ 'changeset_revision' ] = changeset_revision
+ new_set_environment_dict_list.append( set_environment_dict )
+ new_tool_dependencies[ dependency_key ] = new_set_environment_dict_list
+ else:
+ requirements_dict[ 'repository_name' ] = repository_name
+ requirements_dict[ 'repository_owner' ] = repository_owner
+ requirements_dict[ 'changeset_revision' ] = changeset_revision
+ new_tool_dependencies[ dependency_key ] = requirements_dict
+ tool_dependencies = new_tool_dependencies
+ # Cast unicode to string.
+ repo_info_dict[ str( repository.name ) ] = ( str( repository.description ),
+ str( repository_clone_url ),
+ str( changeset_revision ),
+ str( ctx_rev ),
+ str( repository_owner ),
+ repository_dependencies,
+ tool_dependencies )
+ return repo_info_dict
+
+def get_update_to_changeset_revision_and_ctx_rev( trans, repository ):
+ """Return the changeset revision hash to which the repository can be updated."""
+ changeset_revision_dict = {}
+ tool_shed_url = suc.get_url_from_repository_tool_shed( trans.app, repository )
+ url = suc.url_join( tool_shed_url, 'repository/get_changeset_revision_and_ctx_rev?name=%s&owner=%s&changeset_revision=%s' % \
+ ( repository.name, repository.owner, repository.installed_changeset_revision ) )
+ try:
+ response = urllib2.urlopen( url )
+ encoded_update_dict = response.read()
+ if encoded_update_dict:
+ update_dict = encoding_util.tool_shed_decode( encoded_update_dict )
+ includes_data_managers = update_dict.get( 'includes_data_managers', False )
+ includes_datatypes = update_dict.get( 'includes_datatypes', False )
+ includes_tools = update_dict.get( 'includes_tools', False )
+ includes_tools_for_display_in_tool_panel = update_dict.get( 'includes_tools_for_display_in_tool_panel', False )
+ includes_tool_dependencies = update_dict.get( 'includes_tool_dependencies', False )
+ includes_workflows = update_dict.get( 'includes_workflows', False )
+ has_repository_dependencies = update_dict.get( 'has_repository_dependencies', False )
+ changeset_revision = update_dict.get( 'changeset_revision', None )
+ ctx_rev = update_dict.get( 'ctx_rev', None )
+ response.close()
+ changeset_revision_dict[ 'includes_data_managers' ] = includes_data_managers
+ changeset_revision_dict[ 'includes_datatypes' ] = includes_datatypes
+ changeset_revision_dict[ 'includes_tools' ] = includes_tools
+ changeset_revision_dict[ 'includes_tools_for_display_in_tool_panel' ] = includes_tools_for_display_in_tool_panel
+ changeset_revision_dict[ 'includes_tool_dependencies' ] = includes_tool_dependencies
+ changeset_revision_dict[ 'includes_workflows' ] = includes_workflows
+ changeset_revision_dict[ 'has_repository_dependencies' ] = has_repository_dependencies
+ changeset_revision_dict[ 'changeset_revision' ] = changeset_revision
+ changeset_revision_dict[ 'ctx_rev' ] = ctx_rev
+ except Exception, e:
+ log.debug( "Error getting change set revision for update from the tool shed for repository '%s': %s" % ( repository.name, str( e ) ) )
+ changeset_revision_dict[ 'includes_data_managers' ] = False
+ changeset_revision_dict[ 'includes_datatypes' ] = False
+ changeset_revision_dict[ 'includes_tools' ] = False
+ changeset_revision_dict[ 'includes_tools_for_display_in_tool_panel' ] = False
+ changeset_revision_dict[ 'includes_tool_dependencies' ] = False
+ changeset_revision_dict[ 'includes_workflows' ] = False
+ changeset_revision_dict[ 'has_repository_dependencies' ] = False
+ changeset_revision_dict[ 'changeset_revision' ] = None
+ changeset_revision_dict[ 'ctx_rev' ] = None
+ return changeset_revision_dict
+
+def merge_containers_dicts_for_new_install( containers_dicts ):
+ """
+ When installing one or more tool shed repositories for the first time, the received list of containers_dicts contains a containers_dict for
+ each repository being installed. Since the repositories are being installed for the first time, all entries are None except the repository
+ dependencies and tool dependencies. The entries for missing dependencies are all None since they have previously been merged into the installed
+ dependencies. This method will merge the dependencies entries into a single container and return it for display.
+ """
+ new_containers_dict = dict( readme_files=None,
+ datatypes=None,
+ missing_repository_dependencies=None,
+ repository_dependencies=None,
+ missing_tool_dependencies=None,
+ tool_dependencies=None,
+ invalid_tools=None,
+ valid_tools=None,
+ workflows=None )
+ if containers_dicts:
+ lock = threading.Lock()
+ lock.acquire( True )
+ try:
+ repository_dependencies_root_folder = None
+ tool_dependencies_root_folder = None
+ # Use a unique folder id (hopefully the following is).
+ folder_id = 867
+ for old_container_dict in containers_dicts:
+ # Merge repository_dependencies.
+ old_container_repository_dependencies_root = old_container_dict[ 'repository_dependencies' ]
+ if old_container_repository_dependencies_root:
+ if repository_dependencies_root_folder is None:
+ repository_dependencies_root_folder = container_util.Folder( id=folder_id, key='root', label='root', parent=None )
+ folder_id += 1
+ repository_dependencies_folder = container_util.Folder( id=folder_id,
+ key='merged',
+ label='Repository dependencies',
+ parent=repository_dependencies_root_folder )
+ folder_id += 1
+ # The old_container_repository_dependencies_root will be a root folder containing a single sub_folder.
+ old_container_repository_dependencies_folder = old_container_repository_dependencies_root.folders[ 0 ]
+ # Change the folder id so it won't confict with others being merged.
+ old_container_repository_dependencies_folder.id = folder_id
+ folder_id += 1
+ # Generate the label by retrieving the repository name.
+ toolshed, name, owner, changeset_revision = container_util.get_components_from_key( old_container_repository_dependencies_folder.key )
+ old_container_repository_dependencies_folder.label = str( name )
+ repository_dependencies_folder.folders.append( old_container_repository_dependencies_folder )
+ # Merge tool_dependencies.
+ old_container_tool_dependencies_root = old_container_dict[ 'tool_dependencies' ]
+ if old_container_tool_dependencies_root:
+ if tool_dependencies_root_folder is None:
+ tool_dependencies_root_folder = container_util.Folder( id=folder_id, key='root', label='root', parent=None )
+ folder_id += 1
+ tool_dependencies_folder = container_util.Folder( id=folder_id,
+ key='merged',
+ label='Tool dependencies',
+ parent=tool_dependencies_root_folder )
+ folder_id += 1
+ else:
+ td_list = [ td.listify for td in tool_dependencies_folder.tool_dependencies ]
+ # The old_container_tool_dependencies_root will be a root folder containing a single sub_folder.
+ old_container_tool_dependencies_folder = old_container_tool_dependencies_root.folders[ 0 ]
+ for td in old_container_tool_dependencies_folder.tool_dependencies:
+ if td.listify not in td_list:
+ tool_dependencies_folder.tool_dependencies.append( td )
+ if repository_dependencies_root_folder:
+ repository_dependencies_root_folder.folders.append( repository_dependencies_folder )
+ new_containers_dict[ 'repository_dependencies' ] = repository_dependencies_root_folder
+ if tool_dependencies_root_folder:
+ tool_dependencies_root_folder.folders.append( tool_dependencies_folder )
+ new_containers_dict[ 'tool_dependencies' ] = tool_dependencies_root_folder
+ except Exception, e:
+ log.debug( "Exception in merge_containers_dicts_for_new_install: %s" % str( e ) )
+ finally:
+ lock.release()
+ return new_containers_dict
+
+def populate_containers_dict_for_new_install( trans, tool_shed_url, tool_path, readme_files_dict, installed_repository_dependencies, missing_repository_dependencies,
+ installed_tool_dependencies, missing_tool_dependencies ):
+ """Return the populated containers for a repository being installed for the first time."""
+ installed_tool_dependencies, missing_tool_dependencies = \
+ tool_dependency_util.populate_tool_dependencies_dicts( trans=trans,
+ tool_shed_url=tool_shed_url,
+ tool_path=tool_path,
+ repository_installed_tool_dependencies=installed_tool_dependencies,
+ repository_missing_tool_dependencies=missing_tool_dependencies,
+ required_repo_info_dicts=None )
+ # Since we are installing a new repository, most of the repository contents are set to None since we don't yet know what they are.
+ containers_dict = suc.build_repository_containers_for_galaxy( trans=trans,
+ repository=None,
+ datatypes=None,
+ invalid_tools=None,
+ missing_repository_dependencies=missing_repository_dependencies,
+ missing_tool_dependencies=missing_tool_dependencies,
+ readme_files_dict=readme_files_dict,
+ repository_dependencies=installed_repository_dependencies,
+ tool_dependencies=installed_tool_dependencies,
+ valid_tools=None,
+ workflows=None,
+ valid_data_managers=None,
+ invalid_data_managers=None,
+ data_managers_errors=None,
+ new_install=True,
+ reinstalling=False )
+ # Merge the missing_repository_dependencies container contents to the installed_repository_dependencies container.
+ containers_dict = repository_dependency_util.merge_missing_repository_dependencies_to_installed_container( containers_dict )
+ # Merge the missing_tool_dependencies container contents to the installed_tool_dependencies container.
+ containers_dict = tool_dependency_util.merge_missing_tool_dependencies_to_installed_container( containers_dict )
+ return containers_dict
+
+def pull_repository( repo, repository_clone_url, ctx_rev ):
+ """Pull changes from a remote repository to a local one."""
+ commands.pull( suc.get_configured_ui(), repo, source=repository_clone_url, rev=[ ctx_rev ] )
diff -r 62bd6fe0e9aa297b4448dfbcae32d27c8e313c38 -r 320c9b7f08c42c16ac942f05329c134dcdbb2770 lib/tool_shed/galaxy_install/tool_dependencies/install_util.py
--- a/lib/tool_shed/galaxy_install/tool_dependencies/install_util.py
+++ b/lib/tool_shed/galaxy_install/tool_dependencies/install_util.py
@@ -1,7 +1,7 @@
import sys, os, subprocess, tempfile, urllib2
import common_util
import fabric_util
-from tool_shed.util import encoding_util
+from tool_shed.util import encoding_util, tool_dependency_util
from galaxy.model.orm import and_
from galaxy.web import url_for
@@ -16,24 +16,6 @@
protocol, base = base_url.split( '://' )
return base.rstrip( '/' )
-def create_or_update_tool_dependency( app, tool_shed_repository, name, version, type, status, set_status=True ):
- # Called from Galaxy (never the tool shed) when a new repository is being installed or when an uninstalled repository is being reinstalled.
- sa_session = app.model.context.current
- # First see if an appropriate tool_dependency record exists for the received tool_shed_repository.
- if version:
- tool_dependency = get_tool_dependency_by_name_version_type_repository( app, tool_shed_repository, name, version, type )
- else:
- tool_dependency = get_tool_dependency_by_name_type_repository( app, tool_shed_repository, name, type )
- if tool_dependency:
- if set_status:
- tool_dependency.status = status
- else:
- # Create a new tool_dependency record for the tool_shed_repository.
- tool_dependency = app.model.ToolDependency( tool_shed_repository.id, name, version, type, status )
- sa_session.add( tool_dependency )
- sa_session.flush()
- return tool_dependency
-
def create_temporary_tool_dependencies_config( tool_shed_url, name, owner, changeset_revision ):
"""Make a call to the tool shed to get the required repository's tool_dependencies.xml file."""
url = url_join( tool_shed_url,
@@ -95,23 +77,6 @@
return tool_shed_repository
return None
-def get_tool_dependency_by_name_type_repository( app, repository, name, type ):
- sa_session = app.model.context.current
- return sa_session.query( app.model.ToolDependency ) \
- .filter( and_( app.model.ToolDependency.table.c.tool_shed_repository_id == repository.id,
- app.model.ToolDependency.table.c.name == name,
- app.model.ToolDependency.table.c.type == type ) ) \
- .first()
-
-def get_tool_dependency_by_name_version_type_repository( app, repository, name, version, type ):
- sa_session = app.model.context.current
- return sa_session.query( app.model.ToolDependency ) \
- .filter( and_( app.model.ToolDependency.table.c.tool_shed_repository_id == repository.id,
- app.model.ToolDependency.table.c.name == name,
- app.model.ToolDependency.table.c.version == version,
- app.model.ToolDependency.table.c.type == type ) ) \
- .first()
-
def get_tool_dependency_install_dir( app, repository_name, repository_owner, repository_changeset_revision, tool_dependency_type, tool_dependency_name,
tool_dependency_version ):
if tool_dependency_type == 'package':
@@ -147,13 +112,13 @@
for package_elem in elem:
if package_elem.tag == 'install':
# Create the tool_dependency record in the database.
- tool_dependency = create_or_update_tool_dependency( app=app,
- tool_shed_repository=tool_shed_repository,
- name=package_name,
- version=package_version,
- type='package',
- status=app.model.ToolDependency.installation_status.INSTALLING,
- set_status=True )
+ tool_dependency = tool_dependency_util.create_or_update_tool_dependency( app=app,
+ tool_shed_repository=tool_shed_repository,
+ name=package_name,
+ version=package_version,
+ type='package',
+ status=app.model.ToolDependency.installation_status.INSTALLING,
+ set_status=True )
# Get the installation method version from a tag like: <install version="1.0">
package_install_version = package_elem.get( 'version', '1.0' )
if package_install_version == '1.0':
@@ -296,13 +261,13 @@
elif package_elem.tag == 'install':
# <install version="1.0">
package_install_version = package_elem.get( 'version', '1.0' )
- tool_dependency = create_or_update_tool_dependency( app=app,
- tool_shed_repository=tool_shed_repository,
- name=package_name,
- version=package_version,
- type='package',
- status=app.model.ToolDependency.installation_status.INSTALLING,
- set_status=True )
+ tool_dependency = tool_dependency_util.create_or_update_tool_dependency( app=app,
+ tool_shed_repository=tool_shed_repository,
+ name=package_name,
+ version=package_version,
+ type='package',
+ status=app.model.ToolDependency.installation_status.INSTALLING,
+ set_status=True )
if package_install_version == '1.0':
# Handle tool dependency installation using a fabric method included in the Galaxy framework.
for actions_elem in package_elem:
@@ -324,7 +289,11 @@
# print 'Installing tool dependencies via fabric script ', proprietary_fabfile_path
else:
print '\nSkipping installation of tool dependency', package_name, 'version', package_version, 'since it is installed in', install_dir, '\n'
- tool_dependency = get_tool_dependency_by_name_version_type_repository( app, tool_shed_repository, package_name, package_version, 'package' )
+ tool_dependency = tool_dependency_util.get_tool_dependency_by_name_version_type_repository( app,
+ tool_shed_repository,
+ package_name,
+ package_version,
+ 'package' )
tool_dependency.status = app.model.ToolDependency.installation_status.INSTALLED
sa_session.add( tool_dependency )
sa_session.flush()
@@ -550,13 +519,13 @@
if env_var_dict:
if not os.path.exists( install_dir ):
os.makedirs( install_dir )
- tool_dependency = create_or_update_tool_dependency( app=app,
- tool_shed_repository=tool_shed_repository,
- name=env_var_name,
- version=None,
- type='set_environment',
- status=app.model.ToolDependency.installation_status.INSTALLING,
- set_status=True )
+ tool_dependency = tool_dependency_util.create_or_update_tool_dependency( app=app,
+ tool_shed_repository=tool_shed_repository,
+ name=env_var_name,
+ version=None,
+ type='set_environment',
+ status=app.model.ToolDependency.installation_status.INSTALLING,
+ set_status=True )
cmd = common_util.create_or_update_env_shell_file( install_dir, env_var_dict )
if env_var_version == '1.0':
# Handle setting environment variables using a fabric method.
diff -r 62bd6fe0e9aa297b4448dfbcae32d27c8e313c38 -r 320c9b7f08c42c16ac942f05329c134dcdbb2770 lib/tool_shed/galaxy_install/update_manager.py
--- a/lib/tool_shed/galaxy_install/update_manager.py
+++ b/lib/tool_shed/galaxy_install/update_manager.py
@@ -3,7 +3,6 @@
"""
import threading, urllib2, logging
from galaxy.util import string_as_bool
-import tool_shed.util.shed_util as shed_util
import tool_shed.util.shed_util_common as suc
from galaxy.model.orm import and_
diff -r 62bd6fe0e9aa297b4448dfbcae32d27c8e313c38 -r 320c9b7f08c42c16ac942f05329c134dcdbb2770 lib/tool_shed/grids/repository_grids.py
--- a/lib/tool_shed/grids/repository_grids.py
+++ b/lib/tool_shed/grids/repository_grids.py
@@ -5,6 +5,7 @@
from galaxy.util import json
import tool_shed.util.shed_util_common as suc
import tool_shed.grids.util as grids_util
+from tool_shed.util import metadata_util
from galaxy import eggs
eggs.require('markupsafe')
@@ -776,12 +777,16 @@
required_repository = suc.get_repository_by_name_and_owner( trans.app, name, owner )
if required_repository:
required_repository_id = trans.security.encode_id( required_repository.id )
- required_repository_metadata = suc.get_repository_metadata_by_repository_id_changeset_revision( trans, required_repository_id, changeset_revision )
+ required_repository_metadata = metadata_util.get_repository_metadata_by_repository_id_changeset_revision( trans,
+ required_repository_id,
+ changeset_revision )
if not required_repository_metadata:
repo_dir = required_repository.repo_path( trans.app )
repo = hg.repository( suc.get_configured_ui(), repo_dir )
updated_changeset_revision = suc.get_next_downloadable_changeset_revision( required_repository, repo, changeset_revision )
- required_repository_metadata = suc.get_repository_metadata_by_repository_id_changeset_revision( trans, required_repository_id, updated_changeset_revision )
+ required_repository_metadata = metadata_util.get_repository_metadata_by_repository_id_changeset_revision( trans,
+ required_repository_id,
+ updated_changeset_revision )
required_repository_metadata_id = trans.security.encode_id( required_repository_metadata.id )
rd_str += '<a href="browse_repository_dependencies?operation=view_or_manage_repository&id=%s">' % ( required_repository_metadata_id )
rd_str += 'Repository <b>%s</b> revision <b>%s</b> owned by <b>%s</b>' % ( escape_html( rd_tup[ 1 ] ), escape_html( rd_tup[ 3 ] ), escape_html( rd_tup[ 2 ] ) )
diff -r 62bd6fe0e9aa297b4448dfbcae32d27c8e313c38 -r 320c9b7f08c42c16ac942f05329c134dcdbb2770 lib/tool_shed/grids/repository_review_grids.py
--- a/lib/tool_shed/grids/repository_review_grids.py
+++ b/lib/tool_shed/grids/repository_review_grids.py
@@ -4,7 +4,7 @@
from galaxy.model.orm import and_, or_
from tool_shed.grids.repository_grids import RepositoryGrid
import tool_shed.util.shed_util_common as suc
-import tool_shed.util.metadata_util as metadata_util
+from tool_shed.util import metadata_util
from galaxy import eggs
eggs.require('mercurial')
@@ -73,7 +73,7 @@
class WithoutReviewsRevisionColumn( grids.GridColumn ):
def get_value( self, trans, grid, repository ):
# Restrict the options to revisions that have not yet been reviewed.
- repository_metadata_revisions = suc.get_repository_metadata_revisions_for_review( repository, reviewed=False )
+ repository_metadata_revisions = metadata_util.get_repository_metadata_revisions_for_review( repository, reviewed=False )
if repository_metadata_revisions:
rval = ''
for repository_metadata in repository_metadata_revisions:
This diff is so big that we needed to truncate the remainder.
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: inithello: Also require and pass-through authentication for the 'pushkey' mercurial command when pushing changes to a tool shed repository.
by commits-noreply@bitbucket.org 15 Mar '13
by commits-noreply@bitbucket.org 15 Mar '13
15 Mar '13
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/62bd6fe0e9aa/
changeset: 62bd6fe0e9aa
user: inithello
date: 2013-03-15 20:11:04
summary: Also require and pass-through authentication for the 'pushkey' mercurial command when pushing changes to a tool shed repository.
affected #: 1 file
diff -r 750222a21eff68b96069d853a4744d6e3682508c -r 62bd6fe0e9aa297b4448dfbcae32d27c8e313c38 lib/galaxy/webapps/tool_shed/framework/middleware/hg.py
--- a/lib/galaxy/webapps/tool_shed/framework/middleware/hg.py
+++ b/lib/galaxy/webapps/tool_shed/framework/middleware/hg.py
@@ -56,12 +56,12 @@
times_downloaded += 1
connection.execute( "update repository set times_downloaded = %d where user_id = %d and name = '%s'" % ( times_downloaded, user_id, name.lower() ) )
connection.close()
- if cmd == 'unbundle':
+ if cmd in [ 'unbundle', 'pushkey' ]:
# This is an hg push from the command line. When doing this, the following commands, in order,
# will be retrieved from environ (see the docs at http://mercurial.selenic.com/wiki/WireProtocol)
- # # If mercurial version >= '2.2.3': capabilities -> batch -> branchmap -> unbundle -> listkeys -> pushkey
+ # # If mercurial version >= '2.2.3': capabilities -> batch -> branchmap -> unbundle -> listkeys -> pushkey -> listkeys
#
- # The mercurial API unbundle() ( i.e., hg push ) method ultimately requires authorization.
+ # The mercurial API unbundle() ( i.e., hg push ) and pushkey() methods ultimately require authorization.
# We'll force password entry every time a change set is pushed.
#
# When a user executes hg commit, it is not guaranteed to succeed. Mercurial records your name
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: guerler: Fixed: No confirm on delete permanently (footer button), saved histories
by commits-noreply@bitbucket.org 15 Mar '13
by commits-noreply@bitbucket.org 15 Mar '13
15 Mar '13
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/750222a21eff/
changeset: 750222a21eff
user: guerler
date: 2013-03-15 19:32:30
summary: Fixed: No confirm on delete permanently (footer button), saved histories
affected #: 3 files
diff -r f1bd817063e092711a9b13f60e569abbce877fba -r 750222a21eff68b96069d853a4744d6e3682508c static/scripts/galaxy.grids.js
--- a/static/scripts/galaxy.grids.js
+++ b/static/scripts/galaxy.grids.js
@@ -541,4 +541,28 @@
}
}
init_grid_elements();
+}
+
+
+// confirmation/submission of operation request
+function submit_operation(selected_button, confirmation_text)
+{
+ // verify in any item is selected
+ var number_of_checked_ids = $('input[name="id"]:checked').length;
+ if (!number_of_checked_ids > 0)
+ return false;
+
+ // show confirmation box
+ if (confirmation_text != 'None' && confirmation_text != '')
+ if(!confirm(confirmation_text))
+ return false;
+
+ // set up hidden field to parse the command/operation to controller
+ $('#operation').val(selected_button.value);
+
+ // submit form
+ selected_button.form.submit();
+
+ // return
+ return true;
}
\ No newline at end of file
diff -r f1bd817063e092711a9b13f60e569abbce877fba -r 750222a21eff68b96069d853a4744d6e3682508c static/scripts/packed/galaxy.grids.js
--- a/static/scripts/packed/galaxy.grids.js
+++ b/static/scripts/packed/galaxy.grids.js
@@ -1,1 +1,1 @@
-jQuery.ajaxSettings.traditional=true;var Grid=Backbone.Model.extend({defaults:{url_base:"",async:false,async_ops:[],categorical_filters:[],filters:{},sort_key:null,show_item_checkboxes:false,cur_page:1,num_pages:1,operation:undefined,item_ids:undefined},can_async_op:function(a){return _.indexOf(this.attributes.async_ops,a)!==-1},add_filter:function(e,f,b){if(b){var c=this.attributes.filters[e],a;if(c===null||c===undefined){a=f}else{if(typeof(c)=="string"){if(c=="All"){a=f}else{var d=[];d[0]=c;d[1]=f;a=d}}else{a=c;a.push(f)}}this.attributes.filters[e]=a}else{this.attributes.filters[e]=f}},remove_filter:function(b,e){var a=this.attributes.filters[b];if(a===null||a===undefined){return false}var d=true;if(typeof(a)==="string"){if(a=="All"){d=false}else{delete this.attributes.filters[b]}}else{var c=_.indexOf(a,e);if(c!==-1){a.splice(c,1)}else{d=false}}return d},get_url_data:function(){var a={async:this.attributes.async,sort:this.attributes.sort_key,page:this.attributes.cur_page,show_item_checkboxes:this.attributes.show_item_checkboxes};if(this.attributes.operation){a.operation=this.attributes.operation}if(this.attributes.item_ids){a.id=this.attributes.item_ids}var b=this;_.each(_.keys(b.attributes.filters),function(c){a["f-"+c]=b.attributes.filters[c]});return a}});function init_operation_buttons(){$("input[name=operation]:submit").each(function(){$(this).click(function(){var b=$(this).val();var a=[];$("input[name=id]:checked").each(function(){a.push($(this).val())});do_operation(b,a)})})}function init_grid_controls(){init_operation_buttons();$(".submit-image").each(function(){$(this).mousedown(function(){$(this).addClass("gray-background")});$(this).mouseup(function(){$(this).removeClass("gray-background")})});$(".sort-link").each(function(){$(this).click(function(){set_sort_condition($(this).attr("sort_key"));return false})});$(".page-link > a").each(function(){$(this).click(function(){set_page($(this).attr("page_num"));return false})});$(".categorical-filter > a").each(function(){$(this).click(function(){set_categorical_filter($(this).attr("filter_key"),$(this).attr("filter_val"));return false})});$(".text-filter-form").each(function(){$(this).submit(function(){var d=$(this).attr("column_key");var c=$("#input-"+d+"-filter");var e=c.val();c.val("");add_filter_condition(d,e,true);return false})});var a=$("#input-tags-filter");if(a.length){a.autocomplete(history_tag_autocomplete_url,{selectFirst:false,autoFill:false,highlight:false,mustMatch:false})}var b=$("#input-name-filter");if(b.length){b.autocomplete(history_name_autocomplete_url,{selectFirst:false,autoFill:false,highlight:false,mustMatch:false})}$(".advanced-search-toggle").each(function(){$(this).click(function(){$("#standard-search").slideToggle("fast");$("#advanced-search").slideToggle("fast");return false})})}function init_grid_elements(){$(".grid").each(function(){var b=$(this).find("input.grid-row-select-checkbox");var a=$(this).find("span.grid-selected-count");var c=function(){a.text($(b).filter(":checked").length)};$(b).each(function(){$(this).change(c)});c()});$(".label").each(function(){var a=$(this).attr("href");if(a!==undefined&&a.indexOf("operation=")!=-1){$(this).click(function(){do_operation_from_href($(this).attr("href"));return false})}});$(".community_rating_star").rating({});make_popup_menus()}function go_page_one(){var a=grid.get("cur_page");if(a!==null&&a!==undefined&&a!=="all"){grid.set("cur_page",1)}}function add_filter_condition(c,e,a){if(e===""){return false}grid.add_filter(c,e,a);var d=$("<span>"+e+"<a href='javascript:void(0);'><span class='delete-search-icon' /></a></span>");d.addClass("text-filter-val");d.click(function(){grid.remove_filter(c,e);$(this).remove();go_page_one();update_grid()});var b=$("#"+c+"-filtering-criteria");b.append(d);go_page_one();update_grid()}function add_tag_to_grid_filter(c,b){var a=c+(b!==undefined&&b!==""?":"+b:"");$("#advanced-search").show("fast");add_filter_condition("tags",a,true)}function set_sort_condition(f){var e=grid.get("sort_key");var d=f;if(e.indexOf(f)!==-1){if(e.substring(0,1)!=="-"){d="-"+f}else{}}$(".sort-arrow").remove();var c=(d.substring(0,1)=="-")?"↑":"↓";var a=$("<span>"+c+"</span>").addClass("sort-arrow");var b=$("#"+f+"-header");b.append(a);grid.set("sort_key",d);go_page_one();update_grid()}function set_categorical_filter(b,d){var a=grid.get("categorical_filters")[b],c=grid.get("filters")[b];$("."+b+"-filter").each(function(){var h=$.trim($(this).text());var f=a[h];var g=f[b];if(g==d){$(this).empty();$(this).addClass("current-filter");$(this).append(h)}else{if(g==c){$(this).empty();var e=$("<a href='#'>"+h+"</a>");e.click(function(){set_categorical_filter(b,g)});$(this).removeClass("current-filter");$(this).append(e)}}});grid.add_filter(b,d);go_page_one();update_grid()}function set_page(a){$(".page-link").each(function(){var g=$(this).attr("id"),e=parseInt(g.split("-")[2],10),c=grid.get("cur_page"),f;if(e===a){f=$(this).children().text();$(this).empty();$(this).addClass("inactive-link");$(this).text(f)}else{if(e===c){f=$(this).text();$(this).empty();$(this).removeClass("inactive-link");var d=$("<a href='#'>"+f+"</a>");d.click(function(){set_page(e)});$(this).append(d)}}});var b=true;if(a==="all"){grid.set("cur_page",a);b=false}else{grid.set("cur_page",parseInt(a,10))}update_grid(b)}function do_operation(b,a){b=b.toLowerCase();grid.set({operation:b,item_ids:a});if(grid.can_async_op(b)){update_grid(true)}else{go_to_URL()}}function do_operation_from_href(c){var f=c.split("?");if(f.length>1){var a=f[1];var e=a.split("&");var b=null;var g=-1;for(var d=0;d<e.length;d++){if(e[d].indexOf("operation")!=-1){b=e[d].split("=")[1]}else{if(e[d].indexOf("id")!=-1){g=e[d].split("=")[1]}}}do_operation(b,g);return false}}function go_to_URL(){grid.set("async",false);window.location=grid.get("url_base")+"?"+$.param(grid.get_url_data())}function update_grid(a){if(!grid.get("async")){go_to_URL();return}var b=(grid.get("operation")?"POST":"GET");$(".loading-elt-overlay").show();$.ajax({type:b,url:grid.get("url_base"),data:grid.get_url_data(),error:function(){alert("Grid refresh failed")},success:function(d){var c=d.split("*****");$("#grid-table-body").html(c[0]);$("#grid-table-footer").html(c[1]);$("#grid-table-body").trigger("update");init_grid_elements();init_operation_buttons();make_popup_menus();$(".loading-elt-overlay").hide();var e=$.trim(c[2]);if(e!==""){$("#grid-message").html(e).show();setTimeout(function(){$("#grid-message").hide()},5000)}},complete:function(){grid.set({operation:undefined,item_ids:undefined})}})}function check_all_items(){var a=document.getElementById("check_all"),b=document.getElementsByTagName("input"),d=0,c;if(a.checked===true){for(c=0;c<b.length;c++){if(b[c].name.indexOf("id")!==-1){b[c].checked=true;d++}}}else{for(c=0;c<b.length;c++){if(b[c].name.indexOf("id")!==-1){b[c].checked=false}}}init_grid_elements()};
\ No newline at end of file
+jQuery.ajaxSettings.traditional=true;var Grid=Backbone.Model.extend({defaults:{url_base:"",async:false,async_ops:[],categorical_filters:[],filters:{},sort_key:null,show_item_checkboxes:false,cur_page:1,num_pages:1,operation:undefined,item_ids:undefined},can_async_op:function(a){return _.indexOf(this.attributes.async_ops,a)!==-1},add_filter:function(e,f,b){if(b){var c=this.attributes.filters[e],a;if(c===null||c===undefined){a=f}else{if(typeof(c)=="string"){if(c=="All"){a=f}else{var d=[];d[0]=c;d[1]=f;a=d}}else{a=c;a.push(f)}}this.attributes.filters[e]=a}else{this.attributes.filters[e]=f}},remove_filter:function(b,e){var a=this.attributes.filters[b];if(a===null||a===undefined){return false}var d=true;if(typeof(a)==="string"){if(a=="All"){d=false}else{delete this.attributes.filters[b]}}else{var c=_.indexOf(a,e);if(c!==-1){a.splice(c,1)}else{d=false}}return d},get_url_data:function(){var a={async:this.attributes.async,sort:this.attributes.sort_key,page:this.attributes.cur_page,show_item_checkboxes:this.attributes.show_item_checkboxes};if(this.attributes.operation){a.operation=this.attributes.operation}if(this.attributes.item_ids){a.id=this.attributes.item_ids}var b=this;_.each(_.keys(b.attributes.filters),function(c){a["f-"+c]=b.attributes.filters[c]});return a}});function init_operation_buttons(){$("input[name=operation]:submit").each(function(){$(this).click(function(){var b=$(this).val();var a=[];$("input[name=id]:checked").each(function(){a.push($(this).val())});do_operation(b,a)})})}function init_grid_controls(){init_operation_buttons();$(".submit-image").each(function(){$(this).mousedown(function(){$(this).addClass("gray-background")});$(this).mouseup(function(){$(this).removeClass("gray-background")})});$(".sort-link").each(function(){$(this).click(function(){set_sort_condition($(this).attr("sort_key"));return false})});$(".page-link > a").each(function(){$(this).click(function(){set_page($(this).attr("page_num"));return false})});$(".categorical-filter > a").each(function(){$(this).click(function(){set_categorical_filter($(this).attr("filter_key"),$(this).attr("filter_val"));return false})});$(".text-filter-form").each(function(){$(this).submit(function(){var d=$(this).attr("column_key");var c=$("#input-"+d+"-filter");var e=c.val();c.val("");add_filter_condition(d,e,true);return false})});var a=$("#input-tags-filter");if(a.length){a.autocomplete(history_tag_autocomplete_url,{selectFirst:false,autoFill:false,highlight:false,mustMatch:false})}var b=$("#input-name-filter");if(b.length){b.autocomplete(history_name_autocomplete_url,{selectFirst:false,autoFill:false,highlight:false,mustMatch:false})}$(".advanced-search-toggle").each(function(){$(this).click(function(){$("#standard-search").slideToggle("fast");$("#advanced-search").slideToggle("fast");return false})})}function init_grid_elements(){$(".grid").each(function(){var b=$(this).find("input.grid-row-select-checkbox");var a=$(this).find("span.grid-selected-count");var c=function(){a.text($(b).filter(":checked").length)};$(b).each(function(){$(this).change(c)});c()});$(".label").each(function(){var a=$(this).attr("href");if(a!==undefined&&a.indexOf("operation=")!=-1){$(this).click(function(){do_operation_from_href($(this).attr("href"));return false})}});$(".community_rating_star").rating({});make_popup_menus()}function go_page_one(){var a=grid.get("cur_page");if(a!==null&&a!==undefined&&a!=="all"){grid.set("cur_page",1)}}function add_filter_condition(c,e,a){if(e===""){return false}grid.add_filter(c,e,a);var d=$("<span>"+e+"<a href='javascript:void(0);'><span class='delete-search-icon' /></a></span>");d.addClass("text-filter-val");d.click(function(){grid.remove_filter(c,e);$(this).remove();go_page_one();update_grid()});var b=$("#"+c+"-filtering-criteria");b.append(d);go_page_one();update_grid()}function add_tag_to_grid_filter(c,b){var a=c+(b!==undefined&&b!==""?":"+b:"");$("#advanced-search").show("fast");add_filter_condition("tags",a,true)}function set_sort_condition(f){var e=grid.get("sort_key");var d=f;if(e.indexOf(f)!==-1){if(e.substring(0,1)!=="-"){d="-"+f}else{}}$(".sort-arrow").remove();var c=(d.substring(0,1)=="-")?"↑":"↓";var a=$("<span>"+c+"</span>").addClass("sort-arrow");var b=$("#"+f+"-header");b.append(a);grid.set("sort_key",d);go_page_one();update_grid()}function set_categorical_filter(b,d){var a=grid.get("categorical_filters")[b],c=grid.get("filters")[b];$("."+b+"-filter").each(function(){var h=$.trim($(this).text());var f=a[h];var g=f[b];if(g==d){$(this).empty();$(this).addClass("current-filter");$(this).append(h)}else{if(g==c){$(this).empty();var e=$("<a href='#'>"+h+"</a>");e.click(function(){set_categorical_filter(b,g)});$(this).removeClass("current-filter");$(this).append(e)}}});grid.add_filter(b,d);go_page_one();update_grid()}function set_page(a){$(".page-link").each(function(){var g=$(this).attr("id"),e=parseInt(g.split("-")[2],10),c=grid.get("cur_page"),f;if(e===a){f=$(this).children().text();$(this).empty();$(this).addClass("inactive-link");$(this).text(f)}else{if(e===c){f=$(this).text();$(this).empty();$(this).removeClass("inactive-link");var d=$("<a href='#'>"+f+"</a>");d.click(function(){set_page(e)});$(this).append(d)}}});var b=true;if(a==="all"){grid.set("cur_page",a);b=false}else{grid.set("cur_page",parseInt(a,10))}update_grid(b)}function do_operation(b,a){b=b.toLowerCase();grid.set({operation:b,item_ids:a});if(grid.can_async_op(b)){update_grid(true)}else{go_to_URL()}}function do_operation_from_href(c){var f=c.split("?");if(f.length>1){var a=f[1];var e=a.split("&");var b=null;var g=-1;for(var d=0;d<e.length;d++){if(e[d].indexOf("operation")!=-1){b=e[d].split("=")[1]}else{if(e[d].indexOf("id")!=-1){g=e[d].split("=")[1]}}}do_operation(b,g);return false}}function go_to_URL(){grid.set("async",false);window.location=grid.get("url_base")+"?"+$.param(grid.get_url_data())}function update_grid(a){if(!grid.get("async")){go_to_URL();return}var b=(grid.get("operation")?"POST":"GET");$(".loading-elt-overlay").show();$.ajax({type:b,url:grid.get("url_base"),data:grid.get_url_data(),error:function(){alert("Grid refresh failed")},success:function(d){var c=d.split("*****");$("#grid-table-body").html(c[0]);$("#grid-table-footer").html(c[1]);$("#grid-table-body").trigger("update");init_grid_elements();init_operation_buttons();make_popup_menus();$(".loading-elt-overlay").hide();var e=$.trim(c[2]);if(e!==""){$("#grid-message").html(e).show();setTimeout(function(){$("#grid-message").hide()},5000)}},complete:function(){grid.set({operation:undefined,item_ids:undefined})}})}function check_all_items(){var a=document.getElementById("check_all"),b=document.getElementsByTagName("input"),d=0,c;if(a.checked===true){for(c=0;c<b.length;c++){if(b[c].name.indexOf("id")!==-1){b[c].checked=true;d++}}}else{for(c=0;c<b.length;c++){if(b[c].name.indexOf("id")!==-1){b[c].checked=false}}}init_grid_elements()}function submit_operation(b,c){var a=$('input[name="id"]:checked').length;if(!a>0){return false}if(c!="None"&&c!=""){if(!confirm(c)){return false}}$("#operation").val(b.value);b.form.submit();return true};
\ No newline at end of file
diff -r f1bd817063e092711a9b13f60e569abbce877fba -r 750222a21eff68b96069d853a4744d6e3682508c templates/grid_base.mako
--- a/templates/grid_base.mako
+++ b/templates/grid_base.mako
@@ -459,12 +459,14 @@
## Grid operations for multiple items.
%if show_item_checkboxes:
<tr>
+ ## place holder for multiple operation commands
+ <input type="hidden" id="operation" name="operation" value=""><td></td><td colspan="100">
For <span class="grid-selected-count"></span> selected ${items_plural}:
%for operation in grid.operations:
%if operation.allow_multiple:
- <input type="submit" name="operation" value="${operation.label}" class="action-button">
+ <input type="button" value="${operation.label}" class="action-button" onclick="submit_operation(this, '${operation.confirm}')">
%endif
%endfor
</td>
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: carlfeberhard: browser tests: cleanup, unify datasource format, add more documentation
by commits-noreply@bitbucket.org 13 Mar '13
by commits-noreply@bitbucket.org 13 Mar '13
13 Mar '13
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/f1bd817063e0/
changeset: f1bd817063e0
user: carlfeberhard
date: 2013-03-13 23:58:42
summary: browser tests: cleanup, unify datasource format, add more documentation
affected #: 7 files
diff -r c2f310489973fc2ba42e5289ccec3ed88a42aabc -r f1bd817063e092711a9b13f60e569abbce877fba test/casperjs/anon-history-tests.js
--- a/test/casperjs/anon-history-tests.js
+++ b/test/casperjs/anon-history-tests.js
@@ -39,12 +39,9 @@
spaceghost.info( 'Will use fixtureData.testUser: ' + email );
}
-var tooltipSelector = '.bs-tooltip',
-
- editableTextClass = 'editable-text',
- editableTextInputSelector = 'input#renaming-active',
-
- galaxyCookieName = 'galaxysession';
+var tooltipSelector = spaceghost.data.selectors.tooltipBalloon,
+ editableTextClass = spaceghost.data.selectors.editableText,
+ editableTextInput = spaceghost.data.selectors.editableTextInput,
unnamedName = spaceghost.historypanel.data.text.history.newName,
nameSelector = spaceghost.historypanel.data.selectors.history.name,
@@ -57,6 +54,7 @@
anonNameTooltip = spaceghost.historypanel.data.text.anonymous.tooltips.name;
var historyFrameInfo = {},
+ filepathToUpload = '../../test-data/1.txt',
testUploadInfo = {};
@@ -69,18 +67,6 @@
if( loggedInAs ){ this.logout(); }
});
-// ------------------------------------------------------------------- check anon cookies
-spaceghost.then( function testAnonCookies(){
- this.test.comment( 'session cookie for anon-user should be present and well formed' );
- var cookies = this.page.cookies;
- this.debug( this.jsonStr( this.page.cookies ) );
- //??: what are 'well formed' values?
- this.test.assert( cookies.length === 1, "Has one cookie" );
- var galaxyCookie = cookies[0];
- this.test.assert( galaxyCookie.name === galaxyCookieName, "Cookie named: " + galaxyCookieName );
- this.test.assert( !galaxyCookie.secure, "Cookie.secure is false" );
-});
-
// ------------------------------------------------------------------- check the empty history for well formedness
// grab the history frame bounds for mouse later tests
spaceghost.then( function(){
@@ -90,7 +76,7 @@
spaceghost.thenOpen( spaceghost.baseUrl, function testPanelStructure(){
this.test.comment( 'history panel for anonymous user, new history' );
- this.withFrame( this.selectors.frames.history, function(){
+ this.withFrame( spaceghost.data.selectors.frames.history, function(){
this.test.comment( "frame should have proper url and title: 'History'" );
this.test.assertMatch( this.getCurrentUrl(), /\/history/, 'Found history frame url' );
this.test.assertTitle( this.getTitle(), 'History', 'Found history frame title' );
@@ -118,33 +104,29 @@
this.test.comment( 'name should have a tooltip with info on anon-user name editing' );
// mouse over to find tooltip
- //NOTE!!: bounds are returned relative to containing frame - need to adjust using historyFrameInfo
- //TODO: into conv. fn
- var nameInfo = this.getElementInfo( nameSelector );
- //this.debug( 'nameInfo:' + this.jsonStr( nameInfo ) );
- this.page.sendEvent( 'mousemove',
- historyFrameInfo.x + nameInfo.x + 1, historyFrameInfo.y + nameInfo.y + 1 );
- this.test.assertExists( tooltipSelector, "Found tooltip after name hover" );
- this.test.assertSelectorHasText( tooltipSelector, anonNameTooltip );
+ this.historypanel.hoverOver( nameSelector, function testingHover(){
+ this.test.assertExists( tooltipSelector, "Found tooltip after name hover" );
+ this.test.assertSelectorHasText( tooltipSelector, anonNameTooltip );
+ }, historyFrameInfo );
this.test.comment( 'name should NOT be editable when clicked by anon-user' );
- this.test.assert( nameInfo.attributes[ 'class' ].indexOf( editableTextClass ) === -1,
- "Name field is not class for editable text" );
+ this.assertDoesntHaveClass( nameSelector, editableTextClass,
+ "Name field is not classed as editable text" );
this.click( nameSelector );
- this.test.assertDoesntExist( editableTextInputSelector, "Clicking on name does not create an input" );
+ this.test.assertDoesntExist( editableTextInput, "Clicking on name does not create an input" );
});
});
// ------------------------------------------------------------------- anon user can upload file
spaceghost.then( function testAnonUpload(){
this.test.comment( 'anon-user should be able to upload files' );
- spaceghost.tools.uploadFile( '../../test-data/1.txt', function uploadCallback( _uploadInfo ){
+ spaceghost.tools.uploadFile( filepathToUpload, function uploadCallback( _uploadInfo ){
this.debug( 'uploaded HDA info: ' + this.jsonStr( _uploadInfo ) );
var hasHda = _uploadInfo.hdaElement,
hasClass = _uploadInfo.hdaElement.attributes[ 'class' ],
hasOkClass = _uploadInfo.hdaElement.attributes[ 'class' ].indexOf( 'historyItem-ok' ) !== -1;
this.test.assert( ( hasHda && hasClass && hasOkClass ), "Uploaded file: " + _uploadInfo.name );
- uploadInfo = _uploadInfo;
+ testUploadInfo = _uploadInfo;
});
});
spaceghost.then( function testAnonUpload(){
@@ -157,18 +139,17 @@
// ------------------------------------------------------------------- anon user registers/logs in -> same history
spaceghost.user.loginOrRegisterUser( email, password );
-//??: why is a reload needed here? If we don't, loggedInAs === '' ...
spaceghost.thenOpen( spaceghost.baseUrl, function(){
this.test.comment( 'anon-user should login and be associated with previous history' );
var loggedInAs = spaceghost.user.loggedInAs();
this.test.assert( loggedInAs === email, 'loggedInAs() matches email: "' + loggedInAs + '"' );
- this.withFrame( this.selectors.frames.history, function(){
- var hdaInfo = this.historypanel.hdaElementInfoByTitle( uploadInfo.name, uploadInfo.hid );
+ this.withFrame( spaceghost.data.selectors.frames.history, function(){
+ var hdaInfo = this.historypanel.hdaElementInfoByTitle( testUploadInfo.name, testUploadInfo.hid );
this.test.assert( hdaInfo !== null, "After logging in - found a matching hda by name and hid" );
if( hdaInfo ){
- this.test.assert( uploadInfo.hdaElement.attributes.id === hdaInfo.attributes.id,
+ this.test.assert( testUploadInfo.hdaElement.attributes.id === hdaInfo.attributes.id,
"After logging in - found a matching hda by hda view id: " + hdaInfo.attributes.id );
}
});
@@ -177,8 +158,7 @@
spaceghost.user.logout();
spaceghost.thenOpen( spaceghost.baseUrl, function(){
this.test.comment( 'logging out should create a new, anonymous history' );
-
- this.withFrame( this.selectors.frames.history, function(){
+ this.withFrame( spaceghost.data.selectors.frames.history, function(){
this.test.assertSelectorHasText( nameSelector, unnamedName, 'History name is ' + unnamedName );
this.test.assertSelectorHasText( emptyMsgSelector, emptyMsgStr,
'Message contains "' + emptyMsgStr + '"' );
diff -r c2f310489973fc2ba42e5289ccec3ed88a42aabc -r f1bd817063e092711a9b13f60e569abbce877fba test/casperjs/hda-state-tests.js
--- a/test/casperjs/hda-state-tests.js
+++ b/test/casperjs/hda-state-tests.js
@@ -40,7 +40,7 @@
spaceghost.info( 'Will use fixtureData.testUser: ' + email );
}
-var tooltipSelector = '.bs-tooltip';
+var tooltipSelector = spaceghost.data.selectors.tooltipBalloon;
var utils = require( 'utils' ),
historyFrameInfo = {},
@@ -70,8 +70,7 @@
});
-// =================================================================== TESTS
-// ------------------------------------------------------------------- helpers
+// =================================================================== TEST HELPERS
//NOTE: to be called with fn.call( spaceghost, ... )
function testTitle( hdaSelector, hid, name ){
@@ -238,19 +237,18 @@
testPeek.call( this, hdaSelector, peekShouldBeArray );
}
+// =================================================================== TESTS
// ------------------------------------------------------------------- ok state
spaceghost.then( function checkOkState(){
this.test.comment( 'HDAs in the "ok" state should be well formed' );
- this.withFrame( this.selectors.frames.history, function(){
+ this.withFrame( spaceghost.data.selectors.frames.history, function(){
var uploadSelector = '#' + testUploadInfo.hdaElement.attributes.id;
this.test.assertVisible( uploadSelector, 'HDA is visible' );
this.test.comment( 'should have the proper state class' );
- var okStateClass = this.historypanel.data.selectors.hda.wrapper.stateClasses.ok,
- uploadElement = this.getElementInfo( uploadSelector );
- this.test.assert( uploadElement.attributes['class'].indexOf( okStateClass ) !== -1,
- 'HDA has "ok" state class' );
+ this.assertHasClass( uploadSelector, this.historypanel.data.selectors.hda.wrapper.stateClasses.ok,
+ 'HDA has ok state class' );
// since we're using css there's no great way to test state icon (.state-icon is empty)
@@ -273,11 +271,27 @@
});
});
-/*
+// restore to collapsed
+spaceghost.then( function collapseOkState(){
+ this.test.comment( "Collapsing hda in 'ok' state should hide body again" );
+ this.withFrame( spaceghost.data.selectors.frames.history, function(){
+ var uploadSelector = '#' + testUploadInfo.hdaElement.attributes.id,
+ hdaTitle = uploadSelector + ' ' + this.historypanel.data.selectors.hda.title;
+ body = uploadSelector + ' ' + this.historypanel.data.selectors.hda.body;
+
+ this.click( hdaTitle );
+ this.wait( 500, function(){
+ this.test.assertNotVisible( body, 'body is not visible' );
+ });
+ });
+});
+
+
+// ------------------------------------------------------------------- new state
spaceghost.then( function checkNewState(){
this.test.comment( 'HDAs in the "new" state should be well formed' );
- this.withFrame( this.selectors.frames.history, function(){
+ this.withFrame( spaceghost.data.selectors.frames.history, function(){
// set state directly through model
//TODO: not ideal
this.evaluate( function(){
@@ -291,37 +305,45 @@
// should have proper title and hid
testTitle.call( spaceghost, uploadSelector, testUploadInfo.hid, testUploadInfo.name );
- // should have the new state class
- var newStateClass = this.historypanel.data.selectors.hda.wrapper.stateClasses['new'];
- uploadElement = this.getElementInfo( uploadSelector );
- this.test.assert( uploadElement.attributes['class'].indexOf( newStateClass ) !== -1,
+ this.test.comment( 'new HDA should have the new state class' );
+ this.assertHasClass( uploadSelector, this.historypanel.data.selectors.hda.wrapper.stateClasses['new'],
'HDA has new state class' );
- // since we're using css there's no great way to test this
- //var stateIconSelector = uploadSelector + ' .state-icon';
- //this.test.assertVisible( stateIconSelector, 'HDA has proper hid' );
-
- // should NOT have any of the three, main buttons
+ this.test.comment( 'new HDA should NOT have any of the three, main buttons' );
var buttonSelector = uploadSelector + ' ' + this.historypanel.data.selectors.hda.titleButtons + ' a';
this.test.assertDoesntExist( buttonSelector, 'No display, edit, or delete buttons' );
- // expand and check the body
- this.click( titleSelector );
+ this.test.comment( 'clicking the title of the new HDA will expand the body' );
+ var hdaTitle = uploadSelector + ' ' + this.historypanel.data.selectors.hda.title;
+ this.click( hdaTitle );
this.wait( 500, function(){
var bodySelector = uploadSelector + ' ' + this.historypanel.data.selectors.hda.body;
this.test.assertVisible( bodySelector, 'HDA body is visible (after expanding)' );
var expectedBodyText = 'This is a new dataset';
+ this.test.comment( 'the body should have the text: ' + expectedBodyText );
this.test.assertSelectorHasText( bodySelector, expectedBodyText,
'HDA body has text: ' + expectedBodyText );
// restore to collapsed
- this.click( titleSelector );
+ this.click( hdaTitle );
});
});
});
});
-*/
+// restore state, collapse
+spaceghost.then( function revertStateAndCollapse(){
+ this.withFrame( spaceghost.data.selectors.frames.history, function(){
+ this.evaluate( function(){
+ return Galaxy.currHistoryPanel.model.hdas.at( 0 ).set( 'state', 'ok' );
+ });
+ this.wait( 500, function(){
+ var hdaTitle = '#' + testUploadInfo.hdaElement.attributes.id
+ + ' ' + this.historypanel.data.selectors.hda.title;
+ this.click( hdaTitle );
+ });
+ });
+});
// ===================================================================
diff -r c2f310489973fc2ba42e5289ccec3ed88a42aabc -r f1bd817063e092711a9b13f60e569abbce877fba test/casperjs/history-panel-tests.js
--- a/test/casperjs/history-panel-tests.js
+++ b/test/casperjs/history-panel-tests.js
@@ -38,7 +38,11 @@
}
// selectors and labels
-var nameSelector = spaceghost.historypanel.data.selectors.history.name,
+var tooltipSelector = spaceghost.data.selectors.tooltipBalloon,
+ editableTextClass = spaceghost.data.selectors.editableText,
+ editableTextInput = spaceghost.data.selectors.editableTextInput,
+
+ nameSelector = spaceghost.historypanel.data.selectors.history.name,
subtitleSelector = spaceghost.historypanel.data.selectors.history.subtitle,
unnamedName = spaceghost.historypanel.data.text.history.newName,
initialSizeStr = spaceghost.historypanel.data.text.history.newSize,
@@ -51,14 +55,9 @@
annoAreaSelector = spaceghost.historypanel.data.selectors.history.annoArea,
nameTooltip = spaceghost.historypanel.data.text.history.tooltips.name,
- tooltipSelector = '.bs-tooltip',
-
- editableTextClass = 'editable-text',
- editableTextInputSelector = 'input#renaming-active',
-
- refreshButtonSelector = 'a#history-refresh-button',
- refreshButtonIconSelector = 'span.fa-icon-refresh',
- refreshButtonHref = '/history',
+ refreshButtonSelector = 'a#history-refresh-button',
+ refreshButtonIconSelector = 'span.fa-icon-refresh',
+ refreshButtonHref = '/history',
includeDeletedOptionsLabel = spaceghost.historyoptions.data.labels.options.includeDeleted;
@@ -66,7 +65,7 @@
var newHistoryName = "Test History",
filepathToUpload = '../../test-data/1.txt',
historyFrameInfo = {},
- uploadInfo = {};
+ testUploadInfo = {};
// =================================================================== TESTS
@@ -88,7 +87,7 @@
// ------------------------------------------------------------------- check structure of empty history
spaceghost.thenOpen( spaceghost.baseUrl, function testPanelStructure(){
this.test.comment( 'history panel, new history' );
- this.withFrame( this.selectors.frames.history, function(){
+ this.withFrame( spaceghost.data.selectors.frames.history, function(){
this.test.comment( "frame should have proper url and title: 'History'" );
this.test.assertMatch( this.getCurrentUrl(), /\/history/, 'Found history frame url' );
this.test.assertTitle( this.getTitle(), 'History', 'Found history frame title' );
@@ -98,7 +97,7 @@
this.test.assertVisible( nameSelector, 'History name is visible' );
this.test.assertSelectorHasText( nameSelector, unnamedName, 'History name is ' + unnamedName );
- this.test.comment( "history subtitle should display size and size should be 0 bytes" );
+ this.test.comment( "history subtitle should display size and size should be: " + initialSizeStr );
this.test.assertExists( subtitleSelector, 'Found ' + subtitleSelector );
this.test.assertVisible( subtitleSelector, 'History subtitle is visible' );
this.test.assertSelectorHasText( subtitleSelector, initialSizeStr,
@@ -119,50 +118,46 @@
// ------------------------------------------------------------------- name editing
spaceghost.then( function(){
this.test.comment( 'history panel, editing the history name' );
- this.withFrame( this.selectors.frames.history, function(){
+ this.withFrame( spaceghost.data.selectors.frames.history, function(){
this.test.comment( 'name should have a tooltip with proper info on name editing' );
var nameInfo = this.getElementInfo( nameSelector );
- this.page.sendEvent( 'mousemove',
- historyFrameInfo.x + nameInfo.x + 1, historyFrameInfo.y + nameInfo.y + 1 );
+ this.page.sendEvent( 'mousemove', historyFrameInfo.x + nameInfo.x + 1, historyFrameInfo.y + nameInfo.y + 1 );
this.test.assertExists( tooltipSelector, "Found tooltip after name hover" );
this.test.assertSelectorHasText( tooltipSelector, nameTooltip );
this.test.comment( 'name should be create an input when clicked' );
- this.test.assert( nameInfo.attributes[ 'class' ].indexOf( editableTextClass ) !== -1,
- "Name field classed for editable text" );
+ this.assertHasClass( nameSelector, editableTextClass, "Name field classed for editable text" );
this.click( nameSelector );
- this.test.assertExists( editableTextInputSelector, "Clicking on name creates an input" );
+ this.test.assertExists( editableTextInput, "Clicking on name creates an input" );
this.test.comment( 'name should be editable by entering keys and pressing enter' );
//NOTE: casperjs.sendKeys adds a click before and a selector.blur after sending - won't work here
- //TODO: to conv. fn
this.page.sendEvent( 'keypress', newHistoryName );
this.page.sendEvent( 'keypress', this.page.event.key.Enter );
this.wait( 1000, function(){
this.test.assertSelectorHasText( nameSelector, newHistoryName, 'History name is ' + newHistoryName );
- this.test.assertDoesntExist( editableTextInputSelector, "Input disappears after pressing enter" );
+ this.test.assertDoesntExist( editableTextInput, "Input disappears after pressing enter" );
});
});
});
spaceghost.then( function(){
- this.withFrame( this.selectors.frames.history, function(){
+ this.withFrame( spaceghost.data.selectors.frames.history, function(){
this.test.comment( 'name should revert if user clicks away while editing' );
this.click( nameSelector );
this.page.sendEvent( 'keypress', "Woodchipper metagenomics, Fargo, ND" );
// click above the name input element
- var inputInfo = this.getElementInfo( editableTextInputSelector );
- this.page.sendEvent( 'mousedown',
- historyFrameInfo.x + inputInfo.x + 1, historyFrameInfo.y + inputInfo.y - 5 );
+ var inputInfo = this.getElementInfo( editableTextInput );
+ this.page.sendEvent( 'mousedown', historyFrameInfo.x + inputInfo.x + 1, historyFrameInfo.y + inputInfo.y - 5 );
this.wait( 1000, function(){
this.test.assertSelectorHasText( nameSelector, newHistoryName, 'History name is STILL ' + newHistoryName );
- this.test.assertDoesntExist( editableTextInputSelector, "Input disappears after clicking away" );
+ this.test.assertDoesntExist( editableTextInput, "Input disappears after clicking away" );
});
});
});
spaceghost.then( function(){
- this.withFrame( this.selectors.frames.history, function(){
+ this.withFrame( spaceghost.data.selectors.frames.history, function(){
this.test.comment( 'name should revert if user hits ESC while editing' );
this.click( nameSelector );
this.page.sendEvent( 'keypress', "Arsenic Bacteria" );
@@ -170,7 +165,7 @@
this.page.sendEvent( 'keypress', this.page.event.key.Escape );
this.wait( 1000, function(){
this.test.assertSelectorHasText( nameSelector, newHistoryName, 'History name is STILL ' + newHistoryName );
- this.test.assertDoesntExist( editableTextInputSelector, "Input disappears after hitting ESC" );
+ this.test.assertDoesntExist( editableTextInput, "Input disappears after hitting ESC" );
});
});
});
@@ -185,14 +180,14 @@
hasClass = _uploadInfo.hdaElement.attributes[ 'class' ],
hasOkClass = _uploadInfo.hdaElement.attributes[ 'class' ].indexOf( wrapperOkClassName ) !== -1;
this.test.assert( ( hasHda && hasClass && hasOkClass ), "Uploaded file: " + _uploadInfo.name );
- uploadInfo = _uploadInfo;
+ testUploadInfo = _uploadInfo;
});
});
spaceghost.then( function checkPanelStructure(){
this.test.comment( 'checking structure of non-empty panel' );
- this.withFrame( this.selectors.frames.history, function(){
+ this.withFrame( spaceghost.data.selectors.frames.history, function(){
this.test.comment( "history name should exist, be visible, and have text " + unnamedName );
this.test.assertExists( nameSelector, nameSelector + ' exists' );
this.test.assertVisible( nameSelector, 'History name is visible' );
@@ -221,8 +216,7 @@
//TODO: check tooltips
spaceghost.then( function openTags(){
this.test.comment( 'tag area should open when the history panel tag icon is clicked' );
- this.withFrame( this.selectors.frames.history, function(){
- this.capture( 'tag-area.png' );
+ this.withFrame( spaceghost.data.selectors.frames.history, function(){
this.mouseEvent( 'click', tagIconSelector );
this.wait( 1000, function(){
this.test.assertVisible( tagAreaSelector, 'Tag area is now displayed' );
@@ -235,7 +229,7 @@
//TODO: check tooltips
spaceghost.then( function openAnnotation(){
this.test.comment( 'annotation area should open when the history panel annotation icon is clicked' );
- this.withFrame( this.selectors.frames.history, function(){
+ this.withFrame( spaceghost.data.selectors.frames.history, function(){
this.mouseEvent( 'click', annoIconSelector );
this.wait( 1000, function(){
this.test.assertVisible( annoAreaSelector, 'Annotation area is now displayed' );
@@ -244,7 +238,7 @@
});
spaceghost.then( function closeAnnotation(){
this.test.comment( 'annotation area should close when the history panel tag icon is clicked again' );
- this.withFrame( this.selectors.frames.history, function bler(){
+ this.withFrame( spaceghost.data.selectors.frames.history, function bler(){
this.mouseEvent( 'click', annoIconSelector );
this.wait( 1000, function(){
this.test.assertNotVisible( annoAreaSelector, 'Tag area is now hidden' );
@@ -296,8 +290,9 @@
spaceghost.then( function(){
this.test.comment( 'deleted hdas shouldn\'t be in the history panel DOM' );
- this.historypanel.deleteHda( '#' + uploadInfo.hdaElement.attributes.id, function(){
- this.test.assertDoesntExist( '#' + uploadInfo.hdaElement.attributes.id, "Deleted HDA is not in the DOM" );
+ this.historypanel.deleteHda( '#' + testUploadInfo.hdaElement.attributes.id, function(){
+ this.test.assertDoesntExist( '#' + testUploadInfo.hdaElement.attributes.id,
+ "Deleted HDA is not in the DOM" );
});
});
@@ -306,11 +301,11 @@
this.test.comment( 'History options->' + includeDeletedOptionsLabel + ' shows deleted datasets' );
this.historyoptions.includeDeleted();
- this.withFrame( this.selectors.frames.history, function(){
+ this.withFrame( spaceghost.data.selectors.frames.history, function(){
this.waitForSelector( nameSelector, function(){
- this.test.assertExists( '#' + uploadInfo.hdaElement.attributes.id,
+ this.test.assertExists( '#' + testUploadInfo.hdaElement.attributes.id,
"Deleted HDA is in the DOM (using history options -> " + includeDeletedOptionsLabel + ")" );
- this.test.assertVisible( '#' + uploadInfo.hdaElement.attributes.id,
+ this.test.assertVisible( '#' + testUploadInfo.hdaElement.attributes.id,
"Deleted HDA is visible again (using history options -> " + includeDeletedOptionsLabel + ")" );
});
});
@@ -320,9 +315,9 @@
this.test.comment( 'History options->' + includeDeletedOptionsLabel + ' (again) re-hides deleted datasets' );
this.historyoptions.includeDeleted();
- this.withFrame( this.selectors.frames.history, function(){
+ this.withFrame( spaceghost.data.selectors.frames.history, function(){
this.waitForSelector( nameSelector, function(){
- this.test.assertDoesntExist( '#' + uploadInfo.hdaElement.attributes.id,
+ this.test.assertDoesntExist( '#' + testUploadInfo.hdaElement.attributes.id,
"Deleted HDA is not in the DOM (using history options -> " + includeDeletedOptionsLabel + ")" );
});
});
@@ -331,10 +326,10 @@
// undelete the uploaded file
spaceghost.then( function(){
this.historyoptions.includeDeleted();
- this.withFrame( this.selectors.frames.history, function(){
+ this.withFrame( spaceghost.data.selectors.frames.history, function(){
this.waitForSelector( nameSelector, function(){
//TODO: to conv. fn
- this.click( '#' + uploadInfo.hdaElement.attributes.id
+ this.click( '#' + testUploadInfo.hdaElement.attributes.id
+ ' ' + this.historypanel.data.selectors.history.undeleteLink );
});
});
@@ -348,16 +343,13 @@
// broken in webkit w/ jq 1.7
spaceghost.then( function(){
this.test.comment( 'HDAs can be expanded by clicking on the name' );
- var uploadedSelector = '#' + uploadInfo.hdaElement.attributes.id;
+ var uploadedSelector = '#' + testUploadInfo.hdaElement.attributes.id;
- this.withFrame( this.selectors.frames.history, function(){
+ this.withFrame( spaceghost.data.selectors.frames.history, function(){
this.click( uploadedSelector + ' .historyItemTitle' );
- this.debug( 'title: ' + this.debugElement( uploadedSelector + ' .historyItemTitle' ) );
- this.debug( 'wrapper: ' + this.debugElement( uploadedSelector ) );
-
this.wait( 1000, function(){
- this.test.assertExists( uploadedSelector + ' .historyItemBody', "Body for uploaded file is found" );
- this.test.assertVisible( uploadedSelector + ' .hda-summary', "hda-summary is visible" );
+ this.test.assertVisible( uploadedSelector + ' ' + this.historypanel.data.selectors.hda.body,
+ "Body for uploaded file is visible" );
});
});
});
@@ -365,13 +357,13 @@
// ------------------------------------------------------------------- expanded hdas are still expanded after a refresh
spaceghost.then( function(){
this.test.comment( 'Expanded hdas are still expanded after a refresh' );
- var uploadedSelector = '#' + uploadInfo.hdaElement.attributes.id;
+ var uploadedSelector = '#' + testUploadInfo.hdaElement.attributes.id;
this.click( refreshButtonSelector );
- this.withFrame( this.selectors.frames.history, function(){
+ this.withFrame( spaceghost.data.selectors.frames.history, function(){
this.waitForSelector( nameSelector, function(){
- this.test.assertExists( uploadedSelector + ' .historyItemBody', "Body for uploaded file is found" );
- this.test.assertVisible( uploadedSelector + ' .hda-summary', "hda-summary is visible" );
+ this.test.assertVisible( uploadedSelector + ' ' + this.historypanel.data.selectors.hda.body,
+ "Body for uploaded file is visible" );
});
});
// this will break: webkit + jq 1.7
@@ -380,26 +372,27 @@
// ------------------------------------------------------------------- expanded hdas collapse by clicking name again
spaceghost.then( function(){
this.test.comment( 'Expanded hdas collapse by clicking name again' );
- var uploadedSelector = '#' + uploadInfo.hdaElement.attributes.id;
+ var uploadedSelector = '#' + testUploadInfo.hdaElement.attributes.id;
- this.withFrame( this.selectors.frames.history, function(){
- this.click( uploadedSelector + ' .historyItemTitle' );
-
+ this.withFrame( spaceghost.data.selectors.frames.history, function(){
+ this.click( uploadedSelector + ' ' + this.historypanel.data.selectors.hda.title );
this.wait( 500, function(){
- this.test.assertNotVisible( uploadedSelector + ' .hda-summary', "hda-summary is not visible" );
+ this.test.assertNotVisible( uploadedSelector + ' ' + this.historypanel.data.selectors.hda.body,
+ "Body for uploaded file is not visible" );
});
});
});
-// ------------------------------------------------------------------- collapsed hdas are still collapsed after a refresh
+// ------------------------------------------------------------------- collapsed hdas still collapsed after a refresh
spaceghost.then( function(){
- this.test.comment( 'Expanded hdas are still expanded after a refresh' );
- var uploadedSelector = '#' + uploadInfo.hdaElement.attributes.id;
+ this.test.comment( 'collapsed hdas still collapsed after a refresh' );
+ var uploadedSelector = '#' + testUploadInfo.hdaElement.attributes.id;
this.click( refreshButtonSelector );
- this.withFrame( this.selectors.frames.history, function(){
+ this.withFrame( spaceghost.data.selectors.frames.history, function(){
this.waitForSelector( nameSelector, function(){
- this.test.assertNotVisible( uploadedSelector + ' .hda-summary', "hda-summary is not visible" );
+ this.test.assertNotVisible( uploadedSelector + ' ' + this.historypanel.data.selectors.hda.body,
+ "Body for uploaded file is not visible" );
});
});
});
@@ -407,19 +400,20 @@
// ------------------------------------------------------------------- history options collapses all expanded hdas
spaceghost.then( function(){
// expand again
- this.withFrame( this.selectors.frames.history, function(){
- this.click( '#' + uploadInfo.hdaElement.attributes.id + ' .historyItemTitle' );
+ this.withFrame( spaceghost.data.selectors.frames.history, function(){
+ this.click( '#' + testUploadInfo.hdaElement.attributes.id + ' ' + this.historypanel.data.selectors.hda.title );
this.wait( 500, function(){});
});
});
spaceghost.then( function(){
this.test.comment( 'History option collapses all expanded hdas' );
- var uploadedSelector = '#' + uploadInfo.hdaElement.attributes.id;
+ var uploadedSelector = '#' + testUploadInfo.hdaElement.attributes.id;
this.historyoptions.collapseExpanded();
this.wait( 500, function(){
- this.withFrame( this.selectors.frames.history, function(){
- this.test.assertNotVisible( uploadedSelector + ' .hda-summary', "hda-summary is not visible" );
+ this.withFrame( spaceghost.data.selectors.frames.history, function(){
+ this.test.assertNotVisible( uploadedSelector + ' ' + this.historypanel.data.selectors.hda.body,
+ "Body for uploaded file is not visible" );
});
});
});
diff -r c2f310489973fc2ba42e5289ccec3ed88a42aabc -r f1bd817063e092711a9b13f60e569abbce877fba test/casperjs/modules/historypanel.js
--- a/test/casperjs/modules/historypanel.js
+++ b/test/casperjs/modules/historypanel.js
@@ -73,7 +73,7 @@
finalStateClass = '.historyItem-' + finalState;
spaceghost.then( function(){
- spaceghost.withFrame( spaceghost.selectors.frames.history, function(){
+ spaceghost.withFrame( spaceghost.data.selectors.frames.history, function(){
// save the old time out
var oldWaitTimeout = spaceghost.options.waitTimeout,
@@ -157,7 +157,7 @@
whenDeletedFn = whenDeletedFn || function(){};
var spaceghost = this.spaceghost;
- spaceghost.withFrame( spaceghost.selectors.frames.history, function deletingHda(){
+ spaceghost.withFrame( spaceghost.data.selectors.frames.history, function deletingHda(){
//precondition: historyItemWrapper's (hda dom elements) should have an id
// we could use the selector directly, but better if it errors before an attempted delete
var hdaId = spaceghost.getElementInfo( hdaSelector ).attributes.id;
@@ -192,7 +192,7 @@
var spaceghost = this.spaceghost,
historyFrameInfo = spaceghost.getElementInfo( 'iframe[name="galaxy_history"]' );
- spaceghost.withFrame( spaceghost.selectors.frames.history, function expandingHda(){
+ spaceghost.withFrame( spaceghost.data.selectors.frames.history, function expandingHda(){
var titleInfo = spaceghost.getElementInfo( hdaSelector + ' .historyItemTitle' );
spaceghost.page.sendEvent( 'mousedown',
historyFrameInfo.x + titleInfo.x + 1, historyFrameInfo.y + titleInfo.y - 5 );
@@ -233,7 +233,7 @@
if( !historyFrameInfo ){
//TODO: move selector to data (use selectors.frames? )
historyFrameInfo = spaceghost.getElementInfo( 'iframe[name="galaxy_history"]' );
- spaceghost.withFrame( spaceghost.selectors.frames.history, function inHistoryPanel(){
+ spaceghost.withFrame( spaceghost.data.selectors.frames.history, function inHistoryPanel(){
hoverAndCallback.call( spaceghost, historyFrameInfo, selector, whenHovering );
});
diff -r c2f310489973fc2ba42e5289ccec3ed88a42aabc -r f1bd817063e092711a9b13f60e569abbce877fba test/casperjs/modules/tools.js
--- a/test/casperjs/modules/tools.js
+++ b/test/casperjs/modules/tools.js
@@ -22,7 +22,11 @@
return this.spaceghost + '.Tools';
};
+// -------------------------------------------------------------------
+/* TODO:
+ move selectors from sg to here
+*/
// =================================================================== INTERNAL
/** Tests uploading a file.
* NOTE: this version does NOT throw an error on a bad upload.
@@ -35,52 +39,34 @@
uploadInfo = {};
//TODO: check file exists using phantom.fs
//TODO: pull from test data
- uploadInfo[ spaceghost.selectors.tools.upload.fileInput ] = filepath;
+ uploadInfo[ spaceghost.data.selectors.tools.upload.fileInput ] = filepath;
spaceghost.debug( 'uploading file: ' + filepath );
spaceghost.then( function(){
- spaceghost.withFrame( spaceghost.selectors.frames.tools, function(){
- spaceghost.clickLabel( spaceghost.labels.tools.upload.panelLabel );
+ spaceghost.withFrame( spaceghost.data.selectors.frames.tools, function(){
+ spaceghost.clickLabel( spaceghost.data.labels.tools.upload.panelLabel );
});
});
spaceghost.then( function beginUpload(){
- spaceghost.withFrame( spaceghost.selectors.frames.main, function(){
- spaceghost.fill( spaceghost.selectors.tools.general.form, uploadInfo, false );
+ spaceghost.withFrame( spaceghost.data.selectors.frames.main, function(){
+ spaceghost.fill( spaceghost.data.selectors.tools.general.form, uploadInfo, false );
// the following throws:
// [error] [remote] Failed dispatching clickmouse event on xpath selector: //input[@value="Execute"]:
// PageError: TypeError: 'undefined' is not a function (evaluating '$(spaceghost).formSerialize()')
// ...and yet the upload still seems to work
- spaceghost.click( xpath( spaceghost.selectors.tools.general.executeButton_xpath ) );
+ spaceghost.click( xpath( spaceghost.data.selectors.tools.general.executeButton_xpath ) );
});
});
// debugging
- spaceghost.withFrame( spaceghost.selectors.frames.main, function afterUpload(){
- var messageInfo = spaceghost.elementInfoOrNull( spaceghost.selectors.messages.all );
+ spaceghost.withFrame( spaceghost.data.selectors.frames.main, function afterUpload(){
+ var messageInfo = spaceghost.elementInfoOrNull( spaceghost.data.selectors.messages.all );
spaceghost.debug( 'post upload message:\n' + spaceghost.jsonStr( messageInfo ) );
});
};
-/** Uploads a file.
- * @param {String} filepath the local filesystem path of the file to upload (absolute (?))
- */
-//Tools.prototype.uploadFile = function uploadFile( filepath ){
-// this._uploadFile( filepath );
-// this.then( function(){
-// this.withFrame( this.selectors.frames.main, function mainAfterUpload(){
-// var messageInfo = this.elementInfoOrNull( this.selectors.messages.all );
-// if( ( !messageInfo )
-// || ( messageInfo.attributes[ 'class' ] !== 'donemessagelarge' )
-// || ( messageInfo.text.indexOf( this.text.upload.success ) === -1 ) ){
-// throw new GalaxyError( 'UploadError: ' + this.jsonStr( messageInfo ) );
-// }
-// });
-// });
-// return this;
-//};
-
/** Parses the hid and name of a newly uploaded file from the tool execution donemessagelarge
* @param {String} doneMsgText the text extracted from the donemessagelarge after a tool execution
*/
@@ -159,9 +145,9 @@
// upload the file erroring if a done message is not displayed, aggregate info about upload
spaceghost.info( 'uploading file: ' + filepath + ' (timeout after ' + timeoutAfterMs + ')' );
this._uploadFile( filepath );
- spaceghost.withFrame( spaceghost.selectors.frames.main, function toolExecuted(){
+ spaceghost.withFrame( spaceghost.data.selectors.frames.main, function toolExecuted(){
spaceghost.debug( 'checking for done message' );
- var doneElementInfo = spaceghost.elementInfoOrNull( spaceghost.selectors.messages.donelarge );
+ var doneElementInfo = spaceghost.elementInfoOrNull( spaceghost.data.selectors.messages.donelarge );
if( !doneElementInfo ){
throw new spaceghost.GalaxyError( 'Upload Error: no done message uploading "' + filepath + '"' );
}
@@ -178,7 +164,7 @@
spaceghost.then( function getNewHda(){
spaceghost.debug( 'beginning wait for upload file\'s ok state' );
// get the hda view DOM element from the upload name and hid
- spaceghost.withFrame( spaceghost.selectors.frames.history, function(){
+ spaceghost.withFrame( spaceghost.data.selectors.frames.history, function(){
spaceghost.waitForSelector( '#history-name', function(){
var hdaInfo = spaceghost.historypanel.hdaElementInfoByTitle( uploadInfo.name, uploadInfo.hid );
if( hdaInfo === null ){
diff -r c2f310489973fc2ba42e5289ccec3ed88a42aabc -r f1bd817063e092711a9b13f60e569abbce877fba test/casperjs/modules/user.js
--- a/test/casperjs/modules/user.js
+++ b/test/casperjs/modules/user.js
@@ -43,19 +43,19 @@
spaceghost.debug( 'registering user:\n' + spaceghost.jsonStr( userInfo ) );
spaceghost.thenOpen( spaceghost.baseUrl, function(){
- spaceghost.clickLabel( spaceghost.labels.masthead.menus.user );
- spaceghost.clickLabel( spaceghost.labels.masthead.userMenu.register );
+ spaceghost.clickLabel( spaceghost.data.labels.masthead.menus.user );
+ spaceghost.clickLabel( spaceghost.data.labels.masthead.userMenu.register );
- spaceghost.withFrame( spaceghost.selectors.frames.main, function mainBeforeRegister(){
+ spaceghost.withFrame( spaceghost.data.selectors.frames.main, function mainBeforeRegister(){
spaceghost.debug( 'submitting registration... ' + spaceghost.getCurrentUrl() );
- spaceghost.fill( spaceghost.selectors.registrationPage.form, userInfo, false );
+ spaceghost.fill( spaceghost.data.selectors.registrationPage.form, userInfo, false );
// need manual submit (not a normal html form)
- spaceghost.click( xpath( spaceghost.selectors.registrationPage.submit_xpath ) );
+ spaceghost.click( xpath( spaceghost.data.selectors.registrationPage.submit_xpath ) );
});
//// debugging
- //spaceghost.withFrame( spaceghost.selectors.frames.main, function mainAfterRegister(){
- // var messageInfo = spaceghost.getElementInfo( spaceghost.selectors.messages.all );
+ //spaceghost.withFrame( spaceghost.data.selectors.frames.main, function mainAfterRegister(){
+ // var messageInfo = spaceghost.getElementInfo( spaceghost.data.selectors.messages.all );
// spaceghost.debug( 'post registration message:\n' + spaceghost.jsonStr( messageInfo ) );
//});
});
@@ -78,21 +78,21 @@
spaceghost.thenOpen( spaceghost.baseUrl, function(){
- spaceghost.clickLabel( spaceghost.labels.masthead.menus.user );
- spaceghost.clickLabel( spaceghost.labels.masthead.userMenu.login );
+ spaceghost.clickLabel( spaceghost.data.labels.masthead.menus.user );
+ spaceghost.clickLabel( spaceghost.data.labels.masthead.userMenu.login );
- spaceghost.withFrame( spaceghost.selectors.frames.main, function mainBeforeLogin(){
+ spaceghost.withFrame( spaceghost.data.selectors.frames.main, function mainBeforeLogin(){
spaceghost.debug( '(' + spaceghost.getCurrentUrl() + ') logging in user:\n'
+ spaceghost.jsonStr( loginInfo ) );
- spaceghost.fill( spaceghost.selectors.loginPage.form, loginInfo, false );
- spaceghost.click( xpath( spaceghost.selectors.loginPage.submit_xpath ) );
+ spaceghost.fill( spaceghost.data.selectors.loginPage.form, loginInfo, false );
+ spaceghost.click( xpath( spaceghost.data.selectors.loginPage.submit_xpath ) );
});
//// debugging
- //spaceghost.withFrame( spaceghost.selectors.frames.main, function mainAfterLogin(){
+ //spaceghost.withFrame( spaceghost.data.selectors.frames.main, function mainAfterLogin(){
// //TODO: prob. could use a more generalized form of this for url breakdown/checking
- // if( spaceghost.getCurrentUrl().search( spaceghost.selectors.loginPage.url_regex ) != -1 ){
- // var messageInfo = spaceghost.getElementInfo( spaceghost.selectors.messages.all );
+ // if( spaceghost.getCurrentUrl().search( spaceghost.data.selectors.loginPage.url_regex ) != -1 ){
+ // var messageInfo = spaceghost.getElementInfo( spaceghost.data.selectors.messages.all );
// spaceghost.debug( 'post login message:\n' + spaceghost.jsonStr( messageInfo ) );
// }
//});
@@ -111,8 +111,8 @@
var spaceghost = this.spaceghost;
this._submitRegistration( email, password, username );
spaceghost.then( function(){
- spaceghost.withFrame( spaceghost.selectors.frames.main, function mainAfterRegister(){
- var messageInfo = spaceghost.getElementInfo( spaceghost.selectors.messages.all );
+ spaceghost.withFrame( spaceghost.data.selectors.frames.main, function mainAfterRegister(){
+ var messageInfo = spaceghost.getElementInfo( spaceghost.data.selectors.messages.all );
spaceghost.debug( 'post registration message:\n' + this.jsonStr( messageInfo ) );
if( messageInfo.attributes[ 'class' ] === 'errormessage' ){
@@ -132,9 +132,9 @@
var spaceghost = this.spaceghost;
this._submitLogin( email, password );
spaceghost.then( function(){
- spaceghost.withFrame( spaceghost.selectors.frames.main, function mainAfterLogin(){
- if( spaceghost.getCurrentUrl().search( spaceghost.selectors.loginPage.url_regex ) != -1 ){
- var messageInfo = spaceghost.getElementInfo( spaceghost.selectors.messages.all );
+ spaceghost.withFrame( spaceghost.data.selectors.frames.main, function mainAfterLogin(){
+ if( spaceghost.getCurrentUrl().search( spaceghost.data.selectors.loginPage.url_regex ) != -1 ){
+ var messageInfo = spaceghost.getElementInfo( spaceghost.data.selectors.messages.all );
if( messageInfo && messageInfo.attributes[ 'class' ] === 'errormessage' ){
throw new spaceghost.GalaxyError( 'LoginError: ' + messageInfo.html );
}
@@ -154,7 +154,8 @@
var spaceghost = this.spaceghost,
userEmail = '';
try {
- var loggedInInfo = spaceghost.getElementInfo( xpath( spaceghost.selectors.masthead.userMenu.userEmail_xpath ) );
+ var loggedInInfo = spaceghost.getElementInfo(
+ xpath( spaceghost.data.selectors.masthead.userMenu.userEmail_xpath ) );
userEmail = loggedInInfo.text;
} catch( err ){
spaceghost.error( err );
@@ -170,8 +171,8 @@
var spaceghost = this.spaceghost;
spaceghost.thenOpen( spaceghost.baseUrl, function(){
//TODO: handle already logged out
- spaceghost.clickLabel( spaceghost.labels.masthead.menus.user );
- spaceghost.clickLabel( spaceghost.labels.masthead.userMenu.logout );
+ spaceghost.clickLabel( spaceghost.data.labels.masthead.menus.user );
+ spaceghost.clickLabel( spaceghost.data.labels.masthead.userMenu.logout );
});
return spaceghost;
};
diff -r c2f310489973fc2ba42e5289ccec3ed88a42aabc -r f1bd817063e092711a9b13f60e569abbce877fba test/casperjs/spaceghost.js
--- a/test/casperjs/spaceghost.js
+++ b/test/casperjs/spaceghost.js
@@ -64,11 +64,13 @@
var utils = require( 'utils' );
// ------------------------------------------------------------------- inheritance
-/**
+/** @class An extension of the Casper object with methods and overrides specifically
+ * for interacting with a Galaxy web page.
+ * @augments Casper
*/
function SpaceGhost(){
SpaceGhost.super_.apply( this, arguments );
- this.init.apply( this, arguments );
+ this._init.apply( this, arguments );
}
utils.inherits( SpaceGhost, Casper );
@@ -85,6 +87,9 @@
// ------------------------------------------------------------------- error types
PageError.prototype = new CasperError();
PageError.prototype.constructor = CasperError;
+/** @class Represents a javascript error on the page casper is browsing
+ * (as opposed to an error in the test script).
+ */
function PageError(){
CasperError.apply( this, arguments );
this.name = "PageError";
@@ -93,6 +98,7 @@
GalaxyError.prototype = new CasperError();
GalaxyError.prototype.constructor = CasperError;
+/** @class Thrown when Galaxy has (gracefully?) indicated pilot error. */
function GalaxyError(){
CasperError.apply( this, arguments );
this.name = "GalaxyError";
@@ -101,6 +107,7 @@
AlertError.prototype = new CasperError();
AlertError.prototype.constructor = CasperError;
+/** @class Thrown when Galaxy has displayed a javascript alert. */
function AlertError(){
CasperError.apply( this, arguments );
this.name = "AlertError";
@@ -111,14 +118,17 @@
// ------------------------------------------------------------------- set up
/** More initialization: cli, event handlers, etc.
* @param {Object} options option hash
+ * @private
*/
-SpaceGhost.prototype.init = function init( options ){
+SpaceGhost.prototype._init = function _init( options ){
//console.debug( 'init, options:', JSON.stringify( options, null, 2 ) );
//NOTE: cli will override in-script options
this._setOptionsFromCli();
// save errors for later output (needs to go before process CLI)
+ /** cache of errors that have occurred
+ * @memberOf SpaceGhost */
this.errors = [];
this.on( 'error', function pushErrorToStack( msg, backtrace ){
//this.debug( 'adding error to stack: ' + msg + ', trace:' + JSON.stringify( backtrace, null, 2 ) );
@@ -139,8 +149,9 @@
};
/** Allow CLI arguments to set options if the proper option name is used.
- * @example:
+ * @example
* casperjs myscript.js --verbose=true --logLevel=debug
+ * @private
*/
SpaceGhost.prototype._setOptionsFromCli = function setOptionsFromCli(){
// get and remove any casper options passed on the command line
@@ -154,25 +165,8 @@
};
// ------------------------------------------------------------------- cli args and options
-SpaceGhost.prototype._saveHtmlOnErrorHandler = function _saveHtmlOnErrorHandler( msg, backtrace ){
- // needs to output to a file in GALAXY_SAVE
- //this.debugHTML();
-};
-
-SpaceGhost.prototype._saveTextOnErrorHandler = function _saveTextOnErrorHandler( msg, backtrace ){
- // needs to output to a file in GALAXY_SAVE
- //this.debugPage();
-};
-
-SpaceGhost.prototype._saveScreenOnErrorHandler = function _saveScreenOnErrorHandler( msg, backtrace ){
- // needs to output to a pic in GALAXY_SAVE
- //var filename = ...??
- //?? this.getCurrentUrl(), this.getCurrent
- //this.capture( filename );
-};
-
-
/** Set up any SG specific options passed in on the cli.
+ * @private
*/
SpaceGhost.prototype._processCLIArguments = function _processCLIArguments(){
//this.debug( 'cli: ' + this.jsonStr( this.cli ) );
@@ -262,6 +256,7 @@
};
/** Suppress the normal output from the casper object (echo, errors)
+ * @private
*/
SpaceGhost.prototype._suppressOutput = function _suppressOutput(){
// currently (1.0) the only way to suppress test pass/fail messages
@@ -274,6 +269,7 @@
};
/** Suppress the normal output from the casper object (echo, errors)
+ * @private
*/
SpaceGhost.prototype._redirectOutputToStderr = function _redirectOutputToStderr(){
// currently (1.0) the only way to suppress test pass/fail messages
@@ -288,7 +284,7 @@
this.removeListener( 'error', this.listeners( 'error' )[1] );
};
-/** Outputs logs, test results and errors in a single JSON formatted object.
+/** Outputs logs, test results and errors in a single JSON formatted object to the console.
*/
SpaceGhost.prototype.outputStateAsJson = function outputStateAsJson(){
var returnedJSON = {
@@ -303,18 +299,20 @@
// ------------------------------------------------------------------- event handling
//note: using non-anon fns to allow removal if needed
-// most of these are stubs (w logging) for later expansion
-/** Event handler for failed page loads
+/** 'load failed' Event handler for failed page loads that only records to the log
+ * @private
*/
SpaceGhost.prototype._loadFailedHandler = function _loadFailedHandler( object ){
this.error( 'load.failed: ' + spaceghost.jsonStr( object ) );
//TODO: throw error?
};
-/** Event handler for page errors (js) - throws test scope as PageError
+/** 'page.error' Event handler that re-raises as PageError
* NOTE: this has some special handling for DOM exc 12 which some casper selectors are throwing
* (even tho the selector still works)
+ * @throws {PageError} (with original's msg and backtrace)
+ * @private
*/
SpaceGhost.prototype._pageErrorHandler = function _pageErrorHandler( msg, backtrace ){
// add a page error handler to catch page errors (what we're most interested with here)
@@ -337,7 +335,9 @@
}
};
-/** Event handler for step/casper timeouts - throws PageError
+/** 'timeout' Event handler for step/casper timeouts - raises as PageError
+ * @throws {PageError} Timeout occurred
+ * @private
*/
SpaceGhost.prototype._timeoutHandler = function _timeoutHandler(){
console.debug( 'timeout' );
@@ -345,17 +345,13 @@
throw new PageError( 'Timeout occurred' );
};
-/** Event handler for console messages from the page.
- */
-SpaceGhost.prototype._pageConsoleHandler = function _pageConsoleHandler(){
- // remote.message
- var DELIM = '-';
- this.debug( this + '(page console) "' + Array.prototype.join.call( arguments, DELIM ) + '"' );
-};
-
-/** Event handler for alerts
+/** 'alert' Event handler that raises an AlertError with the alert message
+ * @throws {AlertError} (the alert message)
+ * @private
*/
SpaceGhost.prototype._alertHandler = function _alertHandler( message ){
+ //TODO: this still isn't working well...
+
// casper info level already has outputs these
//this.warning( this + '(page alert)\n"' + message + '"' );
var ALERT_MARKER = '(page alert) ';
@@ -374,13 +370,34 @@
}
};
-/** Event handler for navigation requested (loading of frames, redirects(?))
+/** 'error' Event handler that saves html from the errored page.
+ * @private
*/
-SpaceGhost.prototype._navHandler = function _navHandler( url, navigationType, navigationLocked, isMainFrame ){
- this.debug( 'navigation.requested: ' + url );
+SpaceGhost.prototype._saveHtmlOnErrorHandler = function _saveHtmlOnErrorHandler( msg, backtrace ){
+ // needs to output to a file in GALAXY_SAVE
+ //this.debugHTML();
};
-/** Set up event handlers.
+/** 'error' Event handler that saves text from the errored page.
+ * @private
+ */
+SpaceGhost.prototype._saveTextOnErrorHandler = function _saveTextOnErrorHandler( msg, backtrace ){
+ // needs to output to a file in GALAXY_SAVE
+ //this.debugPage();
+};
+
+/** 'error' Event handler that saves a screenshot of the errored page.
+ * @private
+ */
+SpaceGhost.prototype._saveScreenOnErrorHandler = function _saveScreenOnErrorHandler( msg, backtrace ){
+ // needs to output to a pic in GALAXY_SAVE
+ //var filename = ...??
+ //?? this.getCurrentUrl(), this.getCurrent
+ //this.capture( filename );
+};
+
+/** Sets up event handlers.
+ * @private
*/
SpaceGhost.prototype._setUpEventHandlers = function _setUpEventHandlers(){
//console.debug( '_setUpEventHandlers' );
@@ -393,18 +410,17 @@
this.on( 'waitFor.timeout', this._timeoutHandler );
// ........................ page info/debugging
- // these are already displayed at the casper info level
-
- //this.on( 'remote.message', this._pageConsoleHandler );
this.on( 'remote.alert', this._alertHandler );
- // these are already displayed at the casper debug level
- //this.on( 'navigation.requested', this._navHandler );
-
};
// ------------------------------------------------------------------- sub modules
/** Load sub modules (similar to casperjs.test)
+ * @requires User modules/user.js
+ * @requires Tools modules/tools.js
+ * @requires HistoryPanel modules/historypanel.js
+ * @requires HistoryOptions modules/historyoptions.js
+ * @private
*/
SpaceGhost.prototype._loadModules = function _loadModules(){
this.user = require( this.options.scriptDir + 'modules/user' ).create( this );
@@ -416,6 +432,7 @@
// =================================================================== PAGE CONTROL
/** An override of casper.start for additional set up.
* (Currently only used to change viewport)
+ * @see Casper#start
*/
SpaceGhost.prototype.start = function start(){
var returned = Casper.prototype.start.apply( this, arguments );
@@ -423,8 +440,9 @@
return returned;
};
-/** An override of casper.open specifically for Galaxy.
+/** An override of casper.open for additional page control.
* (Currently only used to change language headers)
+ * @see Casper#open
*/
SpaceGhost.prototype.open = function open(){
//TODO: this can be moved to start (I think...?)
@@ -433,13 +451,13 @@
return Casper.prototype.open.apply( this, arguments );
};
-/** An override to provide json output and more informative error codes
+/** An override to provide json output and more informative error codes.
+ * Exits with 2 if a test has failed.
+ * Exits with 1 if some error has occurred.
+ * Exits with 0 if all tests passed.
+ * @see Casper#run run, boy, run (doesn't he fly?)
*/
SpaceGhost.prototype.run = function run( onComplete, time ){
- // wrap the onComplete to:
- // return code 2 on test failure
- // 0 on success
- // (1 on js error - in error handler)
var new_onComplete = function(){
onComplete.call( this );
var returnCode = ( this.test.testResults.failed )?( 2 ):( 0 );
@@ -467,7 +485,6 @@
* @param {Function} catchFn some portion of the correct error msg
*/
SpaceGhost.prototype.tryStepsCatch = function tryStepsCatch( stepsFn, catchFn ){
- //TODO: * @param {Boolean} removeOtherListeners option to remove other listeners while this fires
// create three steps: 1) set up new error handler, 2) try the fn, 3) check for errors and rem. handler
var originalExitOnError,
originalErrorHandlers = [],
@@ -522,11 +539,10 @@
/** Casper has an (undocumented?) skip test feature. This is a conv. wrapper for that.
*/
SpaceGhost.prototype.skipTest = function(){
- //TODO: does this work? seems to...
throw this.test.SKIP_MESSAGE;
};
-/** test helper - within frame, assert selector, and assert text in selector
+/** Test helper - within frame, assert selector, and assert text in selector
* @param {CasperJS selector} selector what element in which to search for the text
* @param {String} text what text to search for
* @param {String} frame frame selector (gen. name) in which to search for selector (defaults to top)
@@ -548,15 +564,17 @@
}
}
-/** test helper - within frame, assert errormessage, and assert text in errormessage
+/** Test helper - within frame, assert errormessage, and assert text in errormessage
* *message is a common UI feedback motif in Galaxy (often displayed in the main panel)
- * @param {String} message what the message should contain
- * @param {String} frame frame selector (gen. name) in which to search for selector (defaults to 'galaxy_main')
- * @param {CasperJS selector} messageSelector what element in which to search for the text (defaults to '.errormessage')
+ * @param {String} message what the message should contain
+ * @param {String} frame frame selector (gen. name) in which to search for selector
+ * (defaults to 'galaxy_main')
+ * @param {CasperJS selector} messageSelector what element in which to search for the text
+ * (defaults to '.errormessage')
*/
SpaceGhost.prototype.assertErrorMessage = function assertSelectorAndTextInFrame( message, frame, messageSelector ){
- messageSelector = messageSelector || this.selectors.messages.error;
- frame = frame || this.selectors.frames.main;
+ messageSelector = messageSelector || this.data.selectors.messages.error;
+ frame = frame || this.data.selectors.frames.main;
this.assertSelectorAndTextInFrame( messageSelector, message, frame );
};
@@ -610,9 +628,32 @@
this.test.assert( toSearch.indexOf( searchFor ) !== -1, msg );
};
+/** Assert that a given element has a given class.
+ * @param {CasperJS selector} selector what element to test
+ * @param {String} className the class to test for (classes passed in with a leading '.' will have it trimmed)
+ */
+SpaceGhost.prototype.assertHasClass = function assertHasClass( selector, className, msg ){
+ className = ( className[0] == '.' )?( className.slice( 1 ) ):( className );
+ msg = msg || 'selector "' + selector + '" has class: "' + className + '"';
+ var classes = this.getElementAttribute( selector, 'class' );
+ this.test.assert( classes.indexOf( className ) !== -1, msg );
+};
+
+/** Assert that a given element doesn't have a given class.
+ * @param {CasperJS selector} selector what element to test
+ * @param {String} className the class to test for (classes passed in with a leading '.' will have it trimmed)
+ */
+SpaceGhost.prototype.assertDoesntHaveClass = function assertDoesntHaveClass( selector, className, msg ){
+ className = ( className[0] == '.' )?( className.slice( 1 ) ):( className );
+ msg = msg || 'selector "' + selector + '" has class: "' + className + '"';
+ var classes = this.getElementAttribute( selector, 'class' );
+ this.test.assert( classes.indexOf( className ) === -1, msg );
+};
+
// =================================================================== CONVENIENCE
/** Wraps casper.getElementInfo in try, returning null if element not found instead of erroring.
* @param {String} selector css or xpath selector for the element to find
+ * @returns {Object|null} element info if found, null if not
*/
SpaceGhost.prototype.elementInfoOrNull = function elementInfoOrNull( selector ){
var found = null;
@@ -622,8 +663,9 @@
return found;
};
-/** Wraps casper.click in try, returning true if element found and clicked, false if not instead of erroring.
+/** Wraps casper.click in try to prevent error if element isn't found
* @param {String} selector css or xpath selector for the element to find
+ * @returns {Boolean} true if element found and clicked, false if not instead of erroring
*/
SpaceGhost.prototype.tryClick = function tryClick( selector ){
var done = false;
@@ -634,9 +676,9 @@
return done;
};
+// =================================================================== GALAXY CONVENIENCE
-// =================================================================== GALAXY CONVENIENCE
// =================================================================== MISCELAIN
/** Override capture to save to environ: GALAXY_TEST_SAVE (or passed in from CLI)
* @param {String} filename the image filename
@@ -651,6 +693,7 @@
/** Pop all handlers for eventName from casper and return them in order.
* @param {String} eventName the name of the event from which to remove handlers
+ * @returns {Function[]} the array of functions no longer bound to the event
*/
SpaceGhost.prototype.popAllListeners = function popAllListeners( eventName ){
var returnedListeners = this.listeners( eventName );
@@ -660,7 +703,7 @@
/** Add the given list of handler functions to the listener for eventName in order.
* @param {String} eventName the name of the event to which to add handlers
- * @param {Array} handlerArray an array of event handler functions to add
+ * @param {Function[]} handlerArray an array of event handler functions to add
*/
SpaceGhost.prototype.addListeners = function addListeners( eventName, handlerArray ){
for( var i=0; i<handlerArray.length; i++ ){
@@ -668,14 +711,15 @@
}
};
-/** Send message to stderr
+/** Send message to stderr using the phantom fs module.
+ * @param {String} the msg to output
*/
SpaceGhost.prototype.stderr = function( msg ){
var fs = require( 'fs' );
fs.write( '/dev/stderr', msg + '\n', 'w' );
};
-// convenience logging funcs
+// ------------------------------------------------------------------- convenience logging funcs
/** log using level = 'debug' and default namespace = 'spaceghost'
*/
SpaceGhost.prototype.debug = function( msg, namespace ){
@@ -690,14 +734,14 @@
this.log( msg, 'info', namespace );
};
-/** log using level = 'info' and default namespace = 'spaceghost'
+/** log using level = 'warning' and default namespace = 'spaceghost'
*/
SpaceGhost.prototype.warning = function( msg, namespace ){
namespace = namespace || 'spaceghost';
this.log( msg, 'warning', namespace );
};
-/** log using level = 'info' and default namespace = 'spaceghost'
+/** log using level = 'error' and default namespace = 'spaceghost'
*/
SpaceGhost.prototype.error = function( msg, namespace ){
namespace = namespace || 'spaceghost';
@@ -707,6 +751,7 @@
/** log despite logLevel settings, unless returnJsonOnly is set
*/
SpaceGhost.prototype.out = function( msg, namespace ){
+ namespace = namespace || 'spaceghost';
if( !this.options.returnJsonOnly ){
console.debug( msg );
}
@@ -718,10 +763,10 @@
return JSON.stringify( obj, null, 2 );
};
-/** output to debug the JSON of the selector (or null if not found)
+/** output the JSON of the selector (or null if not found) to debug level
*/
SpaceGhost.prototype.debugElement = function debugElement( selector ){
- this.debug( this.jsonStr( this.elementInfoOrNull( selector ) ) );
+ this.debug( selector + ':\n' + this.jsonStr( this.elementInfoOrNull( selector ) ) );
};
/** Debug SG itself
@@ -737,21 +782,6 @@
return this.errors[( this.errors.length - 1 )];
};
-/** Get the last error from an assertRaises test (gen. for the message)
- */
-SpaceGhost.prototype.getLastAssertRaisesError = function(){
- // assuming the test passed here...
- var testsThatPassed = this.test.testResults.passes;
- var test = null;
- for( var i=( testsThatPassed.length - 1 ); i>=0; i-- ){
- currTest = testsThatPassed[i];
- if( currTest.type === 'assertRaises' ){
- test = currTest; break;
- }
- }
- return ( ( test && test.values )?( test.values.error ):( undefined ) );
-};
-
/** String representation
*/
SpaceGhost.prototype.toString = function(){
@@ -762,12 +792,105 @@
return 'SpaceGhost(' + currentUrl + ')';
};
+/** Load and parse a JSON file into an object.
+ * @param filepath filepath relative to the current scriptDir
+ * @returns the object parsed
+ */
+SpaceGhost.prototype.loadJSONFile = function loadJSONFile( filepath ){
+ //precondition: filepath is relative to script dir
+ filepath = this.options.scriptDir + filepath;
+ return JSON.parse( require( 'fs' ).read( filepath ) );
+};
+
+/** Load and parse a JSON file into an object.
+ * @param filepath filepath relative to the current scriptDir
+ * @param object the object to write
+ * @param mode 'w' for a new file, 'a' for append
+ */
+SpaceGhost.prototype.writeJSONFile = function writeJSONFile( filepath, object, mode ){
+ mode = mode || 'w';
+ //precondition: filepath is relative to script dir
+ filepath = this.options.scriptDir + filepath;
+ return require( 'fs' ).write( filepath, this.jsonStr( object ), mode );
+};
+
// =================================================================== TEST DATA
-// maintain selectors, labels, text here in one central location
+/** General use selectors, labels, and text. Kept here to allow a centralized location.
+ */
+SpaceGhost.prototype.data = {
+ selectors : {
+ tooltipBalloon : '.bs-tooltip',
+ editableText : '.editable-text',
+ editableTextInput : 'input#renaming-active',
+ masthead : {
+ userMenu : {
+ userEmail : 'a #user-email',
+ userEmail_xpath : '//a[contains(text(),"Logged in as")]/span["id=#user-email"]'
+ }
+ },
+ frames : {
+ main : 'galaxy_main',
+ tools : 'galaxy_tools',
+ history : 'galaxy_history'
+ },
+ messages : {
+ all : '[class*="message"]',
+ error : '.errormessage',
+ done : '.donemessage',
+ donelarge : '.donemessagelarge'
+ },
+ loginPage : {
+ form : 'form#login',
+ submit_xpath : "//input[@value='Login']",
+ url_regex : /\/user\/login/
+ },
+ registrationPage : {
+ form : 'form#registration',
+ submit_xpath : "//input[@value='Submit']"
+ },
+ tools : {
+ general : {
+ form : 'form#tool_form',
+ executeButton_xpath : '//input[@value="Execute"]'
+ },
+ upload : {
+ fileInput : 'files_0|file_data' // is this general?
+ }
+ }
+ },
+ labels : {
+ masthead : {
+ menus : {
+ user : 'User'
+ },
+ userMenu : {
+ register : 'Register',
+ login : 'Login',
+ logout : 'Logout'
+ }
+ },
+ tools : {
+ upload : {
+ panelLabel : 'Upload File'
+ }
+ }
+ },
+ text : {
+ registrationPage : {
+ badEmailError : 'Enter a real email address'
+ },
+ upload : {
+ success : 'The following job has been successfully added to the queue'
+ }
+ }
+};
-//TODO: to data
+/*
SpaceGhost.prototype.selectors = {
+ tooltipBalloon : '.bs-tooltip',
+ editableText : '.editable-text',
+ editableTextInput : 'input#renaming-active',
masthead : {
userMenu : {
userEmail : 'a #user-email',
@@ -832,28 +955,14 @@
success : 'The following job has been successfully added to the queue'
}
};
-
-SpaceGhost.prototype.loadJSONFile = function loadJSONFile( filepath ){
- //precondition: filepath is relative to script dir
- filepath = this.options.scriptDir + filepath;
- return JSON.parse( require( 'fs' ).read( filepath ) );
-};
-
-SpaceGhost.prototype.writeJSONFile = function writeJSONFile( filepath, object, mode ){
- mode = mode || 'w';
- //precondition: filepath is relative to script dir
- filepath = this.options.scriptDir + filepath;
- return require( 'fs' ).write( filepath, this.jsonStr( object ), mode );
-};
+*/
// =================================================================== EXPORTS
-/**
- */
exports.SpaceGhost = SpaceGhost;
exports.PageError = PageError;
exports.GalaxyError = GalaxyError;
exports.AlertError = AlertError;
-/**
+/** creation function
*/
exports.create = function create(options) {
"use strict";
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: carlfeberhard: browser tests: add tests for structure of HDA in the 'ok' state
by commits-noreply@bitbucket.org 13 Mar '13
by commits-noreply@bitbucket.org 13 Mar '13
13 Mar '13
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/c2f310489973/
changeset: c2f310489973
user: carlfeberhard
date: 2013-03-13 21:03:31
summary: browser tests: add tests for structure of HDA in the 'ok' state
affected #: 4 files
diff -r 4827bed265e7563c58346654f5a668f5de6d9d68 -r c2f310489973fc2ba42e5289ccec3ed88a42aabc test/casperjs/hda-state-tests.js
--- a/test/casperjs/hda-state-tests.js
+++ b/test/casperjs/hda-state-tests.js
@@ -40,22 +40,22 @@
spaceghost.info( 'Will use fixtureData.testUser: ' + email );
}
-var newHistoryName = "Test History",
+var tooltipSelector = '.bs-tooltip';
+
+var utils = require( 'utils' ),
historyFrameInfo = {},
filepathToUpload = '../../test-data/1.txt',
- possibleHDAStates = [],
- testUploadInfo = {};
+ testUploadInfo = {},
+ //TODO: get from the api module - that doesn't exist yet
+ summaryShouldBeArray = [ '10 lines', 'format: txt' ],
+ infoShouldBe = 'uploaded txt file',
+ peekShouldBeArray = [];
// ------------------------------------------------------------------- set up
// start a new user
spaceghost.user.loginOrRegisterUser( email, password );
-// ??: why is a reload needed here? If we don't, loggedInAs === '' ...
-spaceghost.thenOpen( spaceghost.baseUrl, function(){
- var loggedInAs = spaceghost.user.loggedInAs();
- this.test.assert( loggedInAs === email, 'loggedInAs() matches email: "' + loggedInAs + '"' );
-});
-// grab the history frame bounds for mouse later tests
+// grab the history frame bounds for later mouse tests
spaceghost.then( function(){
historyFrameInfo = this.getElementInfo( 'iframe[name="galaxy_history"]' );
//this.debug( 'historyFrameInfo:' + this.jsonStr( historyFrameInfo ) );
@@ -69,15 +69,211 @@
});
});
-spaceghost.then( function getHDAStates(){
+
+// =================================================================== TESTS
+// ------------------------------------------------------------------- helpers
+//NOTE: to be called with fn.call( spaceghost, ... )
+
+function testTitle( hdaSelector, hid, name ){
+ var titleSelector = hdaSelector + ' ' + this.historypanel.data.selectors.hda.title,
+ titleShouldBe = hid + ': ' + name;
+ this.test.assertVisible( titleSelector,
+ 'HDA title is visible' );
+ this.test.assertSelectorHasText( titleSelector, titleShouldBe,
+ 'HDA has proper hid and title' );
+}
+
+function testTitleButtonStructure( hdaSelector, shouldHaveTheseButtons ){
+ // defaults to the current buttons most states should have
+ shouldHaveTheseButtons = shouldHaveTheseButtons || [ 'display', 'edit', 'delete' ];
+
+ var hdaDbId = this.getElementAttribute( hdaSelector, 'id' ).split( '-' )[1],
+ buttonsArea = hdaSelector + ' ' + this.historypanel.data.selectors.hda.titleButtons,
+ buttons = {
+ // this seems backwards -> TODO: move buttonsArea concat into loop below, move this data to historypanel.data
+ display : {
+ nodeName : this.historypanel.data.text.hda.ok.nodeNames.displayButton,
+ selector : buttonsArea + ' ' + this.historypanel.data.selectors.hda.displayButton,
+ tooltip : this.historypanel.data.text.hda.ok.tooltips.displayButton,
+ hrefTpl : this.historypanel.data.text.hda.ok.hrefs.displayButton
+ },
+ edit : {
+ nodeName : this.historypanel.data.text.hda.ok.nodeNames.editAttrButton,
+ selector : buttonsArea + ' ' + this.historypanel.data.selectors.hda.editAttrButton,
+ tooltip : this.historypanel.data.text.hda.ok.tooltips.editAttrButton,
+ hrefTpl : this.historypanel.data.text.hda.ok.hrefs.editAttrButton
+ },
+ 'delete' : {
+ nodeName : this.historypanel.data.text.hda.ok.nodeNames.deleteButton,
+ selector : buttonsArea + ' ' + this.historypanel.data.selectors.hda.deleteButton,
+ tooltip : this.historypanel.data.text.hda.ok.tooltips.deleteButton,
+ hrefTpl : this.historypanel.data.text.hda.ok.hrefs.deleteButton
+ }
+ };
+ this.test.assertVisible( buttonsArea, 'Button area is visible' );
+
+ for( var i=0; i<shouldHaveTheseButtons.length; i++ ){
+ // don't use button names we don't have data for
+ var buttonName = shouldHaveTheseButtons[ i ];
+ if( !buttons.hasOwnProperty( buttonName ) ){ continue; }
+
+ this.test.comment( buttonName + ' should exist, be visible, and well formed' );
+ var button = buttons[ buttonName ];
+ this.debug( 'checking button "' + buttonName + '" on hda "' + hdaDbId + '":\n' + this.jsonStr( button ) );
+ this.test.assertExists( button.selector, buttonName + ' button exists' );
+ this.test.assertVisible( button.selector, buttonName + ' button is visible' );
+
+ var buttonElement = this.getElementInfo( button.selector );
+ this.debug( 'buttonElement:' + this.jsonStr( buttonElement ) );
+
+ // should be an anchor
+ this.test.assert( buttonElement.nodeName === button.nodeName,
+ buttonName + ' is proper node type (' + button.nodeName + '): ' + buttonElement.nodeName );
+
+ // should have a proper href
+ var href = buttonElement.attributes.href,
+ hrefShouldBe = utils.format( button.hrefTpl, hdaDbId );
+ this.assertTextContains( href, hrefShouldBe,
+ buttonName + ' has proper href (' + hrefShouldBe + '): ' + href );
+
+ this.historypanel.hoverOver( button.selector, function testingHover(){
+ var tooltipText = button.tooltip;
+ this.test.assertVisible( tooltipSelector, buttonName + ' button tooltip is visible when hovering' );
+ this.test.assertSelectorHasText( tooltipSelector, tooltipText,
+ buttonName + ' button has tooltip text: "' + tooltipText + '"' );
+ }, historyFrameInfo );
+ }
+}
+
+function testDbkey( hdaSelector, dbkeySetTo ){
+ var dbkeySelector = hdaSelector + ' ' + this.historypanel.data.selectors.hda.body
+ + ' ' + this.historypanel.data.selectors.hda.dbkey,
+ unspecifiedDbkeyText = '?',
+ unspecifiedDbkeyNodeName = 'a',
+ specifiedDbkeyNodeName = 'span',
+ editAttrHrefRegex = /\/datasets\/\w+\/edit/;
+
+ this.test.assertExists( dbkeySelector, 'dbkey exists' );
+ this.test.assertVisible( dbkeySelector, 'dbkey is visible' );
+ var dbkey = this.elementInfoOrNull( dbkeySelector );
+ if( !dbkey ){ return; }
+
+ // dbkey is set, check text
+ if( dbkeySetTo ){
+ this.test.comment( '(specified) dbkey should be displayed correctly' );
+ this.test.assertSelectorHasText( dbkeySelector, dbkeySetTo,
+ 'dbkey is specified: ' + dbkey.text );
+ this.test.assert( dbkey.nodeName === specifiedDbkeyNodeName,
+ 'dbkey has proper nodeName (' + specifiedDbkeyNodeName + '):' + dbkey.nodeName );
+
+ // dbkey expected to be not set
+ } else {
+ this.test.comment( '(unspecified) dbkey should be displayed correctly' );
+ this.test.assertSelectorHasText( dbkeySelector, unspecifiedDbkeyText,
+ 'dbkey is not specified: ' + dbkey.text );
+ this.test.assert( dbkey.nodeName === unspecifiedDbkeyNodeName,
+ 'dbkey has proper nodeName (' + unspecifiedDbkeyNodeName + '):' + dbkey.nodeName );
+
+ this.test.comment( '(unspecified) dbkey href should point to edit attributes' );
+ this.test.assertMatch( dbkey.attributes.href, editAttrHrefRegex,
+ 'dbkey has a proper href: ' + dbkey.attributes.href );
+ }
+}
+
+function testPrimaryActionButtons( hdaSelector ){
+ var buttonsSelector = hdaSelector + ' ' + this.historypanel.data.selectors.hda.body
+ + ' ' + this.historypanel.data.selectors.hda.primaryActionButtons;
+ this.test.comment( 'Primary action buttons div should exist and be visible' );
+ this.test.assertExists( buttonsSelector, 'Primary action buttons div exists' );
+ this.test.assertVisible( buttonsSelector, 'Primary action buttons div is visible' );
+}
+
+function testSecondaryActionButtons( hdaSelector ){
+ var buttonsSelector = hdaSelector + ' ' + this.historypanel.data.selectors.hda.body
+ + ' ' + this.historypanel.data.selectors.hda.secondaryActionButtons;
+ this.test.comment( 'Secondary action buttons div should exist and be visible' );
+ this.test.assertExists( buttonsSelector, 'Secondary action buttons div exists' );
+ this.test.assertVisible( buttonsSelector, 'Secondary action buttons div is visible' );
+}
+
+function testPeek( hdaSelector, expectedPeekArray ){
+ var peekSelector = hdaSelector + ' ' + this.historypanel.data.selectors.hda.body
+ + ' ' + this.historypanel.data.selectors.hda.peek;
+ this.test.comment( 'Peek div should exist and be visible' );
+ this.test.assertExists( peekSelector, 'peek exists' );
+ this.test.assertVisible( peekSelector, 'peek is visible' );
+ expectedPeekArray.forEach( function( string, i ){
+ spaceghost.test.assertSelectorHasText( peekSelector, string, 'peek has proper text (' + string + ')' );
+ });
+}
+
+function testExpandedBody( hdaSelector, expectedSummaryTextArray, expectedInfoText, dbkeySetTo ){
+ var body = hdaSelector + ' ' + this.historypanel.data.selectors.hda.body;
+ this.test.assertExists( body, 'body exists' );
+ this.test.assertVisible( body, 'body is visible' );
+
+ //TODO: create api module, match with api history_contents
+
+ this.test.comment( 'Summary should be displayed correctly' );
+ var summary = body + ' ' + this.historypanel.data.selectors.hda.summary;
+ this.test.assertExists( summary, 'summary exists' );
+ this.test.assertVisible( summary, 'summary is visible' );
+ // summary text is broken up by whitespace making it inconv. to test in one go
+ expectedSummaryTextArray.forEach( function( string, i ){
+ spaceghost.test.assertSelectorHasText( summary, string, 'summary has proper text (' + string + ')' );
+ });
+ this.debug( 'summary text: ' + this.fetchText( summary ) );
+
+ testDbkey.call( this, hdaSelector, dbkeySetTo );
+
+ this.test.comment( 'Info should be displayed correctly' );
+ var info = body + ' ' + this.historypanel.data.selectors.hda.info;
+ this.test.assertExists( info, 'info exists' );
+ this.test.assertVisible( info, 'info is visible' );
+ this.test.assertSelectorHasText( info, expectedInfoText,
+ 'info has proper text (' + expectedInfoText + '): ' + this.fetchText( info ) );
+
+ testPrimaryActionButtons.call( this, hdaSelector );
+ testSecondaryActionButtons.call( this, hdaSelector ); //TODO: isAnonymous
+ testPeek.call( this, hdaSelector, peekShouldBeArray );
+}
+
+// ------------------------------------------------------------------- ok state
+spaceghost.then( function checkOkState(){
+ this.test.comment( 'HDAs in the "ok" state should be well formed' );
+
this.withFrame( this.selectors.frames.history, function(){
- var model = this.evaluate( function(){
- return Galaxy.currHistoryPanel.model.hdas.at( 0 ).attributes;
+ var uploadSelector = '#' + testUploadInfo.hdaElement.attributes.id;
+ this.test.assertVisible( uploadSelector, 'HDA is visible' );
+
+ this.test.comment( 'should have the proper state class' );
+ var okStateClass = this.historypanel.data.selectors.hda.wrapper.stateClasses.ok,
+ uploadElement = this.getElementInfo( uploadSelector );
+ this.test.assert( uploadElement.attributes['class'].indexOf( okStateClass ) !== -1,
+ 'HDA has "ok" state class' );
+
+ // since we're using css there's no great way to test state icon (.state-icon is empty)
+
+ this.test.comment( 'should have proper title and hid' );
+ testTitle.call( spaceghost, uploadSelector, testUploadInfo.hid, testUploadInfo.name );
+
+ this.test.comment( 'should have all of the three, main buttons' );
+ testTitleButtonStructure.call( spaceghost, uploadSelector );
+
+ this.test.comment( 'body is not visible before clicking the hda title' );
+ var body = uploadSelector + ' ' + this.historypanel.data.selectors.hda.body;
+ this.test.assertNotVisible( body, 'body is not visible' );
+
+ this.test.comment( 'clicking the hda title should expand its body' );
+ var hdaTitle = uploadSelector + ' ' + this.historypanel.data.selectors.hda.title;
+ this.click( hdaTitle );
+ this.wait( 500, function(){
+ testExpandedBody.call( spaceghost, uploadSelector, summaryShouldBeArray, infoShouldBe, false );
});
- this.info( 'model:' + this.jsonStr( model ) );
});
});
+/*
spaceghost.then( function checkNewState(){
this.test.comment( 'HDAs in the "new" state should be well formed' );
@@ -93,15 +289,10 @@
this.test.assertVisible( uploadSelector, 'HDA is visible' );
// should have proper title and hid
- var titleSelector = uploadSelector + ' .historyItemTitle';
- this.test.assertVisible( titleSelector, 'HDA title is visible' );
- this.test.assertSelectorHasText( titleSelector, testUploadInfo.name,
- 'HDA has proper title' );
- this.test.assertSelectorHasText( titleSelector, testUploadInfo.hid,
- 'HDA has proper hid' );
+ testTitle.call( spaceghost, uploadSelector, testUploadInfo.hid, testUploadInfo.name );
// should have the new state class
- var newStateClass = 'historyItem-new',
+ var newStateClass = this.historypanel.data.selectors.hda.wrapper.stateClasses['new'];
uploadElement = this.getElementInfo( uploadSelector );
this.test.assert( uploadElement.attributes['class'].indexOf( newStateClass ) !== -1,
'HDA has new state class' );
@@ -111,13 +302,13 @@
//this.test.assertVisible( stateIconSelector, 'HDA has proper hid' );
// should NOT have any of the three, main buttons
- var buttonSelector = uploadSelector + ' .historyItemButtons a';
+ var buttonSelector = uploadSelector + ' ' + this.historypanel.data.selectors.hda.titleButtons + ' a';
this.test.assertDoesntExist( buttonSelector, 'No display, edit, or delete buttons' );
// expand and check the body
this.click( titleSelector );
this.wait( 500, function(){
- var bodySelector = uploadSelector + ' .historyItemBody';
+ var bodySelector = uploadSelector + ' ' + this.historypanel.data.selectors.hda.body;
this.test.assertVisible( bodySelector, 'HDA body is visible (after expanding)' );
var expectedBodyText = 'This is a new dataset';
@@ -130,8 +321,7 @@
});
});
});
-
-// =================================================================== TESTS
+*/
// ===================================================================
diff -r 4827bed265e7563c58346654f5a668f5de6d9d68 -r c2f310489973fc2ba42e5289ccec3ed88a42aabc test/casperjs/history-panel-tests.js
--- a/test/casperjs/history-panel-tests.js
+++ b/test/casperjs/history-panel-tests.js
@@ -79,7 +79,7 @@
this.test.assert( loggedInAs === email, 'loggedInAs() matches email: "' + loggedInAs + '"' );
});
-// grab the history frame bounds for mouse later tests
+// grab the history frame bounds for later mouse tests
spaceghost.then( function(){
historyFrameInfo = this.getElementInfo( 'iframe[name="galaxy_history"]' );
//this.debug( 'historyFrameInfo:' + this.jsonStr( historyFrameInfo ) );
diff -r 4827bed265e7563c58346654f5a668f5de6d9d68 -r c2f310489973fc2ba42e5289ccec3ed88a42aabc test/casperjs/modules/historypanel.js
--- a/test/casperjs/modules/historypanel.js
+++ b/test/casperjs/modules/historypanel.js
@@ -136,12 +136,12 @@
};
//TODO!: this will break if the hda name has single or double quotes (which are permitted in names)
-/** Find the id of the hda wrapper given the hda title and hid.
+/** Find the DOM id of the hda wrapper given the hda title and hid.
* @param {String} title the title of the hda
* @param {Int} hid (optional) the hid of the hda to look for
* @returns {String|null} DOM id of the historyItemWrapper found, null if not found
*/
-HistoryPanel.prototype.hdaIdByTitle = function hdaIdByTitle( title, hid ){
+HistoryPanel.prototype.hdaElementIdByTitle = function hdaElementIdByTitle( title, hid ){
var elementInfo = this.hdaElementInfoByTitle( title, hid );
return (( elementInfo && elementInfo.attributes && elementInfo.attributes.id )?
( elementInfo.attributes.id ):( null ));
@@ -200,6 +200,49 @@
return spaceghost;
};
+/** Hover over an element in the history panel.
+ * This is re-implemented here because element bounds in iframes are calc'd
+ * relative to the iframe - but mouse coords are not. Capture the iframe
+ * bounds first to re-calc for mouse coords
+ * @param {String} selector a css or xpath selector for an historyItemWrapper
+ * @param {Function} whenHovering a function to call after the hover (will be scoped to spaceghost)
+ * @param {ElementInfo} historyFrameInfo casper ElementInfo for the history iframe (optional)
+ * If undefined, hoverOver will use withFrame first and gather the information itself.
+ * Send in history iframe info if you're already in the frame when calling this. bleh.
+ */
+HistoryPanel.prototype.hoverOver = function hoverOver( selector, whenHovering, historyFrameInfo ){
+ var spaceghost = this.spaceghost;
+
+ // helper function
+ function hoverAndCallback( historyFrameInfo, selector, whenHovering ){
+ // ...this suddenly started working when I upped the viewport size
+ //this.debug( 'historyFrameInfo:' + this.jsonStr( historyFrameInfo ) );
+ var elementInfo = this.getElementInfo( selector ),
+ newCoords = { x: ( historyFrameInfo.x + elementInfo.x ),
+ y: ( historyFrameInfo.y + elementInfo.y ) };
+ //this.debug( 'elementInfo:' + this.jsonStr( elementInfo ) );
+ //this.debug( 'newCoords:' + this.jsonStr( newCoords ) );
+ this.page.sendEvent( 'mousemove', newCoords.x + 1, newCoords.y + 1 );
+ if( whenHovering ){
+ whenHovering.call( this );
+ }
+ }
+
+ // complicated by iframes
+ // if no history frame info was passed - assume not in history frame already and move into using withFrame
+ if( !historyFrameInfo ){
+ //TODO: move selector to data (use selectors.frames? )
+ historyFrameInfo = spaceghost.getElementInfo( 'iframe[name="galaxy_history"]' );
+ spaceghost.withFrame( spaceghost.selectors.frames.history, function inHistoryPanel(){
+ hoverAndCallback.call( spaceghost, historyFrameInfo, selector, whenHovering );
+ });
+
+ // otherwise, assume we're already 'in' the history frame and use the passed info
+ } else {
+ hoverAndCallback.call( spaceghost, historyFrameInfo, selector, whenHovering );
+ }
+ //return spaceghost;
+};
// =================================================================== SELECTORS
//TODO: data is not a very good name
@@ -219,9 +262,22 @@
wrapper : {
stateClasses : {
prefix : 'historyItem-',
- ok : 'historyItem-ok'
+ ok : 'historyItem-ok',
+ 'new' : 'historyItem-new'
}
- }
+ },
+ title : '.historyItemTitle',
+ titleButtons : '.historyItemButtons',
+ displayButton : '.icon-button.display',
+ editAttrButton : '.icon-button.edit',
+ deleteButton : '.icon-button.delete',
+ body : '.historyItemBody',
+ summary : '.hda-summary',
+ dbkey : '.metadata-dbkey',
+ info : '.hda-info',
+ primaryActionButtons : 'div[id^="primary-actions"]',
+ secondaryActionButtons : 'div[id^="secondary-actions"]',
+ peek : 'pre.peek'
}
},
labels : {
@@ -247,6 +303,23 @@
emptyMsg : "Your history is empty. Click 'Get Data' on the left pane to start"
},
hda : {
+ ok: {
+ tooltips : {
+ displayButton : 'Display data in browser',
+ editAttrButton : 'Edit Attributes',
+ deleteButton : 'Delete'
+ },
+ hrefs : {
+ displayButton : '/datasets/%s/display',
+ editAttrButton : '/datasets/%s/edit',
+ deleteButton : '/datasets/%s/delete_async'
+ },
+ nodeNames : {
+ displayButton : 'a',
+ editAttrButton : 'a',
+ deleteButton : 'a'
+ }
+ }
}
}
};
diff -r 4827bed265e7563c58346654f5a668f5de6d9d68 -r c2f310489973fc2ba42e5289ccec3ed88a42aabc test/casperjs/spaceghost.js
--- a/test/casperjs/spaceghost.js
+++ b/test/casperjs/spaceghost.js
@@ -136,7 +136,6 @@
this.debug( 'clientScripts:\n' + this.jsonStr( this.options.clientScripts ) );
this._loadModules();
-
};
/** Allow CLI arguments to set options if the proper option name is used.
@@ -415,6 +414,15 @@
};
// =================================================================== PAGE CONTROL
+/** An override of casper.start for additional set up.
+ * (Currently only used to change viewport)
+ */
+SpaceGhost.prototype.start = function start(){
+ var returned = Casper.prototype.start.apply( this, arguments );
+ this.viewport( 1024, 728 );
+ return returned;
+};
+
/** An override of casper.open specifically for Galaxy.
* (Currently only used to change language headers)
*/
@@ -492,18 +500,20 @@
});
};
-/** Override capture to save to environ: GALAXY_TEST_SAVE (or passed in from CLI)
- * @param {String} filename the image filename
+/** Hover over an element.
+ * NOTE: not for use with iframes (main, tool, history) - they need to re-calc
+ * for the iframe bounds and should be implemented in their own modules
+ * @param {String} selector a css or xpath selector for an historyItemWrapper
+ * @param {Function} whenHovering a function to call after the hover (will be scoped to spaceghost)
*/
-SpaceGhost.prototype.capture = function capture( filename, clipRect_or_selector ){
- //TODO: override with saved output dir
- if( clipRect_or_selector && ( !utils.isClipRect( clipRect_or_selector ) ) ){
- this.debug( "USING CAPTURE SELECTOR" );
- return this.captureSelector( filename, clipRect_or_selector );
- }
- return Casper.prototype.capture.apply( this, arguments );
+SpaceGhost.prototype.hoverOver = function hoverOver( selector, whenHovering ){
+ var elementInfo = this.getElementInfo( selector );
+ this.page.sendEvent( 'mousemove', elementInfo.x + 1, elementInfo.y + 1 );
+ whenHovering.call( this );
+ return this;
};
+
// =================================================================== TESTING
//TODO: form fill doesn't work as casperjs would want it - often a button -> controller url
//TODO: saveScreenshot (to GALAXY_TEST_SAVE)
@@ -591,6 +601,15 @@
});
};
+/** Assert that a given string (toSearch) contains some given string (searchFor).
+ * @param {String} toSearch the string to search
+ * @param {String} searchFor the string to search for
+ * @param {String} msg assertion msg to display
+ */
+SpaceGhost.prototype.assertTextContains = function assertTextContains( toSearch, searchFor, msg ){
+ this.test.assert( toSearch.indexOf( searchFor ) !== -1, msg );
+};
+
// =================================================================== CONVENIENCE
/** Wraps casper.getElementInfo in try, returning null if element not found instead of erroring.
* @param {String} selector css or xpath selector for the element to find
@@ -619,6 +638,17 @@
// =================================================================== GALAXY CONVENIENCE
// =================================================================== MISCELAIN
+/** Override capture to save to environ: GALAXY_TEST_SAVE (or passed in from CLI)
+ * @param {String} filename the image filename
+ */
+SpaceGhost.prototype.capture = function capture( filename, clipRect_or_selector ){
+ //TODO: override with saved output dir
+ if( clipRect_or_selector && ( !utils.isClipRect( clipRect_or_selector ) ) ){
+ return this.captureSelector( filename, clipRect_or_selector );
+ }
+ return Casper.prototype.capture.apply( this, arguments );
+};
+
/** Pop all handlers for eventName from casper and return them in order.
* @param {String} eventName the name of the event from which to remove handlers
*/
@@ -736,7 +766,7 @@
// =================================================================== TEST DATA
// maintain selectors, labels, text here in one central location
-//TODO: to separate file?
+//TODO: to data
SpaceGhost.prototype.selectors = {
masthead : {
userMenu : {
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: greg: Add a new ~/tool_shed/util/metadata_util.py component to contain all tool shed related repository metadata utility methods.
by commits-noreply@bitbucket.org 13 Mar '13
by commits-noreply@bitbucket.org 13 Mar '13
13 Mar '13
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/4827bed265e7/
changeset: 4827bed265e7
user: greg
date: 2013-03-13 20:37:29
summary: Add a new ~/tool_shed/util/metadata_util.py component to contain all tool shed related repository metadata utility methods.
affected #: 11 files
diff -r 185a0ed00d8f245aa4ec161ba134df38edf19f70 -r 4827bed265e7563c58346654f5a668f5de6d9d68 lib/galaxy/webapps/galaxy/controllers/admin_toolshed.py
--- a/lib/galaxy/webapps/galaxy/controllers/admin_toolshed.py
+++ b/lib/galaxy/webapps/galaxy/controllers/admin_toolshed.py
@@ -7,6 +7,7 @@
from galaxy.model.orm import or_
import tool_shed.util.shed_util as shed_util
import tool_shed.util.shed_util_common as suc
+import tool_shed.util.metadata_util as metadata_util
from tool_shed.util import encoding_util
from galaxy.webapps.tool_shed.util import workflow_util
import tool_shed.galaxy_install.grids.admin_toolshed_grids as admin_toolshed_grids
@@ -380,16 +381,16 @@
when an admin is installing a new repository or reinstalling an uninstalled repository.
"""
shed_config_dict = trans.app.toolbox.get_shed_config_dict_by_filename( shed_tool_conf )
- metadata_dict, invalid_file_tups = suc.generate_metadata_for_changeset_revision( app=trans.app,
- repository=tool_shed_repository,
- changeset_revision=tool_shed_repository.changeset_revision,
- repository_clone_url=repository_clone_url,
- shed_config_dict=shed_config_dict,
- relative_install_dir=relative_install_dir,
- repository_files_dir=None,
- resetting_all_metadata_on_repository=False,
- updating_installed_repository=False,
- persist=True )
+ metadata_dict, invalid_file_tups = metadata_util.generate_metadata_for_changeset_revision( app=trans.app,
+ repository=tool_shed_repository,
+ changeset_revision=tool_shed_repository.changeset_revision,
+ repository_clone_url=repository_clone_url,
+ shed_config_dict=shed_config_dict,
+ relative_install_dir=relative_install_dir,
+ repository_files_dir=None,
+ resetting_all_metadata_on_repository=False,
+ updating_installed_repository=False,
+ persist=True )
tool_shed_repository.metadata = metadata_dict
trans.sa_session.add( tool_shed_repository )
trans.sa_session.flush()
@@ -1462,7 +1463,7 @@
@web.require_admin
def reset_metadata_on_selected_installed_repositories( self, trans, **kwd ):
if 'reset_metadata_on_selected_repositories_button' in kwd:
- message, status = suc.reset_metadata_on_selected_repositories( trans, **kwd )
+ message, status = metadata_util.reset_metadata_on_selected_repositories( trans, **kwd )
else:
message = util.restore_text( kwd.get( 'message', '' ) )
status = kwd.get( 'status', 'done' )
@@ -1482,16 +1483,16 @@
tool_path, relative_install_dir = repository.get_tool_relative_path( trans.app )
if relative_install_dir:
original_metadata_dict = repository.metadata
- metadata_dict, invalid_file_tups = suc.generate_metadata_for_changeset_revision( app=trans.app,
- repository=repository,
- changeset_revision=repository.changeset_revision,
- repository_clone_url=repository_clone_url,
- shed_config_dict = repository.get_shed_config_dict( trans.app ),
- relative_install_dir=relative_install_dir,
- repository_files_dir=None,
- resetting_all_metadata_on_repository=False,
- updating_installed_repository=False,
- persist=False )
+ metadata_dict, invalid_file_tups = metadata_util.generate_metadata_for_changeset_revision( app=trans.app,
+ repository=repository,
+ changeset_revision=repository.changeset_revision,
+ repository_clone_url=repository_clone_url,
+ shed_config_dict = repository.get_shed_config_dict( trans.app ),
+ relative_install_dir=relative_install_dir,
+ repository_files_dir=None,
+ resetting_all_metadata_on_repository=False,
+ updating_installed_repository=False,
+ persist=False )
repository.metadata = metadata_dict
if metadata_dict != original_metadata_dict:
suc.update_in_shed_tool_config( trans.app, repository )
@@ -1687,16 +1688,16 @@
if repository.includes_data_managers:
shed_util.remove_from_data_manager( trans.app, repository )
# Update the repository metadata.
- metadata_dict, invalid_file_tups = suc.generate_metadata_for_changeset_revision( app=trans.app,
- repository=repository,
- changeset_revision=latest_changeset_revision,
- repository_clone_url=repository_clone_url,
- shed_config_dict = repository.get_shed_config_dict( trans.app ),
- relative_install_dir=relative_install_dir,
- repository_files_dir=None,
- resetting_all_metadata_on_repository=False,
- updating_installed_repository=True,
- persist=True )
+ metadata_dict, invalid_file_tups = metadata_util.generate_metadata_for_changeset_revision( app=trans.app,
+ repository=repository,
+ changeset_revision=latest_changeset_revision,
+ repository_clone_url=repository_clone_url,
+ shed_config_dict = repository.get_shed_config_dict( trans.app ),
+ relative_install_dir=relative_install_dir,
+ repository_files_dir=None,
+ resetting_all_metadata_on_repository=False,
+ updating_installed_repository=True,
+ persist=True )
repository.metadata = metadata_dict
# Update the repository changeset_revision in the database.
repository.changeset_revision = latest_changeset_revision
diff -r 185a0ed00d8f245aa4ec161ba134df38edf19f70 -r 4827bed265e7563c58346654f5a668f5de6d9d68 lib/galaxy/webapps/tool_shed/controllers/admin.py
--- a/lib/galaxy/webapps/tool_shed/controllers/admin.py
+++ b/lib/galaxy/webapps/tool_shed/controllers/admin.py
@@ -3,6 +3,7 @@
from galaxy.web.base.controllers.admin import Admin
from galaxy.util import inflector
import tool_shed.util.shed_util_common as suc
+import tool_shed.util.metadata_util as metadata_util
import tool_shed.grids.admin_grids as admin_grids
from galaxy import eggs
@@ -301,7 +302,7 @@
@web.require_admin
def reset_metadata_on_selected_repositories_in_tool_shed( self, trans, **kwd ):
if 'reset_metadata_on_selected_repositories_button' in kwd:
- message, status = suc.reset_metadata_on_selected_repositories( trans, **kwd )
+ message, status = metadata_util.reset_metadata_on_selected_repositories( trans, **kwd )
else:
message = util.restore_text( kwd.get( 'message', '' ) )
status = kwd.get( 'status', 'done' )
diff -r 185a0ed00d8f245aa4ec161ba134df38edf19f70 -r 4827bed265e7563c58346654f5a668f5de6d9d68 lib/galaxy/webapps/tool_shed/controllers/hg.py
--- a/lib/galaxy/webapps/tool_shed/controllers/hg.py
+++ b/lib/galaxy/webapps/tool_shed/controllers/hg.py
@@ -1,7 +1,8 @@
import os, logging
from galaxy import web
from galaxy.web.base.controller import BaseUIController
-from tool_shed.util.shed_util_common import get_repository_by_name_and_owner, set_repository_metadata
+from tool_shed.util.shed_util_common import get_repository_by_name_and_owner
+from tool_shed.util.metadata_util import set_repository_metadata
from galaxy import eggs
eggs.require('mercurial')
diff -r 185a0ed00d8f245aa4ec161ba134df38edf19f70 -r 4827bed265e7563c58346654f5a668f5de6d9d68 lib/galaxy/webapps/tool_shed/controllers/repository.py
--- a/lib/galaxy/webapps/tool_shed/controllers/repository.py
+++ b/lib/galaxy/webapps/tool_shed/controllers/repository.py
@@ -11,6 +11,7 @@
from galaxy.util import json
from galaxy.model.orm import and_, or_
import tool_shed.util.shed_util_common as suc
+import tool_shed.util.metadata_util as metadata_util
from tool_shed.util import encoding_util
from galaxy.webapps.tool_shed.util import workflow_util
from galaxy.webapps.tool_shed.util import common_util
@@ -365,7 +366,7 @@
operation = kwd[ 'operation' ].lower()
if operation == "preview_tools_in_changeset":
repository = suc.get_repository_in_tool_shed( trans, repository_id )
- repository_metadata = suc.get_latest_repository_metadata( trans, repository.id )
+ repository_metadata = metadata_util.get_latest_repository_metadata( trans, repository.id )
latest_installable_changeset_revision = repository_metadata.changeset_revision
return trans.response.send_redirect( web.url_for( controller='repository',
action='preview_tools_in_changeset',
@@ -1914,7 +1915,7 @@
def reset_all_metadata( self, trans, id, **kwd ):
# This method is called only from the ~/templates/webapps/tool_shed/repository/manage_repository.mako template.
# It resets all metadata on the complete changelog for a single repository in the tool shed.
- invalid_file_tups, metadata_dict = suc.reset_all_metadata_on_repository_in_tool_shed( trans, id, **kwd )
+ invalid_file_tups, metadata_dict = metadata_util.reset_all_metadata_on_repository_in_tool_shed( trans, id, **kwd )
if invalid_file_tups:
repository = suc.get_repository_in_tool_shed( trans, id )
message = suc.generate_message_for_invalid_tools( trans, invalid_file_tups, repository, metadata_dict )
@@ -2082,7 +2083,7 @@
else:
message += 'The selected files were deleted from the repository. '
kwd[ 'message' ] = message
- suc.set_repository_metadata_due_to_new_tip( trans, repository, **kwd )
+ metadata_util.set_repository_metadata_due_to_new_tip( trans, repository, **kwd )
else:
message = "Select at least 1 file to delete from the repository before clicking <b>Delete selected files</b>."
status = "error"
diff -r 185a0ed00d8f245aa4ec161ba134df38edf19f70 -r 4827bed265e7563c58346654f5a668f5de6d9d68 lib/galaxy/webapps/tool_shed/controllers/upload.py
--- a/lib/galaxy/webapps/tool_shed/controllers/upload.py
+++ b/lib/galaxy/webapps/tool_shed/controllers/upload.py
@@ -3,6 +3,7 @@
from galaxy import web, util
from galaxy.datatypes import checkers
import tool_shed.util.shed_util_common as suc
+import tool_shed.util.metadata_util as metadata_util
from galaxy import eggs
eggs.require('mercurial')
@@ -15,6 +16,26 @@
CHUNK_SIZE = 2**20 # 1Mb
class UploadController( BaseUIController ):
+ def check_file_contents_for_email_alerts( self, trans ):
+ """
+ See if any admin users have chosen to receive email alerts when a repository is updated. If so, the file contents of the update must be
+ checked for inappropriate content.
+ """
+ admin_users = trans.app.config.get( "admin_users", "" ).split( "," )
+ for repository in trans.sa_session.query( trans.model.Repository ) \
+ .filter( trans.model.Repository.table.c.email_alerts != None ):
+ email_alerts = json.from_json_string( repository.email_alerts )
+ for user_email in email_alerts:
+ if user_email in admin_users:
+ return True
+ return False
+ def check_file_content_for_html_and_images( self, file_path ):
+ message = ''
+ if checkers.check_html( file_path ):
+ message = 'The file "%s" contains HTML content.\n' % str( file_path )
+ elif checkers.check_image( file_path ):
+ message = 'The file "%s" contains image content.\n' % str( file_path )
+ return message
@web.expose
@web.require_login( 'upload', use_panels=True )
def upload( self, trans, **kwd ):
@@ -120,9 +141,9 @@
shutil.move( uploaded_file_name, full_path )
# See if any admin users have chosen to receive email alerts when a repository is
# updated. If so, check every uploaded file to ensure content is appropriate.
- check_contents = suc.check_file_contents( trans )
+ check_contents = self.check_file_contents_for_email_alerts( trans )
if check_contents and os.path.isfile( full_path ):
- content_alert_str = self.__check_file_content( full_path )
+ content_alert_str = self.check_file_content_for_html_and_images( full_path )
else:
content_alert_str = ''
commands.add( repo.ui, repo, full_path )
@@ -166,7 +187,7 @@
else:
message += " %d files were removed from the repository root. " % len( files_to_remove )
kwd[ 'message' ] = message
- suc.set_repository_metadata_due_to_new_tip( trans, repository, content_alert_str=content_alert_str, **kwd )
+ metadata_util.set_repository_metadata_due_to_new_tip( trans, repository, content_alert_str=content_alert_str, **kwd )
if repository.metadata_revisions:
# A repository's metadata revisions are order descending by update_time, so the zeroth revision will be the tip just after an upload.
metadata_dict = repository.metadata_revisions[0].metadata
@@ -330,11 +351,11 @@
pass
# See if any admin users have chosen to receive email alerts when a repository is
# updated. If so, check every uploaded file to ensure content is appropriate.
- check_contents = suc.check_file_contents( trans )
+ check_contents = self.check_file_contents_for_email_alerts( trans )
for filename_in_archive in filenames_in_archive:
# Check file content to ensure it is appropriate.
if check_contents and os.path.isfile( filename_in_archive ):
- content_alert_str += self.__check_file_content( filename_in_archive )
+ content_alert_str += self.check_file_content_for_html_and_images( filename_in_archive )
commands.add( repo.ui, repo, filename_in_archive )
if filename_in_archive.endswith( 'tool_data_table_conf.xml.sample' ):
# Handle the special case where a tool_data_table_conf.xml.sample file is being uploaded by parsing the file and adding new entries
@@ -421,10 +442,3 @@
message = "Uploaded archives cannot contain hgrc files."
return False, message
return True, ''
- def __check_file_content( self, file_path ):
- message = ''
- if checkers.check_html( file_path ):
- message = 'The file "%s" contains HTML content.\n' % str( file_path )
- elif checkers.check_image( file_path ):
- message = 'The file "%s" contains image content.\n' % str( file_path )
- return message
diff -r 185a0ed00d8f245aa4ec161ba134df38edf19f70 -r 4827bed265e7563c58346654f5a668f5de6d9d68 lib/tool_shed/galaxy_install/install_manager.py
--- a/lib/tool_shed/galaxy_install/install_manager.py
+++ b/lib/tool_shed/galaxy_install/install_manager.py
@@ -8,6 +8,7 @@
from galaxy.util.json import from_json_string, to_json_string
import tool_shed.util.shed_util as shed_util
import tool_shed.util.shed_util_common as suc
+import tool_shed.util.metadata_util as metadata_util
from galaxy.util.odict import odict
from tool_shed.util import common_util
@@ -179,16 +180,16 @@
else:
print 'The tool "%s" (%s) has not been enabled because it is not defined in a proprietary tool config (%s).' \
% ( guid, tool_config, ", ".join( self.proprietary_tool_confs or [] ) )
- metadata_dict, invalid_file_tups = suc.generate_metadata_for_changeset_revision( app=self.app,
- repository=tool_shed_repository,
- changeset_revision=tool_shed_repository.changeset_revision,
- repository_clone_url=repository_clone_url,
- shed_config_dict = self.shed_config_dict,
- relative_install_dir=relative_install_dir,
- repository_files_dir=None,
- resetting_all_metadata_on_repository=False,
- updating_installed_repository=False,
- persist=True )
+ metadata_dict, invalid_file_tups = metadata_util.generate_metadata_for_changeset_revision( app=self.app,
+ repository=tool_shed_repository,
+ changeset_revision=tool_shed_repository.changeset_revision,
+ repository_clone_url=repository_clone_url,
+ shed_config_dict = self.shed_config_dict,
+ relative_install_dir=relative_install_dir,
+ repository_files_dir=None,
+ resetting_all_metadata_on_repository=False,
+ updating_installed_repository=False,
+ persist=True )
tool_shed_repository.metadata = metadata_dict
self.app.sa_session.add( tool_shed_repository )
self.app.sa_session.flush()
diff -r 185a0ed00d8f245aa4ec161ba134df38edf19f70 -r 4827bed265e7563c58346654f5a668f5de6d9d68 lib/tool_shed/galaxy_install/repository_util.py
--- a/lib/tool_shed/galaxy_install/repository_util.py
+++ b/lib/tool_shed/galaxy_install/repository_util.py
@@ -1,5 +1,6 @@
import tool_shed.util.shed_util as shed_util
import tool_shed.util.shed_util_common as suc
+import tool_shed.util.metadata_util as metadata_util
def handle_repository_contents( app, tool_shed_repository, tool_path, repository_clone_url, relative_install_dir, tool_shed=None, tool_section=None,
shed_tool_conf=None, reinstalling=False ):
@@ -9,16 +10,16 @@
"""
sa_session = app.model.context.current
shed_config_dict = app.toolbox.get_shed_config_dict_by_filename( shed_tool_conf )
- metadata_dict, invalid_file_tups = suc.generate_metadata_for_changeset_revision( app=app,
- repository=tool_shed_repository,
- changeset_revision=tool_shed_repository.changeset_revision,
- repository_clone_url=repository_clone_url,
- shed_config_dict=shed_config_dict,
- relative_install_dir=relative_install_dir,
- repository_files_dir=None,
- resetting_all_metadata_on_repository=False,
- updating_installed_repository=False,
- persist=True )
+ metadata_dict, invalid_file_tups = metadata_util.generate_metadata_for_changeset_revision( app=app,
+ repository=tool_shed_repository,
+ changeset_revision=tool_shed_repository.changeset_revision,
+ repository_clone_url=repository_clone_url,
+ shed_config_dict=shed_config_dict,
+ relative_install_dir=relative_install_dir,
+ repository_files_dir=None,
+ resetting_all_metadata_on_repository=False,
+ updating_installed_repository=False,
+ persist=True )
tool_shed_repository.metadata = metadata_dict
sa_session.add( tool_shed_repository )
sa_session.flush()
diff -r 185a0ed00d8f245aa4ec161ba134df38edf19f70 -r 4827bed265e7563c58346654f5a668f5de6d9d68 lib/tool_shed/grids/repository_review_grids.py
--- a/lib/tool_shed/grids/repository_review_grids.py
+++ b/lib/tool_shed/grids/repository_review_grids.py
@@ -4,6 +4,7 @@
from galaxy.model.orm import and_, or_
from tool_shed.grids.repository_grids import RepositoryGrid
import tool_shed.util.shed_util_common as suc
+import tool_shed.util.metadata_util as metadata_util
from galaxy import eggs
eggs.require('mercurial')
@@ -76,9 +77,9 @@
if repository_metadata_revisions:
rval = ''
for repository_metadata in repository_metadata_revisions:
- rev, label, changeset_revision = suc.get_rev_label_changeset_revision_from_repository_metadata( trans,
- repository_metadata,
- repository=repository )
+ rev, label, changeset_revision = metadata_util.get_rev_label_changeset_revision_from_repository_metadata( trans,
+ repository_metadata,
+ repository=repository )
rval += '<a href="manage_repository_reviews_of_revision?id=%s&changeset_revision=%s">%s</a><br/>' % \
( trans.security.encode_id( repository.id ), changeset_revision, label )
return rval
diff -r 185a0ed00d8f245aa4ec161ba134df38edf19f70 -r 4827bed265e7563c58346654f5a668f5de6d9d68 lib/tool_shed/grids/util.py
--- a/lib/tool_shed/grids/util.py
+++ b/lib/tool_shed/grids/util.py
@@ -1,5 +1,6 @@
import os, logging
import tool_shed.util.shed_util_common as suc
+import tool_shed.util.metadata_util as metadata_util
from galaxy.web.form_builder import SelectField
def build_approved_select_field( trans, name, selected_value=None, for_component=True ):
@@ -43,7 +44,7 @@
# Restrict the options to all revisions that have associated metadata.
repository_metadata_revisions = repository.metadata_revisions
for repository_metadata in repository_metadata_revisions:
- rev, label, changeset_revision = suc.get_rev_label_changeset_revision_from_repository_metadata( trans, repository_metadata, repository=repository )
+ rev, label, changeset_revision = metadata_util.get_rev_label_changeset_revision_from_repository_metadata( trans, repository_metadata, repository=repository )
changeset_tups.append( ( rev, label, changeset_revision ) )
refresh_on_change_values.append( changeset_revision )
# Sort options by the revision label. Even though the downloadable_revisions query sorts by update_time,
This diff is so big that we needed to truncate the remainder.
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: dan: Comment out description for vcf to summary tree converter tool.
by commits-noreply@bitbucket.org 13 Mar '13
by commits-noreply@bitbucket.org 13 Mar '13
13 Mar '13
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/185a0ed00d8f/
changeset: 185a0ed00d8f
user: dan
date: 2013-03-13 20:31:06
summary: Comment out description for vcf to summary tree converter tool.
affected #: 1 file
diff -r bdd46627880ce13479c49c065ee118952ba8e2f2 -r 185a0ed00d8f245aa4ec161ba134df38edf19f70 lib/galaxy/datatypes/converters/vcf_to_summary_tree_converter.xml
--- a/lib/galaxy/datatypes/converters/vcf_to_summary_tree_converter.xml
+++ b/lib/galaxy/datatypes/converters/vcf_to_summary_tree_converter.xml
@@ -1,5 +1,5 @@
<tool id="CONVERTER_vcf_to_summary_tree_0" name="Convert VCF to Summary Tree" version="1.0.0" hidden="true">
- <description>__NOT_USED_CURRENTLY_FOR_CONVERTERS__</description>
+ <!-- <description>__NOT_USED_CURRENTLY_FOR_CONVERTERS__</description> --><command interpreter="python">vcf_to_summary_tree_converter.py $input1 $output1</command><inputs><page>
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: dan: Handle Updating Data Managers when installed from Tool Shed.
by commits-noreply@bitbucket.org 13 Mar '13
by commits-noreply@bitbucket.org 13 Mar '13
13 Mar '13
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/bdd46627880c/
changeset: bdd46627880c
user: dan
date: 2013-03-13 20:12:55
summary: Handle Updating Data Managers when installed from Tool Shed.
affected #: 1 file
diff -r 188400f16e5f997f16c2a31db99da55c37909d00 -r bdd46627880ce13479c49c065ee118952ba8e2f2 lib/galaxy/webapps/galaxy/controllers/admin_toolshed.py
--- a/lib/galaxy/webapps/galaxy/controllers/admin_toolshed.py
+++ b/lib/galaxy/webapps/galaxy/controllers/admin_toolshed.py
@@ -1683,6 +1683,9 @@
shed_util.pull_repository( repo, repository_clone_url, latest_ctx_rev )
suc.update_repository( repo, latest_ctx_rev )
tool_shed = suc.clean_tool_shed_url( tool_shed_url )
+ # Remove old Data Manager entries
+ if repository.includes_data_managers:
+ shed_util.remove_from_data_manager( trans.app, repository )
# Update the repository metadata.
metadata_dict, invalid_file_tups = suc.generate_metadata_for_changeset_revision( app=trans.app,
repository=repository,
@@ -1715,6 +1718,11 @@
shed_tool_conf=shed_tool_conf,
tool_panel_dict=tool_panel_dict,
new_install=False )
+ # Add new Data Manager entries
+ if 'data_manager' in metadata_dict:
+ new_data_managers = shed_util.install_data_managers( trans.app, trans.app.config.shed_data_manager_config_file, metadata_dict,
+ repository.get_shed_config_dict( trans.app ), os.path.join( relative_install_dir, name ),
+ repository, repository_tools_tups )
# Create tool_dependency records if necessary.
if 'tool_dependencies' in metadata_dict:
tool_dependencies = shed_util.create_tool_dependency_objects( trans.app, repository, relative_install_dir, set_status=False )
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: greg: Fix the json import in the new Galaxy bioatar controller.
by commits-noreply@bitbucket.org 13 Mar '13
by commits-noreply@bitbucket.org 13 Mar '13
13 Mar '13
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/188400f16e5f/
changeset: 188400f16e5f
user: greg
date: 2013-03-13 19:52:42
summary: Fix the json import in the new Galaxy bioatar controller.
affected #: 1 file
diff -r 5a17ab5f5466eaf61e80d6efda90c394c601f4ad -r 188400f16e5f997f16c2a31db99da55c37909d00 lib/galaxy/webapps/galaxy/controllers/biostar.py
--- a/lib/galaxy/webapps/galaxy/controllers/biostar.py
+++ b/lib/galaxy/webapps/galaxy/controllers/biostar.py
@@ -5,7 +5,7 @@
from galaxy.web.base.controller import BaseUIController, url_for, error, web
import base64
-import json
+from galaxy.util import json
import hmac
# Biostar requires all keys to be present, so we start with a template
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0