galaxy-commits
Threads by month
- ----- 2025 -----
- June
- May
- April
- March
- February
- January
- ----- 2024 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2023 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2022 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2021 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2020 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2019 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2018 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2017 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2016 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2015 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2014 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2013 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2012 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2011 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2010 -----
- December
- November
- October
- September
- August
- July
- June
- May
- 15302 discussions

commit/galaxy-central: greg: Implement support for handling refined xml definition for installing tool dependencies along with installed tool shed repositories.
by Bitbucket 25 Jun '12
by Bitbucket 25 Jun '12
25 Jun '12
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/20e01e610de0/
changeset: 20e01e610de0
user: greg
date: 2012-06-25 21:46:35
summary: Implement support for handling refined xml definition for installing tool dependencies along with installed tool shed repositories.
affected #: 7 files
diff -r d2aba0918cf01b1c5be95f7b0b59cc52cd889dbd -r 20e01e610de056e2f2855df9a3840c315b17587f lib/galaxy/tool_shed/install_manager.py
--- a/lib/galaxy/tool_shed/install_manager.py
+++ b/lib/galaxy/tool_shed/install_manager.py
@@ -149,6 +149,11 @@
repository_clone_url,
metadata_dict,
dist_to_shed=True )
+ if 'tool_dependencies' in metadata_dict:
+ # All tool_dependency objects must be created before the tools are processed no matter whether tool dependencies are going to be installed.
+ tool_dependencies = create_tool_dependency_objects( self.app, tool_shed_repository, installed_changeset_revision )
+ else:
+ tool_dependencies = None
if 'tools' in metadata_dict:
work_dir = make_tmp_directory()
repository_tools_tups = get_repository_tools_tups( self.app, metadata_dict )
@@ -165,7 +170,7 @@
repository_tools_tups, sample_files_copied = handle_missing_index_file( self.app, self.tool_path, sample_files, repository_tools_tups )
# Copy remaining sample files included in the repository to the ~/tool-data directory of the local Galaxy instance.
copy_sample_files( self.app, sample_files, sample_files_copied=sample_files_copied )
- if install_dependencies and 'tool_dependencies' in metadata_dict:
+ if install_dependencies and tool_dependencies and 'tool_dependencies' in metadata_dict:
# Get the tool_dependencies.xml file from the repository.
tool_dependencies_config = get_config_from_repository( self.app,
'tool_dependencies.xml',
@@ -173,12 +178,14 @@
installed_changeset_revision,
work_dir )
# Install tool dependencies.
- status, message = handle_tool_dependencies( app=self.app,
- tool_shed_repository=tool_shed_repository,
- tool_dependencies_config=tool_dependencies_config )
- if status != 'done' and message:
- print 'The following error occurred from the InstallManager while installing tool dependencies:'
- print message
+ installed_tool_dependencies = handle_tool_dependencies( app=self.app,
+ tool_shed_repository=tool_shed_repository,
+ tool_dependencies_config=tool_dependencies_config,
+ tool_dependencies=tool_dependencies )
+ for installed_tool_dependency in installed_tool_dependencies:
+ if installed_tool_dependency.status == self.app.model.ToolDependency.installation_status.ERROR:
+ print '\nThe following error occurred from the InstallManager while installing tool dependency ', installed_tool_dependency.name, ':'
+ print installed_tool_dependency.error_message, '\n\n'
add_to_tool_panel( self.app,
repository_name,
repository_clone_url,
diff -r d2aba0918cf01b1c5be95f7b0b59cc52cd889dbd -r 20e01e610de056e2f2855df9a3840c315b17587f lib/galaxy/tool_shed/tool_dependencies/fabric_util.py
--- a/lib/galaxy/tool_shed/tool_dependencies/fabric_util.py
+++ b/lib/galaxy/tool_shed/tool_dependencies/fabric_util.py
@@ -35,6 +35,7 @@
if os.path.exists( work_dir ):
local( 'rm -rf %s' % work_dir )
def handle_post_build_processing( app, tool_dependency, install_dir, env_dependency_path, package_name=None ):
+ # TODO: This method is deprecated and should be eliminated when the implementation for handling proprietary fabric scripts is implemented.
sa_session = app.model.context.current
cmd = "echo 'PATH=%s:$PATH; export PATH' > %s/env.sh;chmod +x %s/env.sh" % ( env_dependency_path, install_dir, install_dir )
output = local( cmd, capture=True )
@@ -44,49 +45,84 @@
tool_dependency.error_message = str( output.stderr )
sa_session.add( tool_dependency )
sa_session.flush()
-def install_and_build_package( app, tool_dependency, params_dict ):
+def install_and_build_package( app, tool_dependency, actions_dict ):
"""Install a Galaxy tool dependency package either via a url or a mercurial or git clone command."""
sa_session = app.model.context.current
- install_dir = params_dict[ 'install_dir' ]
- download_url = params_dict.get( 'download_url', None )
- clone_cmd = params_dict.get( 'clone_cmd', None )
- actions = params_dict.get( 'actions', None )
- package_name = params_dict.get( 'package_name', None )
- with make_tmp_dir() as work_dir:
- with lcd( work_dir ):
- if download_url:
- downloaded_filename = os.path.split( download_url )[ -1 ]
- downloaded_file_path = common_util.url_download( work_dir, downloaded_filename, download_url )
- if common_util.istar( downloaded_file_path ):
- common_util.extract_tar( downloaded_file_path, work_dir )
- dir = common_util.tar_extraction_directory( work_dir, downloaded_filename )
- else:
- dir = work_dir
- elif clone_cmd:
- output = local( clone_cmd, capture=True )
- log_results( clone_cmd, output, os.path.join( install_dir, INSTALLATION_LOG ) )
- if output.return_code:
- tool_dependency.status = app.model.ToolDependency.installation_status.ERROR
- tool_dependency.error_message = str( output.stderr )
- sa_session.add( tool_dependency )
- sa_session.flush()
- return
- dir = package_name
- if actions:
- with lcd( dir ):
- current_dir = os.path.abspath( os.path.join( work_dir, dir ) )
- for action_tup in actions:
- action_key, action_dict = action_tup
- if action_key == 'move_directory_files':
+ install_dir = actions_dict[ 'install_dir' ]
+ package_name = actions_dict[ 'package_name' ]
+ #download_url = actions_dict.get( 'download_url', None )
+ #clone_cmd = actions_dict.get( 'clone_cmd', None )
+ actions = actions_dict.get( 'actions', None )
+ if actions:
+ with make_tmp_dir() as work_dir:
+ with lcd( work_dir ):
+ # The first action in the list of actions will be the one that defines the installation process. There
+ # are currently only two supported processes; download_by_url and clone via a "shell_command" action type.
+ action_type, action_dict = actions[ 0 ]
+ if action_type == 'download_by_url':
+ # <action type="download_by_url">http://sourceforge.net/projects/samtools/files/samtools/0.1.18/samtools-0.1…</action>
+ url = action_dict[ 'url' ]
+ downloaded_filename = os.path.split( url )[ -1 ]
+ downloaded_file_path = common_util.url_download( work_dir, downloaded_filename, url )
+ if common_util.istar( downloaded_file_path ):
+ common_util.extract_tar( downloaded_file_path, work_dir )
+ dir = common_util.tar_extraction_directory( work_dir, downloaded_filename )
+ else:
+ dir = work_dir
+ elif action_type == 'shell_command':
+ # <action type="shell_command">git clone --recursive git://github.com/ekg/freebayes.git</action>
+ clone_cmd = action_dict[ 'command' ]
+ output = local( clone_cmd, capture=True )
+ log_results( clone_cmd, output, os.path.join( install_dir, INSTALLATION_LOG ) )
+ if output.return_code:
+ tool_dependency.status = app.model.ToolDependency.installation_status.ERROR
+ tool_dependency.error_message = str( output.stderr )
+ sa_session.add( tool_dependency )
+ sa_session.flush()
+ return
+ dir = package_name
+ if not os.path.exists( dir ):
+ os.makedirs( dir )
+ # The package has been down-loaded, so we can now perform all of the actions defined for building it.
+ with lcd( dir ):
+ for action_tup in actions[ 1: ]:
+ action_type, action_dict = action_tup
+ current_dir = os.path.abspath( os.path.join( work_dir, dir ) )
+ if action_type == 'move_directory_files':
common_util.move_directory_files( current_dir=current_dir,
source_dir=os.path.join( action_dict[ 'source_directory' ] ),
destination_dir=os.path.join( action_dict[ 'destination_directory' ] ) )
- elif action_key == 'move_file':
+ elif action_type == 'move_file':
common_util.move_file( current_dir=current_dir,
source=os.path.join( action_dict[ 'source' ] ),
destination_dir=os.path.join( action_dict[ 'destination' ] ) )
- else:
- action = action_key
+ elif action_type == 'set_environment':
+ # Currently the only action supported in this category is "environment_variable".
+ env_var_dict = action_dict[ 'environment_variable' ]
+ env_var_name = env_var_dict[ 'name' ]
+ env_var_action = env_var_dict[ 'action' ]
+ env_var_value = env_var_dict[ 'value' ]
+ if env_var_action == 'prepend_to':
+ changed_value = '%s:$%s' % ( env_var_value, env_var_name )
+ elif env_var_action == 'set_to':
+ changed_value = '%s' % env_var_value
+ elif env_var_action == 'append_to':
+ changed_value = '$%s:%s' % ( env_var_name, env_var_value )
+ cmd = "echo '%s=%s; export %s' > %s/env.sh;chmod +x %s/env.sh" % ( env_var_name,
+ changed_value,
+ env_var_name,
+ install_dir,
+ install_dir )
+ output = local( cmd, capture=True )
+ log_results( cmd, output, os.path.join( install_dir, INSTALLATION_LOG ) )
+ if output.return_code:
+ tool_dependency.status = app.model.ToolDependency.installation_status.ERROR
+ tool_dependency.error_message = str( output.stderr )
+ sa_session.add( tool_dependency )
+ sa_session.flush()
+ return
+ elif action_type == 'shell_command':
+ action = action_dict[ 'command' ]
with settings( warn_only=True ):
output = local( action, capture=True )
log_results( action, output, os.path.join( install_dir, INSTALLATION_LOG ) )
diff -r d2aba0918cf01b1c5be95f7b0b59cc52cd889dbd -r 20e01e610de056e2f2855df9a3840c315b17587f lib/galaxy/tool_shed/tool_dependencies/install_util.py
--- a/lib/galaxy/tool_shed/tool_dependencies/install_util.py
+++ b/lib/galaxy/tool_shed/tool_dependencies/install_util.py
@@ -51,117 +51,109 @@
install_dir = get_tool_dependency_install_dir( app, tool_shed_repository, package_name, package_version )
if not os.path.exists( install_dir ):
for package_elem in elem:
- if package_elem.tag == 'proprietary_fabfile':
- # TODO: This is not yet working...
- # Handle tool dependency installation where the repository includes one or more proprietary fabric scripts.
- if not fabric_version_checked:
- check_fabric_version()
- fabric_version_checked = True
- fabfile_name = package_elem.get( 'name', None )
- fabfile_path = os.path.abspath( os.path.join( os.path.split( tool_dependencies_config )[ 0 ], fabfile_name ) )
- print 'Installing tool dependencies via fabric script ', fabfile_path
- elif package_elem.tag == 'fabfile':
- # Handle tool dependency installation using a fabric method included in the Galaxy framework.
- fabfile_path = None
- for method_elem in package_elem:
+ if package_elem.tag == 'install':
+ # <install version="1.0">
+ package_install_version = package_elem.get( 'version', '1.0' )
tool_dependency = create_or_update_tool_dependency( app,
tool_shed_repository,
name=package_name,
version=package_version,
type='package',
status=app.model.ToolDependency.installation_status.INSTALLING )
- run_fabric_method( app, tool_dependency, method_elem, fabfile_path, install_dir, package_name=package_name )
- sa_session.refresh( tool_dependency )
- if tool_dependency.status != app.model.ToolDependency.installation_status.ERROR:
- print package_name, 'version', package_version, 'installed in', install_dir
+ if package_install_version == '1.0':
+ # Handle tool dependency installation using a fabric method included in the Galaxy framework.
+ for actions_elem in package_elem:
+ install_via_fabric( app, tool_dependency, actions_elem, install_dir, package_name=package_name )
+ sa_session.refresh( tool_dependency )
+ if tool_dependency.status != app.model.ToolDependency.installation_status.ERROR:
+ print package_name, 'version', package_version, 'installed in', install_dir
+ elif package_elem.tag == 'readme':
+ # Nothing to be done.
+ continue
+ #elif package_elem.tag == 'proprietary_fabfile':
+ # # TODO: This is not yet supported or functionally correct...
+ # # Handle tool dependency installation where the repository includes one or more proprietary fabric scripts.
+ # if not fabric_version_checked:
+ # check_fabric_version()
+ # fabric_version_checked = True
+ # fabfile_name = package_elem.get( 'name', None )
+ # proprietary_fabfile_path = os.path.abspath( os.path.join( os.path.split( tool_dependencies_config )[ 0 ], fabfile_name ) )
+ # print 'Installing tool dependencies via fabric script ', proprietary_fabfile_path
else:
print '\nSkipping installation of tool dependency', package_name, 'version', package_version, 'since it is installed in', install_dir, '\n'
return tool_dependency
-def run_fabric_method( app, tool_dependency, elem, fabfile_path, install_dir, package_name=None, **kwd ):
- """Parse a tool_dependency.xml file's fabfile <method> tag set to build the method parameters and execute the method."""
+def install_via_fabric( app, tool_dependency, actions_elem, install_dir, package_name=None, proprietary_fabfile_path=None, **kwd ):
+ """Parse a tool_dependency.xml file's <actions> tag set to gather information for the installation via fabric."""
sa_session = app.model.context.current
if not os.path.exists( install_dir ):
os.makedirs( install_dir )
- # Default value for env_dependency_path.
- install_path, install_directory = os.path.split( install_dir )
- if install_directory != 'bin':
- env_dependency_path = os.path.join( install_dir, 'bin' )
+ actions_dict = dict( install_dir=install_dir )
+ if package_name:
+ actions_dict[ 'package_name' ] = package_name
+ actions = []
+ for action_elem in actions_elem:
+ action_dict = {}
+ action_type = action_elem.get( 'type', 'shell_command' )
+ if action_type == 'shell_command':
+ # <action type="shell_command">make</action>
+ action_elem_text = action_elem.text.replace( '$INSTALL_DIR', install_dir )
+ if action_elem_text:
+ action_dict[ 'command' ] = action_elem_text
+ else:
+ continue
+ elif action_type == 'download_by_url':
+ # <action type="download_by_url">http://sourceforge.net/projects/samtools/files/samtools/0.1.18/samtools-0.1…</action>
+ if action_elem.text:
+ action_dict[ 'url' ] = action_elem.text
+ else:
+ continue
+ elif action_type in [ 'move_directory_files', 'move_file' ]:
+ # <action type="move_file">
+ # <source>misc/some_file</source>
+ # <destination>$INSTALL_DIR/bin</destination>
+ # </action>
+ # <action type="move_directory_files">
+ # <source_directory>bin</source_directory>
+ # <destination_directory>$INSTALL_DIR/bin</destination_directory>
+ # </action>
+ for move_elem in action_elem:
+ move_elem_text = move_elem.text.replace( '$INSTALL_DIR', install_dir )
+ if move_elem_text:
+ action_dict[ move_elem.tag ] = move_elem_text
+ elif action_type == 'set_environment':
+ # <action type="set_environment">
+ # <environment_variable name="PATH" action="prepend_to">$INSTALL_DIR/bin</environment_variable>
+ # </action>
+ for env_elem in action_elem:
+ if env_elem.tag == 'environment_variable':
+ env_var_name = env_elem.get( 'name', 'PATH' )
+ env_var_action = env_elem.get( 'action', 'prepend_to' )
+ env_var_text = env_elem.text.replace( '$INSTALL_DIR', install_dir )
+ if env_var_text:
+ action_dict[ env_elem.tag ] = dict( name=env_var_name, action=env_var_action, value=env_var_text )
+ else:
+ continue
+ actions.append( ( action_type, action_dict ) )
+ if actions:
+ actions_dict[ 'actions' ] = actions
+ if proprietary_fabfile_path:
+ # TODO: this is not yet supported or functional, but when it is handle it using the fabric api.
+ # run_proprietary_fabric_method( app, elem, proprietary_fabfile_path, install_dir, package_name=package_name )
+ raise Exception( 'Tool dependency installation using proprietary fabric scripts is not yet supported.' )
else:
- env_dependency_path = install_dir
- method_name = elem.get( 'name', None )
- params_dict = dict( install_dir=install_dir )
- actions = []
- for param_elem in elem:
- param_name = param_elem.get( 'name' )
- if param_name:
- if param_name == 'actions':
- for action_elem in param_elem:
- action_dict = {}
- action_type = action_elem.get( 'type', 'shell_command' )
- if action_type == 'shell_command':
- # Example: <action type="shell_command">make</action>
- action_key = action_elem.text.replace( '$INSTALL_DIR', install_dir )
- if not action_key:
- continue
- elif action_type in [ 'move_directory_files', 'move_file' ]:
- # Examples:
- # <action type="move_file">
- # <source>misc/some_file</source>
- # <destination>$INSTALL_DIR/bin</destination>
- # </action>
- # <action type="move_directory_files">
- # <source_directory>bin</source_directory>
- # <destination_directory>$INSTALL_DIR/bin</destination_directory>
- # </action>
- action_key = action_type
- for move_elem in action_elem:
- move_elem_text = move_elem.text.replace( '$INSTALL_DIR', install_dir )
- if move_elem_text:
- action_dict[ move_elem.tag ] = move_elem_text
- else:
- continue
- actions.append( ( action_key, action_dict ) )
- if actions:
- params_dict[ 'actions' ] = actions
- elif param_name == 'env_dependency_path':
- env_dependency_path = param_elem.text.replace( '$INSTALL_DIR', install_dir )
- else:
- if param_elem.text:
- params_dict[ param_name ] = param_elem.text.replace( '$INSTALL_DIR', install_dir )
- if package_name:
- params_dict[ 'package_name' ] = package_name
- if fabfile_path:
- # TODO: Handle this using the fabric api.
- # run_proprietary_fabric_method( app, elem, fabfile_path, install_dir, package_name=package_name )
- return 'Tool dependency installation using proprietary fabric scripts is not yet supported. '
- else:
- # There is currently only 1 fabric method, install_and_build_package().
try:
- install_and_build_package( app, tool_dependency, params_dict )
+ # There is currently only one fabric method.
+ install_and_build_package( app, tool_dependency, actions_dict )
except Exception, e:
tool_dependency.status = app.model.ToolDependency.installation_status.ERROR
tool_dependency.error_message = str( e )
sa_session.add( tool_dependency )
sa_session.flush()
- sa_session.refresh( tool_dependency )
- if tool_dependency.status != app.model.ToolDependency.installation_status.ERROR:
- try:
- handle_post_build_processing( app,
- tool_dependency,
- install_dir,
- env_dependency_path,
- package_name=package_name )
- except Exception, e:
- tool_dependency.status = app.model.ToolDependency.installation_status.ERROR
- tool_dependency.error_message = str( e )
- sa_session.add( tool_dependency )
- sa_session.flush()
- sa_session.refresh( tool_dependency )
if tool_dependency.status != app.model.ToolDependency.installation_status.ERROR:
tool_dependency.status = app.model.ToolDependency.installation_status.INSTALLED
sa_session.add( tool_dependency )
sa_session.flush()
-def run_proprietary_fabric_method( app, elem, fabfile_path, install_dir, package_name=None, **kwd ):
+def run_proprietary_fabric_method( app, elem, proprietary_fabfile_path, install_dir, package_name=None, **kwd ):
"""
TODO: Handle this using the fabric api.
Parse a tool_dependency.xml file's fabfile <method> tag set to build the method parameters and execute the method.
@@ -190,10 +182,10 @@
else:
params_str = params_str.rstrip( ',' )
try:
- cmd = 'fab -f %s %s:%s' % ( fabfile_path, method_name, params_str )
+ cmd = 'fab -f %s %s:%s' % ( proprietary_fabfile_path, method_name, params_str )
returncode, message = run_subprocess( app, cmd )
except Exception, e:
- return "Exception executing fabric script %s: %s. " % ( str( fabfile_path ), str( e ) )
+ return "Exception executing fabric script %s: %s. " % ( str( proprietary_fabfile_path ), str( e ) )
if returncode:
return message
message = handle_post_build_processing( app, tool_dependency, install_dir, env_dependency_path, package_name=package_name )
diff -r d2aba0918cf01b1c5be95f7b0b59cc52cd889dbd -r 20e01e610de056e2f2855df9a3840c315b17587f lib/galaxy/util/shed_util.py
--- a/lib/galaxy/util/shed_util.py
+++ b/lib/galaxy/util/shed_util.py
@@ -479,52 +479,16 @@
package_name = elem.get( 'name', None )
package_version = elem.get( 'version', None )
if package_name and package_version:
+ dependency_key = '%s/%s' % ( package_name, package_version )
requirements_dict [ 'name' ] = package_name
+ requirements_dict [ 'version' ] = package_version
requirements_dict [ 'type' ] = 'package'
- requirements_dict [ 'version' ] = package_version
- dependency_key = '%s/%s' % ( package_name, package_version )
- fabfiles_dict = {}
for sub_elem in elem:
- if sub_elem.tag == 'proprietary_fabfile':
- requirements_dict = generate_fabfile_metadata( sub_elem, requirements_dict, proprietary=True )
- elif sub_elem.tag == 'fabfile':
- requirements_dict = generate_fabfile_metadata( sub_elem, requirements_dict, proprietary=False )
- elif sub_elem.tag == 'readme':
+ if sub_elem.tag == 'readme':
requirements_dict[ 'readme' ] = sub_elem.text
if requirements_dict:
tool_dependencies_dict[ dependency_key ] = requirements_dict
return tool_dependencies_dict
-def generate_fabfile_metadata( elem, requirements_dict, proprietary=False ):
- """
- <proprietary_fabfile name="fabfile.py">
- <method name="install_and_build">
- <param name="download_url">ftp://emboss.open-bio.org/pub/EMBOSS/old/5.0.0/EMBOSS-5.0.0.tar.gz</param>
- <param name="download_url">ftp://emboss.open-bio.org/pub/EMBOSS/old/5.0.0/PHYLIP-3.6b.tar.gz</param>
- </method>
- </proprietary_fabfile>
- """
- fabfiles_dict = {}
- fabfile_name = elem.get( 'name', None )
- if fabfile_name:
- for method_elem in elem.findall( 'method' ):
- method_name = method_elem.get( 'name', None )
- if method_name:
- params_str = ''
- for param_elem in method_elem.findall( 'param' ):
- param_name = param_elem.get( 'name', None )
- param_value = param_elem.text
- if param_name and param_value:
- params_str += '%s=%s,' % ( param_name, param_value )
- fabfiles_dict[ 'fabfile' ] = fabfile_name
- fabfiles_dict[ 'method' ] = method_name
- fabfiles_dict[ 'params' ] = params_str.rstrip( ',' )
- if fabfiles_dict:
- if proprietary:
- key = 'proprietary_fabfiles'
- else:
- key = 'fabfiles'
- requirements_dict[ key ] = fabfiles_dict
- return requirements_dict
def generate_metadata_using_disk_files( toolbox, relative_install_dir, repository_clone_url ):
"""Generate metadata using only the repository files on disk - files are not retrieved from the repository manifest."""
metadata_dict = {}
@@ -1194,7 +1158,7 @@
message = str( e )
error = True
return error, message
-def handle_tool_dependencies( app, tool_shed_repository, tool_dependencies_config, tool_dependencies=None ):
+def handle_tool_dependencies( app, tool_shed_repository, tool_dependencies_config, tool_dependencies ):
"""
Install and build tool dependencies defined in the tool_dependencies_config. This config's tag sets can currently refer to installation
methods in Galaxy's tool_dependencies module. In the future, proprietary fabric scripts contained in the repository will be supported.
@@ -1202,8 +1166,7 @@
will be installed in:
~/<app.config.tool_dependency_dir>/<package_name>/<package_version>/<repo_owner>/<repo_name>/<repo_installed_changeset_revision>
"""
- status = 'done'
- message = ''
+ installed_tool_dependencies = []
# Parse the tool_dependencies.xml config.
tree = ElementTree.parse( tool_dependencies_config )
root = tree.getroot()
@@ -1214,7 +1177,8 @@
package_name = elem.get( 'name', None )
package_version = elem.get( 'version', None )
if package_name and package_version:
- can_install = True
+ # The value of tool_dependencies will be None only when this method is called by the InstallManager. In that case, tool
+ # dependency installation is not ajaxian, so the ToolDependency objects do not yet exist.
if tool_dependencies:
# Only install the package if it is not already installed.
can_install = False
@@ -1223,12 +1187,14 @@
can_install = tool_dependency.status in [ app.model.ToolDependency.installation_status.NEVER_INSTALLED,
app.model.ToolDependency.installation_status.UNINSTALLED ]
break
+ else:
+ can_install = False
if can_install:
tool_dependency = install_package( app, elem, tool_shed_repository, tool_dependencies=tool_dependencies )
- if tool_dependency and tool_dependency.status == app.model.ToolDependency.installation_status.ERROR:
- message = tool_dependency.error_message
- status = 'error'
- return status, message
+ if tool_dependency and tool_dependency.status in [ app.model.ToolDependency.installation_status.INSTALLED,
+ app.model.ToolDependency.installation_status.ERROR ]:
+ installed_tool_dependencies.append( tool_dependency )
+ return installed_tool_dependencies
def handle_tool_versions( app, tool_version_dicts, tool_shed_repository ):
"""
Using the list of tool_version_dicts retrieved from the tool shed (one per changeset revison up to the currently installed changeset revision),
diff -r d2aba0918cf01b1c5be95f7b0b59cc52cd889dbd -r 20e01e610de056e2f2855df9a3840c315b17587f lib/galaxy/web/controllers/admin_toolshed.py
--- a/lib/galaxy/web/controllers/admin_toolshed.py
+++ b/lib/galaxy/web/controllers/admin_toolshed.py
@@ -113,6 +113,11 @@
use_paging = False
columns = [
NameColumn( "Name",
+ link=( lambda item: iff( item.status in \
+ [ model.ToolDependency.installation_status.NEVER_INSTALLED,
+ model.ToolDependency.installation_status.INSTALLING,
+ model.ToolDependency.installation_status.UNINSTALLED ], \
+ None, dict( action="manage_tool_dependencies", operation='browse', id=item.id ) ) ),
filterable="advanced" ),
VersionColumn( "Version",
filterable="advanced" ),
@@ -130,8 +135,17 @@
allow_multiple=True,
allow_popup=False,
condition=( lambda item: item.status in [ model.ToolDependency.installation_status.INSTALLED,
- model.ToolDependency.installation_status.ERROR ] ) )
+ model.ToolDependency.installation_status.ERROR ] ) )
]
+ def build_initial_query( self, trans, **kwd ):
+ tool_dependency_ids = kwd.get( 'tool_dependency_ids', None )
+ if tool_dependency_ids:
+ clause_list = []
+ for tool_dependency_id in tool_dependency_ids:
+ clause_list.append( self.model_class.table.c.id == trans.security.decode_id( tool_dependency_id ) )
+ return trans.sa_session.query( self.model_class ) \
+ .filter( or_( *clause_list ) )
+ return trans.sa_session.query( self.model_class )
def apply_query_filter( self, trans, query, **kwd ):
tool_dependency_id = kwd.get( 'tool_dependency_id', None )
if not tool_dependency_id:
@@ -363,6 +377,7 @@
def initiate_tool_dependency_installation( self, trans, tool_dependencies ):
"""Install specified dependencies for repository tools."""
# Get the tool_shed_repository from one of the tool_dependencies.
+ message = ''
tool_shed_repository = tool_dependencies[ 0 ].tool_shed_repository
work_dir = make_tmp_directory()
# Get the tool_dependencies.xml file from the repository.
@@ -371,17 +386,23 @@
tool_shed_repository,
tool_shed_repository.changeset_revision,
work_dir )
- status, message = handle_tool_dependencies( app=trans.app,
- tool_shed_repository=tool_shed_repository,
- tool_dependencies_config=tool_dependencies_config,
- tool_dependencies=tool_dependencies )
+ installed_tool_dependencies = handle_tool_dependencies( app=trans.app,
+ tool_shed_repository=tool_shed_repository,
+ tool_dependencies_config=tool_dependencies_config,
+ tool_dependencies=tool_dependencies )
+ for installed_tool_dependency in installed_tool_dependencies:
+ if installed_tool_dependency.status == trans.app.model.ToolDependency.installation_status.ERROR:
+ message += ' %s' % installed_tool_dependency.error_message
try:
shutil.rmtree( work_dir )
except:
pass
tool_dependency_ids = [ trans.security.encode_id( td.id ) for td in tool_dependencies ]
- if not message:
- message = "Installed tool dependencies: %s" % ','.join( td.name for td in tool_dependencies )
+ if message:
+ status = 'error'
+ else:
+ message = "Installed tool dependencies: %s" % ','.join( td.name for td in installed_tool_dependencies )
+ status = 'done'
return trans.response.send_redirect( web.url_for( controller='admin_toolshed',
action='manage_tool_dependencies',
tool_dependency_ids=tool_dependency_ids,
diff -r d2aba0918cf01b1c5be95f7b0b59cc52cd889dbd -r 20e01e610de056e2f2855df9a3840c315b17587f templates/admin/tool_shed_repository/manage_repository.mako
--- a/templates/admin/tool_shed_repository/manage_repository.mako
+++ b/templates/admin/tool_shed_repository/manage_repository.mako
@@ -88,6 +88,7 @@
<td><b>name</b></td><td><b>version</b></td><td><b>type</b></td>
+ <td><b>status</b></td></tr>
%for tool_dependency in missing_tool_dependencies:
<tr>
@@ -98,6 +99,7 @@
</td><td>${tool_dependency.version}</td><td>${tool_dependency.type}</td>
+ <td>${tool_dependency.status}</td></tr>
%endfor
</table>
diff -r d2aba0918cf01b1c5be95f7b0b59cc52cd889dbd -r 20e01e610de056e2f2855df9a3840c315b17587f templates/webapps/community/repository/common.mako
--- a/templates/webapps/community/repository/common.mako
+++ b/templates/webapps/community/repository/common.mako
@@ -111,13 +111,14 @@
%for dependency_key, requirements_dict in tool_dependencies.items():
<%
name = requirements_dict[ 'name' ]
+ version = requirements_dict[ 'version' ]
type = requirements_dict[ 'type' ]
- version = requirements_dict[ 'version' ]
+
%><tr><td>${name}</td>
+ <td>${version}</td><td>${type}</td>
- <td>${version}</td></tr>
%endfor
</table>
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0

commit/galaxy-central: inithello: Added Ensembl build parser. Improved genome downloader interface. Added post-download indexing feature.
by Bitbucket 25 Jun '12
by Bitbucket 25 Jun '12
25 Jun '12
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/d2aba0918cf0/
changeset: d2aba0918cf0
user: inithello
date: 2012-06-25 15:39:41
summary: Added Ensembl build parser. Improved genome downloader interface. Added post-download indexing feature.
affected #: 17 files
diff -r c4f325ba7caa1c86b0f97a0a486c3bbdea15c754 -r d2aba0918cf01b1c5be95f7b0b59cc52cd889dbd .hgignore
--- a/.hgignore
+++ b/.hgignore
@@ -50,7 +50,9 @@
tool-data/shared/igv/igv_build_sites.txt
tool-data/shared/rviewer/rviewer_build_sites.txt
tool-data/shared/ucsc/builds.txt
+tool-data/shared/ensembl/builds.txt
tool-data/*.loc
+tool-data/genome/*
# Test output
run_functional_tests.html
@@ -72,4 +74,5 @@
*.orig
.DS_Store
*.rej
-*~
\ No newline at end of file
+*~
+
diff -r c4f325ba7caa1c86b0f97a0a486c3bbdea15c754 -r d2aba0918cf01b1c5be95f7b0b59cc52cd889dbd cron/get_ensembl.py
--- /dev/null
+++ b/cron/get_ensembl.py
@@ -0,0 +1,22 @@
+from galaxy import eggs
+import pkg_resources
+pkg_resources.require("SQLAlchemy >= 0.4")
+pkg_resources.require("MySQL_python")
+from sqlalchemy import *
+
+
+engine = create_engine( 'mysql://anonymous@ensembldb.ensembl.org:5306', pool_recycle=3600 )
+conn = engine.connect()
+dbs = conn.execute( "SHOW DATABASES LIKE 'ensembl_website_%%'" )
+builds = {}
+lines = []
+for res in dbs:
+ dbname = res[0]
+ release = dbname.split('_')[-1]
+ genomes = conn.execute( "SELECT RS.assembly_code, S.name, S.common_name, %s FROM ensembl_website_%s.release_species RS LEFT JOIN ensembl_website_%s.species S on RS.species_id = S.species_id" % ( release, release, release ) )
+ for genome in genomes:
+ builds[genome[0]] = dict( release=genome[3], species='%s (%s/%s)' % ( genome[1], genome[2], genome[0] ) )
+for build in builds.items():
+ lines.append( '\t'.join( [ build[0], '%d' % build[1]['release'], build[1]['species'] ] ) )
+
+print '\n'.join( lines )
\ No newline at end of file
diff -r c4f325ba7caa1c86b0f97a0a486c3bbdea15c754 -r d2aba0918cf01b1c5be95f7b0b59cc52cd889dbd cron/parse_publicbuilds.py
--- /dev/null
+++ b/cron/parse_publicbuilds.py
@@ -0,0 +1,57 @@
+#!/usr/bin/env python
+
+"""
+Connects to the URL specified and outputs builds available at that
+DSN in tabular format. USCS Test gateway is used as default.
+build description
+"""
+
+import sys
+import urllib
+if sys.version_info[:2] >= ( 2, 5 ):
+ import xml.etree.ElementTree as ElementTree
+else:
+ from galaxy import eggs
+ import pkg_resources; pkg_resources.require( "elementtree" )
+ from elementtree import ElementTree
+
+URL = "http://genome.cse.ucsc.edu/cgi-bin/das/dsn"
+
+def getbuilds(url):
+ try:
+ page = urllib.urlopen(URL)
+ except:
+ print "#Unable to open " + URL
+ print "?\tunspecified (?)"
+ sys.exit(1)
+
+ text = page.read()
+ try:
+ tree = ElementTree.fromstring(text)
+ except:
+ print "#Invalid xml passed back from " + URL
+ print "?\tunspecified (?)"
+ sys.exit(1)
+
+ print "#Harvested from http://genome.cse.ucsc.edu/cgi-bin/das/dsn"
+ print "?\tunspecified (?)"
+ for dsn in tree:
+ build = dsn.find("SOURCE").attrib['id']
+ description = dsn.find("DESCRIPTION").text.replace(" - Genome at UCSC","").replace(" Genome at UCSC","")
+
+ fields = description.split(" ")
+ temp = fields[0]
+ for i in range(len(fields)-1):
+ if temp == fields[i+1]:
+ fields.pop(i+1)
+ else:
+ temp = fields[i+1]
+ description = " ".join(fields)
+ yield [build,description]
+
+if __name__ == "__main__":
+ if len(sys.argv) > 1:
+ URL = sys.argv[1]
+ for build in getbuilds(URL):
+ print build[0]+"\t"+build[1]+" ("+build[0]+")"
+
diff -r c4f325ba7caa1c86b0f97a0a486c3bbdea15c754 -r d2aba0918cf01b1c5be95f7b0b59cc52cd889dbd cron/updateensembl.sh.sample
--- /dev/null
+++ b/cron/updateensembl.sh.sample
@@ -0,0 +1,42 @@
+#!/bin/sh
+#
+# Script to update Ensembl shared data tables. The idea is to update, but if
+# the update fails, not replace current data/tables with error
+# messages.
+
+# Edit this line to refer to galaxy's path:
+GALAXY=/path/to/galaxy
+PYTHONPATH=${GALAXY}/lib
+export PYTHONPATH
+
+# setup directories
+echo "Creating required directories."
+DIRS="
+${GALAXY}/tool-data/shared/ensembl
+${GALAXY}/tool-data/shared/ensembl/new
+"
+for dir in $DIRS; do
+ if [ ! -d $dir ]; then
+ echo "Creating $dir"
+ mkdir $dir
+ else
+ echo "$dir already exists, continuing."
+ fi
+done
+
+date
+echo "Updating Ensembl shared data tables."
+
+# Try to build "builds.txt"
+echo "Updating builds.txt"
+python ${GALAXY}/cron/get_ensembl.py > ${GALAXY}/tool-data/shared/ensembl/new/builds.txt
+if [ $? -eq 0 ]
+then
+ diff ${GALAXY}/tool-data/shared/ensembl/new/builds.txt ${GALAXY}/tool-data/shared/ensembl/builds.txt > /dev/null 2>&1
+ if [ $? -ne 0 ]
+ then
+ cp -f ${GALAXY}/tool-data/shared/ensembl/new/builds.txt ${GALAXY}/tool-data/shared/ensembl/builds.txt
+ fi
+else
+ echo "Failed to update builds.txt" >&2
+fi
diff -r c4f325ba7caa1c86b0f97a0a486c3bbdea15c754 -r d2aba0918cf01b1c5be95f7b0b59cc52cd889dbd cron/updateucsc.sh.sample
--- a/cron/updateucsc.sh.sample
+++ b/cron/updateucsc.sh.sample
@@ -28,6 +28,20 @@
date
echo "Updating UCSC shared data tables."
+# Try to build "publicbuilds.txt"
+echo "Updating publicbuilds.txt"
+python ${GALAXY}/cron/parse_publicbuilds.py > ${GALAXY}/tool-data/shared/ucsc/new/publicbuilds.txt
+if [ $? -eq 0 ]
+then
+ diff ${GALAXY}/tool-data/shared/ucsc/new/publicbuilds.txt ${GALAXY}/tool-data/shared/ucsc/publicbuilds.txt > /dev/null 2>&1
+ if [ $? -ne 0 ]
+ then
+ cp -f ${GALAXY}/tool-data/shared/ucsc/new/publicbuilds.txt ${GALAXY}/tool-data/shared/ucsc/publicbuilds.txt
+ fi
+else
+ echo "Failed to update publicbuilds.txt" >&2
+fi
+
# Try to build "builds.txt"
echo "Updating builds.txt"
python ${GALAXY}/cron/parse_builds.py > ${GALAXY}/tool-data/shared/ucsc/new/builds.txt
diff -r c4f325ba7caa1c86b0f97a0a486c3bbdea15c754 -r d2aba0918cf01b1c5be95f7b0b59cc52cd889dbd lib/galaxy/jobs/deferred/genome_index.py
--- /dev/null
+++ b/lib/galaxy/jobs/deferred/genome_index.py
@@ -0,0 +1,43 @@
+"""
+Module for managing genome transfer jobs.
+"""
+from __future__ import with_statement
+
+import logging, shutil, gzip, bz2, zipfile, tempfile, tarfile, sys, os
+
+from galaxy import eggs
+from sqlalchemy import and_
+from data_transfer import *
+
+log = logging.getLogger( __name__ )
+
+__all__ = [ 'GenomeIndexPlugin' ]
+
+class GenomeIndexPlugin( DataTransfer ):
+
+ def __init__( self, app ):
+ super( GenomeIndexPlugin, self ).__init__( app )
+ self.app = app
+ self.tool = app.toolbox.tools_by_id['__GENOME_INDEX__']
+ self.sa_session = app.model.context.current
+
+ def create_job( self, trans, path, indexes, dbkey, intname ):
+ params = dict( user=trans.user.id, path=path, indexes=indexes, dbkey=dbkey, intname=intname )
+ deferred = trans.app.model.DeferredJob( state = self.app.model.DeferredJob.states.NEW, plugin = 'GenomeIndexPlugin', params = params )
+ self.sa_session.add( deferred )
+ self.sa_session.flush()
+ log.debug( 'Job created, id %d' % deferred.id )
+ return deferred.id
+
+ def check_job( self, job ):
+ log.debug( 'Job check' )
+ return 'ready'
+
+ def run_job( self, job ):
+ incoming = dict( path=os.path.abspath( job.params[ 'path' ] ), indexer=job.params[ 'indexes' ][0], user=job.params[ 'user' ] )
+ indexjob = self.tool.execute( self, set_output_hid=False, history=None, incoming=incoming, transfer=None, deferred=job )
+ job.params[ 'indexjob' ] = indexjob[0].id
+ job.state = self.app.model.DeferredJob.states.RUNNING
+ self.sa_session.add( job )
+ self.sa_session.flush()
+ return self.app.model.DeferredJob.states.RUNNING
diff -r c4f325ba7caa1c86b0f97a0a486c3bbdea15c754 -r d2aba0918cf01b1c5be95f7b0b59cc52cd889dbd lib/galaxy/jobs/deferred/genome_transfer.py
--- a/lib/galaxy/jobs/deferred/genome_transfer.py
+++ b/lib/galaxy/jobs/deferred/genome_transfer.py
@@ -78,10 +78,11 @@
def get_job_status( self, jobid ):
job = self.sa_session.query( self.app.model.DeferredJob ).get( int( jobid ) )
- if not hasattr( job, 'transfer_job' ):
- job.transfer_job = self.sa_session.query( self.app.model.TransferJob ).get( int( job.params[ 'transfer_job_id' ] ) )
- else:
- self.sa_session.refresh( job.transfer_job )
+ if 'transfer_job_id' in job.params:
+ if not hasattr( job, 'transfer_job' ):
+ job.transfer_job = self.sa_session.query( self.app.model.TransferJob ).get( int( job.params[ 'transfer_job_id' ] ) )
+ else:
+ self.sa_session.refresh( job.transfer_job )
return job
def run_job( self, job ):
@@ -139,7 +140,6 @@
if not chunk:
break
os.write( fd, chunk )
- os.write( fd, '\n' )
os.close( fd )
compressed.close()
elif data_type == 'bzip':
@@ -154,7 +154,6 @@
if not chunk:
break
os.write( fd, chunk )
- os.write( fd, '\n' )
os.close( fd )
compressed.close()
elif data_type == 'zip':
@@ -177,7 +176,6 @@
if not chunk:
break
os.write( fd, chunk )
- os.write( fd, '\n' )
zipped_file.close()
else:
try:
@@ -223,8 +221,8 @@
else:
job.state = self.app.model.DeferredJob.states.OK
self.sa_session.add( job )
+ self.sa_session.flush()
return self.app.model.DeferredJob.states.OK
- self.sa_session.flush()
def _check_compress( self, filepath ):
retval = ''
diff -r c4f325ba7caa1c86b0f97a0a486c3bbdea15c754 -r d2aba0918cf01b1c5be95f7b0b59cc52cd889dbd lib/galaxy/jobs/deferred/liftover_transfer.py
--- a/lib/galaxy/jobs/deferred/liftover_transfer.py
+++ b/lib/galaxy/jobs/deferred/liftover_transfer.py
@@ -40,7 +40,7 @@
deferred = trans.app.model.DeferredJob( state = self.app.model.DeferredJob.states.NEW, plugin = 'LiftOverTransferPlugin', params = params )
self.sa_session.add( deferred )
self.sa_session.flush()
- return deferred.id
+ return job.id
def check_job( self, job ):
if job.params['type'] == 'init_transfer':
@@ -98,7 +98,9 @@
transfer = job.transfer_job
if params[ 'type' ] == 'extract_transfer':
CHUNK_SIZE = 2**20
- destpath = os.path.join( self.app.config.get( 'genome_data_path', 'tool-data/genome' ), job.params[ 'dbkey' ], 'liftOver' )
+ destpath = os.path.join( self.app.config.get( 'genome_data_path', 'tool-data/genome' ), source, 'liftOver' )
+ if not os.path.exists( destpath ):
+ os.makedirs( destpath )
destfile = job.params[ 'destfile' ]
destfilepath = os.path.join( destpath, destfile )
tmpprefix = '%s_%s_download_unzip_' % ( job.params['dbkey'], job.params[ 'transfer_job_id' ] )
diff -r c4f325ba7caa1c86b0f97a0a486c3bbdea15c754 -r d2aba0918cf01b1c5be95f7b0b59cc52cd889dbd lib/galaxy/tools/actions/index_genome.py
--- a/lib/galaxy/tools/actions/index_genome.py
+++ b/lib/galaxy/tools/actions/index_genome.py
@@ -21,7 +21,9 @@
job.tool_id = tool.id
job.user_id = incoming['user']
start_job_state = job.state # should be job.states.NEW
- job.state = job.states.WAITING # we need to set job state to something other than NEW, or else when tracking jobs in db it will be picked up before we have added input / output parameters
+ job.state = job.states.WAITING # we need to set job state to something other than NEW,
+ # or else when tracking jobs in db it will be picked up
+ # before we have added input / output parameters
trans.sa_session.add( job )
# Create dataset that will serve as archive.
diff -r c4f325ba7caa1c86b0f97a0a486c3bbdea15c754 -r d2aba0918cf01b1c5be95f7b0b59cc52cd889dbd lib/galaxy/tools/genome_index/__init__.py
--- a/lib/galaxy/tools/genome_index/__init__.py
+++ b/lib/galaxy/tools/genome_index/__init__.py
@@ -13,13 +13,12 @@
def load_genome_index_tools( toolbox ):
""" Adds tools for indexing genomes via the main job runner. """
- # Use same process as that used in load_external_metadata_tool; see that
- # method for why create tool description files on the fly.
+ # Create XML for loading the tool.
tool_xml_text = """
<tool id="__GENOME_INDEX__" name="Index Genome" version="0.1" tool_type="genome_index"><type class="GenomeIndexTool" module="galaxy.tools"/><action module="galaxy.tools.actions.index_genome" class="GenomeIndexToolAction"/>
- <command>$__GENOME_INDEX_COMMAND__ $output_file $output_file.files_path $__app__.config.rsync_url</command>
+ <command>$__GENOME_INDEX_COMMAND__ $output_file $output_file.files_path $__app__.config.rsync_url "$__app__.config.tool_data_path"</command><inputs><param name="__GENOME_INDEX_COMMAND__" type="hidden"/></inputs>
@@ -29,7 +28,7 @@
</tool>
"""
- # Load export tool.
+ # Load index tool.
tmp_name = tempfile.NamedTemporaryFile()
tmp_name.write( tool_xml_text )
tmp_name.flush()
@@ -166,6 +165,10 @@
self._check_link( fasta, target )
for line in location:
self._add_line( line[ 'file' ], line[ 'line' ] )
+ deferred.state = app.model.DeferredJob.states.OK
+ sa_session.add( deferred )
+ sa_session.flush()
+
def _check_link( self, targetfile, symlink ):
target = os.path.relpath( targetfile, os.path.dirname( symlink ) )
diff -r c4f325ba7caa1c86b0f97a0a486c3bbdea15c754 -r d2aba0918cf01b1c5be95f7b0b59cc52cd889dbd lib/galaxy/tools/genome_index/index_genome.py
--- a/lib/galaxy/tools/genome_index/index_genome.py
+++ b/lib/galaxy/tools/genome_index/index_genome.py
@@ -10,7 +10,8 @@
import optparse, sys, os, tempfile, time, subprocess, shlex, json, tarfile, shutil
class ManagedIndexer():
- def __init__( self, output_file, infile, workingdir, rsync_url ):
+ def __init__( self, output_file, infile, workingdir, rsync_url, tooldata ):
+ self.tooldatapath = os.path.abspath( tooldata )
self.workingdir = os.path.abspath( workingdir )
self.outfile = open( os.path.abspath( output_file ), 'w' )
self.basedir = os.path.split( self.workingdir )[0]
@@ -44,11 +45,12 @@
with WithChDir( self.workingdir ):
self._log( 'Running indexer %s.' % indexer )
result = getattr( self, self.indexers[ indexer ] )()
- if result is None:
- self._log( 'Error running indexer %s.' % indexer )
+ if result in [ None, False ]:
+ self._log( 'Error running indexer %s, %s' % ( indexer, result ) )
self._flush_files()
return True
else:
+ self._log( self.locations )
self._log( 'Indexer %s completed successfully.' % indexer )
self._flush_files()
@@ -93,6 +95,7 @@
os.remove( self.fafile )
return self._bwa_cs()
else:
+ self._log( 'BWA (base) exited with code %s' % result )
return False
def _bwa_cs( self ):
@@ -109,6 +112,7 @@
self.locations[ 'cs' ].append( self.fafile )
os.remove( self.fafile )
else:
+ self._log( 'BWA (color) exited with code %s' % result )
return False
else:
self.locations[ 'cs' ].append( self.fafile )
@@ -136,6 +140,7 @@
os.remove( self.fafile )
return self._bowtie_cs()
else:
+ self._log( 'Bowtie (base) exited with code %s' % result )
return False
def _bowtie_cs( self ):
@@ -149,6 +154,7 @@
if result == 0:
self.locations[ 'cs' ].append( self.genome )
else:
+ self._log( 'Bowtie (color) exited with code %s' % result )
return False
os.remove( os.path.join( indexdir, self.fafile ) )
else:
@@ -174,6 +180,7 @@
os.remove( self.fafile )
return True
else:
+ self._log( 'Bowtie2 exited with code %s' % result )
return False
def _twobit( self ):
@@ -193,6 +200,7 @@
os.remove( self.fafile )
return True
else:
+ self._log( 'faToTwoBit exited with code %s' % result )
return False
def _perm( self ):
@@ -208,12 +216,15 @@
command = shlex.split("PerM %s %s --readFormat fastq --seed %s -m -s %s" % (self.fafile, read_length, seed, index))
result = subprocess.call( command )
if result != 0:
+ self._log( 'PerM (base) exited with code %s' % result )
return False
self.locations[ 'nt' ].append( [ key, desc, index ] )
os.remove( self.fafile )
return self._perm_cs()
def _perm_cs( self ):
+ genome = self.genome
+ read_length = 50
if not os.path.exists( 'cs' ):
os.makedirs( 'cs' )
with WithChDir( 'cs' ):
@@ -223,12 +234,13 @@
desc = '%s: seed=%s, read length=%s' % (genome, seed, read_length)
index = "%s_color_%s_%s.index" % (genome, seed, read_length)
if not os.path.exists( index ):
- command = shlex.split("PerM %s %s --readFormat csfastq --seed %s -m -s %s" % (local_ref, read_length, seed, index))
+ command = shlex.split("PerM %s %s --readFormat csfastq --seed %s -m -s %s" % (self.fafile, read_length, seed, index))
result = subprocess.call( command, stderr=self.logfile, stdout=self.logfile )
if result != 0:
+ self._log( 'PerM (color) exited with code %s' % result )
return False
self.locations[ 'cs' ].append( [ key, desc, index ] )
- os.remove( local_ref )
+ os.remove( self.fafile )
temptar = tarfile.open( 'cs.tar', 'w' )
temptar.add( 'cs' )
temptar.close()
@@ -241,17 +253,19 @@
self.locations[ 'nt' ].append( self.fafile )
return True
local_ref = self.fafile
- srma = 'tool-data/shared/jars/srma.jar'
+ srma = os.path.abspath( os.path.join( self.tooldatapath, 'shared/jars/picard/CreateSequenceDictionary.jar' ) )
genome = os.path.splitext( self.fafile )[0]
self._check_link()
if not os.path.exists( '%s.fai' % self.fafile ) and not os.path.exists( '%s.fai' % self.genome ):
command = shlex.split( 'samtools faidx %s' % self.fafile )
subprocess.call( command, stderr=self.logfile )
- command = shlex.split( "java -cp %s net.sf.picard.sam.CreateSequenceDictionary R=%s O=%s/%s.dict URI=%s" \
- % ( srma, local_ref, os.curdir, genome, local_ref ) )
+ command = shlex.split( "java -jar %s R=%s O=%s.dict URI=%s" \
+ % ( srma, local_ref, genome, local_ref ) )
if not os.path.exists( '%s.dict' % self.genome ):
result = subprocess.call( command, stderr=self.logfile, stdout=self.logfile )
+ self._log( ' '.join( command ) )
if result != 0:
+ self._log( 'Picard exited with code %s' % result )
return False
self.locations[ 'nt' ].append( self.fafile )
os.remove( self.fafile )
@@ -260,17 +274,20 @@
def _sam( self ):
local_ref = self.fafile
local_file = os.path.splitext( self.fafile )[ 0 ]
+ print 'Trying rsync'
result = self._do_rsync( '/sam_index/' )
if result == 0 and ( os.path.exists( '%s.fai' % self.fafile ) or os.path.exists( '%s.fai' % self.genome ) ):
- self.locations[ 'nt' ].append( local_ref )
+ self.locations[ 'nt' ].append( '%s.fai' % local_ref )
return True
self._check_link()
+ print 'Trying indexer'
command = shlex.split("samtools faidx %s" % local_ref)
- result = subprocess.call( command, stderr=self.logfile )
+ result = subprocess.call( command, stderr=self.logfile, stdout=self.logfile )
if result != 0:
+ self._log( 'SAM exited with code %s' % result )
return False
else:
- self.locations[ 'nt' ].append( local_ref )
+ self.locations[ 'nt' ].append( '%s.fai' % local_ref )
os.remove( local_ref )
return True
@@ -288,9 +305,9 @@
# Parse command line.
parser = optparse.OptionParser()
(options, args) = parser.parse_args()
- indexer, infile, outfile, working_dir, rsync_url = args
+ indexer, infile, outfile, working_dir, rsync_url, tooldata = args
# Create archive.
- idxobj = ManagedIndexer( outfile, infile, working_dir, rsync_url )
+ idxobj = ManagedIndexer( outfile, infile, working_dir, rsync_url, tooldata )
idxobj.run_indexer( indexer )
\ No newline at end of file
diff -r c4f325ba7caa1c86b0f97a0a486c3bbdea15c754 -r d2aba0918cf01b1c5be95f7b0b59cc52cd889dbd lib/galaxy/util/__init__.py
--- a/lib/galaxy/util/__init__.py
+++ b/lib/galaxy/util/__init__.py
@@ -407,6 +407,22 @@
db_names = DBNames( [( db_names.default_value, db_names.default_name )] )
return db_names
+def read_ensembl( filename, ucsc ):
+ """ Read Ensembl build names from file """
+ ucsc_builds = []
+ for build in ucsc:
+ ucsc_builds.append( build[0] )
+ ensembl_builds = list()
+ try:
+ for line in open( filename ):
+ if line[0:1] in [ '#', '\t' ]: continue
+ fields = line.replace("\r","").replace("\n","").split("\t")
+ if fields[0] in ucsc_builds: continue
+ ensembl_builds.append( dict( dbkey=fields[0], release=fields[1], name=fields[2].replace( '_', ' ' ) ) )
+ except Exception, e:
+ print "ERROR: Unable to read builds file:", e
+ return ensembl_builds
+
def read_build_sites( filename, check_builds=True ):
""" read db names to ucsc mappings from file, this file should probably be merged with the one above """
build_sites = []
@@ -634,11 +650,15 @@
s.quit()
galaxy_root_path = os.path.join(__path__[0], "..","..","..")
+
# The dbnames list is used in edit attributes and the upload tool
dbnames = read_dbnames( os.path.join( galaxy_root_path, "tool-data", "shared", "ucsc", "builds.txt" ) )
+ucsc_names = read_dbnames( os.path.join( galaxy_root_path, "tool-data", "shared", "ucsc", "publicbuilds.txt" ) )
+ensembl_names = read_ensembl( os.path.join( galaxy_root_path, "tool-data", "shared", "ensembl", "builds.txt" ), ucsc_names )
ucsc_build_sites = read_build_sites( os.path.join( galaxy_root_path, "tool-data", "shared", "ucsc", "ucsc_build_sites.txt" ) )
gbrowse_build_sites = read_build_sites( os.path.join( galaxy_root_path, "tool-data", "shared", "gbrowse", "gbrowse_build_sites.txt" ) )
genetrack_sites = read_build_sites( os.path.join( galaxy_root_path, "tool-data", "shared", "genetrack", "genetrack_sites.txt" ), check_builds=False )
+dlnames = dict(ucsc=ucsc_names, ensembl=ensembl_names)
def galaxy_directory():
return os.path.abspath(galaxy_root_path)
diff -r c4f325ba7caa1c86b0f97a0a486c3bbdea15c754 -r d2aba0918cf01b1c5be95f7b0b59cc52cd889dbd lib/galaxy/web/controllers/data_admin.py
--- a/lib/galaxy/web/controllers/data_admin.py
+++ b/lib/galaxy/web/controllers/data_admin.py
@@ -26,17 +26,69 @@
error='panel-error-message',
queued='state-color-waiting'
)
-
+
@web.expose
@web.require_admin
def manage_data( self, trans, **kwd ):
+ genomes = dict()
if trans.app.config.get_bool( 'enable_beta_job_managers', False ) == False:
return trans.fill_template( '/admin/data_admin/betajob.mako' )
- dbkeys = trans.db_builds
- return trans.fill_template( '/admin/data_admin/data_form.mako', dbkeys=dbkeys )
+ for line in trans.app.tool_data_tables.data_tables[ 'all_fasta' ].data:
+ indexers = dict( bowtie_indexes='Generate', bowtie2_indexes='Generate', bwa_indexes='Generate', perm_base_indexes='Generate', srma_indexes='Generate', sam_fa_indexes='Generate' )
+ dbkey = line[0]
+ name = line[2]
+ indexers[ 'name' ] = name
+ indexers[ 'fapath' ] = line[3]
+ genomes[ dbkey ] = indexers
+ for table in [ 'bowtie_indexes', 'bowtie2_indexes', 'bwa_indexes', 'srma_indexes' ]:
+ for line in trans.app.tool_data_tables.data_tables[ table ].data:
+ dbkey = line[0]
+ genomes[ dbkey ][ table ] = 'Generated'
+ for line in trans.app.tool_data_tables.data_tables[ 'sam_fa_indexes' ].data:
+ genomes[ line[1] ][ 'sam_fa_indexes' ] = 'Generated'
+ for line in trans.app.tool_data_tables.data_tables[ 'perm_base_indexes' ].data:
+ genomes[ line[1].split(':')[0] ][ 'perm_base_indexes' ] = 'Generated'
+ jobgrid = []
+ sa_session = trans.app.model.context.current
+ jobs = sa_session.query( model.GenomeIndexToolData ).order_by( model.GenomeIndexToolData.created_time.desc() ).filter_by( user_id=trans.get_user().id ).group_by( model.GenomeIndexToolData.deferred ).limit( 20 ).all()
+ prevjobid = 0
+ for job in jobs:
+ if prevjobid == job.deferred.id:
+ continue
+ prevjobid = job.deferred.id
+ state = job.deferred.state
+ params = job.deferred.params
+ if job.transfer is not None:
+ jobtype = 'download'
+ else:
+ jobtype = 'index'
+ indexers = ', '.join( params['indexes'] )
+ jobgrid.append( dict( jobtype=jobtype, indexers=indexers, rowclass=state, deferred=job.deferred.id, state=state, intname=job.deferred.params[ 'intname' ], dbkey=job.deferred.params[ 'dbkey' ] ) )
+ return trans.fill_template( '/admin/data_admin/local_data.mako', jobgrid=jobgrid, genomes=genomes )
+
+ @web.expose
+ @web.require_admin
+ def add_genome( self, trans, **kwd ):
+ if trans.app.config.get_bool( 'enable_beta_job_managers', False ) == False:
+ return trans.fill_template( '/admin/data_admin/betajob.mako' )
+ dbkeys = trans.ucsc_builds
+ ensemblkeys = trans.ensembl_builds
+ return trans.fill_template( '/admin/data_admin/data_form.mako', dbkeys=dbkeys, ensembls=ensemblkeys )
@web.expose
@web.require_admin
+ def index_build( self, trans, **kwd ):
+ """Index a previously downloaded genome."""
+ params = util.Params( kwd )
+ path = os.path.abspath( params.get( 'path', None ) )
+ indexes = [ params.get( 'indexes', None ) ]
+ dbkey = params.get( 'dbkey', None )
+ intname = params.get( 'longname', None )
+ indexjob = trans.app.job_manager.deferred_job_queue.plugins['GenomeIndexPlugin'].create_job( trans, path, indexes, dbkey, intname )
+ return indexjob
+
+ @web.expose
+ @web.require_admin
def download_build( self, trans, **kwd ):
"""Download a genome from a remote source and add it to the library."""
params = util.Params( kwd )
@@ -57,21 +109,21 @@
protocol = 'http'
if source == 'NCBI':
- dbkey = params.get('dbkey', '')[0]
+ dbkey = params.get('ncbi_dbkey', '')[0]
url = 'http://togows.dbcls.jp/entry/ncbi-nucleotide/%s.fasta' % dbkey
elif source == 'Broad':
- dbkey = params.get('dbkey', '')[0]
+ dbkey = params.get('broad_dbkey', '')[0]
url = 'ftp://ftp.broadinstitute.org/pub/seq/references/%s.fasta' % dbkey
elif source == 'UCSC':
longname = None
- for build in trans.db_builds:
- if dbkey[1] == build[0]:
+ for build in trans.ucsc_builds:
+ if dbkey == build[0]:
dbkey = build[0]
longname = build[1]
break
assert dbkey is not '?', 'That build was not found'
ftp = ftplib.FTP('hgdownload.cse.ucsc.edu')
- ftp.login('anonymous', 'user(a)example.com')
+ ftp.login('anonymous', trans.get_user().email)
checker = []
liftover = []
newlift = []
@@ -81,10 +133,12 @@
fname = chain.split( '/' )[-1]
target = fname.replace( '.over.chain.gz', '' ).split( 'To' )[1]
target = target[0].lower() + target[1:]
- newlift.append( [ chain, dbkey, target ] )
+ if not os.path.exists( os.path.join( trans.app.config.get( 'genome_data_path', 'tool-data/genome' ), dbkey, 'liftOver', fname ) ):
+ newlift.append( [ chain, dbkey, target ] )
current = dbkey[0].upper() + dbkey[1:]
targetfile = '%sTo%s.over.chain.gz' % ( target, current )
- newlift.append( [ '/goldenPath/%s/liftOver/%s' % ( target, targetfile ), target, dbkey ] )
+ if not os.path.exists( os.path.join( trans.app.config.get( 'genome_data_path', 'tool-data/genome' ), target, 'liftOver', targetfile ) ):
+ newlift.append( [ '/goldenPath/%s/liftOver/%s' % ( target, targetfile ), target, dbkey ] )
except:
newlift = None
pass
@@ -103,36 +157,35 @@
status = u'error'
return trans.fill_template( '/admin/data_admin/data_form.mako',
message=message,
- status=status )
+ status=status,
+ ensembls=trans.ensembl_builds,
+ dbkeys=trans.ucsc_builds )
elif source == 'Ensembl':
- section = params.get('ensembl_section', '')
- release1 = params.get('release_number', '')
- organism = params.get('organism', '')
- name = params.get('name', '')
- longname = organism
- dbkey = name
- release2 = params.get('release2', '')
- release2 = ".%s" % release2 if release2 else ""
- if section == 'standard':
- url = 'ftp://ftp.ensembl.org/pub/release-%s/fasta/%s/dna/%s.%s%s.dna.toplevel.fa.gz' % \
- (release1, organism.lower(), organism, name, release2)
- else:
- url = 'ftp://ftp.ensemblgenomes.org/pub/%s/release-%s/fasta/%s/dna/%s.%s%s.dna.top…' % \
- (section, release1, organism.lower(), organism, name, release2)
- elif source == 'local':
- url = 'http://127.0.0.1/%s.tar.gz' % dbkey
+ dbkey = params.get( 'ensembl_dbkey', None )
+ assert dbkey is not '?', 'That build was not found'
+ for build in trans.ensembl_builds:
+ if build[ 'dbkey' ] == dbkey:
+ dbkey = build[ 'dbkey' ]
+ release = build[ 'release' ]
+ pathname = '_'.join( build[ 'name' ].split(' ')[0:2] )
+ longname = build[ 'name' ].replace('_', ' ')
+ break
+ url = 'ftp://ftp.ensembl.org/pub/release-%s/fasta/%s/dna/%s.%s.%s.dna.toplevel.fa.…' % ( release, pathname.lower(), pathname, dbkey, release )
+ log.debug( build )
+ log.debug( url )
else:
- raise ValueError
+ raise ValueError, 'Somehow an invalid data source was specified.'
params = dict( protocol='http', name=dbkey, datatype='fasta', url=url, user=trans.user.id )
jobid = trans.app.job_manager.deferred_job_queue.plugins['GenomeTransferPlugin'].create_job( trans, url, dbkey, longname, indexers )
chainjob = []
if newlift is not None:
for chain in newlift:
- liftover_url = u'ftp://hgdownload.cse.ucsc.edu%s' % chain[0]
+ liftover_url = u'ftp://hgdownload.cse.ucsc.edu%s' % chain[0]
from_genome = chain[1]
to_genome = chain[2]
destfile = liftover_url.split('/')[-1].replace('.gz', '')
- chainjob.append( trans.app.job_manager.deferred_job_queue.plugins['LiftOverTransferPlugin'].create_job( trans, liftover_url, dbkey, from_genome, to_genome, destfile, jobid ) )
+ lochain = trans.app.job_manager.deferred_job_queue.plugins['LiftOverTransferPlugin'].create_job( trans, liftover_url, dbkey, from_genome, to_genome, destfile, jobid )
+ chainjob.append( lochain )
job = trans.app.job_manager.deferred_job_queue.plugins['GenomeTransferPlugin'].get_job_status( jobid )
job.params['liftover'] = chainjob
trans.app.model.context.current.add( job )
@@ -146,9 +199,13 @@
def monitor_status( self, trans, **kwd ):
params = util.Params( kwd )
jobid = params.get( 'job', '' )
+ gname = params.get( 'intname', '' )
+ deferred = trans.app.model.context.current.query( model.DeferredJob ).filter_by( id=jobid ).first()
+ gname = deferred.params[ 'intname' ]
+ indexers = ', '.join( deferred.params[ 'indexes' ] )
jobs = self._get_jobs( jobid, trans )
jsonjobs = json.dumps( jobs )
- return trans.fill_template( '/admin/data_admin/download_status.mako', mainjob=jobid, jobs=jobs, jsonjobs=jsonjobs )
+ return trans.fill_template( '/admin/data_admin/download_status.mako', name=gname, indexers=indexers, mainjob=jobid, jobs=jobs, jsonjobs=jsonjobs )
@web.expose
@web.require_admin
@@ -160,16 +217,6 @@
jobs = self._get_jobs( jobid, trans )
return trans.fill_template( '/admin/data_admin/ajax_status.mako', json=json.dumps( jobs ) )
- @web.expose
- @web.require_admin
- def job_status( self, trans, **kwd ):
- params = util.Params( kwd )
- jobid = params.get( 'jobid', None )
- jobtype = params.get( 'jobtype', None )
- fillvals = None
- fillvals = self._get_job( jobid, jobtype, trans )
- return trans.fill_template( '/admin/data_admin/ajax_status.mako', json=json.dumps( fillvals ) )
-
def _get_job( self, jobid, jobtype, trans ):
sa = trans.app.model.context.current
if jobtype == 'liftover':
@@ -191,12 +238,12 @@
job = trans.app.job_manager.deferred_job_queue.plugins['GenomeTransferPlugin'].get_job_status( jobid )
sa_session = trans.app.model.context.current
jobs.append( self._get_job( job.id, 'deferred', trans ) )
- jobs.append( self._get_job( job.transfer_job.id, 'transfer', trans ) )
- idxjobs = sa_session.query( model.GenomeIndexToolData ).filter_by( deferred_job_id=job.id, transfer_job_id=job.transfer_job.id ).all()
- if job.params.has_key( 'liftover' ):
- for jobid in job.params[ 'liftover' ]:
- jobs.append( self._get_job( jobid, 'liftover', trans ) )
- for idxjob in idxjobs:
- #print idxjob
- jobs.append( self._get_job( idxjob.job_id, 'index', trans ) )
+ if hasattr( job, 'transfer_job' ): # This is a transfer job, check for indexers
+ jobs.append( self._get_job( job.transfer_job.id, 'transfer', trans ) )
+ idxjobs = sa_session.query( model.GenomeIndexToolData ).filter_by( deferred_job_id=job.id, transfer_job_id=job.transfer_job.id ).all()
+ if job.params.has_key( 'liftover' ) and job.params[ 'liftover' ] is not None:
+ for jobid in job.params[ 'liftover' ]:
+ jobs.append( self._get_job( jobid, 'liftover', trans ) )
+ for idxjob in idxjobs:
+ jobs.append( self._get_job( idxjob.job_id, 'index', trans ) )
return jobs
diff -r c4f325ba7caa1c86b0f97a0a486c3bbdea15c754 -r d2aba0918cf01b1c5be95f7b0b59cc52cd889dbd lib/galaxy/web/framework/__init__.py
--- a/lib/galaxy/web/framework/__init__.py
+++ b/lib/galaxy/web/framework/__init__.py
@@ -799,6 +799,14 @@
dbnames.extend( util.dbnames )
return dbnames
+ @property
+ def ucsc_builds( self ):
+ return util.dlnames['ucsc']
+
+ @property
+ def ensembl_builds( self ):
+ return util.dlnames['ensembl']
+
def db_dataset_for( self, dbkey ):
"""
Returns the db_file dataset associated/needed by `dataset`, or `None`.
@@ -957,6 +965,14 @@
dbnames.append((key, "%s (%s) [Custom]" % (chrom_dict['name'], key) ))
dbnames.extend( util.dbnames )
return dbnames
+
+ @property
+ def ucsc_builds( self ):
+ return util.dlnames['ucsc']
+
+ @property
+ def ensembl_builds( self ):
+ return util.dlnames['ensembl']
class GalaxyWebUITransaction( GalaxyWebTransaction ):
def __init__( self, environ, app, webapp, session_cookie ):
diff -r c4f325ba7caa1c86b0f97a0a486c3bbdea15c754 -r d2aba0918cf01b1c5be95f7b0b59cc52cd889dbd templates/admin/data_admin/data_form.mako
--- a/templates/admin/data_admin/data_form.mako
+++ b/templates/admin/data_admin/data_form.mako
@@ -62,7 +62,7 @@
<div class="form-row"><label for="indexers">Indexers</label><select name="indexers" multiple style="width: 200px; height: 125px;">
- <option value="2bit">TwoBit</option>
+ <option value="2bit" selected>TwoBit</option><option value="bowtie">Bowtie</option><option value="bowtie2">Bowtie 2</option><option value="bwa">BWA</option>
@@ -75,7 +75,7 @@
</div></div><h2>Parameters</h2>
- <div id="params_generic" class="params-block" style="display: block;">
+ <div id="params_Broad" class="params-block" style="display: block;"><div class="form-row"><label for="longname">Internal Name</label><input name="longname" type="text" label="Internal Name" />
@@ -88,55 +88,47 @@
</div><div id="dlparams"><div class="form-row">
- <label for="dbkey">External Name</label>
- <input name="dbkey" type="text" label="Genome Unique Name" />
+ <label for="broad_dbkey">External Name</label>
+ <input name="broad_dbkey" type="text" label="Genome Unique Name" /><div style="clear: both;"> </div></div></div></div>
- <div id="params_ensembl" class="params-block">
+ <div id="params_NCBI" class="params-block" style="display: block;"><div class="form-row">
- <label for="ensembl_section">Section</label>
- <input name="ensembl_section" type="text" label="Section" />
+ <label for="longname">Internal Name</label>
+ <input name="longname" type="text" label="Internal Name" /><div style="clear: both;"> </div>
- <div class="toolParamHelp" style="clear: both;">
- Ensembl section, either standard or one of plants, protists, metazoa, fungi, bacteria.
- </div></div><div class="form-row">
- <label for="release_number">Release Number</label>
- <input name="release_number" type="text" label="Release" />
+ <label for="uniqid">Internal Unique Identifier</label>
+ <input name="uniqid" type="text" label="Internal Identifier" /><div style="clear: both;"> </div>
- <div class="toolParamHelp" style="clear: both;">
- Release number, e.g. ftp://ftp.ensembl.org/pub/release-<strong style="color: red;">56</strong>/fasta/callithrix_jacchus/dna/Callithrix_jacchus.calJac3.56.dna.toplevel.fa.gz
- </div></div>
- <div class="form-row">
- <label for="organism">Organism</label>
- <input name="organism" type="text" label="Organism" />
- <div style="clear: both;"> </div>
- <div class="toolParamHelp" style="clear: both;">
- Organism long name, e.g. ftp://ftp.ensembl.org/pub/release-56/fasta/callithrix_jacchus/dna/<strong style="color: red;">Callithrix_jacchus</strong>.calJac3.56.dna.toplevel.fa.gz
- </div>
- </div>
- <div class="form-row">
- <label for="name">Name</label>
- <input name="name" type="text" label="name" />
- <div style="clear: both;"> </div>
- <div class="toolParamHelp" style="clear: both;">
- Organism short name, e.g. ftp://ftp.ensembl.org/pub/release-56/fasta/callithrix_jacchus/dna/Callithri….<strong style="color: red;">calJac3</strong>.56.dna.toplevel.fa.gz
- </div>
- </div>
- <div class="form-row">
- <label for="release2">Release ID</label>
- <input name="release2" type="text" label="Release ID" />
- <div style="clear: both;"> </div>
- <div class="toolParamHelp" style="clear: both;">
- Release ID, e.g. ftp://ftp.ensembl.org/pub/release-56/fasta/callithrix_jacchus/dna/Callithri….<strong style="color: red;">56</strong>.dna.toplevel.fa.gz
+ <div id="dlparams">
+ <div class="form-row">
+ <label for="ncbi_dbkey">External Name</label>
+ <input name="ncbi_dbkey" type="text" label="Genome Unique Name" />
+ <div style="clear: both;"> </div></div></div></div>
- <div id="params_ucsc" class="params-block">
+ <div id="params_Ensembl" class="params-block">
+ <div class="form-row">
+ <label>Genome:</label>
+ <div class="form-row-input">
+ <select name="ensembl_dbkey" last_selected_value="?">
+ %for dbkey in ensembls:
+ <option value="${dbkey['dbkey']}">${dbkey['dbkey']} - ${dbkey['name']}</option>
+ %endfor
+ </select>
+ </div>
+ <div class="toolParamHelp" style="clear: both;">
+ If you can't find the build you want in this list, <insert link to instructions here>
+ </div>
+ </div>
+ </div>
+ <div id="params_UCSC" class="params-block"><div class="form-row"><label>Genome:</label><div class="form-row-input">
@@ -166,23 +158,11 @@
checkDataSource();
});
function checkDataSource() {
- var ds = $('#datasource').val()
+ var ds = $('#datasource').val();
$('.params-block').each(function() {
$(this).hide();
});
- switch (ds) {
- case 'UCSC':
- $('#params_ucsc').show();
- break;
- case 'Ensembl':
- $('#params_ensembl').show();
- break;
- case 'NCBI':
- case 'Broad':
- default:
- $('#params_generic').show();
- break;
- }
+ $('#params_' + ds).show();
};
</script></form>
diff -r c4f325ba7caa1c86b0f97a0a486c3bbdea15c754 -r d2aba0918cf01b1c5be95f7b0b59cc52cd889dbd templates/admin/data_admin/download_status.mako
--- a/templates/admin/data_admin/download_status.mako
+++ b/templates/admin/data_admin/download_status.mako
@@ -33,10 +33,15 @@
</div></div></%def>
-<p>The genome build and any selected indexers have been added to the job queue. Below you will see the status of each job.</p>
+<p>${name} been added to the job queue
+ %if indexers:
+ to be indexed with ${indexers}
+ %endif
+ </p><table id="jobStatus"></table>
-<a href="${h.url_for( controller='data_admin', action='manage_data' )}">Return to the download form</a>
+<p><a href="${h.url_for( controller='data_admin', action='manage_data' )}">Overview</a>.</p>
+<p><a href="${h.url_for( controller='data_admin', action='add_genome' )}">Download form</a>.</p><script type="text/javascript">
jobs = ${jsonjobs}
finalstates = new Array('done', 'error', 'ok');
diff -r c4f325ba7caa1c86b0f97a0a486c3bbdea15c754 -r d2aba0918cf01b1c5be95f7b0b59cc52cd889dbd templates/admin/data_admin/local_data.mako
--- /dev/null
+++ b/templates/admin/data_admin/local_data.mako
@@ -0,0 +1,161 @@
+<%inherit file="/base.mako"/>
+<%namespace file="/message.mako" import="render_msg" />
+<%namespace file="/library/common/common.mako" import="common_javascripts" />
+
+<%!
+ def inherit(context):
+ if context.get('use_panels'):
+ return '/webapps/galaxy/base_panels.mako'
+ else:
+ return '/base.mako'
+%>
+<%inherit file="${inherit(context)}"/>
+
+<%def name="init()">
+<%
+ self.has_left_panel=False
+ self.has_right_panel=False
+ self.message_box_visible=False
+ self.active_view="user"
+ self.overlay_visible=False
+ self.has_accessible_datasets = False
+%>
+</%def>
+<%def name="stylesheets()">
+ ${parent.stylesheets()}
+ ${h.css( "autocomplete_tagging" )}
+</%def>
+<%def name="javascripts()">
+ ${parent.javascripts()}
+ ${h.js("jquery.autocomplete", "autocomplete_tagging" )}
+</%def>
+##
+## Override methods from base.mako and base_panels.mako
+##
+<%def name="center_panel()">
+ <div style="overflow: auto; height: 100%;">
+ <div class="page-container" style="padding: 10px;">
+ ${render_content()}
+ </div>
+ </div>
+</%def>
+<style type="text/css">
+ .params-block { display: none; }
+ td, th { padding-left: 10px; padding-right: 10px; }
+ td.Generate { text-decoration: underline; background-color: #EEEEEE; }
+ td.Generating { text-decoration: none; background-color: #FFFFCC; }
+ td.Generated { background-color: #CCFFCC; }
+</style>
+<div class="toolForm">
+ %if message:
+ <div class="${status}">${message}</div>
+ %endif
+ <div class="toolFormTitle">Currently tracked builds <a class="action-button" href="/data_admin/add_genome">Add new</a></div>
+ <div class="toolFormBody">
+ <h2>Locally cached data:</h2>
+ <h3>NOTE: Indexers queued here will not be reflected in the table until Galaxy is restarted.</h3>
+ <table id="locfiles">
+ <tr><th>Database ID</th><th>Name</th><th>Bowtie</th><th>Bowtie 2</th><th>BWA</th><th>Sam</th><th>Picard</th><th>PerM</th></tr>
+ %for dbkey in sorted(genomes.keys()):
+ <tr>
+ <td>${dbkey}</td>
+ <td>${genomes[dbkey]['name']}</td>
+ <td id="${dbkey}-bowtie" class="indexcell ${genomes[dbkey]['bowtie_indexes']}" data-fapath="${genomes[dbkey]['fapath']}" data-longname="${genomes[dbkey]['name']}" data-index="bowtie" data-dbkey="${dbkey}">${genomes[dbkey]['bowtie_indexes']}</td>
+ <td id="${dbkey}-bowtie2" class="indexcell ${genomes[dbkey]['bowtie2_indexes']}" data-fapath="${genomes[dbkey]['fapath']}" data-longname="${genomes[dbkey]['name']}" data-index="bowtie2" data-dbkey="${dbkey}">${genomes[dbkey]['bowtie2_indexes']}</td>
+ <td id="${dbkey}-bwa" class="indexcell ${genomes[dbkey]['bwa_indexes']}" data-fapath="${genomes[dbkey]['fapath']}" data-longname="${genomes[dbkey]['name']}" data-index="bwa" data-dbkey="${dbkey}">${genomes[dbkey]['bwa_indexes']}</td>
+ <td id="${dbkey}-sam" class="indexcell ${genomes[dbkey]['sam_fa_indexes']}" data-fapath="${genomes[dbkey]['fapath']}" data-longname="${genomes[dbkey]['name']}" data-index="sam" data-dbkey="${dbkey}">${genomes[dbkey]['sam_fa_indexes']}</td>
+ <td id="${dbkey}-picard" class="indexcell ${genomes[dbkey]['srma_indexes']}" data-fapath="${genomes[dbkey]['fapath']}" data-longname="${genomes[dbkey]['name']}" data-index="picard" data-dbkey="${dbkey}">${genomes[dbkey]['srma_indexes']}</td>
+ <td id="${dbkey}-perm" class="indexcell ${genomes[dbkey]['perm_base_indexes']}" data-fapath="${genomes[dbkey]['fapath']}" data-longname="${genomes[dbkey]['name']}" data-index="perm" data-dbkey="${dbkey}">${genomes[dbkey]['perm_base_indexes']}</td>
+ </tr>
+ %endfor
+ </table>
+ <h2>Recent jobs:</h2>
+ <p>Click the job ID to see job details. Note that this list only shows jobs initiated by your account.</p>
+ <div id="recentJobs">
+ %for job in jobgrid:
+ <div id="job-${job['deferred']}" data-dbkey="${job['dbkey']}" data-name="${job['intname']}" data-indexes="${job['indexers']}" data-jobid="${job['deferred']}" data-state="${job['state']}" class="historyItem-${job['state']} historyItemWrapper historyItem">
+ <p>Job ID <a href="${h.url_for( controller='data_admin', action='monitor_status', job=job['deferred'] )}">${job['deferred']}</a>:
+ %if job['jobtype'] == 'download':
+ Download <em>${job['intname']}</em>
+ %if job['indexers']:
+ and index with ${job['indexers']}
+ %endif
+ %else:
+ Index <em>${job['intname']}</em> with ${job['indexers']}
+ %endif
+ </p>
+ </div>
+ %endfor
+ </div>
+</div>
+<script type="text/javascript">
+ finalstates = new Array('done', 'error', 'ok');
+ $('.indexcell').click(function() {
+ status = $(this).html();
+ elem = $(this);
+ if (status != 'Generate') {
+ return;
+ }
+ longname = $(this).attr('data-longname');
+ dbkey = $(this).attr('data-dbkey');
+ indexes = $(this).attr('data-index');
+ path = $(this).attr('data-fapath');
+ $.post('${h.url_for( controller='data_admin', action='index_build' )}', { longname: longname, dbkey: dbkey, indexes: indexes, path: path }, function(data) {
+ if (data == 'ERROR') {
+ alert('There was an error.');
+ }
+ else {
+ elem.html('Generating');
+ elem.attr('class', 'indexcell Generating');
+ }
+ newhtml = '<div data-dbkey="' + dbkey + '" data-name="' + longname + '" data-indexes="' + indexes + '" id="job-' + data + '" class="historyItem-new historyItemWrapper historyItem">' +
+ '<p>Job ID <a href="${h.url_for( controller='data_admin', action='monitor_status')}?job=' + data + '">' + data + '</a>: ' +
+ 'Index <em>' + longname + '</em> with ' + indexes + '</p></div>';
+ $('#recentJobs').prepend(newhtml);
+ $('#job-' + data).delay(3000).queue(function(n) {
+ checkJob(data);
+ n();
+ });
+ });
+ });
+
+ function checkJob(jobid) {
+ $.get('${h.url_for( controller='data_admin', action='get_jobs' )}', { jobid: jobid }, function(data) {
+ jsondata = JSON.parse(data)[0];
+ jsondata["name"] = $('#job-' + jobid).attr('data-name');
+ jsondata["dbkey"] = $('#job-' + jobid).attr('data-dbkey');
+ jsondata["indexes"] = $('#job-' + jobid).attr('data-indexes');
+ newhtml = makeNewHTML(jsondata);
+ $('#job-' + jobid).replaceWith(newhtml);
+ if ($.inArray(jsondata["status"], finalstates) == -1) {
+ $('#job-' + jobid).delay(3000).queue(function(n) {
+ checkJob(jobid);
+ n();
+ });
+ }
+ if (jsondata["status"] == 'done' || jsondata["status"] == 'ok') {
+ elem = $('#' + jsondata["dbkey"] + '-' + jsondata["indexes"]);
+ elem.html('Generated');
+ elem.attr('class', 'indexcell Generated');
+ }
+ });
+ }
+
+ function makeNewHTML(jsondata) {
+ newhtml = '<div data-dbkey="' + jsondata["dbkey"] + '" data-name="' + jsondata["name"] + '" data-indexes="' + jsondata["indexes"] + '" id="job-' + jsondata["jobid"] + '" class="historyItem-' + jsondata["status"] + ' historyItemWrapper historyItem">' +
+ '<p>Job ID <a href="${h.url_for( controller='data_admin', action='monitor_status')}?job=' + jsondata["jobid"] + '">' + jsondata["jobid"] + '</a>: ' +
+ 'Index <em>' + jsondata["name"] + '</em> with ' + jsondata["indexes"] + '</p></div>';
+ return newhtml;
+ }
+
+ $(document).ready(function() {
+ $('.historyItem').each(function() {
+ state = $(this).attr('data-state');
+ jobid = $(this).attr('data-jobid');
+ if ($.inArray(state, finalstates) == -1) {
+ checkJob(jobid);
+ }
+ });
+ });
+
+</script>
\ No newline at end of file
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0

commit/galaxy-central: greg: Revert one of the "fixes" in 7301:46de10d8c8e5.
by Bitbucket 22 Jun '12
by Bitbucket 22 Jun '12
22 Jun '12
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/c4f325ba7caa/
changeset: c4f325ba7caa
user: greg
date: 2012-06-22 20:23:28
summary: Revert one of the "fixes" in 7301:46de10d8c8e5.
affected #: 1 file
diff -r 46de10d8c8e5ffd537f7c13e4833aa8635ab6fcf -r c4f325ba7caa1c86b0f97a0a486c3bbdea15c754 lib/galaxy/webapps/community/controllers/common.py
--- a/lib/galaxy/webapps/community/controllers/common.py
+++ b/lib/galaxy/webapps/community/controllers/common.py
@@ -438,13 +438,7 @@
# Find all tool configs.
ctx_file_name = strip_path( filename )
if ctx_file_name not in NOT_TOOL_CONFIGS and filename.endswith( '.xml' ):
- is_tool_config, valid, tool, error_message, sample_files, deleted_sample_files = load_tool_from_tmp_directory( trans,
- repo,
- repo_dir,
- ctx,
- filename,
- work_dir )
- all_sample_files_copied.extend( sample_files )
+ is_tool_config, valid, tool, error_message = load_tool_from_tmp_directory( trans, repo, repo_dir, ctx, filename, work_dir )
if is_tool_config and valid and tool is not None:
sample_files_copied, can_set_metadata, invalid_files = check_tool_input_params( trans,
repo,
@@ -861,8 +855,6 @@
tool = None
valid = False
error_message = ''
- sample_files = []
- deleted_sample_files = []
tmp_config = get_named_tmpfile_from_ctx( ctx, filename, dir )
if tmp_config:
if not ( check_binary( tmp_config ) or check_image( tmp_config ) or check_gzip( tmp_config )[ 0 ]
@@ -876,9 +868,6 @@
log.debug( "Error parsing %s, exception: %s" % ( tmp_config, str( e ) ) )
is_tool_config = False
if is_tool_config:
- sample_files, deleted_sample_files = get_list_of_copied_sample_files( repo, ctx, dir=dir )
- if sample_files:
- trans.app.config.tool_data_path = dir
# Load entries into the tool_data_tables if the tool requires them.
tool_data_table_config = copy_file_from_manifest( repo, ctx, 'tool_data_table_conf.xml.sample', dir )
if tool_data_table_config:
@@ -903,7 +892,7 @@
error_message = str( e )
# Reset the tool_data_tables by loading the empty tool_data_table_conf.xml file.
reset_tool_data_tables( trans.app )
- return is_tool_config, valid, tool, error_message, sample_files, deleted_sample_files
+ return is_tool_config, valid, tool, error_message
def new_tool_metadata_required( trans, id, metadata_dict ):
"""
Compare the last saved metadata for each tool in the repository with the new metadata
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0

commit/galaxy-central: greg: More tool shed refinements to enable working with Galaxy's ToolDataTableManager.
by Bitbucket 22 Jun '12
by Bitbucket 22 Jun '12
22 Jun '12
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/46de10d8c8e5/
changeset: 46de10d8c8e5
user: greg
date: 2012-06-22 20:10:53
summary: More tool shed refinements to enable working with Galaxy's ToolDataTableManager.
affected #: 1 file
diff -r f197c4346cc44bf3996eee6b24e2c393ad519d9d -r 46de10d8c8e5ffd537f7c13e4833aa8635ab6fcf lib/galaxy/webapps/community/controllers/common.py
--- a/lib/galaxy/webapps/community/controllers/common.py
+++ b/lib/galaxy/webapps/community/controllers/common.py
@@ -429,6 +429,7 @@
sample_files, deleted_sample_files = get_list_of_copied_sample_files( repo, ctx, dir=work_dir )
if sample_files:
trans.app.config.tool_data_path = work_dir
+ all_sample_files_copied = []
# Handle the tool_data_table_conf.xml.sample file if it is included in the repository.
if 'tool_data_table_conf.xml.sample' in sample_files:
tool_data_table_config = copy_file_from_manifest( repo, ctx, 'tool_data_table_conf.xml.sample', work_dir )
@@ -437,7 +438,13 @@
# Find all tool configs.
ctx_file_name = strip_path( filename )
if ctx_file_name not in NOT_TOOL_CONFIGS and filename.endswith( '.xml' ):
- is_tool_config, valid, tool, error_message = load_tool_from_tmp_directory( trans, repo, repo_dir, ctx, filename, work_dir )
+ is_tool_config, valid, tool, error_message, sample_files, deleted_sample_files = load_tool_from_tmp_directory( trans,
+ repo,
+ repo_dir,
+ ctx,
+ filename,
+ work_dir )
+ all_sample_files_copied.extend( sample_files )
if is_tool_config and valid and tool is not None:
sample_files_copied, can_set_metadata, invalid_files = check_tool_input_params( trans,
repo,
@@ -449,33 +456,13 @@
invalid_files,
original_tool_data_path,
work_dir )
+ all_sample_files_copied.extend( sample_files_copied )
if can_set_metadata:
# Update the list of metadata dictionaries for tools in metadata_dict.
repository_clone_url = generate_clone_url( trans, id )
metadata_dict = generate_tool_metadata( filename, tool, repository_clone_url, metadata_dict )
else:
invalid_tool_configs.append( ctx_file_name )
- # Remove all copied sample files from both the original tool data path (~/shed-tool-data) and the temporary
- # value of trans.app.config.tool_data_path, which is work_dir.
- for copied_sample_file in sample_files_copied:
- copied_file = copied_sample_file.replace( '.sample', '' )
- try:
- os.unlink( os.path.join( trans.app.config.tool_data_path, copied_sample_file ) )
- except:
- pass
- try:
- os.unlink( os.path.join( trans.app.config.tool_data_path, copied_file ) )
- except:
- pass
- if trans.app.config.tool_data_path == work_dir:
- try:
- os.unlink( os.path.join( original_tool_data_path, copied_sample_file ) )
- except:
- pass
- try:
- os.unlink( os.path.join( original_tool_data_path, copied_file ) )
- except:
- pass
elif is_tool_config:
if not error_message:
error_message = 'Unknown problems loading tool.'
@@ -508,6 +495,27 @@
shutil.rmtree( work_dir )
except:
pass
+ # Remove all copied sample files from both the original tool data path (~/shed-tool-data) and the temporary
+ # value of trans.app.config.tool_data_path, which is work_dir.
+ for copied_sample_file in all_sample_files_copied:
+ copied_file = copied_sample_file.replace( '.sample', '' )
+ try:
+ os.unlink( os.path.join( trans.app.config.tool_data_path, copied_sample_file ) )
+ except:
+ pass
+ try:
+ os.unlink( os.path.join( trans.app.config.tool_data_path, copied_file ) )
+ except:
+ pass
+ if trans.app.config.tool_data_path == work_dir:
+ try:
+ os.unlink( os.path.join( original_tool_data_path, copied_sample_file ) )
+ except:
+ pass
+ try:
+ os.unlink( os.path.join( original_tool_data_path, copied_file ) )
+ except:
+ pass
return metadata_dict, invalid_files, deleted_sample_files
def generate_tool_guid( trans, repository, tool ):
"""
@@ -810,6 +818,7 @@
except:
pass
return tool, message
+ original_tool_data_path = trans.app.config.tool_data_path
tool_config_filename = strip_path( tool_config_filename )
repository = get_repository( trans, repository_id )
repo_files_dir = repository.repo_path
@@ -818,6 +827,9 @@
tool = None
message = ''
work_dir = make_tmp_directory()
+ sample_files, deleted_sample_files = get_list_of_copied_sample_files( repo, ctx, dir=work_dir )
+ if sample_files:
+ trans.app.config.tool_data_path = work_dir
# Load entries into the tool_data_tables if the tool requires them.
tool_data_table_config = copy_file_from_manifest( repo, ctx, 'tool_data_table_conf.xml.sample', work_dir )
if tool_data_table_config:
@@ -841,12 +853,16 @@
shutil.rmtree( work_dir )
except:
pass
+ if sample_files:
+ trans.app.config.tool_data_path = original_tool_data_path
return tool, message
def load_tool_from_tmp_directory( trans, repo, repo_dir, ctx, filename, dir ):
is_tool_config = False
tool = None
valid = False
error_message = ''
+ sample_files = []
+ deleted_sample_files = []
tmp_config = get_named_tmpfile_from_ctx( ctx, filename, dir )
if tmp_config:
if not ( check_binary( tmp_config ) or check_image( tmp_config ) or check_gzip( tmp_config )[ 0 ]
@@ -860,6 +876,9 @@
log.debug( "Error parsing %s, exception: %s" % ( tmp_config, str( e ) ) )
is_tool_config = False
if is_tool_config:
+ sample_files, deleted_sample_files = get_list_of_copied_sample_files( repo, ctx, dir=dir )
+ if sample_files:
+ trans.app.config.tool_data_path = dir
# Load entries into the tool_data_tables if the tool requires them.
tool_data_table_config = copy_file_from_manifest( repo, ctx, 'tool_data_table_conf.xml.sample', dir )
if tool_data_table_config:
@@ -884,7 +903,7 @@
error_message = str( e )
# Reset the tool_data_tables by loading the empty tool_data_table_conf.xml file.
reset_tool_data_tables( trans.app )
- return is_tool_config, valid, tool, error_message
+ return is_tool_config, valid, tool, error_message, sample_files, deleted_sample_files
def new_tool_metadata_required( trans, id, metadata_dict ):
"""
Compare the last saved metadata for each tool in the repository with the new metadata
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0

commit/galaxy-central: dan: Allow external display applications to work when login_required=True.
by Bitbucket 22 Jun '12
by Bitbucket 22 Jun '12
22 Jun '12
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/f197c4346cc4/
changeset: f197c4346cc4
user: dan
date: 2012-06-22 19:31:29
summary: Allow external display applications to work when login_required=True.
affected #: 1 file
diff -r db2bf800496478a5ea041480c3c514c2620e28ae -r f197c4346cc44bf3996eee6b24e2c393ad519d9d lib/galaxy/web/framework/__init__.py
--- a/lib/galaxy/web/framework/__init__.py
+++ b/lib/galaxy/web/framework/__init__.py
@@ -419,7 +419,7 @@
# The value of session_cookie can be one of
# 'galaxysession' or 'galaxycommunitysession'
# Currently this method does nothing unless session_cookie is 'galaxysession'
- if session_cookie == 'galaxysession':
+ if session_cookie == 'galaxysession' and self.galaxy_session.user is None:
# TODO: re-engineer to eliminate the use of allowed_paths
# as maintenance overhead is far too high.
allowed_paths = (
@@ -443,16 +443,23 @@
url_for( controller='dataset', action='list' )
)
display_as = url_for( controller='root', action='display_as' )
- if self.galaxy_session.user is None:
- if self.app.config.ucsc_display_sites and self.request.path == display_as:
- try:
- host = socket.gethostbyaddr( self.environ[ 'REMOTE_ADDR' ] )[0]
- except( socket.error, socket.herror, socket.gaierror, socket.timeout ):
- host = None
- if host in UCSC_SERVERS:
+ if self.app.config.ucsc_display_sites and self.request.path == display_as:
+ try:
+ host = socket.gethostbyaddr( self.environ[ 'REMOTE_ADDR' ] )[0]
+ except( socket.error, socket.herror, socket.gaierror, socket.timeout ):
+ host = None
+ if host in UCSC_SERVERS:
+ return
+ external_display_path = url_for( controller='dataset', action='display_application' )
+ if self.request.path.startswith( external_display_path ):
+ request_path_split = external_display_path.split( '/' )
+ try:
+ if self.app.datatypes_registry.display_applications.get( request_path_split[-5] ) and request_path_split[-4] in self.app.datatypes_registry.display_applications.get( request_path_split[-5] ).links and request_path_split[-3] != 'None':
return
- if self.request.path not in allowed_paths:
- self.response.send_redirect( url_for( controller='root', action='index' ) )
+ except IndexError:
+ pass
+ if self.request.path not in allowed_paths:
+ self.response.send_redirect( url_for( controller='root', action='index' ) )
def __create_new_session( self, prev_galaxy_session=None, user_for_new_session=None ):
"""
Create a new GalaxySession for this request, possibly with a connection
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0

commit/galaxy-central: greg: Refinements for installing and managing tool dependencies for tools contained in installed tool shed repositories.
by Bitbucket 22 Jun '12
by Bitbucket 22 Jun '12
22 Jun '12
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/db2bf8004964/
changeset: db2bf8004964
user: greg
date: 2012-06-22 16:21:09
summary: Refinements for installing and managing tool dependencies for tools contained in installed tool shed repositories.
affected #: 26 files
diff -r a0a4f15fe0958c5ff2658c1695c63023b9cf6d39 -r db2bf800496478a5ea041480c3c514c2620e28ae lib/galaxy/model/__init__.py
--- a/lib/galaxy/model/__init__.py
+++ b/lib/galaxy/model/__init__.py
@@ -2647,6 +2647,12 @@
pass
class ToolShedRepository( object ):
+ installation_status = Bunch( CLONED='cloned',
+ SETTING_TOOL_VERSIONS='setting tool versions',
+ INSTALLING_TOOL_DEPENDENCIES='installing tool dependencies',
+ INSTALLED='installed',
+ ERROR='error',
+ UNINSTALLED='uninstalled' )
def __init__( self, id=None, create_time=None, tool_shed=None, name=None, description=None, owner=None, installed_changeset_revision=None,
changeset_revision=None, ctx_rev=None, metadata=None, includes_datatypes=False, update_available=False, deleted=False,
uninstalled=False, dist_to_shed=False ):
@@ -2696,47 +2702,59 @@
"""Return the repository's tool dependencies that are currently installed."""
installed_dependencies = []
for tool_dependency in self.tool_dependencies:
- if not tool_dependency.uninstalled:
+ if tool_dependency.status == ToolDependency.installation_status.INSTALLED:
installed_dependencies.append( tool_dependency )
return installed_dependencies
@property
def missing_tool_dependencies( self ):
"""Return the repository's tool dependencies that are not currently installed, and may not ever have been installed."""
missing_dependencies = []
- # Get the dependency information from the metadata for comparison against the installed tool dependencies.
- tool_dependencies = self.metadata.get( 'tool_dependencies', None )
- if tool_dependencies:
- for dependency_key, requirements_dict in tool_dependencies.items():
- name = requirements_dict[ 'name' ]
- version = requirements_dict[ 'version' ]
- type = requirements_dict[ 'type' ]
- if self.tool_dependencies:
- found = False
- for tool_dependency in self.tool_dependencies:
- if tool_dependency.name==name and tool_dependency.version==version and tool_dependency.type==type:
- found = True
- if tool_dependency.uninstalled:
- missing_dependencies.append( ( tool_dependency.name, tool_dependency.version, tool_dependency.type ) )
- break
- if not found:
- missing_dependencies.append( ( name, version, type ) )
+ for tool_dependency in self.tool_dependencies:
+ if tool_dependency.status in [ ToolDependency.installation_status.NEVER_INSTALLED,
+ ToolDependency.installation_status.ERROR,
+ ToolDependency.installation_status.UNINSTALLED ]:
+ missing_dependencies.append( tool_dependency )
return missing_dependencies
@property
+ def tool_dependencies_being_installed( self ):
+ dependencies_being_installed = []
+ for tool_dependency in self.tool_dependencies:
+ if tool_dependency.status == ToolDependency.installation_status.INSTALLING:
+ dependencies_being_installed.append( tool_dependency )
+ return dependencies_being_installed
+ @property
+ def tool_dependencies_with_installation_errors( self ):
+ dependencies_with_installation_errors = []
+ for tool_dependency in self.tool_dependencies:
+ if tool_dependency.status == ToolDependency.installation_status.ERROR:
+ dependencies_with_installation_errors.append( tool_dependency )
+ return dependencies_with_installation_errors
+ @property
def uninstalled_tool_dependencies( self ):
"""Return the repository's tool dependencies that have been uninstalled."""
uninstalled_tool_dependencies = []
for tool_dependency in self.tool_dependencies:
- if tool_dependency.uninstalled:
+ if tool_dependency.status == ToolDependency.installation_status.UNINSTALLED:
uninstalled_tool_dependencies.append( tool_dependency )
return uninstalled_tool_dependencies
class ToolDependency( object ):
- def __init__( self, tool_shed_repository_id=None, name=None, version=None, type=None, uninstalled=False ):
+ installation_status = Bunch( NEVER_INSTALLED='Never installed',
+ INSTALLING='Installing',
+ INSTALLED='Installed',
+ ERROR='Error',
+ UNINSTALLED='Uninstalled' )
+ states = Bunch( INSTALLING = 'running',
+ OK = 'ok',
+ ERROR = 'error',
+ UNINSTALLED = 'deleted_new' )
+ def __init__( self, tool_shed_repository_id=None, name=None, version=None, type=None, status=None, error_message=None ):
self.tool_shed_repository_id = tool_shed_repository_id
self.name = name
self.version = version
self.type = type
- self.uninstalled = uninstalled
+ self.status = status
+ self.error_message = error_message
def installation_directory( self, app ):
return os.path.join( app.config.tool_dependency_dir,
self.name,
diff -r a0a4f15fe0958c5ff2658c1695c63023b9cf6d39 -r db2bf800496478a5ea041480c3c514c2620e28ae lib/galaxy/model/mapping.py
--- a/lib/galaxy/model/mapping.py
+++ b/lib/galaxy/model/mapping.py
@@ -398,7 +398,8 @@
Column( "name", TrimmedString( 255 ) ),
Column( "version", Text ),
Column( "type", TrimmedString( 40 ) ),
- Column( "uninstalled", Boolean, default=False ) )
+ Column( "status", TrimmedString( 255 ), nullable=False ),
+ Column( "error_message", TEXT ) )
ToolVersion.table = Table( "tool_version", metadata,
Column( "id", Integer, primary_key=True ),
diff -r a0a4f15fe0958c5ff2658c1695c63023b9cf6d39 -r db2bf800496478a5ea041480c3c514c2620e28ae lib/galaxy/model/migrate/versions/0102_add_tool_dependency_status_columns.py
--- /dev/null
+++ b/lib/galaxy/model/migrate/versions/0102_add_tool_dependency_status_columns.py
@@ -0,0 +1,63 @@
+"""
+Migration script to add status and error_message columns to the tool_dependency table and drop the uninstalled column from the tool_dependency table.
+"""
+
+from sqlalchemy import *
+from sqlalchemy.orm import *
+from migrate import *
+from migrate.changeset import *
+
+import datetime
+now = datetime.datetime.utcnow
+# Need our custom types, but don't import anything else from model
+from galaxy.model.custom_types import *
+
+import sys, logging
+log = logging.getLogger( __name__ )
+log.setLevel(logging.DEBUG)
+handler = logging.StreamHandler( sys.stdout )
+format = "%(name)s %(levelname)s %(asctime)s %(message)s"
+formatter = logging.Formatter( format )
+handler.setFormatter( formatter )
+log.addHandler( handler )
+
+metadata = MetaData( migrate_engine )
+db_session = scoped_session( sessionmaker( bind=migrate_engine, autoflush=False, autocommit=True ) )
+
+def upgrade():
+ print __doc__
+ metadata.reflect()
+ ToolDependency_table = Table( "tool_dependency", metadata, autoload=True )
+ col = Column( "status", TrimmedString( 255 ), nullable=False )
+ try:
+ col.create( ToolDependency_table )
+ assert col is ToolDependency_table.c.status
+ except Exception, e:
+ print "Adding status column to the tool_dependency table failed: %s" % str( e )
+ col = Column( "error_message", TEXT )
+ try:
+ col.create( ToolDependency_table )
+ assert col is ToolDependency_table.c.error_message
+ except Exception, e:
+ print "Adding error_message column to the tool_dependency table failed: %s" % str( e )
+ try:
+ ToolDependency_table.c.uninstalled.drop()
+ except Exception, e:
+ print "Dropping uninstalled column from the tool_dependency table failed: %s" % str( e )
+def downgrade():
+ metadata.reflect()
+ ToolDependency_table = Table( "tool_dependency", metadata, autoload=True )
+ try:
+ ToolDependency_table.c.status.drop()
+ except Exception, e:
+ print "Dropping column status from the tool_dependency table failed: %s" % str( e )
+ try:
+ ToolDependency_table.c.error_message.drop()
+ except Exception, e:
+ print "Dropping column error_message from the tool_dependency table failed: %s" % str( e )
+ col = Column( "uninstalled", Boolean, default=False )
+ try:
+ col.create( ToolDependency_table )
+ assert col is ToolDependency_table.c.uninstalled
+ except Exception, e:
+ print "Adding uninstalled column to the tool_dependency table failed: %s" % str( e )
diff -r a0a4f15fe0958c5ff2658c1695c63023b9cf6d39 -r db2bf800496478a5ea041480c3c514c2620e28ae lib/galaxy/tool_shed/install_manager.py
--- a/lib/galaxy/tool_shed/install_manager.py
+++ b/lib/galaxy/tool_shed/install_manager.py
@@ -7,7 +7,6 @@
from galaxy.util.json import from_json_string, to_json_string
from galaxy.util.shed_util import *
from galaxy.util.odict import odict
-from galaxy.tool_shed.migrate.common import *
REPOSITORY_OWNER = 'devteam'
@@ -177,7 +176,7 @@
status, message = handle_tool_dependencies( app=self.app,
tool_shed_repository=tool_shed_repository,
tool_dependencies_config=tool_dependencies_config )
- if status != 'ok' and message:
+ if status != 'done' and message:
print 'The following error occurred from the InstallManager while installing tool dependencies:'
print message
add_to_tool_panel( self.app,
diff -r a0a4f15fe0958c5ff2658c1695c63023b9cf6d39 -r db2bf800496478a5ea041480c3c514c2620e28ae lib/galaxy/tool_shed/tool_dependencies/common_util.py
--- a/lib/galaxy/tool_shed/tool_dependencies/common_util.py
+++ b/lib/galaxy/tool_shed/tool_dependencies/common_util.py
@@ -1,10 +1,6 @@
import os, shutil, tarfile, urllib2
from galaxy.datatypes.checkers import *
-MISCELLANEOUS_ACTIONS = [ 'change_directory' ]
-MOVE_ACTIONS = [ 'move_directory_files', 'move_file' ]
-ALL_ACTIONS = MISCELLANEOUS_ACTIONS + MOVE_ACTIONS
-
def extract_tar( file_name, file_path ):
if isgzip( file_name ) or isbz2( file_name ):
# Open for reading with transparent compression.
diff -r a0a4f15fe0958c5ff2658c1695c63023b9cf6d39 -r db2bf800496478a5ea041480c3c514c2620e28ae lib/galaxy/tool_shed/tool_dependencies/fabric_util.py
--- a/lib/galaxy/tool_shed/tool_dependencies/fabric_util.py
+++ b/lib/galaxy/tool_shed/tool_dependencies/fabric_util.py
@@ -13,6 +13,8 @@
from fabric.api import env, lcd, local, settings
+INSTALLATION_LOG = 'INSTALLATION.log'
+
def check_fabric_version():
version = env.version
if int( version.split( "." )[ 0 ] ) < 1:
@@ -32,16 +34,19 @@
yield work_dir
if os.path.exists( work_dir ):
local( 'rm -rf %s' % work_dir )
-def handle_post_build_processing( tool_dependency_dir, install_dir, env_dependency_path, package_name=None ):
+def handle_post_build_processing( app, tool_dependency, install_dir, env_dependency_path, package_name=None ):
+ sa_session = app.model.context.current
cmd = "echo 'PATH=%s:$PATH; export PATH' > %s/env.sh;chmod +x %s/env.sh" % ( env_dependency_path, install_dir, install_dir )
- message = ''
output = local( cmd, capture=True )
- log_results( cmd, output, os.path.join( install_dir, 'env_sh.log' ) )
+ log_results( cmd, output, os.path.join( install_dir, INSTALLATION_LOG ) )
if output.return_code:
- message = '%s %s' % ( message, str( output.stderr ) )
- return message
-def install_and_build_package( params_dict ):
+ tool_dependency.status = app.model.ToolDependency.installation_status.ERROR
+ tool_dependency.error_message = str( output.stderr )
+ sa_session.add( tool_dependency )
+ sa_session.flush()
+def install_and_build_package( app, tool_dependency, params_dict ):
"""Install a Galaxy tool dependency package either via a url or a mercurial or git clone command."""
+ sa_session = app.model.context.current
install_dir = params_dict[ 'install_dir' ]
download_url = params_dict.get( 'download_url', None )
clone_cmd = params_dict.get( 'clone_cmd', None )
@@ -59,43 +64,38 @@
dir = work_dir
elif clone_cmd:
output = local( clone_cmd, capture=True )
- log_results( clone_cmd, output, os.path.join( install_dir, 'clone_repository.log' ) )
+ log_results( clone_cmd, output, os.path.join( install_dir, INSTALLATION_LOG ) )
if output.return_code:
- return '%s. ' % str( output.stderr )
+ tool_dependency.status = app.model.ToolDependency.installation_status.ERROR
+ tool_dependency.error_message = str( output.stderr )
+ sa_session.add( tool_dependency )
+ sa_session.flush()
+ return
dir = package_name
if actions:
with lcd( dir ):
current_dir = os.path.abspath( os.path.join( work_dir, dir ) )
for action_tup in actions:
action_key, action_dict = action_tup
- if action_key.find( 'v^v^v' ) >= 0:
- action_items = action_key.split( 'v^v^v' )
- action_name = action_items[ 0 ]
- action = action_items[ 1 ]
- elif action_key in common_util.ALL_ACTIONS:
- action_name = action_key
- else:
- action_name = None
- if action_name:
- if action_name == 'change_directory':
- current_dir = os.path.join( current_dir, action )
- lcd( current_dir )
- elif action_name == 'move_directory_files':
- common_util.move_directory_files( current_dir=current_dir,
- source_dir=os.path.join( action_dict[ 'source_directory' ] ),
- destination_dir=os.path.join( action_dict[ 'destination_directory' ] ) )
- elif action_name == 'move_file':
- common_util.move_file( current_dir=current_dir,
- source=os.path.join( action_dict[ 'source' ] ),
- destination_dir=os.path.join( action_dict[ 'destination' ] ) )
+ if action_key == 'move_directory_files':
+ common_util.move_directory_files( current_dir=current_dir,
+ source_dir=os.path.join( action_dict[ 'source_directory' ] ),
+ destination_dir=os.path.join( action_dict[ 'destination_directory' ] ) )
+ elif action_key == 'move_file':
+ common_util.move_file( current_dir=current_dir,
+ source=os.path.join( action_dict[ 'source' ] ),
+ destination_dir=os.path.join( action_dict[ 'destination' ] ) )
else:
action = action_key
with settings( warn_only=True ):
output = local( action, capture=True )
- log_results( action, output, os.path.join( install_dir, 'actions.log' ) )
+ log_results( action, output, os.path.join( install_dir, INSTALLATION_LOG ) )
if output.return_code:
- return '%s. ' % str( output.stderr )
- return ''
+ tool_dependency.status = app.model.ToolDependency.installation_status.ERROR
+ tool_dependency.error_message = str( output.stderr )
+ sa_session.add( tool_dependency )
+ sa_session.flush()
+ return
def log_results( command, fabric_AttributeString, file_path ):
"""
Write attributes of fabric.operations._AttributeString (which is the output of executing command using fabric's local() method)
@@ -105,12 +105,12 @@
logfile = open( file_path, 'ab' )
else:
logfile = open( file_path, 'wb' )
- logfile.write( "\n#############################################" )
- logfile.write( '\n%s\nSTDOUT\n' % command )
+ logfile.write( "\n#############################################\n" )
+ logfile.write( '%s\nSTDOUT\n' % command )
logfile.write( str( fabric_AttributeString.stdout ) )
- logfile.write( "#############################################\n" )
- logfile.write( "\n#############################################" )
- logfile.write( '\n%s\nSTDERR\n' % command )
+ logfile.write( "\n#############################################\n" )
+ logfile.write( "\n#############################################\n" )
+ logfile.write( '%s\nSTDERR\n' % command )
logfile.write( str( fabric_AttributeString.stderr ) )
- logfile.write( "#############################################\n" )
+ logfile.write( "\n#############################################\n" )
logfile.close()
diff -r a0a4f15fe0958c5ff2658c1695c63023b9cf6d39 -r db2bf800496478a5ea041480c3c514c2620e28ae lib/galaxy/tool_shed/tool_dependencies/install_util.py
--- a/lib/galaxy/tool_shed/tool_dependencies/install_util.py
+++ b/lib/galaxy/tool_shed/tool_dependencies/install_util.py
@@ -11,19 +11,16 @@
from elementtree import ElementTree, ElementInclude
from elementtree.ElementTree import Element, SubElement
-def create_or_update_tool_dependency( app, tool_shed_repository, name, version, type ):
+def create_or_update_tool_dependency( app, tool_shed_repository, name, version, type, status ):
# Called from Galaxy (never the tool shed) when a new repository is being installed or when an uninstalled repository is being reinstalled.
+ sa_session = app.model.context.current
# First see if an appropriate tool_dependency record exists for the received tool_shed_repository.
- sa_session = app.model.context.current
tool_dependency = get_tool_dependency_by_name_version_type_repository( app, tool_shed_repository, name, version, type )
if tool_dependency:
- tool_dependency.uninstalled = False
+ tool_dependency.status = status
else:
# Create a new tool_dependency record for the tool_shed_repository.
- tool_dependency = app.model.ToolDependency( tool_shed_repository_id=tool_shed_repository.id,
- name=name,
- version=version,
- type=type )
+ tool_dependency = app.model.ToolDependency( tool_shed_repository.id, name, version, type, status )
sa_session.add( tool_dependency )
sa_session.flush()
return tool_dependency
@@ -42,14 +39,15 @@
repository.owner,
repository.name,
repository.installed_changeset_revision ) )
-def install_package( app, elem, tool_shed_repository, name=None, version=None ):
- # If name and version are not None, then a specific tool dependency is being installed.
- message = ''
+def install_package( app, elem, tool_shed_repository, tool_dependencies=None ):
+ # The value of tool_dependencies is a partial or full list of ToolDependency records associated with the tool_shed_repository.
+ sa_session = app.model.context.current
+ tool_dependency = None
# The value of package_name should match the value of the "package" type in the tool config's <requirements> tag set, but it's not required.
package_name = elem.get( 'name', None )
package_version = elem.get( 'version', None )
if package_name and package_version:
- if ( not name and not version ) or ( name and version and name==package_name and version==package_version ):
+ if tool_dependencies:
install_dir = get_tool_dependency_install_dir( app, tool_shed_repository, package_name, package_version )
if not os.path.exists( install_dir ):
for package_elem in elem:
@@ -66,26 +64,22 @@
# Handle tool dependency installation using a fabric method included in the Galaxy framework.
fabfile_path = None
for method_elem in package_elem:
- error_message = run_fabric_method( app,
- method_elem,
- fabfile_path,
- app.config.tool_dependency_dir,
- install_dir,
- package_name=package_name )
- if error_message:
- message += '%s' % error_message
- else:
- tool_dependency = create_or_update_tool_dependency( app,
- tool_shed_repository,
- name=package_name,
- version=package_version,
- type='package' )
+ tool_dependency = create_or_update_tool_dependency( app,
+ tool_shed_repository,
+ name=package_name,
+ version=package_version,
+ type='package',
+ status=app.model.ToolDependency.installation_status.INSTALLING )
+ run_fabric_method( app, tool_dependency, method_elem, fabfile_path, install_dir, package_name=package_name )
+ sa_session.refresh( tool_dependency )
+ if tool_dependency.status != app.model.ToolDependency.installation_status.ERROR:
print package_name, 'version', package_version, 'installed in', install_dir
else:
print '\nSkipping installation of tool dependency', package_name, 'version', package_version, 'since it is installed in', install_dir, '\n'
- return message
-def run_fabric_method( app, elem, fabfile_path, tool_dependency_dir, install_dir, package_name=None, **kwd ):
+ return tool_dependency
+def run_fabric_method( app, tool_dependency, elem, fabfile_path, install_dir, package_name=None, **kwd ):
"""Parse a tool_dependency.xml file's fabfile <method> tag set to build the method parameters and execute the method."""
+ sa_session = app.model.context.current
if not os.path.exists( install_dir ):
os.makedirs( install_dir )
# Default value for env_dependency_path.
@@ -109,7 +103,7 @@
action_key = action_elem.text.replace( '$INSTALL_DIR', install_dir )
if not action_key:
continue
- elif action_type in MOVE_ACTIONS:
+ elif action_type in [ 'move_directory_files', 'move_file' ]:
# Examples:
# <action type="move_file">
# <source>misc/some_file</source>
@@ -124,9 +118,6 @@
move_elem_text = move_elem.text.replace( '$INSTALL_DIR', install_dir )
if move_elem_text:
action_dict[ move_elem.tag ] = move_elem_text
- elif action_elem.text:
- # Example: <action type="change_directory">bin</action>
- action_key = '%sv^v^v%s' % ( action_type, action_elem.text )
else:
continue
actions.append( ( action_key, action_dict ) )
@@ -141,24 +132,36 @@
params_dict[ 'package_name' ] = package_name
if fabfile_path:
# TODO: Handle this using the fabric api.
- # run_proprietary_fabric_method( app, elem, fabfile_path, tool_dependency_dir, install_dir, package_name=package_name )
+ # run_proprietary_fabric_method( app, elem, fabfile_path, install_dir, package_name=package_name )
return 'Tool dependency installation using proprietary fabric scripts is not yet supported. '
else:
# There is currently only 1 fabric method, install_and_build_package().
try:
- message = install_and_build_package( params_dict )
- if message:
- return message
+ install_and_build_package( app, tool_dependency, params_dict )
except Exception, e:
- return '%s. ' % str( e )
- try:
- message = handle_post_build_processing( tool_dependency_dir, install_dir, env_dependency_path, package_name=package_name )
- if message:
- return message
- except:
- return '%s. ' % str( e )
- return ''
-def run_proprietary_fabric_method( app, elem, fabfile_path, tool_dependency_dir, install_dir, package_name=None, **kwd ):
+ tool_dependency.status = app.model.ToolDependency.installation_status.ERROR
+ tool_dependency.error_message = str( e )
+ sa_session.add( tool_dependency )
+ sa_session.flush()
+ sa_session.refresh( tool_dependency )
+ if tool_dependency.status != app.model.ToolDependency.installation_status.ERROR:
+ try:
+ handle_post_build_processing( app,
+ tool_dependency,
+ install_dir,
+ env_dependency_path,
+ package_name=package_name )
+ except Exception, e:
+ tool_dependency.status = app.model.ToolDependency.installation_status.ERROR
+ tool_dependency.error_message = str( e )
+ sa_session.add( tool_dependency )
+ sa_session.flush()
+ sa_session.refresh( tool_dependency )
+ if tool_dependency.status != app.model.ToolDependency.installation_status.ERROR:
+ tool_dependency.status = app.model.ToolDependency.installation_status.INSTALLED
+ sa_session.add( tool_dependency )
+ sa_session.flush()
+def run_proprietary_fabric_method( app, elem, fabfile_path, install_dir, package_name=None, **kwd ):
"""
TODO: Handle this using the fabric api.
Parse a tool_dependency.xml file's fabfile <method> tag set to build the method parameters and execute the method.
@@ -193,7 +196,7 @@
return "Exception executing fabric script %s: %s. " % ( str( fabfile_path ), str( e ) )
if returncode:
return message
- message = handle_post_build_processing( tool_dependency_dir, install_dir, env_dependency_path, package_name=package_name )
+ message = handle_post_build_processing( app, tool_dependency, install_dir, env_dependency_path, package_name=package_name )
if message:
return message
else:
diff -r a0a4f15fe0958c5ff2658c1695c63023b9cf6d39 -r db2bf800496478a5ea041480c3c514c2620e28ae lib/galaxy/util/shed_util.py
--- a/lib/galaxy/util/shed_util.py
+++ b/lib/galaxy/util/shed_util.py
@@ -6,7 +6,7 @@
from galaxy.datatypes.checkers import *
from galaxy.util.json import *
from galaxy.tools.search import ToolBoxSearch
-from galaxy.tool_shed.tool_dependencies.install_util import install_package
+from galaxy.tool_shed.tool_dependencies.install_util import create_or_update_tool_dependency, install_package
from galaxy.tool_shed.encoding_util import *
from galaxy.model.orm import *
@@ -318,8 +318,8 @@
# to be in the Galaxy distribution, but have been moved to the main Galaxy tool shed.
if current_changeset_revision is None:
# The current_changeset_revision is not passed if a repository is being installed for the first time. If a previously installed repository
- # was later uninstalled, this value should be received as the value of that change set to which the repository had been updated just prior to
- # it being uninstalled.
+ # was later uninstalled, this value should be received as the value of that change set to which the repository had been updated just prior
+ # to it being uninstalled.
current_changeset_revision = installed_changeset_revision
sa_session = app.model.context.current
tool_shed = get_tool_shed_from_clone_url( repository_clone_url )
@@ -353,6 +353,37 @@
sa_session.add( tool_shed_repository )
sa_session.flush()
return tool_shed_repository
+def create_tool_dependency_objects( app, tool_shed_repository, current_changeset_revision ):
+ # Create or update a ToolDependency for each entry in tool_dependencies_config. This method is called when installing a new tool_shed_repository.
+ tool_dependency_objects = []
+ work_dir = make_tmp_directory()
+ # Get the tool_dependencies.xml file from the repository.
+ tool_dependencies_config = get_config_from_repository( app,
+ 'tool_dependencies.xml',
+ tool_shed_repository,
+ current_changeset_revision,
+ work_dir )
+ tree = ElementTree.parse( tool_dependencies_config )
+ root = tree.getroot()
+ ElementInclude.include( root )
+ fabric_version_checked = False
+ for elem in root:
+ if elem.tag == 'package':
+ package_name = elem.get( 'name', None )
+ package_version = elem.get( 'version', None )
+ if package_name and package_version:
+ tool_dependency = create_or_update_tool_dependency( app,
+ tool_shed_repository,
+ name=package_name,
+ version=package_version,
+ type='package',
+ status=app.model.ToolDependency.installation_status.NEVER_INSTALLED )
+ tool_dependency_objects.append( tool_dependency )
+ try:
+ shutil.rmtree( work_dir )
+ except:
+ pass
+ return tool_dependency_objects
def generate_clone_url( trans, repository ):
"""Generate the URL for cloning a repository."""
tool_shed_url = get_url_from_repository_tool_shed( trans.app, repository )
@@ -1163,7 +1194,7 @@
message = str( e )
error = True
return error, message
-def handle_tool_dependencies( app, tool_shed_repository, tool_dependencies_config, name=None, version=None, type='package' ):
+def handle_tool_dependencies( app, tool_shed_repository, tool_dependencies_config, tool_dependencies=None ):
"""
Install and build tool dependencies defined in the tool_dependencies_config. This config's tag sets can currently refer to installation
methods in Galaxy's tool_dependencies module. In the future, proprietary fabric scripts contained in the repository will be supported.
@@ -1171,7 +1202,7 @@
will be installed in:
~/<app.config.tool_dependency_dir>/<package_name>/<package_version>/<repo_owner>/<repo_name>/<repo_installed_changeset_revision>
"""
- status = 'ok'
+ status = 'done'
message = ''
# Parse the tool_dependencies.xml config.
tree = ElementTree.parse( tool_dependencies_config )
@@ -1179,12 +1210,24 @@
ElementInclude.include( root )
fabric_version_checked = False
for elem in root:
- if elem.tag == type:
- error_message = install_package( app, elem, tool_shed_repository, name=name, version=version )
- if error_message:
- message += ' %s' % error_message
- if message:
- status = 'error'
+ if elem.tag == 'package':
+ package_name = elem.get( 'name', None )
+ package_version = elem.get( 'version', None )
+ if package_name and package_version:
+ can_install = True
+ if tool_dependencies:
+ # Only install the package if it is not already installed.
+ can_install = False
+ for tool_dependency in tool_dependencies:
+ if tool_dependency.name==package_name and tool_dependency.version==package_version:
+ can_install = tool_dependency.status in [ app.model.ToolDependency.installation_status.NEVER_INSTALLED,
+ app.model.ToolDependency.installation_status.UNINSTALLED ]
+ break
+ if can_install:
+ tool_dependency = install_package( app, elem, tool_shed_repository, tool_dependencies=tool_dependencies )
+ if tool_dependency and tool_dependency.status == app.model.ToolDependency.installation_status.ERROR:
+ message = tool_dependency.error_message
+ status = 'error'
return status, message
def handle_tool_versions( app, tool_version_dicts, tool_shed_repository ):
"""
@@ -1247,13 +1290,11 @@
# Load or deactivate proprietary datatype display applications
app.datatypes_registry.load_display_applications( installed_repository_dict=installed_repository_dict, deactivate=deactivate )
def load_repository_contents( trans, repository_name, description, owner, installed_changeset_revision, current_changeset_revision, ctx_rev,
- tool_path, repository_clone_url, relative_install_dir, tool_shed=None, tool_section=None, shed_tool_conf=None,
- install_tool_dependencies=False ):
+ tool_path, repository_clone_url, relative_install_dir, tool_shed=None, tool_section=None, shed_tool_conf=None ):
"""
Generate the metadata for the installed tool shed repository, among other things. This method is called from Galaxy (never the tool shed)
when an admin is installing a new repository or reinstalling an uninstalled repository.
"""
- message = ''
metadata_dict = generate_metadata_using_disk_files( trans.app.toolbox, relative_install_dir, repository_clone_url )
# Add a new record to the tool_shed_repository table if one doesn't already exist. If one exists but is marked deleted, undelete it. This
# must happen before the call to add_to_tool_panel() below because tools will not be properly loaded if the repository is marked deleted.
@@ -1285,20 +1326,6 @@
repository_tools_tups, sample_files_copied = handle_missing_index_file( trans.app, tool_path, sample_files, repository_tools_tups )
# Copy remaining sample files included in the repository to the ~/tool-data directory of the local Galaxy instance.
copy_sample_files( trans.app, sample_files, sample_files_copied=sample_files_copied )
- if install_tool_dependencies and 'tool_dependencies' in metadata_dict:
- # Get the tool_dependencies.xml file from the repository.
- tool_dependencies_config = get_config_from_repository( trans.app,
- 'tool_dependencies.xml',
- tool_shed_repository,
- current_changeset_revision,
- work_dir )
- # Install dependencies for repository tools.
- status, message = handle_tool_dependencies( app=trans.app,
- tool_shed_repository=tool_shed_repository,
- tool_dependencies_config=tool_dependencies_config )
- if status != 'ok' and message:
- print 'The following error occurred from load_repository_contents while installing tool dependencies:'
- print message
add_to_tool_panel( app=trans.app,
repository_name=repository_name,
repository_clone_url=repository_clone_url,
@@ -1340,7 +1367,9 @@
shutil.rmtree( work_dir )
except:
pass
- return tool_shed_repository, metadata_dict, message
+ if 'tool_dependencies' in metadata_dict:
+ tool_dependencies = create_tool_dependency_objects( trans.app, tool_shed_repository, current_changeset_revision )
+ return tool_shed_repository, metadata_dict
def make_tmp_directory():
tmp_dir = os.getenv( 'TMPDIR', '' )
if tmp_dir:
@@ -1505,7 +1534,7 @@
error_message = "Error removing tool dependency installation directory %s: %s" % ( str( dependency_install_dir ), str( e ) )
log.debug( error_message )
if removed:
- tool_dependency.uninstalled = True
+ tool_dependency.status = trans.model.ToolDependency.installation_status.UNINSTALLED
trans.sa_session.add( tool_dependency )
trans.sa_session.flush()
return removed, error_message
diff -r a0a4f15fe0958c5ff2658c1695c63023b9cf6d39 -r db2bf800496478a5ea041480c3c514c2620e28ae lib/galaxy/web/controllers/admin_toolshed.py
--- a/lib/galaxy/web/controllers/admin_toolshed.py
+++ b/lib/galaxy/web/controllers/admin_toolshed.py
@@ -77,9 +77,71 @@
def build_initial_query( self, trans, **kwd ):
return trans.sa_session.query( self.model_class )
+class ToolDependencyGrid( grids.Grid ):
+ class NameColumn( grids.TextColumn ):
+ def get_value( self, trans, grid, tool_dependency ):
+ return tool_dependency.name
+ class VersionColumn( grids.TextColumn ):
+ def get_value( self, trans, grid, tool_dependency ):
+ return tool_dependency.version
+ class TypeColumn( grids.TextColumn ):
+ def get_value( self, trans, grid, tool_dependency ):
+ return tool_dependency.type
+ class StatusColumn( grids.TextColumn ):
+ def get_value( self, trans, grid, tool_dependency ):
+ if tool_dependency.status in [ trans.model.ToolDependency.installation_status.INSTALLING ]:
+ return tool_dependency.status
+ else:
+ if tool_dependency.status in [ trans.model.ToolDependency.installation_status.NEVER_INSTALLED,
+ trans.model.ToolDependency.installation_status.UNINSTALLED ]:
+ bgcolor = trans.model.ToolDependency.states.UNINSTALLED
+ elif tool_dependency.status in [ trans.model.ToolDependency.installation_status.ERROR ]:
+ bgcolor = trans.model.ToolDependency.states.ERROR
+ elif tool_dependency.status in [ trans.model.ToolDependency.installation_status.INSTALLED ]:
+ bgcolor = trans.model.ToolDependency.states.OK
+ rval = '<div class="count-box state-color-%s" id="ToolDependencyStatus-%s">' % ( bgcolor, trans.security.encode_id( tool_dependency.id ) )
+ rval += '%s</div>' % tool_dependency.status
+ return rval
+
+ webapp = "galaxy"
+ title = "Tool Dependencies"
+ template = "admin/tool_shed_repository/tool_dependencies_grid.mako"
+ model_class = model.ToolDependency
+ default_sort_key = "-create_time"
+ num_rows_per_page = 50
+ preserve_state = True
+ use_paging = False
+ columns = [
+ NameColumn( "Name",
+ filterable="advanced" ),
+ VersionColumn( "Version",
+ filterable="advanced" ),
+ TypeColumn( "Type",
+ filterable="advanced" ),
+ StatusColumn( "Installation Status",
+ filterable="advanced" ),
+ ]
+ operations = [
+ grids.GridOperation( "Install",
+ allow_multiple=True,
+ condition=( lambda item: item.status in [ model.ToolDependency.installation_status.NEVER_INSTALLED,
+ model.ToolDependency.installation_status.UNINSTALLED ] ) ),
+ grids.GridOperation( "Uninstall",
+ allow_multiple=True,
+ allow_popup=False,
+ condition=( lambda item: item.status in [ model.ToolDependency.installation_status.INSTALLED,
+ model.ToolDependency.installation_status.ERROR ] ) )
+ ]
+ def apply_query_filter( self, trans, query, **kwd ):
+ tool_dependency_id = kwd.get( 'tool_dependency_id', None )
+ if not tool_dependency_id:
+ return query
+ return query.filter_by( tool_dependency_id=trans.security.decode_id( tool_dependency_id ) )
+
class AdminToolshed( AdminGalaxy ):
repository_list_grid = RepositoryListGrid()
+ tool_dependency_grid = ToolDependencyGrid()
@web.expose
@web.require_admin
@@ -153,9 +215,8 @@
message = util.restore_text( params.get( 'message', '' ) )
status = params.get( 'status', 'done' )
tool_dependency = get_tool_dependency( trans, kwd[ 'id' ] )
- repository = get_repository( trans, kwd[ 'repository_id' ] )
return trans.fill_template( '/admin/tool_shed_repository/browse_tool_dependency.mako',
- repository=repository,
+ repository=tool_dependency.tool_shed_repository,
tool_dependency=tool_dependency,
message=message,
status=status )
@@ -187,6 +248,35 @@
return trans.response.send_redirect( url )
@web.expose
@web.require_admin
+ def confirm_tool_dependency_install( self, trans, **kwd ):
+ """Display a page enabling the Galaxy administrator to choose to install tool dependencies for a tool shed repository they are installing."""
+ # This method is called from the tool shed (never Galaxy) when a tool shed repository that includes a file named tool_dependencies.xml
+ # is being installed into a local Galaxy instance.
+ message = kwd.get( 'message', '' )
+ status = kwd.get( 'status', 'done' )
+ tool_shed_url = kwd[ 'tool_shed_url' ]
+ repo_info_dict = kwd[ 'repo_info_dict' ]
+ includes_tools = util.string_as_bool( kwd.get( 'includes_tools', False ) )
+ # Decode the encoded repo_info_dict param value.
+ dict_with_tool_dependencies = tool_shed_decode( repo_info_dict )
+ # The repo_info_dict includes tool dependencies which we need to display so the user knows what will be installed.
+ new_repo_info_dict = {}
+ for name, repo_info_tuple in dict_with_tool_dependencies.items():
+ description, repository_clone_url, changeset_revision, ctx_rev, repository_owner, tool_dependencies = repo_info_tuple
+ # Create a new repo_info_dict by eliminating tool-dependencies from the repo_info_tuple.
+ new_repo_info_dict[ name ] = ( description, repository_clone_url, changeset_revision, ctx_rev )
+ repo_info_dict = tool_shed_encode( new_repo_info_dict )
+ install_tool_dependencies_check_box = CheckboxField( 'install_tool_dependencies', checked=True )
+ return trans.fill_template( '/admin/tool_shed_repository/confirm_tool_dependency_install.mako',
+ tool_shed_url=tool_shed_url,
+ repo_info_dict=repo_info_dict,
+ dict_with_tool_dependencies=dict_with_tool_dependencies,
+ includes_tools=includes_tools,
+ install_tool_dependencies_check_box=install_tool_dependencies_check_box,
+ message=message,
+ status=status )
+ @web.expose
+ @web.require_admin
def deactivate_or_uninstall_repository( self, trans, **kwd ):
params = util.Params( kwd )
message = util.restore_text( params.get( 'message', '' ) )
@@ -270,71 +360,33 @@
return get_repository_file_contents( file_path )
@web.expose
@web.require_admin
- def install_missing_tool_dependencies( self, trans, **kwd ):
- """
- Install dependencies for tools included in the repository that were not installed when the repository was installed or that are
- being reinstalled after the repository was uninstalled.
- """
- reinstalling = util.string_as_bool( kwd.get( 'reinstalling', False ) )
- message = kwd.get( 'message', '' )
- status = kwd.get( 'status', 'done' )
- repository = get_repository( trans, kwd[ 'id' ] )
- install_tool_dependencies = CheckboxField.is_checked( kwd.get( 'install_tool_dependencies', '' ) )
- if not reinstalling and install_tool_dependencies and kwd.get( 'install_missing_tool_dependencies_button', False ):
- shed_tool_conf, tool_path, relative_install_dir = get_tool_panel_config_tool_path_install_dir( trans.app, repository )
- repo_files_dir = os.path.abspath( os.path.join( relative_install_dir, repository.name ) )
- # Get the tool_dependencies.xml file from the repository.
- work_dir = make_tmp_directory()
- tool_dependencies_config = get_config_from_repository( trans.app,
- 'tool_dependencies.xml',
- repository,
- repository.changeset_revision,
- work_dir,
- install_dir=relative_install_dir )
- status, message = handle_tool_dependencies( app=trans.app,
- tool_shed_repository=repository,
- tool_dependencies_config=tool_dependencies_config )
- try:
- shutil.rmtree( work_dir )
- except:
- pass
- return trans.fill_template( '/admin/tool_shed_repository/manage_repository.mako',
- repository=repository,
- description=repository.description,
- repo_files_dir=repo_files_dir,
- message=message,
- status=status )
- if reinstalling and kwd.get( 'install_missing_tool_dependencies_button', False ):
- # The user has been presented the option to install tool dependencies, so redirect to reinstall the repository, sending
- # along the user's choice.
- return trans.response.send_redirect( web.url_for( controller='admin_toolshed',
- action='reinstall_repository',
- **kwd ) )
- tool_dependencies = repository.metadata[ 'tool_dependencies' ]
- install_tool_dependencies_check_box = CheckboxField( 'install_tool_dependencies', checked=True )
- if not reinstalling:
- # Filter the tool_dependencies dictionary to eliminate successfully installed dependencies.
- filtered_tool_dependencies = {}
- for missing_dependency_tup in repository.missing_tool_dependencies:
- name, version, type = missing_dependency_tup
- dependency_key = '%s/%s' % ( name, version )
- install_dir = get_tool_dependency_install_dir( trans.app, repository, name, version )
- filtered_tool_dependencies[ dependency_key ] = dict( name=name, type=type, version=version )
- tool_dependencies = filtered_tool_dependencies
- no_changes = kwd.get( 'no_changes', '' )
- no_changes_checked = CheckboxField.is_checked( no_changes )
- new_tool_panel_section = kwd.get( 'new_tool_panel_section', '' )
- tool_panel_section = kwd.get( 'tool_panel_section', '' )
- return trans.fill_template( '/admin/tool_shed_repository/install_missing_tool_dependencies.mako',
- repository=repository,
- reinstalling=reinstalling,
- tool_dependencies=tool_dependencies,
- no_changes_checked=no_changes_checked,
- new_tool_panel_section=new_tool_panel_section,
- tool_panel_section=tool_panel_section,
- install_tool_dependencies_check_box=install_tool_dependencies_check_box,
- message=message,
- status=status )
+ def initiate_tool_dependency_installation( self, trans, tool_dependencies ):
+ """Install specified dependencies for repository tools."""
+ # Get the tool_shed_repository from one of the tool_dependencies.
+ tool_shed_repository = tool_dependencies[ 0 ].tool_shed_repository
+ work_dir = make_tmp_directory()
+ # Get the tool_dependencies.xml file from the repository.
+ tool_dependencies_config = get_config_from_repository( trans.app,
+ 'tool_dependencies.xml',
+ tool_shed_repository,
+ tool_shed_repository.changeset_revision,
+ work_dir )
+ status, message = handle_tool_dependencies( app=trans.app,
+ tool_shed_repository=tool_shed_repository,
+ tool_dependencies_config=tool_dependencies_config,
+ tool_dependencies=tool_dependencies )
+ try:
+ shutil.rmtree( work_dir )
+ except:
+ pass
+ tool_dependency_ids = [ trans.security.encode_id( td.id ) for td in tool_dependencies ]
+ if not message:
+ message = "Installed tool dependencies: %s" % ','.join( td.name for td in tool_dependencies )
+ return trans.response.send_redirect( web.url_for( controller='admin_toolshed',
+ action='manage_tool_dependencies',
+ tool_dependency_ids=tool_dependency_ids,
+ message=message,
+ status=status ) )
@web.expose
@web.require_admin
def install_repository( self, trans, **kwd ):
@@ -413,23 +465,19 @@
clone_repository( repository_clone_url, os.path.abspath( relative_install_dir ), ctx_rev )
owner = get_repository_owner( clean_repository_clone_url( repository_clone_url ) )
tool_shed = clean_tool_shed_url( tool_shed_url )
- tool_shed_repository, metadata_dict, error_message = load_repository_contents( trans,
- repository_name=name,
- description=description,
- owner=owner,
- installed_changeset_revision=changeset_revision,
- current_changeset_revision=changeset_revision,
- ctx_rev=ctx_rev,
- tool_path=tool_path,
- repository_clone_url=repository_clone_url,
- relative_install_dir=relative_install_dir,
- tool_shed=tool_shed,
- tool_section=tool_section,
- shed_tool_conf=shed_tool_conf,
- install_tool_dependencies=install_tool_dependencies )
- if error_message:
- message += error_message
- status = 'error'
+ tool_shed_repository, metadata_dict, = load_repository_contents( trans,
+ repository_name=name,
+ description=description,
+ owner=owner,
+ installed_changeset_revision=changeset_revision,
+ current_changeset_revision=changeset_revision,
+ ctx_rev=ctx_rev,
+ tool_path=tool_path,
+ repository_clone_url=repository_clone_url,
+ relative_install_dir=relative_install_dir,
+ tool_shed=tool_shed,
+ tool_section=tool_section,
+ shed_tool_conf=shed_tool_conf )
if 'tools' in metadata_dict:
# Get the tool_versions from the tool shed for each tool in the installed change set.
url = '%srepository/get_tool_versions?name=%s&owner=%s&changeset_revision=%s&webapp=galaxy&no_reset=true' % \
@@ -449,26 +497,28 @@
if installed_repository_names:
installed_repository_names.sort()
num_repositories_installed = len( installed_repository_names )
- if install_tool_dependencies:
- dependency_str = ' along with tool dependencies '
- else:
- dependency_str = ''
if tool_section:
- message += 'Installed %d %s%sand all tools were loaded into tool panel section <b>%s</b>:<br/>Installed repositories: ' % \
+ message += 'Installed %d %s and all tools were loaded into tool panel section <b>%s</b>:<br/>Installed repositories: ' % \
( num_repositories_installed,
inflector.cond_plural( num_repositories_installed, 'repository' ),
- dependency_str,
tool_section.name )
else:
- message += 'Installed %d %s%s and all tools were loaded into the tool panel outside of any sections.<br/>Installed repositories: ' % \
+ message += 'Installed %d %s and all tools were loaded into the tool panel outside of any sections.<br/>Installed repositories: ' % \
( num_repositories_installed,
- inflector.cond_plural( num_repositories_installed, 'repository' ),
- dependency_str )
+ inflector.cond_plural( num_repositories_installed, 'repository' ) )
for i, repo_name in enumerate( installed_repository_names ):
if i == len( installed_repository_names ) -1:
message += '%s.<br/>' % repo_name
else:
message += '%s, ' % repo_name
+ if install_tool_dependencies:
+ tool_dependency_ids = [ trans.security.encode_id( td.id ) for td in tool_shed_repository.missing_tool_dependencies ]
+ return trans.response.send_redirect( web.url_for( controller='admin_toolshed',
+ action='manage_tool_dependencies',
+ operation='install',
+ tool_dependency_ids=tool_dependency_ids,
+ status=status,
+ message=message ) )
return trans.response.send_redirect( web.url_for( controller='admin_toolshed',
action='browse_repositories',
message=message,
@@ -520,61 +570,42 @@
@web.expose
@web.require_admin
def install_tool_dependencies( self, trans, **kwd ):
- """Install dependencies for tools included in the repository when the repository is being installed."""
- message = kwd.get( 'message', '' )
- status = kwd.get( 'status', 'done' )
- tool_shed_url = kwd[ 'tool_shed_url' ]
- repo_info_dict = kwd[ 'repo_info_dict' ]
- includes_tools = util.string_as_bool( kwd.get( 'includes_tools', False ) )
- # Decode the encoded repo_info_dict param value.
- dict_with_tool_dependencies = tool_shed_decode( repo_info_dict )
- # The repo_info_dict includes tool dependencies which we need to display so the user knows what will be installed.
- new_repo_info_dict = {}
- for name, repo_info_tuple in dict_with_tool_dependencies.items():
- description, repository_clone_url, changeset_revision, ctx_rev, repository_owner, tool_dependencies = repo_info_tuple
- # Create a new repo_info_dict by eliminating tool-dependencies from the repo_info_tuple.
- new_repo_info_dict[ name ] = ( description, repository_clone_url, changeset_revision, ctx_rev )
- repo_info_dict = tool_shed_encode( new_repo_info_dict )
- install_tool_dependencies_check_box = CheckboxField( 'install_tool_dependencies', checked=True )
+ params = util.Params( kwd )
+ message = util.restore_text( params.get( 'message', '' ) )
+ status = params.get( 'status', 'done' )
+ tool_dependency_ids = util.listify( params.get( 'tool_dependency_ids', None ) )
+ if not tool_dependency_ids:
+ tool_dependency_ids = util.listify( params.get( 'id', None ) )
+ tool_dependencies = []
+ for tool_dependency_id in tool_dependency_ids:
+ tool_dependency = get_tool_dependency( trans, tool_dependency_id )
+ tool_dependencies.append( tool_dependency )
+ if kwd.get( 'install_tool_dependencies_button', False ):
+ # Filter tool dependencies to only those that are installed.
+ tool_dependencies_for_installation = []
+ for tool_dependency in tool_dependencies:
+ if tool_dependency.status in [ trans.model.ToolDependency.installation_status.UNINSTALLED,
+ trans.model.ToolDependency.installation_status.ERROR ]:
+ tool_dependencies_for_installation.append( tool_dependency )
+ if tool_dependencies_for_installation:
+ # Redirect back to the ToolDependencyGrid before initiating installation.
+ tool_dependency_ids = [ trans.security.encode_id( td.id ) for td in tool_dependencies_for_installation ]
+ new_kwd = dict( action='manage_tool_dependencies',
+ operation='initiate_tool_dependency_installation',
+ tool_dependency_ids=tool_dependency_ids,
+ message=message,
+ status=status )
+ return self.tool_dependency_grid( trans, **new_kwd )
+ else:
+ message = 'All of the selected tool dependencies are already installed.'
+ status = 'error'
+ return trans.response.send_redirect( web.url_for( controller='admin_toolshed',
+ action='manage_tool_dependencies',
+ tool_dependency_ids=tool_dependency_ids,
+ status=status,
+ message=message ) )
return trans.fill_template( '/admin/tool_shed_repository/install_tool_dependencies.mako',
- tool_shed_url=tool_shed_url,
- repo_info_dict=repo_info_dict,
- dict_with_tool_dependencies=dict_with_tool_dependencies,
- includes_tools=includes_tools,
- install_tool_dependencies_check_box=install_tool_dependencies_check_box,
- message=message,
- status=status )
- @web.expose
- @web.require_admin
- def install_tool_dependency( self, trans, name, version, type, repository_id, **kwd ):
- """Install dependencies for tools included in the repository when the repository is being installed."""
- message = kwd.get( 'message', '' )
- status = kwd.get( 'status', 'done' )
- repository = get_repository( trans, repository_id )
- shed_tool_conf, tool_path, relative_install_dir = get_tool_panel_config_tool_path_install_dir( trans.app, repository )
- repo_files_dir = os.path.abspath( os.path.join( relative_install_dir, repository.name ) )
- # Get the tool_dependencies.xml file from the repository.
- work_dir = make_tmp_directory()
- tool_dependencies_config = get_config_from_repository( trans.app,
- 'tool_dependencies.xml',
- repository,
- repository.changeset_revision,
- work_dir,
- install_dir=relative_install_dir )
- status, message = handle_tool_dependencies( app=trans.app,
- tool_shed_repository=repository,
- tool_dependencies_config=tool_dependencies_config,
- name=name,
- version=version,
- type=type )
- try:
- shutil.rmtree( work_dir )
- except:
- pass
- return trans.fill_template( '/admin/tool_shed_repository/manage_repository.mako',
- repository=repository,
- description=repository.description,
- repo_files_dir=repo_files_dir,
+ tool_dependencies=tool_dependencies,
message=message,
status=status )
@web.expose
@@ -614,12 +645,79 @@
params = util.Params( kwd )
message = util.restore_text( params.get( 'message', '' ) )
status = params.get( 'status', 'done' )
- repository_id = kwd[ 'id' ]
- repository = get_repository( trans, repository_id )
- return trans.fill_template( '/admin/tool_shed_repository/manage_tool_dependencies.mako',
- repository=repository,
- message=message,
- status=status )
+ tool_dependency_id = params.get( 'tool_dependency_id', None )
+ tool_dependency_ids = util.listify( params.get( 'tool_dependency_ids', None ) )
+ if not tool_dependency_ids:
+ tool_dependency_ids = util.listify( params.get( 'id', None ) )
+ if tool_dependency_id and tool_dependency_id not in tool_dependency_ids:
+ tool_dependency_ids.append( tool_dependency_id )
+ tool_dependencies = []
+ # We need a tool_shed_repository, so get it from one of the tool_dependencies.
+ tool_dependency = get_tool_dependency( trans, tool_dependency_ids[ 0 ] )
+ tool_shed_repository = tool_dependency.tool_shed_repository
+ self.tool_dependency_grid.title = "Tool shed repository '%s' tool dependencies" % tool_shed_repository.name
+ self.tool_dependency_grid.global_actions = \
+ [ grids.GridAction( label='Browse repository',
+ url_args=dict( controller='admin_toolshed',
+ action='browse_repository',
+ id=trans.security.encode_id( tool_shed_repository.id ) ) ),
+ grids.GridAction( label='Manage repository',
+ url_args=dict( controller='admin_toolshed',
+ action='manage_repository',
+ id=trans.security.encode_id( tool_shed_repository.id ) ) ),
+ grids.GridAction( label='Get updates',
+ url_args=dict( controller='admin_toolshed',
+ action='check_for_updates',
+ id=trans.security.encode_id( tool_shed_repository.id ) ) ),
+ grids.GridAction( label='Set tool versions',
+ url_args=dict( controller='admin_toolshed',
+ action='set_tool_versions',
+ id=trans.security.encode_id( tool_shed_repository.id ) ) ),
+ grids.GridAction( label='Deactivate or uninstall repository',
+ url_args=dict( controller='admin_toolshed',
+ action='deactivate_or_uninstall_repository',
+ id=trans.security.encode_id( tool_shed_repository.id ) ) ) ]
+ if 'operation' in kwd:
+ operation = kwd[ 'operation' ].lower()
+ if not tool_dependency_ids:
+ message = 'Select at least 1 tool dependency to %s.' % operation
+ kwd[ 'message' ] = message
+ kwd[ 'status' ] = 'error'
+ del kwd[ 'operation' ]
+ return trans.response.send_redirect( web.url_for( controller='admin_toolshed',
+ action='manage_tool_dependencies',
+ **kwd ) )
+ if operation == 'browse':
+ return trans.response.send_redirect( web.url_for( controller='admin_toolshed',
+ action='browse_tool_dependency',
+ **kwd ) )
+ elif operation == 'uninstall':
+ tool_dependencies_for_uninstallation = []
+ for tool_dependency_id in tool_dependency_ids:
+ tool_dependency = get_tool_dependency( trans, tool_dependency_id )
+ if tool_dependency.status in [ trans.model.ToolDependency.installation_status.INSTALLED,
+ trans.model.ToolDependency.installation_status.ERROR ]:
+ tool_dependencies_for_uninstallation.append( tool_dependency )
+ if tool_dependencies_for_uninstallation:
+ return trans.response.send_redirect( web.url_for( controller='admin_toolshed',
+ action='uninstall_tool_dependencies',
+ **kwd ) )
+ else:
+ kwd[ 'message' ] = 'All selected tool dependencies are already uninstalled.'
+ kwd[ 'status' ] = 'error'
+ elif operation == "install":
+ tool_dependencies_for_installation = []
+ for tool_dependency_id in tool_dependency_ids:
+ tool_dependency = get_tool_dependency( trans, tool_dependency_id )
+ if tool_dependency.status in [ trans.model.ToolDependency.installation_status.NEVER_INSTALLED,
+ trans.model.ToolDependency.installation_status.UNINSTALLED ]:
+ tool_dependencies_for_installation.append( tool_dependency )
+ if tool_dependencies_for_installation:
+ self.initiate_tool_dependency_installation( trans, tool_dependencies_for_installation )
+ else:
+ kwd[ 'message' ] = 'All selected tool dependencies are already installed.'
+ kwd[ 'status' ] = 'error'
+ return self.tool_dependency_grid( trans, **kwd )
@web.json
@web.require_admin
def open_folder( self, trans, folder_path ):
@@ -710,39 +808,52 @@
tool_section = trans.app.toolbox.tool_panel[ section_key ]
else:
tool_section = None
- tool_shed_repository, metadata_dict, error_message = load_repository_contents( trans,
- repository_name=repository.name,
- description=repository.description,
- owner=repository.owner,
- installed_changeset_revision=repository.installed_changeset_revision,
- current_changeset_revision=current_changeset_revision,
- ctx_rev=ctx_rev,
- tool_path=tool_path,
- repository_clone_url=repository_clone_url,
- relative_install_dir=relative_install_dir,
- tool_shed=repository.tool_shed,
- tool_section=tool_section,
- shed_tool_conf=shed_tool_conf,
- install_tool_dependencies=install_tool_dependencies )
- if error_message:
- # We'll only have an error_message if there was a problem installing tool dependencies.
- message += error_message
- status = 'error'
+ tool_shed_repository, metadata_dict, load_repository_contents( trans,
+ repository_name=repository.name,
+ description=repository.description,
+ owner=repository.owner,
+ installed_changeset_revision=repository.installed_changeset_revision,
+ current_changeset_revision=current_changeset_revision,
+ ctx_rev=ctx_rev,
+ tool_path=tool_path,
+ repository_clone_url=repository_clone_url,
+ relative_install_dir=relative_install_dir,
+ tool_shed=repository.tool_shed,
+ tool_section=tool_section,
+ shed_tool_conf=shed_tool_conf )
repository.uninstalled = False
repository.deleted = False
trans.sa_session.add( repository )
trans.sa_session.flush()
+ message += 'The <b>%s</b> repository has been reinstalled. ' % repository.name
if install_tool_dependencies:
- dependency_str = ' along with tool dependencies'
- if error_message:
- dependency_str += ', but with some errors installing the dependencies'
- else:
- dependency_str = ' without tool dependencies'
- message += 'The <b>%s</b> repository has been reinstalled%s. ' % ( repository.name, dependency_str )
+ message += 'The following tool dependencies are now being installed, please wait...'
+ tool_dependency_ids = [ trans.security.encode_id( td.id ) for td in tool_shed_repository.missing_tool_dependencies ]
+ return trans.response.send_redirect( web.url_for( controller='admin_toolshed',
+ action='manage_tool_dependencies',
+ operation='install',
+ tool_dependency_ids=tool_dependency_ids,
+ status=status,
+ message=message ) )
return trans.response.send_redirect( web.url_for( controller='admin_toolshed',
action='browse_repositories',
message=message,
status=status ) )
+ @web.json
+ def repository_installation_status_updates( self, trans, id=None, status=None ):
+ # Avoid caching
+ trans.response.headers[ 'Pragma' ] = 'no-cache'
+ trans.response.headers[ 'Expires' ] = '0'
+ # Create new HTML for any that have changed
+ rval = {}
+ if id is not None and status is not None:
+ repository = trans.sa_session.query( trans.model.ToolShedRepository ).get( trans.security.decode_id( id ) )
+ if repository.status != status:
+ repository.status = status
+ rval[ id ] = { "status": repository.status,
+ "html_status": unicode( trans.fill_template( "admin/tool_shed_repository/repository_installation_status.mako",
+ repository=repository ),
+ 'utf-8' ) }
@web.expose
@web.require_admin
def reselect_tool_panel_section( self, trans, **kwd ):
@@ -811,22 +922,62 @@
repo_files_dir=repo_files_dir,
message=message,
status=status )
+ @web.json
+ def tool_dependency_status_updates( self, trans, ids=None, status_list=None ):
+ # Avoid caching
+ trans.response.headers[ 'Pragma' ] = 'no-cache'
+ trans.response.headers[ 'Expires' ] = '0'
+ # Create new HTML for any that have changed
+ rval = {}
+ if ids is not None and status_list is not None:
+ ids = ids.split( "," )
+ status_list = status_list.split( "," )
+ for id, status in zip( ids, status_list ):
+ tool_dependency = trans.sa_session.query( trans.model.ToolDependency ).get( trans.security.decode_id( id ) )
+ if tool_dependency.status != status:
+ rval[ id ] = { "status": tool_dependency.status,
+ "html_status": unicode( trans.fill_template( "admin/tool_shed_repository/tool_dependency_installation_status.mako",
+ tool_dependency=tool_dependency ),
+ 'utf-8' ) }
+ return rval
@web.expose
@web.require_admin
- def uninstall_tool_dependency( self, trans, **kwd ):
+ def uninstall_tool_dependencies( self, trans, **kwd ):
params = util.Params( kwd )
message = util.restore_text( params.get( 'message', '' ) )
status = params.get( 'status', 'done' )
- repository = get_repository( trans, kwd[ 'repository_id' ] )
- tool_dependency = get_tool_dependency( trans, kwd[ 'id' ] )
- uninstalled, error_message = remove_tool_dependency( trans, tool_dependency )
- if uninstalled:
- message = "The '%s' tool dependency has been uninstalled." % tool_dependency.name
- else:
- message = "Error attempting to uninstall the '%s' tool dependency: %s" % ( tool_dependency.name, error_message )
- status = 'error'
- return trans.fill_template( '/admin/tool_shed_repository/manage_tool_dependencies.mako',
- repository=repository,
+ tool_dependency_ids = util.listify( params.get( 'tool_dependency_ids', None ) )
+ if not tool_dependency_ids:
+ tool_dependency_ids = util.listify( params.get( 'id', None ) )
+ tool_dependencies = []
+ for tool_dependency_id in tool_dependency_ids:
+ tool_dependency = get_tool_dependency( trans, tool_dependency_id )
+ tool_dependencies.append( tool_dependency )
+ if kwd.get( 'uninstall_tool_dependencies_button', False ):
+ errors = False
+ # Filter tool dependencies to only those that are installed.
+ tool_dependencies_for_uninstallation = []
+ for tool_dependency in tool_dependencies:
+ if tool_dependency.status in [ trans.model.ToolDependency.installation_status.INSTALLED,
+ trans.model.ToolDependency.installation_status.ERROR ]:
+ tool_dependencies_for_uninstallation.append( tool_dependency )
+ for tool_dependency in tool_dependencies_for_uninstallation:
+ uninstalled, error_message = remove_tool_dependency( trans, tool_dependency )
+ if error_message:
+ errors = True
+ message = '%s %s' % ( message, error_message )
+ if errors:
+ message = "Error attempting to uninstall tool dependencies: %s" % message
+ status = 'error'
+ else:
+ message = "These tool dependencies have been uninstalled: %s" % ','.join( td.name for td in tool_dependencies_for_uninstallation )
+ return trans.response.send_redirect( web.url_for( controller='admin_toolshed',
+ action='manage_tool_dependencies',
+ tool_dependency_ids=tool_dependency_ids,
+ status=status,
+ message=message ) )
+ return trans.fill_template( '/admin/tool_shed_repository/uninstall_tool_dependencies.mako',
+ tool_dependencies=tool_dependencies,
message=message,
status=status )
@web.expose
@@ -864,11 +1015,14 @@
repository.update_available = False
trans.sa_session.add( repository )
trans.sa_session.flush()
+ # Create tool_dependency records if necessary.
+ if 'tool_dependencies' in metadata_dict:
+ tool_dependencies = create_tool_dependency_objects( trans.app, repository, repository.changeset_revision )
message = "The installed repository named '%s' has been updated to change set revision '%s'. " % ( name, latest_changeset_revision )
# See if any tool dependencies can be installed.
shed_tool_conf, tool_path, relative_install_dir = get_tool_panel_config_tool_path_install_dir( trans.app, repository )
if repository.missing_tool_dependencies:
- message += "Select <b>Install tool dependencies</b> from the repository's pop-up menu to install tool dependencies."
+ message += "Click the name of one of the missing tool dependencies listed below to install tool dependencies."
else:
message = "The directory containing the installed repository named '%s' cannot be found. " % name
status = 'error'
@@ -951,3 +1105,6 @@
def get_repository( trans, id ):
"""Get a tool_shed_repository from the database via id"""
return trans.sa_session.query( trans.model.ToolShedRepository ).get( trans.security.decode_id( id ) )
+def get_tool_dependency( trans, id ):
+ """Get a tool_dependency from the database via id"""
+ return trans.sa_session.query( trans.model.ToolDependency ).get( trans.security.decode_id( id ) )
diff -r a0a4f15fe0958c5ff2658c1695c63023b9cf6d39 -r db2bf800496478a5ea041480c3c514c2620e28ae lib/galaxy/web/controllers/tool_runner.py
--- a/lib/galaxy/web/controllers/tool_runner.py
+++ b/lib/galaxy/web/controllers/tool_runner.py
@@ -128,18 +128,17 @@
else:
tool_id_select_field = None
tool = tools[ 0 ]
- if tool.id == job.tool_id and tool.version == job.tool_version:
+ if ( tool.id == job.tool_id or tool.old_id == job.tool_id ) and tool.version == job.tool_version:
tool_id_version_message = ''
elif tool.id == job.tool_id:
if job.tool_version == None:
# For some reason jobs don't always keep track of the tool version.
tool_id_version_message = ''
else:
+ tool_id_version_message = 'This job was initially run with tool version "%s", which is not currently available. ' % job.tool_version
if len( tools ) > 1:
- tool_id_version_message = 'This job was initially run with tool version "%s", which is not currently available. ' % job.tool_version
tool_id_version_message += 'You can rerun the job with the selected tool or choose another derivation of the tool.'
else:
- tool_id_version_message = 'This job was initially run with tool version "%s", which is not currently available. ' % job.tool_version
tool_id_version_message += 'You can rerun the job with this tool version, which is a derivation of the original tool.'
else:
if len( tools ) > 1:
diff -r a0a4f15fe0958c5ff2658c1695c63023b9cf6d39 -r db2bf800496478a5ea041480c3c514c2620e28ae lib/galaxy/webapps/community/controllers/repository.py
--- a/lib/galaxy/webapps/community/controllers/repository.py
+++ b/lib/galaxy/webapps/community/controllers/repository.py
@@ -1229,7 +1229,7 @@
encoded_repo_info_dict = encode( repo_info_dict )
if includes_tool_dependencies:
# Redirect back to local Galaxy to present the option to install tool dependencies.
- url = '%sadmin_toolshed/install_tool_dependencies?tool_shed_url=%s&repo_info_dict=%s&includes_tools=%s' % \
+ url = '%sadmin_toolshed/confirm_tool_dependency_install?tool_shed_url=%s&repo_info_dict=%s&includes_tools=%s' % \
( galaxy_url, url_for( '/', qualified=True ), encoded_repo_info_dict, str( includes_tools ) )
else:
# Redirect back to local Galaxy to perform install.
diff -r a0a4f15fe0958c5ff2658c1695c63023b9cf6d39 -r db2bf800496478a5ea041480c3c514c2620e28ae templates/admin/tool_shed_repository/browse_repository.mako
--- a/templates/admin/tool_shed_repository/browse_repository.mako
+++ b/templates/admin/tool_shed_repository/browse_repository.mako
@@ -10,7 +10,7 @@
<%def name="javascripts()">
${parent.javascripts()}
${h.js( "ui.core", "jquery.dynatree" )}
- ${common_javascripts(repository.name, repository.repo_files_directory(trans.app))}
+ ${browse_files(repository.name, repository.repo_files_directory(trans.app))}
</%def><br/><br/>
@@ -21,10 +21,8 @@
<a class="action-button" href="${h.url_for( controller='admin_toolshed', action='check_for_updates', id=trans.security.encode_id( repository.id ) )}">Get updates</a><a class="action-button" href="${h.url_for( controller='admin_toolshed', action='deactivate_or_uninstall_repository', id=trans.security.encode_id( repository.id ) )}">Deactivate or uninstall repository</a>
%if repository.tool_dependencies:
- <a class="action-button" href="${h.url_for( controller='admin_toolshed', action='manage_tool_dependencies', id=trans.security.encode_id( repository.id ) )}">Manage tool dependencies</a>
- %endif
- %if repository.missing_tool_dependencies:
- <a class="action-button" href="${h.url_for( controller='admin_toolshed', action='install_missing_tool_dependencies', id=trans.security.encode_id( repository.id ) )}">Install missing tool dependencies</a>
+ <% tool_dependency_ids = [ trans.security.encode_id( td.id ) for td in repository.tool_dependencies ] %>
+ <a class="action-button" href="${h.url_for( controller='admin_toolshed', action='manage_tool_dependencies', tool_dependency_ids=tool_dependency_ids )}">Manage tool dependencies</a>
%endif
</div></ul>
diff -r a0a4f15fe0958c5ff2658c1695c63023b9cf6d39 -r db2bf800496478a5ea041480c3c514c2620e28ae templates/admin/tool_shed_repository/browse_tool_dependency.mako
--- a/templates/admin/tool_shed_repository/browse_tool_dependency.mako
+++ b/templates/admin/tool_shed_repository/browse_tool_dependency.mako
@@ -10,9 +10,11 @@
<%def name="javascripts()">
${parent.javascripts()}
${h.js( "ui.core", "jquery.dynatree" )}
- ${common_javascripts(tool_dependency.name, tool_dependency.installation_directory( trans.app ))}
+ ${browse_files(tool_dependency.name, tool_dependency.installation_directory( trans.app ))}
</%def>
+<% tool_dependency_ids = [ trans.security.encode_id( td.id ) for td in repository.tool_dependencies ] %>
+
<br/><br/><ul class="manage-table-actions"><li><a class="action-button" id="tool_dependency-${tool_dependency.id}-popup" class="menubutton">Repository Actions</a></li>
@@ -21,8 +23,7 @@
<a class="action-button" href="${h.url_for( controller='admin_toolshed', action='manage_repository', id=trans.security.encode_id( repository.id ) )}">Manage repository</a><a class="action-button" href="${h.url_for( controller='admin_toolshed', action='check_for_updates', id=trans.security.encode_id( repository.id ) )}">Get updates</a><a class="action-button" href="${h.url_for( controller='admin_toolshed', action='deactivate_or_uninstall_repository', id=trans.security.encode_id( repository.id ) )}">Deactivate or uninstall repository</a>
- <a class="action-button" href="${h.url_for( controller='admin_toolshed', action='manage_tool_dependencies', id=trans.security.encode_id( repository.id ) )}">Manage tool dependencies</a>
- <a class="action-button" href="${h.url_for( controller='admin_toolshed', action='uninstall_tool_dependency', id=trans.security.encode_id( tool_dependency.id ), repository_id=trans.security.encode_id( repository.id ) )}">Uninstall this tool dependency</a>
+ <a class="action-button" href="${h.url_for( controller='admin_toolshed', action='manage_tool_dependencies', tool_dependency_ids=tool_dependency_ids )}">Manage tool dependencies</a></div></ul>
diff -r a0a4f15fe0958c5ff2658c1695c63023b9cf6d39 -r db2bf800496478a5ea041480c3c514c2620e28ae templates/admin/tool_shed_repository/common.mako
--- a/templates/admin/tool_shed_repository/common.mako
+++ b/templates/admin/tool_shed_repository/common.mako
@@ -1,4 +1,4 @@
-<%def name="common_javascripts(title_text, directory_path)">
+<%def name="browse_files(title_text, directory_path)"><script type="text/javascript">
$(function(){
$("#tree").ajaxComplete(function(event, XMLHttpRequest, ajaxOptions) {
@@ -66,3 +66,131 @@
});
</script></%def>
+
+<%def name="dependency_status_updater()">
+ <script type="text/javascript">
+
+ // Tool dependency status updater - used to update the installation status on the Tool Dependencies grid.
+ // Looks for changes in tool dependency installation status using an async request. Keeps calling itself
+ // (via setTimeout) until dependency installation status is neither 'Installing' nor 'Building'.
+ var tool_dependency_status_updater = function ( dependency_status_list ) {
+ // See if there are any items left to track
+ var empty = true;
+ for ( i in dependency_status_list ) {
+ empty = false;
+ break;
+ }
+ if ( ! empty ) {
+ setTimeout( function() { tool_dependency_status_updater_callback( dependency_status_list ) }, 3000 );
+ }
+ };
+ var tool_dependency_status_updater_callback = function ( dependency_status_list ) {
+ var ids = []
+ var status_list = []
+ $.each( dependency_status_list, function ( id, dependency_status ) {
+ ids.push( id );
+ status_list.push( dependency_status );
+ });
+ // Make ajax call
+ $.ajax( {
+ type: "POST",
+ url: "${h.url_for( controller='admin_toolshed', action='tool_dependency_status_updates' )}",
+ dataType: "json",
+ data: { ids: ids.join( "," ), status_list: status_list.join( "," ) },
+ success : function ( data ) {
+ $.each( data, function( id, val ) {
+ // Replace HTML
+ var cell1 = $("#ToolDependencyStatus-" + id);
+ cell1.html( val.html_status );
+ dependency_status_list[ id ] = val.status;
+ });
+ tool_dependency_status_updater( dependency_status_list );
+ },
+ error: function() {
+ tool_dependency_status_updater( dependency_status_list );
+ }
+ });
+ };
+ </script>
+</%def>
+
+<%def name="tool_dependency_installation_updater()">
+ <%
+ can_update = False
+ if query.count():
+ # Get the first tool dependency to get to the tool shed repository.
+ tool_dependency = query[0]
+ tool_shed_repository = tool_dependency.tool_shed_repository
+ can_update = tool_shed_repository.tool_dependencies_being_installed or tool_shed_repository.missing_tool_dependencies
+ %>
+ %if can_update:
+ <script type="text/javascript">
+ // Tool dependency installation status updater
+ tool_dependency_status_updater( {${ ",".join( [ '"%s" : "%s"' % ( trans.security.encode_id( td.id ), td.status ) for td in query ] ) }});
+ </script>
+ %endif
+</%def>
+
+<%def name="repository_installation_status_updater()">
+ <script type="text/javascript">
+
+ // Tool shed repository status updater - used to update the installation status on the repository_installation.mako template.
+ // Looks for changes in repository installation status using an async request. Keeps calling itself (via setTimeout) until
+ // repository installation status is neither 'cloning', 'cloned' nor 'installing tool dependencies'.
+ var tool_shed_repository_status_updater = function ( repository_status_list ) {
+ // See if there are any items left to track
+ var empty = true;
+ for ( i in repository_status_list ) {
+ empty = false;
+ break;
+ }
+ if ( ! empty ) {
+ setTimeout( function() { tool_shed_repository_status_updater_callback( repository_status_list ) }, 3000 );
+ }
+ };
+ var tool_shed_repository_status_updater_callback = function ( repository_status_list ) {
+ var ids = []
+ var status_list = []
+ $.each( repository_status_list, function ( id, repository_status ) {
+ ids.push( id );
+ status_list.push( repository_status );
+ });
+ // Make ajax call
+ $.ajax( {
+ type: "POST",
+ url: "${h.url_for( controller='admin_toolshed', action='repository_installation_status_updates' )}",
+ dataType: "json",
+ data: { id: ids[0], status_list: status_list.join( "," ) },
+ success : function ( data ) {
+ $.each( data, function( id, val ) {
+ // Replace HTML
+ var cell1 = $("#RepositoryStatus-" + id);
+ cell1.html( val.html_status );
+ repository_status_list[ id ] = val.status;
+ });
+ tool_shed_repository_status_updater( repository_status_list );
+ },
+ error: function() {
+ tool_shed_repository_status_updater( repository_status_list );
+ }
+ });
+ };
+ </script>
+</%def>
+
+<%def name="repository_installation_updater()">
+ <%
+ can_update = True
+ if tool_shed_repository:
+ can_update = tool_shed_repository.status not in [ trans.model.ToolShedRepository.installation_status.INSTALLED,
+ trans.model.ToolShedRepository.installation_status.ERROR,
+ trans.model.ToolShedRepository.installation_status.UNINSTALLED ]
+ %>
+ %if can_update:
+ <script type="text/javascript">
+ // Tool shed repository installation status updater
+ repository_installation_status_updater( {${ ",".join( [ '"%s" : "%s"' % ( trans.security.encode_id( repository.id ), repository.status ) for repository in query ] ) }});
+ </script>
+ %endif
+</%def>
+
diff -r a0a4f15fe0958c5ff2658c1695c63023b9cf6d39 -r db2bf800496478a5ea041480c3c514c2620e28ae templates/admin/tool_shed_repository/confirm_tool_dependency_install.mako
--- /dev/null
+++ b/templates/admin/tool_shed_repository/confirm_tool_dependency_install.mako
@@ -0,0 +1,86 @@
+<%inherit file="/base.mako"/>
+<%namespace file="/message.mako" import="render_msg" />
+
+<% import os %>
+
+%if message:
+ ${render_msg( message, status )}
+%endif
+
+<div class="warningmessage">
+ <p>
+ The tool dependencies listed below can be automatically installed with the repository. Installing them provides significant
+ benefits and Galaxy includes various features to manage them.
+ </p>
+ <p>
+ Each of these dependencies may require their own build requirements (e.g., CMake, g++, etc). Galaxy will not attempt to install
+ these build requirements, so if any are missing from your environment tool dependency installation may partially fail. The
+ repository and all of it's contents will be installed in any case.
+ </p>
+ <p>
+ If tool dependency installation fails in any way, you can install the missing build requirements and have Galaxy attempt to install
+ the tool dependencies again using the <b>Install tool dependencies</b> pop-up menu option on the <b>Manage repository</b> page.
+ </p>
+</div>
+
+<div class="toolForm">
+ <div class="toolFormBody">
+ <form name="confirm_tool_dependency_install" id="confirm_tool_dependency_install" action="${h.url_for( controller='admin_toolshed', action='install_repository', tool_shed_url=tool_shed_url, repo_info_dict=repo_info_dict, includes_tools=includes_tools )}" method="post" >
+ <div style="clear: both"></div>
+ <div class="form-row">
+ <label>Install tool dependencies?</label>
+ ${install_tool_dependencies_check_box.get_html()}
+ <div class="toolParamHelp" style="clear: both;">
+ Un-check to skip automatic installation of these tool dependencies.
+ </div>
+ </div>
+ <div style="clear: both"></div>
+ <div class="form-row">
+ <table class="grid">
+ <tr><td colspan="4" bgcolor="#D8D8D8"><b>Tool dependencies</b></td></tr>
+ <tr>
+ <th>Name</th>
+ <th>Version</th>
+ <th>Type</th>
+ <th>Install directory</th>
+ </tr>
+ %for repository_name, repo_info_tuple in dict_with_tool_dependencies.items():
+ <%
+ description, repository_clone_url, changeset_revision, ctx_rev, repository_owner, tool_dependencies = repo_info_tuple
+ %>
+ %for dependency_key, requirements_dict in tool_dependencies.items():
+ <%
+ name = requirements_dict[ 'name' ]
+ version = requirements_dict[ 'version' ]
+ type = requirements_dict[ 'type' ]
+ install_dir = os.path.join( trans.app.config.tool_dependency_dir,
+ name,
+ version,
+ repository_owner,
+ repository_name,
+ changeset_revision )
+ readme_text = requirements_dict.get( 'readme', None )
+ %>
+ %if not os.path.exists( install_dir ):
+ <tr>
+ <td>${name}</td>
+ <td>${version}</td>
+ <td>${type}</td>
+ <td>${install_dir}</td>
+ </tr>
+ %if readme_text:
+ <tr><td colspan="4" bgcolor="#FFFFCC">${name} ${version} requirements and installation information</td></tr>
+ <tr><td colspan="4"><pre>${readme_text}</pre></td></tr>
+ %endif
+ %endif
+ %endfor
+ %endfor
+ </table>
+ <div style="clear: both"></div>
+ </div>
+ <div class="form-row">
+ <input type="submit" name="confirm_tool_dependency_install_button" value="Continue"/>
+ </div>
+ </form>
+ </div>
+</div>
diff -r a0a4f15fe0958c5ff2658c1695c63023b9cf6d39 -r db2bf800496478a5ea041480c3c514c2620e28ae templates/admin/tool_shed_repository/deactivate_or_uninstall_repository.mako
--- a/templates/admin/tool_shed_repository/deactivate_or_uninstall_repository.mako
+++ b/templates/admin/tool_shed_repository/deactivate_or_uninstall_repository.mako
@@ -86,7 +86,7 @@
* The repository's installed tool dependencies will be removed from disk.
</div><div class="toolParamHelp" style="clear: both;">
- * Each associated tool dependency record's uninstalled column in the tool_dependency database table will be set to True.
+ * Each associated tool dependency record's status column in the tool_dependency database table will be set to 'Uninstalled'.
</div>
%endif
%if repository.includes_datatypes:
diff -r a0a4f15fe0958c5ff2658c1695c63023b9cf6d39 -r db2bf800496478a5ea041480c3c514c2620e28ae templates/admin/tool_shed_repository/install_missing_tool_dependencies.mako
--- a/templates/admin/tool_shed_repository/install_missing_tool_dependencies.mako
+++ /dev/null
@@ -1,109 +0,0 @@
-<%inherit file="/base.mako"/>
-<%namespace file="/message.mako" import="render_msg" />
-
-<% import os %>
-
-<br/><br/>
-<ul class="manage-table-actions">
- <li><a class="action-button" id="repository-${repository.id}-popup" class="menubutton">Repository Actions</a></li>
- <div popupmenu="repository-${repository.id}-popup">
- <a class="action-button" href="${h.url_for( controller='admin_toolshed', action='browse_repository', id=trans.security.encode_id( repository.id ) )}">Browse repository files</a>
- <a class="action-button" href="${h.url_for( controller='admin_toolshed', action='manage_repository', id=trans.security.encode_id( repository.id ) )}">Manage repository</a>
- <a class="action-button" href="${h.url_for( controller='admin_toolshed', action='check_for_updates', id=trans.security.encode_id( repository.id ) )}">Get updates</a>
- %if repository.includes_tools:
- <a class="action-button" href="${h.url_for( controller='admin_toolshed', action='set_tool_versions', id=trans.security.encode_id( repository.id ) )}">Set tool versions</a>
- %endif
- %if repository.tool_dependencies:
- <a class="action-button" href="${h.url_for( controller='admin_toolshed', action='manage_tool_dependencies', id=trans.security.encode_id( repository.id ) )}">Manage tool dependencies</a>
- %endif
- <a class="action-button" href="${h.url_for( controller='admin_toolshed', action='deactivate_or_uninstall_repository', id=trans.security.encode_id( repository.id ) )}">Deactivate or uninstall repository</a>
- </div>
-</ul>
-
-%if message:
- ${render_msg( message, status )}
-%endif
-
-<div class="warningmessage">
- <p>
- Galaxy will attempt to install the missing tool dependencies listed below. Each of these dependencies may require their own build
- requirements (e.g., CMake, g++, etc). Galaxy will not attempt to install these build requirements, so if any are missing from your
- environment tool dependency installation may partially fail. If this happens, you can install the missing build requirements and
- have Galaxy attempt to install the tool dependencies again.
- </p>
-</div>
-<br/>
-<div class="warningmessage">
- <p>
- Installation may take a while. <b>Always wait until a message is displayed in your browser after clicking the <b>Go</b> button below.</b>
- If you get bored, watching your Galaxy server's paster log will help pass the time.
- </p>
- <p>
- Information about the tool dependency installation process will be saved in various files named with a ".log" extension in the directory:
- ${trans.app.config.tool_dependency_dir}/<i>package name</i>/<i>package version</i>/${repository.owner}/${repository.name}/${repository.changeset_revision}
- </p>
-</div>
-<br/>
-
-<div class="toolForm">
- <div class="toolFormBody">
- <form name="install_missing_tool_dependencies" id="install_missing_tool_dependencies" action="${h.url_for( controller='admin_toolshed', action='install_missing_tool_dependencies', id=trans.security.encode_id( repository.id ), tool_panel_section=tool_panel_section, new_tool_panel_section=new_tool_panel_section, reinstalling=reinstalling )}" method="post" >
- <div style="clear: both"></div>
- <div class="form-row">
- <label>Install missing tool dependencies?</label>
- ${install_tool_dependencies_check_box.get_html()}
- <div class="toolParamHelp" style="clear: both;">
- Un-check to skip installation of these missing tool dependencies.
- </div>
- ## Fake the no_changes_check_box value.
- %if no_changes_checked:
- <input type="hidden" id="no_changes" name="no_changes" value="true" checked="checked"><input type="hidden" name="no_changes" value="true">
- %else:
- <input type="hidden" name="no_changes" value="true">
- %endif
- </div>
- <div style="clear: both"></div>
- <div class="form-row">
- <table class="grid">
- <tr><td colspan="4" bgcolor="#D8D8D8"><b>Missing tool dependencies</b></td></tr>
- <tr>
- <th>Name</th>
- <th>Version</th>
- <th>Type</th>
- <th>Install directory</th>
- </tr>
- %for dependency_key, requirements_dict in tool_dependencies.items():
- <%
- name = requirements_dict[ 'name' ]
- version = requirements_dict[ 'version' ]
- type = requirements_dict[ 'type' ]
- install_dir = os.path.join( trans.app.config.tool_dependency_dir,
- name,
- version,
- repository.owner,
- repository.name,
- repository.changeset_revision )
- readme_text = requirements_dict.get( 'readme', None )
- %>
- %if not os.path.exists( install_dir ):
- <tr>
- <td>${name}</td>
- <td>${version}</td>
- <td>${type}</td>
- <td>${install_dir}</td>
- </tr>
- %if readme_text:
- <tr><td colspan="4" bgcolor="#FFFFCC">${name} ${version} requirements and installation information</td></tr>
- <tr><td colspan="4"><pre>${readme_text}</pre></td></tr>
- %endif
- %endif
- %endfor
- </table>
- <div style="clear: both"></div>
- </div>
- <div class="form-row">
- <input type="submit" name="install_missing_tool_dependencies_button" value="Go"/>
- </div>
- </form>
- </div>
-</div>
diff -r a0a4f15fe0958c5ff2658c1695c63023b9cf6d39 -r db2bf800496478a5ea041480c3c514c2620e28ae templates/admin/tool_shed_repository/install_tool_dependencies.mako
--- a/templates/admin/tool_shed_repository/install_tool_dependencies.mako
+++ b/templates/admin/tool_shed_repository/install_tool_dependencies.mako
@@ -25,16 +25,7 @@
<div class="toolForm"><div class="toolFormBody">
- <form name="install_tool_dependenceies" id="install_tool_dependenceies" action="${h.url_for( controller='admin_toolshed', action='install_repository', tool_shed_url=tool_shed_url, repo_info_dict=repo_info_dict, includes_tools=includes_tools )}" method="post" >
- <div style="clear: both"></div>
- <div class="form-row">
- <label>Install tool dependencies?</label>
- ${install_tool_dependencies_check_box.get_html()}
- <div class="toolParamHelp" style="clear: both;">
- Un-check to skip automatic installation of these tool dependencies.
- </div>
- </div>
- <div style="clear: both"></div>
+ <form name="install_tool_dependenceies" id="install_tool_dependenceies" action="${h.url_for( controller='admin_toolshed', action='install_tool_dependencies' )}" method="post" ><div class="form-row"><table class="grid"><tr><td colspan="4" bgcolor="#D8D8D8"><b>Tool dependencies</b></td></tr>
@@ -44,42 +35,46 @@
<th>Type</th><th>Install directory</th></tr>
- %for repository_name, repo_info_tuple in dict_with_tool_dependencies.items():
+ <% tool_shed_repository = None %>
+ %for tool_dependency in tool_dependencies:
+ <input type="hidden" name="tool_dependency_ids" value="${trans.security.encode_id( tool_dependency.id )}"/><%
- description, repository_clone_url, changeset_revision, ctx_rev, repository_owner, tool_dependencies = repo_info_tuple
+ readme_text = None
+ if tool_shed_repository is None:
+ tool_shed_repository = tool_dependency.tool_shed_repository
+ metadata = tool_shed_repository.metadata
+ tool_dependencies_dict = metadata[ 'tool_dependencies' ]
+ for key, requirements_dict in tool_dependencies_dict.items():
+ key_items = key.split( '/' )
+ key_name = key_items[ 0 ]
+ key_version = key_items[ 1 ]
+ if key_name == tool_dependency.name and key_version == tool_dependency.version:
+ readme_text = requirements_dict.get( 'readme', None )
+ install_dir = os.path.join( trans.app.config.tool_dependency_dir,
+ tool_dependency.name,
+ tool_dependency.version,
+ tool_shed_repository.owner,
+ tool_shed_repository.name,
+ tool_shed_repository.installed_changeset_revision )
%>
- %for dependency_key, requirements_dict in tool_dependencies.items():
- <%
- name = requirements_dict[ 'name' ]
- version = requirements_dict[ 'version' ]
- type = requirements_dict[ 'type' ]
- install_dir = os.path.join( trans.app.config.tool_dependency_dir,
- name,
- version,
- repository_owner,
- repository_name,
- changeset_revision )
- readme_text = requirements_dict.get( 'readme', None )
- %>
- %if not os.path.exists( install_dir ):
- <tr>
- <td>${name}</td>
- <td>${version}</td>
- <td>${type}</td>
- <td>${install_dir}</td>
- </tr>
- %if readme_text:
- <tr><td colspan="4" bgcolor="#FFFFCC">${name} ${version} requirements and installation information</td></tr>
- <tr><td colspan="4"><pre>${readme_text}</pre></td></tr>
- %endif
+ %if not os.path.exists( install_dir ):
+ <tr>
+ <td>${tool_dependency.name}</td>
+ <td>${tool_dependency.version}</td>
+ <td>${tool_dependency.type}</td>
+ <td>${install_dir}</td>
+ </tr>
+ %if readme_text:
+ <tr><td colspan="4" bgcolor="#FFFFCC">${tool_dependency.name} ${tool_dependency.version} requirements and installation information</td></tr>
+ <tr><td colspan="4"><pre>${readme_text}</pre></td></tr>
%endif
- %endfor
+ %endif
%endfor
</table><div style="clear: both"></div></div><div class="form-row">
- <input type="submit" name="install_tool_dependenceies_button" value="Continue"/>
+ <input type="submit" name="install_tool_dependencies_button" value="Install"/></div></form></div>
diff -r a0a4f15fe0958c5ff2658c1695c63023b9cf6d39 -r db2bf800496478a5ea041480c3c514c2620e28ae templates/admin/tool_shed_repository/manage_repository.mako
--- a/templates/admin/tool_shed_repository/manage_repository.mako
+++ b/templates/admin/tool_shed_repository/manage_repository.mako
@@ -12,10 +12,8 @@
<a class="action-button" href="${h.url_for( controller='admin_toolshed', action='set_tool_versions', id=trans.security.encode_id( repository.id ) )}">Set tool versions</a>
%endif
%if repository.tool_dependencies:
- <a class="action-button" href="${h.url_for( controller='admin_toolshed', action='manage_tool_dependencies', id=trans.security.encode_id( repository.id ) )}">Manage tool dependencies</a>
- %endif
- %if repository.missing_tool_dependencies:
- <a class="action-button" href="${h.url_for( controller='admin_toolshed', action='install_missing_tool_dependencies', id=trans.security.encode_id( repository.id ) )}">Install missing tool dependencies</a>
+ <% tool_dependency_ids = [ trans.security.encode_id( td.id ) for td in repository.tool_dependencies ] %>
+ <a class="action-button" href="${h.url_for( controller='admin_toolshed', action='manage_tool_dependencies', tool_dependency_ids=tool_dependency_ids )}">Manage tool dependencies</a>
%endif
<a class="action-button" href="${h.url_for( controller='admin_toolshed', action='deactivate_or_uninstall_repository', id=trans.security.encode_id( repository.id ) )}">Deactivate or uninstall repository</a></div>
@@ -91,21 +89,15 @@
<td><b>version</b></td><td><b>type</b></td></tr>
- %for index, missing_dependency_tup in enumerate( missing_tool_dependencies ):
- <% name, version, type = missing_dependency_tup %>
+ %for tool_dependency in missing_tool_dependencies:
<tr><td>
- <div style="float: left; margin-left: 1px;" class="menubutton split popup" id="missing_dependency-${index}-popup">
- <a class="view-info" href="${h.url_for( controller='admin_toolshed', action='install_tool_dependency', name=name, version=version, type=type, repository_id=trans.security.encode_id( repository.id ) )}">
- ${name}
- </a>
- </div>
- <div popupmenu="missing_dependency-${index}-popup">
- <a class="action-button" href="${h.url_for( controller='admin_toolshed', action='install_tool_dependency', name=name, version=version, type=type, repository_id=trans.security.encode_id( repository.id ) )}">Install this dependency</a>
- </div>
+ <a class="view-info" href="${h.url_for( controller='admin_toolshed', action='manage_tool_dependencies', id=trans.security.encode_id( tool_dependency.id ) )}">
+ ${tool_dependency.name}
+ </a></td>
- <td>${version}</td>
- <td>${type}</td>
+ <td>${tool_dependency.version}</td>
+ <td>${tool_dependency.type}</td></tr>
%endfor
</table>
@@ -131,14 +123,9 @@
%for installed_tool_dependency in installed_tool_dependencies:
<tr><td>
- <div style="float: left; margin-left: 1px;" class="menubutton split popup" id="dependency-${installed_tool_dependency.id}-popup">
- <a class="view-info" href="${h.url_for( controller='admin_toolshed', action='browse_tool_dependency', id=trans.security.encode_id( installed_tool_dependency.id ), repository_id=trans.security.encode_id( repository.id ) )}">
- ${installed_tool_dependency.name}
- </a>
- </div>
- <div popupmenu="dependency-${installed_tool_dependency.id}-popup">
- <a class="action-button" href="${h.url_for( controller='admin_toolshed', action='uninstall_tool_dependency', id=trans.security.encode_id( installed_tool_dependency.id ), repository_id=trans.security.encode_id( repository.id ) )}">Uninstall this dependency</a>
- </div>
+ <a class="view-info" href="${h.url_for( controller='admin_toolshed', action='browse_tool_dependency', id=trans.security.encode_id( installed_tool_dependency.id ), repository_id=trans.security.encode_id( repository.id ) )}">
+ ${installed_tool_dependency.name}
+ </a></td><td>${installed_tool_dependency.version}</td><td>${installed_tool_dependency.type}</td>
diff -r a0a4f15fe0958c5ff2658c1695c63023b9cf6d39 -r db2bf800496478a5ea041480c3c514c2620e28ae templates/admin/tool_shed_repository/manage_tool_dependencies.mako
--- a/templates/admin/tool_shed_repository/manage_tool_dependencies.mako
+++ b/templates/admin/tool_shed_repository/manage_tool_dependencies.mako
@@ -13,9 +13,6 @@
%if repository.includes_tools:
<a class="action-button" href="${h.url_for( controller='admin_toolshed', action='set_tool_versions', id=trans.security.encode_id( repository.id ) )}">Set tool versions</a>
%endif
- %if repository.missing_tool_dependencies:
- <a class="action-button" href="${h.url_for( controller='admin_toolshed', action='install_missing_tool_dependencies', id=trans.security.encode_id( repository.id ) )}">Install missing tool dependencies</a>
- %endif
<a class="action-button" href="${h.url_for( controller='admin_toolshed', action='deactivate_or_uninstall_repository', id=trans.security.encode_id( repository.id ) )}">Deactivate or uninstall repository</a></div></ul>
@@ -34,25 +31,25 @@
name = tool_dependency.name
version = tool_dependency.version
type = tool_dependency.type
- uninstalled = tool_dependency.uninstalled
+ installed = tool_dependency.status == 'trans.model.ToolDependency.installation_status.INSTALLED
install_dir = tool_dependency.installation_directory( trans.app )
%><tr><td bgcolor="#D8D8D8"><div style="float: left; margin-left: 1px;" class="menubutton split popup" id="dependency-${tool_dependency.id}-popup">
- %if uninstalled:
- <a class="view-info" href="${h.url_for( controller='admin_toolshed', action='install_tool_dependency', name=name, version=version, type=type, repository_id=trans.security.encode_id( repository.id ) )}">
+ %if not installed:
+ <a class="view-info" href="${h.url_for( controller='admin_toolshed', action='manage_tool_dependencies', operation='browse', tool_dependency_id=trans.security.encode_id( tool_dependency.id ) )}"><b>Name</b></a><div popupmenu="dependency-${tool_dependency.id}-popup">
- <a class="action-button" href="${h.url_for( controller='admin_toolshed', action='install_tool_dependency', name=name, version=version, type=type, repository_id=trans.security.encode_id( repository.id ) )}">Install this dependency</a>
+ <a class="action-button" href="${h.url_for( controller='admin_toolshed', action='manage_tool_dependencies', operation='install', tool_dependency_id=trans.security.encode_id( tool_dependency.id ) )}">Install this tool dependency</a></div>
%else:
- <a class="view-info" href="${h.url_for( controller='admin_toolshed', action='browse_tool_dependency', id=trans.security.encode_id( tool_dependency.id ), repository_id=trans.security.encode_id( repository.id ) )}">
+ <a class="view-info" href="${h.url_for( controller='admin_toolshed', action='manage_tool_dependencies', operation='browse', tool_dependency_id=trans.security.encode_id( tool_dependency.id ) )}"><b>Name</b></a><div popupmenu="dependency-${tool_dependency.id}-popup">
- <a class="action-button" href="${h.url_for( controller='admin_toolshed', action='uninstall_tool_dependency', id=trans.security.encode_id( tool_dependency.id ), repository_id=trans.security.encode_id( repository.id ) )}">Uninstall this dependency</a>
+ <a class="action-button" href="${h.url_for( controller='admin_toolshed', action='manage_tool_dependencies', operation='uninstall', tool_dependency_id=trans.security.encode_id( tool_dependency.id ) )}">Uninstall this tool dependency</a></div>
%endif
</div>
@@ -64,7 +61,7 @@
<tr><th>Install directory</th><td>
- %if uninstalled:
+ %if not installed:
This dependency is not currently installed
%else:
<a class="view-info" href="${h.url_for( controller='admin_toolshed', action='browse_tool_dependency', id=trans.security.encode_id( tool_dependency.id ), repository_id=trans.security.encode_id( repository.id ) )}">
@@ -73,7 +70,7 @@
%endif
</td></tr>
- <tr><th>Uninstalled</th><td>${uninstalled}</td></tr>
+ <tr><th>Installed</th><td>${not installed}</td></tr>
%endfor
</table><div style="clear: both"></div>
diff -r a0a4f15fe0958c5ff2658c1695c63023b9cf6d39 -r db2bf800496478a5ea041480c3c514c2620e28ae templates/admin/tool_shed_repository/reselect_tool_panel_section.mako
--- a/templates/admin/tool_shed_repository/reselect_tool_panel_section.mako
+++ b/templates/admin/tool_shed_repository/reselect_tool_panel_section.mako
@@ -8,8 +8,9 @@
<div class="toolForm"><div class="toolFormTitle">Choose the tool panel section to contain the installed tools (optional)</div><div class="toolFormBody">
- %if repository.includes_tool_dependencies:
- <form name="reselect_tool_panel_section" id="reselect_tool_panel_section" action="${h.url_for( controller='admin_toolshed', action='install_missing_tool_dependencies', id=trans.security.encode_id( repository.id ), reinstalling=True )}" method="post" >
+ %if repository.tool_dependencies:
+ <% tool_dependency_ids = [ trans.security.encode_id( td.id ) for td in repository.tool_dependencies ] %>
+ <form name="reselect_tool_panel_section" id="reselect_tool_panel_section" action="${h.url_for( controller='admin_toolshed', action='manage_tool_dependencies', operation='install', tool_dependency_ids=tool_dependency_ids )}" method="post" >
%else:
<form name="reselect_tool_panel_section" id="reselect_tool_panel_section" action="${h.url_for( controller='admin_toolshed', action='reinstall_repository', id=trans.security.encode_id( repository.id ) )}" method="post" >
%endif
diff -r a0a4f15fe0958c5ff2658c1695c63023b9cf6d39 -r db2bf800496478a5ea041480c3c514c2620e28ae templates/admin/tool_shed_repository/select_tool_panel_section.mako
--- a/templates/admin/tool_shed_repository/select_tool_panel_section.mako
+++ b/templates/admin/tool_shed_repository/select_tool_panel_section.mako
@@ -78,6 +78,7 @@
<div class="toolForm"><div class="toolFormTitle">Repository README file (may contain important installation or license information)</div><div class="toolFormBody">
+ <input type="hidden" name="readme_text" value="${readme_text}"/><div class="form-row"><pre>${readme_text}</pre></div>
diff -r a0a4f15fe0958c5ff2658c1695c63023b9cf6d39 -r db2bf800496478a5ea041480c3c514c2620e28ae templates/admin/tool_shed_repository/tool_dependencies_grid.mako
--- /dev/null
+++ b/templates/admin/tool_shed_repository/tool_dependencies_grid.mako
@@ -0,0 +1,8 @@
+<%inherit file="/grid_base.mako"/>
+<%namespace file="/admin/tool_shed_repository/common.mako" import="*" />
+
+<%def name="javascripts()">
+ ${parent.javascripts()}
+ ${dependency_status_updater()}
+ ${tool_dependency_installation_updater()}
+</%def>
diff -r a0a4f15fe0958c5ff2658c1695c63023b9cf6d39 -r db2bf800496478a5ea041480c3c514c2620e28ae templates/admin/tool_shed_repository/tool_dependency_installation_status.mako
--- /dev/null
+++ b/templates/admin/tool_shed_repository/tool_dependency_installation_status.mako
@@ -0,0 +1,13 @@
+<%def name="render_tool_dependency_status( tool_dependency )">
+ <%
+ if tool_dependency.status == trans.model.ToolDependency.installation_status.INSTALLING:
+ bgcolor = trans.model.ToolDependency.states.INSTALLING
+ rval = '<div class="count-box state-color-%s" id="ToolDependencyStatus-%s">' % ( bgcolor, trans.security.encode_id( tool_dependency.id ) )
+ rval += '%s</div>' % tool_dependency.status
+ else:
+ rval = tool_dependency.status
+ %>
+ ${rval}
+</%def>
+
+${render_tool_dependency_status( tool_dependency )}
diff -r a0a4f15fe0958c5ff2658c1695c63023b9cf6d39 -r db2bf800496478a5ea041480c3c514c2620e28ae templates/admin/tool_shed_repository/uninstall_tool_dependencies.mako
--- /dev/null
+++ b/templates/admin/tool_shed_repository/uninstall_tool_dependencies.mako
@@ -0,0 +1,52 @@
+<%inherit file="/base.mako"/>
+<%namespace file="/message.mako" import="render_msg" />
+
+<% import os %>
+
+%if message:
+ ${render_msg( message, status )}
+%endif
+
+<div class="toolForm">
+ <div class="toolFormTitle">Uninstall tool dependencies</div>
+ <div class="toolFormBody">
+ <form name="uninstall_tool_dependenceies" id="uninstall_tool_dependenceies" action="${h.url_for( controller='admin_toolshed', action='uninstall_tool_dependencies' )}" method="post" >
+ <div class="form-row">
+ <table class="grid">
+ <tr>
+ <th>Name</th>
+ <th>Version</th>
+ <th>Type</th>
+ <th>Install directory</th>
+ </tr>
+ %for tool_dependency in tool_dependencies:
+ <input type="hidden" name="tool_dependency_ids" value="${trans.security.encode_id( tool_dependency.id )}"/>
+ <%
+ install_dir = os.path.join( trans.app.config.tool_dependency_dir,
+ tool_dependency.name,
+ tool_dependency.version,
+ tool_dependency.tool_shed_repository.owner,
+ tool_dependency.tool_shed_repository.name,
+ tool_dependency.tool_shed_repository.installed_changeset_revision )
+ %>
+ %if os.path.exists( install_dir ):
+ <tr>
+ <td>${tool_dependency.name}</td>
+ <td>${tool_dependency.version}</td>
+ <td>${tool_dependency.type}</td>
+ <td>${install_dir}</td>
+ </tr>
+ %endif
+ %endfor
+ </table>
+ <div style="clear: both"></div>
+ </div>
+ <div class="form-row">
+ <input type="submit" name="uninstall_tool_dependencies_button" value="Uninstall"/>
+ <div class="toolParamHelp" style="clear: both;">
+ Click to uninstall the tool dependencies listed above.
+ </div>
+ </div>
+ </form>
+ </div>
+</div>
diff -r a0a4f15fe0958c5ff2658c1695c63023b9cf6d39 -r db2bf800496478a5ea041480c3c514c2620e28ae templates/tool_form.mako
--- a/templates/tool_form.mako
+++ b/templates/tool_form.mako
@@ -283,7 +283,7 @@
%>
%if tool_id_version_message:
- ${render_msg( tool_id_version_message, 'error' )}
+ ${render_msg( tool_id_version_message, 'warning' )}
%endif
<div class="toolForm" id="${tool.id}">
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0

commit/galaxy-central: dannon: Remove Availability Zone placement from cloud_launch instance kickoff. This prevents the failure where an AZ is temporarily unavailable by allowing Amazon to auto-place.
by Bitbucket 21 Jun '12
by Bitbucket 21 Jun '12
21 Jun '12
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/a0a4f15fe095/
changeset: a0a4f15fe095
user: dannon
date: 2012-06-21 20:09:17
summary: Remove Availability Zone placement from cloud_launch instance kickoff. This prevents the failure where an AZ is temporarily unavailable by allowing Amazon to auto-place.
affected #: 1 file
diff -r f6a710440c0500fb09e980300419b53b2cda6088 -r a0a4f15fe0958c5ff2658c1695c63023b9cf6d39 lib/galaxy/web/controllers/cloudlaunch.py
--- a/lib/galaxy/web/controllers/cloudlaunch.py
+++ b/lib/galaxy/web/controllers/cloudlaunch.py
@@ -177,7 +177,6 @@
instance_type = user_provided_data['instance_type']
# Remove 'instance_type' key from the dict before creating user data
del user_provided_data['instance_type']
- placement = _find_placement(ec2_conn, instance_type)
ud = "\n".join(['%s: %s' % (key, value) for key, value in user_provided_data.iteritems() if key != 'kp_material'])
try:
rs = ec2_conn.run_instances(image_id=image_id,
@@ -186,8 +185,7 @@
security_groups=security_groups,
user_data=ud,
kernel_id=kernel_id,
- ramdisk_id=ramdisk_id,
- placement=placement)
+ ramdisk_id=ramdisk_id)
except EC2ResponseError, e:
log.error("Problem starting an instance: %s\n%s" % (e, e.body))
if rs:
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
2 new commits in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/53d43a29b41b/
changeset: 53d43a29b41b
user: fbacall
date: 2012-06-19 15:36:44
summary: Included workflow SVG representation in myexperiment export
affected #: 2 files
diff -r 7c495f835a1d436ad33dff6107784f106cc24980 -r 53d43a29b41b4f5327deb42800c0f64d4f129a4a lib/galaxy/web/controllers/workflow.py
--- a/lib/galaxy/web/controllers/workflow.py
+++ b/lib/galaxy/web/controllers/workflow.py
@@ -505,107 +505,8 @@
stored = self.get_stored_workflow( trans, id, check_ownership=True )
session = trans.sa_session
- workflow = stored.latest_workflow
- data = []
-
- canvas = svgfig.canvas(style="stroke:black; fill:none; stroke-width:1px; stroke-linejoin:round; text-anchor:left")
- text = svgfig.SVG("g")
- connectors = svgfig.SVG("g")
- boxes = svgfig.SVG("g")
- svgfig.Text.defaults["font-size"] = "10px"
-
- in_pos = {}
- out_pos = {}
- margin = 5
- line_px = 16 # how much spacing between input/outputs
- widths = {} # store px width for boxes of each step
- max_width, max_x, max_y = 0, 0, 0
-
- for step in workflow.steps:
- # Load from database representation
- module = module_factory.from_workflow_step( trans, step )
-
- # Pack attributes into plain dictionary
- step_dict = {
- 'id': step.order_index,
- 'data_inputs': module.get_data_inputs(),
- 'data_outputs': module.get_data_outputs(),
- 'position': step.position
- }
-
- input_conn_dict = {}
- for conn in step.input_connections:
- input_conn_dict[ conn.input_name ] = \
- dict( id=conn.output_step.order_index, output_name=conn.output_name )
- step_dict['input_connections'] = input_conn_dict
-
- data.append(step_dict)
-
- x, y = step.position['left'], step.position['top']
- count = 0
-
- max_len = len(module.get_name()) * 1.5
- text.append( svgfig.Text(x, y + 20, module.get_name(), **{"font-size": "14px"} ).SVG() )
-
- y += 45
- for di in module.get_data_inputs():
- cur_y = y+count*line_px
- if step.order_index not in in_pos:
- in_pos[step.order_index] = {}
- in_pos[step.order_index][di['name']] = (x, cur_y)
- text.append( svgfig.Text(x, cur_y, di['label']).SVG() )
- count += 1
- max_len = max(max_len, len(di['label']))
-
-
- if len(module.get_data_inputs()) > 0:
- y += 15
-
- for do in module.get_data_outputs():
- cur_y = y+count*line_px
- if step.order_index not in out_pos:
- out_pos[step.order_index] = {}
- out_pos[step.order_index][do['name']] = (x, cur_y)
- text.append( svgfig.Text(x, cur_y, do['name']).SVG() )
- count += 1
- max_len = max(max_len, len(do['name']))
-
- widths[step.order_index] = max_len*5.5
- max_x = max(max_x, step.position['left'])
- max_y = max(max_y, step.position['top'])
- max_width = max(max_width, widths[step.order_index])
-
- for step_dict in data:
- width = widths[step_dict['id']]
- x, y = step_dict['position']['left'], step_dict['position']['top']
- boxes.append( svgfig.Rect(x-margin, y, x+width-margin, y+30, fill="#EBD9B2").SVG() )
- box_height = (len(step_dict['data_inputs']) + len(step_dict['data_outputs'])) * line_px + margin
-
- # Draw separator line
- if len(step_dict['data_inputs']) > 0:
- box_height += 15
- sep_y = y + len(step_dict['data_inputs']) * line_px + 40
- text.append( svgfig.Line(x-margin, sep_y, x+width-margin, sep_y).SVG() ) #
-
- # input/output box
- boxes.append( svgfig.Rect(x-margin, y+30, x+width-margin, y+30+box_height, fill="#ffffff").SVG() )
-
- for conn, output_dict in step_dict['input_connections'].iteritems():
- in_coords = in_pos[step_dict['id']][conn]
- out_conn_pos = out_pos[output_dict['id']][output_dict['output_name']]
- adjusted = (out_conn_pos[0] + widths[output_dict['id']], out_conn_pos[1])
- text.append( svgfig.SVG("circle", cx=out_conn_pos[0]+widths[output_dict['id']]-margin, cy=out_conn_pos[1]-margin, r=5, fill="#ffffff" ) )
- connectors.append( svgfig.Line(adjusted[0], adjusted[1]-margin, in_coords[0]-10, in_coords[1], arrow_end="true" ).SVG() )
-
- canvas.append(connectors)
- canvas.append(boxes)
- canvas.append(text)
- width, height = (max_x + max_width + 50), max_y + 300
- canvas['width'] = "%s px" % width
- canvas['height'] = "%s px" % height
- canvas['viewBox'] = "0 0 %s %s" % (width, height)
trans.response.set_content_type("image/svg+xml")
- return canvas.standalone_xml()
+ return self._workflow_to_svg_canvas( trans, stored ).standalone_xml()
@web.expose
@@ -1056,7 +957,8 @@
request_raw = trans.fill_template( "workflow/myexp_export.mako", \
workflow_name=workflow_dict['name'], \
workflow_description=workflow_dict['annotation'], \
- workflow_content=workflow_content
+ workflow_content=workflow_content, \
+ workflow_svg=self._workflow_to_svg_canvas( trans, stored ).standalone_xml()
)
# strip() b/c myExperiment XML parser doesn't allow white space before XML; utf-8 handles unicode characters.
request = unicode( request_raw.strip(), 'utf-8' )
@@ -1929,6 +1831,110 @@
trans.sa_session.flush()
return stored, missing_tool_tups
+
+ def _workflow_to_svg_canvas( self, trans, stored ):
+
+ workflow = stored.latest_workflow
+ data = []
+
+ canvas = svgfig.canvas(style="stroke:black; fill:none; stroke-width:1px; stroke-linejoin:round; text-anchor:left")
+ text = svgfig.SVG("g")
+ connectors = svgfig.SVG("g")
+ boxes = svgfig.SVG("g")
+ svgfig.Text.defaults["font-size"] = "10px"
+
+ in_pos = {}
+ out_pos = {}
+ margin = 5
+ line_px = 16 # how much spacing between input/outputs
+ widths = {} # store px width for boxes of each step
+ max_width, max_x, max_y = 0, 0, 0
+
+ for step in workflow.steps:
+ # Load from database representation
+ module = module_factory.from_workflow_step( trans, step )
+
+ # Pack attributes into plain dictionary
+ step_dict = {
+ 'id': step.order_index,
+ 'data_inputs': module.get_data_inputs(),
+ 'data_outputs': module.get_data_outputs(),
+ 'position': step.position
+ }
+
+ input_conn_dict = {}
+ for conn in step.input_connections:
+ input_conn_dict[ conn.input_name ] = \
+ dict( id=conn.output_step.order_index, output_name=conn.output_name )
+ step_dict['input_connections'] = input_conn_dict
+
+ data.append(step_dict)
+
+ x, y = step.position['left'], step.position['top']
+ count = 0
+
+ max_len = len(module.get_name()) * 1.5
+ text.append( svgfig.Text(x, y + 20, module.get_name(), **{"font-size": "14px"} ).SVG() )
+
+ y += 45
+ for di in module.get_data_inputs():
+ cur_y = y+count*line_px
+ if step.order_index not in in_pos:
+ in_pos[step.order_index] = {}
+ in_pos[step.order_index][di['name']] = (x, cur_y)
+ text.append( svgfig.Text(x, cur_y, di['label']).SVG() )
+ count += 1
+ max_len = max(max_len, len(di['label']))
+
+
+ if len(module.get_data_inputs()) > 0:
+ y += 15
+
+ for do in module.get_data_outputs():
+ cur_y = y+count*line_px
+ if step.order_index not in out_pos:
+ out_pos[step.order_index] = {}
+ out_pos[step.order_index][do['name']] = (x, cur_y)
+ text.append( svgfig.Text(x, cur_y, do['name']).SVG() )
+ count += 1
+ max_len = max(max_len, len(do['name']))
+
+ widths[step.order_index] = max_len*5.5
+ max_x = max(max_x, step.position['left'])
+ max_y = max(max_y, step.position['top'])
+ max_width = max(max_width, widths[step.order_index])
+
+ for step_dict in data:
+ width = widths[step_dict['id']]
+ x, y = step_dict['position']['left'], step_dict['position']['top']
+ boxes.append( svgfig.Rect(x-margin, y, x+width-margin, y+30, fill="#EBD9B2").SVG() )
+ box_height = (len(step_dict['data_inputs']) + len(step_dict['data_outputs'])) * line_px + margin
+
+ # Draw separator line
+ if len(step_dict['data_inputs']) > 0:
+ box_height += 15
+ sep_y = y + len(step_dict['data_inputs']) * line_px + 40
+ text.append( svgfig.Line(x-margin, sep_y, x+width-margin, sep_y).SVG() ) #
+
+ # input/output box
+ boxes.append( svgfig.Rect(x-margin, y+30, x+width-margin, y+30+box_height, fill="#ffffff").SVG() )
+
+ for conn, output_dict in step_dict['input_connections'].iteritems():
+ in_coords = in_pos[step_dict['id']][conn]
+ out_conn_pos = out_pos[output_dict['id']][output_dict['output_name']]
+ adjusted = (out_conn_pos[0] + widths[output_dict['id']], out_conn_pos[1])
+ text.append( svgfig.SVG("circle", cx=out_conn_pos[0]+widths[output_dict['id']]-margin, cy=out_conn_pos[1]-margin, r=5, fill="#ffffff" ) )
+ connectors.append( svgfig.Line(adjusted[0], adjusted[1]-margin, in_coords[0]-10, in_coords[1], arrow_end="true" ).SVG() )
+
+ canvas.append(connectors)
+ canvas.append(boxes)
+ canvas.append(text)
+ width, height = (max_x + max_width + 50), max_y + 300
+ canvas['width'] = "%s px" % width
+ canvas['height'] = "%s px" % height
+ canvas['viewBox'] = "0 0 %s %s" % (width, height)
+
+ return canvas
## ---- Utility methods -------------------------------------------------------
diff -r 7c495f835a1d436ad33dff6107784f106cc24980 -r 53d43a29b41b4f5327deb42800c0f64d4f129a4a templates/workflow/myexp_export.mako
--- a/templates/workflow/myexp_export.mako
+++ b/templates/workflow/myexp_export.mako
@@ -15,6 +15,7 @@
<content encoding="base64" type="binary">
${textwrap.fill( base64.b64encode( workflow_content ), 64 )}
</content>
- <preview encoding="base64" type="binary">
- </preview>
-</workflow>
\ No newline at end of file
+ <svg encoding="base64">
+ ${textwrap.fill( base64.b64encode( workflow_svg ), 64 )}
+ </svg>
+</workflow>
https://bitbucket.org/galaxy/galaxy-central/changeset/f6a710440c05/
changeset: f6a710440c05
user: jgoecks
date: 2012-06-20 20:59:37
summary: Merged in fbacall/galaxy-central-myexp-integration (pull request #49)
affected #: 2 files
diff -r 6fe91b7bfe6e4bdcf62299ae88a6206fcb50d3f1 -r f6a710440c0500fb09e980300419b53b2cda6088 lib/galaxy/web/controllers/workflow.py
--- a/lib/galaxy/web/controllers/workflow.py
+++ b/lib/galaxy/web/controllers/workflow.py
@@ -505,107 +505,8 @@
stored = self.get_stored_workflow( trans, id, check_ownership=True )
session = trans.sa_session
- workflow = stored.latest_workflow
- data = []
-
- canvas = svgfig.canvas(style="stroke:black; fill:none; stroke-width:1px; stroke-linejoin:round; text-anchor:left")
- text = svgfig.SVG("g")
- connectors = svgfig.SVG("g")
- boxes = svgfig.SVG("g")
- svgfig.Text.defaults["font-size"] = "10px"
-
- in_pos = {}
- out_pos = {}
- margin = 5
- line_px = 16 # how much spacing between input/outputs
- widths = {} # store px width for boxes of each step
- max_width, max_x, max_y = 0, 0, 0
-
- for step in workflow.steps:
- # Load from database representation
- module = module_factory.from_workflow_step( trans, step )
-
- # Pack attributes into plain dictionary
- step_dict = {
- 'id': step.order_index,
- 'data_inputs': module.get_data_inputs(),
- 'data_outputs': module.get_data_outputs(),
- 'position': step.position
- }
-
- input_conn_dict = {}
- for conn in step.input_connections:
- input_conn_dict[ conn.input_name ] = \
- dict( id=conn.output_step.order_index, output_name=conn.output_name )
- step_dict['input_connections'] = input_conn_dict
-
- data.append(step_dict)
-
- x, y = step.position['left'], step.position['top']
- count = 0
-
- max_len = len(module.get_name()) * 1.5
- text.append( svgfig.Text(x, y + 20, module.get_name(), **{"font-size": "14px"} ).SVG() )
-
- y += 45
- for di in module.get_data_inputs():
- cur_y = y+count*line_px
- if step.order_index not in in_pos:
- in_pos[step.order_index] = {}
- in_pos[step.order_index][di['name']] = (x, cur_y)
- text.append( svgfig.Text(x, cur_y, di['label']).SVG() )
- count += 1
- max_len = max(max_len, len(di['label']))
-
-
- if len(module.get_data_inputs()) > 0:
- y += 15
-
- for do in module.get_data_outputs():
- cur_y = y+count*line_px
- if step.order_index not in out_pos:
- out_pos[step.order_index] = {}
- out_pos[step.order_index][do['name']] = (x, cur_y)
- text.append( svgfig.Text(x, cur_y, do['name']).SVG() )
- count += 1
- max_len = max(max_len, len(do['name']))
-
- widths[step.order_index] = max_len*5.5
- max_x = max(max_x, step.position['left'])
- max_y = max(max_y, step.position['top'])
- max_width = max(max_width, widths[step.order_index])
-
- for step_dict in data:
- width = widths[step_dict['id']]
- x, y = step_dict['position']['left'], step_dict['position']['top']
- boxes.append( svgfig.Rect(x-margin, y, x+width-margin, y+30, fill="#EBD9B2").SVG() )
- box_height = (len(step_dict['data_inputs']) + len(step_dict['data_outputs'])) * line_px + margin
-
- # Draw separator line
- if len(step_dict['data_inputs']) > 0:
- box_height += 15
- sep_y = y + len(step_dict['data_inputs']) * line_px + 40
- text.append( svgfig.Line(x-margin, sep_y, x+width-margin, sep_y).SVG() ) #
-
- # input/output box
- boxes.append( svgfig.Rect(x-margin, y+30, x+width-margin, y+30+box_height, fill="#ffffff").SVG() )
-
- for conn, output_dict in step_dict['input_connections'].iteritems():
- in_coords = in_pos[step_dict['id']][conn]
- out_conn_pos = out_pos[output_dict['id']][output_dict['output_name']]
- adjusted = (out_conn_pos[0] + widths[output_dict['id']], out_conn_pos[1])
- text.append( svgfig.SVG("circle", cx=out_conn_pos[0]+widths[output_dict['id']]-margin, cy=out_conn_pos[1]-margin, r=5, fill="#ffffff" ) )
- connectors.append( svgfig.Line(adjusted[0], adjusted[1]-margin, in_coords[0]-10, in_coords[1], arrow_end="true" ).SVG() )
-
- canvas.append(connectors)
- canvas.append(boxes)
- canvas.append(text)
- width, height = (max_x + max_width + 50), max_y + 300
- canvas['width'] = "%s px" % width
- canvas['height'] = "%s px" % height
- canvas['viewBox'] = "0 0 %s %s" % (width, height)
trans.response.set_content_type("image/svg+xml")
- return canvas.standalone_xml()
+ return self._workflow_to_svg_canvas( trans, stored ).standalone_xml()
@web.expose
@@ -1056,7 +957,8 @@
request_raw = trans.fill_template( "workflow/myexp_export.mako", \
workflow_name=workflow_dict['name'], \
workflow_description=workflow_dict['annotation'], \
- workflow_content=workflow_content
+ workflow_content=workflow_content, \
+ workflow_svg=self._workflow_to_svg_canvas( trans, stored ).standalone_xml()
)
# strip() b/c myExperiment XML parser doesn't allow white space before XML; utf-8 handles unicode characters.
request = unicode( request_raw.strip(), 'utf-8' )
@@ -1929,6 +1831,110 @@
trans.sa_session.flush()
return stored, missing_tool_tups
+
+ def _workflow_to_svg_canvas( self, trans, stored ):
+
+ workflow = stored.latest_workflow
+ data = []
+
+ canvas = svgfig.canvas(style="stroke:black; fill:none; stroke-width:1px; stroke-linejoin:round; text-anchor:left")
+ text = svgfig.SVG("g")
+ connectors = svgfig.SVG("g")
+ boxes = svgfig.SVG("g")
+ svgfig.Text.defaults["font-size"] = "10px"
+
+ in_pos = {}
+ out_pos = {}
+ margin = 5
+ line_px = 16 # how much spacing between input/outputs
+ widths = {} # store px width for boxes of each step
+ max_width, max_x, max_y = 0, 0, 0
+
+ for step in workflow.steps:
+ # Load from database representation
+ module = module_factory.from_workflow_step( trans, step )
+
+ # Pack attributes into plain dictionary
+ step_dict = {
+ 'id': step.order_index,
+ 'data_inputs': module.get_data_inputs(),
+ 'data_outputs': module.get_data_outputs(),
+ 'position': step.position
+ }
+
+ input_conn_dict = {}
+ for conn in step.input_connections:
+ input_conn_dict[ conn.input_name ] = \
+ dict( id=conn.output_step.order_index, output_name=conn.output_name )
+ step_dict['input_connections'] = input_conn_dict
+
+ data.append(step_dict)
+
+ x, y = step.position['left'], step.position['top']
+ count = 0
+
+ max_len = len(module.get_name()) * 1.5
+ text.append( svgfig.Text(x, y + 20, module.get_name(), **{"font-size": "14px"} ).SVG() )
+
+ y += 45
+ for di in module.get_data_inputs():
+ cur_y = y+count*line_px
+ if step.order_index not in in_pos:
+ in_pos[step.order_index] = {}
+ in_pos[step.order_index][di['name']] = (x, cur_y)
+ text.append( svgfig.Text(x, cur_y, di['label']).SVG() )
+ count += 1
+ max_len = max(max_len, len(di['label']))
+
+
+ if len(module.get_data_inputs()) > 0:
+ y += 15
+
+ for do in module.get_data_outputs():
+ cur_y = y+count*line_px
+ if step.order_index not in out_pos:
+ out_pos[step.order_index] = {}
+ out_pos[step.order_index][do['name']] = (x, cur_y)
+ text.append( svgfig.Text(x, cur_y, do['name']).SVG() )
+ count += 1
+ max_len = max(max_len, len(do['name']))
+
+ widths[step.order_index] = max_len*5.5
+ max_x = max(max_x, step.position['left'])
+ max_y = max(max_y, step.position['top'])
+ max_width = max(max_width, widths[step.order_index])
+
+ for step_dict in data:
+ width = widths[step_dict['id']]
+ x, y = step_dict['position']['left'], step_dict['position']['top']
+ boxes.append( svgfig.Rect(x-margin, y, x+width-margin, y+30, fill="#EBD9B2").SVG() )
+ box_height = (len(step_dict['data_inputs']) + len(step_dict['data_outputs'])) * line_px + margin
+
+ # Draw separator line
+ if len(step_dict['data_inputs']) > 0:
+ box_height += 15
+ sep_y = y + len(step_dict['data_inputs']) * line_px + 40
+ text.append( svgfig.Line(x-margin, sep_y, x+width-margin, sep_y).SVG() ) #
+
+ # input/output box
+ boxes.append( svgfig.Rect(x-margin, y+30, x+width-margin, y+30+box_height, fill="#ffffff").SVG() )
+
+ for conn, output_dict in step_dict['input_connections'].iteritems():
+ in_coords = in_pos[step_dict['id']][conn]
+ out_conn_pos = out_pos[output_dict['id']][output_dict['output_name']]
+ adjusted = (out_conn_pos[0] + widths[output_dict['id']], out_conn_pos[1])
+ text.append( svgfig.SVG("circle", cx=out_conn_pos[0]+widths[output_dict['id']]-margin, cy=out_conn_pos[1]-margin, r=5, fill="#ffffff" ) )
+ connectors.append( svgfig.Line(adjusted[0], adjusted[1]-margin, in_coords[0]-10, in_coords[1], arrow_end="true" ).SVG() )
+
+ canvas.append(connectors)
+ canvas.append(boxes)
+ canvas.append(text)
+ width, height = (max_x + max_width + 50), max_y + 300
+ canvas['width'] = "%s px" % width
+ canvas['height'] = "%s px" % height
+ canvas['viewBox'] = "0 0 %s %s" % (width, height)
+
+ return canvas
## ---- Utility methods -------------------------------------------------------
diff -r 6fe91b7bfe6e4bdcf62299ae88a6206fcb50d3f1 -r f6a710440c0500fb09e980300419b53b2cda6088 templates/workflow/myexp_export.mako
--- a/templates/workflow/myexp_export.mako
+++ b/templates/workflow/myexp_export.mako
@@ -15,6 +15,7 @@
<content encoding="base64" type="binary">
${textwrap.fill( base64.b64encode( workflow_content ), 64 )}
</content>
- <preview encoding="base64" type="binary">
- </preview>
-</workflow>
\ No newline at end of file
+ <svg encoding="base64">
+ ${textwrap.fill( base64.b64encode( workflow_svg ), 64 )}
+ </svg>
+</workflow>
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0

commit/galaxy-central: Scott McManus: PBS runner now writes and reads an exit code for each command
by Bitbucket 20 Jun '12
by Bitbucket 20 Jun '12
20 Jun '12
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/6fe91b7bfe6e/
changeset: 6fe91b7bfe6e
user: Scott McManus
date: 2012-06-20 19:02:16
summary: PBS runner now writes and reads an exit code for each command
affected #: 1 file
diff -r 663e03e40c86ffa0766dd6638ea7c0886632117d -r 6fe91b7bfe6e4bdcf62299ae88a6206fcb50d3f1 lib/galaxy/jobs/runners/pbs.py
--- a/lib/galaxy/jobs/runners/pbs.py
+++ b/lib/galaxy/jobs/runners/pbs.py
@@ -33,6 +33,8 @@
__all__ = [ 'PBSJobRunner' ]
+# The last two lines execute the command and then retrieve the command's
+# exit code ($?) and write it to a file.
pbs_template = """#!/bin/sh
GALAXY_LIB="%s"
if [ "$GALAXY_LIB" != "None" ]; then
@@ -45,8 +47,11 @@
%s
cd %s
%s
+echo $? > %s
"""
+# The last two lines execute the command and then retrieve the command's
+# exit code ($?) and write it to a file.
pbs_symlink_template = """#!/bin/sh
GALAXY_LIB="%s"
if [ "$GALAXY_LIB" != "None" ]; then
@@ -65,6 +70,7 @@
%s
cd %s
%s
+echo $? > %s
"""
# From pbs' job.h
@@ -93,6 +99,7 @@
self.job_file = None
self.ofile = None
self.efile = None
+ self.ecfile = None
self.runner_url = None
self.check_count = 0
self.stop_job = False
@@ -233,6 +240,7 @@
# define job attributes
ofile = "%s/%s.o" % (self.app.config.cluster_files_directory, job_wrapper.job_id)
efile = "%s/%s.e" % (self.app.config.cluster_files_directory, job_wrapper.job_id)
+ ecfile = "%s/%s.ec" % (self.app.config.cluster_files_directory, job_wrapper.job_id)
output_fnames = job_wrapper.get_output_fnames()
@@ -273,12 +281,15 @@
self.app.config.pbs_stage_path,
job_wrapper.get_env_setup_clause(),
exec_dir,
- command_line )
+ command_line,
+ ecfile )
+
else:
script = pbs_template % ( job_wrapper.galaxy_lib_dir,
job_wrapper.get_env_setup_clause(),
exec_dir,
- command_line )
+ command_line,
+ ecfile )
job_file = "%s/%s.sh" % (self.app.config.cluster_files_directory, job_wrapper.job_id)
fh = file(job_file, "w")
fh.write(script)
@@ -289,7 +300,7 @@
log.debug( "Job %s deleted by user before it entered the PBS queue" % job_wrapper.job_id )
pbs.pbs_disconnect(c)
if self.app.config.cleanup_job in ( "always", "onsuccess" ):
- self.cleanup( ( ofile, efile, job_file ) )
+ self.cleanup( ( ofile, efile, ecfile, job_file ) )
job_wrapper.cleanup()
return
@@ -321,6 +332,7 @@
pbs_job_state.job_id = job_id
pbs_job_state.ofile = ofile
pbs_job_state.efile = efile
+ pbs_job_state.ecfile = ecfile
pbs_job_state.job_file = job_file
pbs_job_state.old_state = 'N'
pbs_job_state.running = False
@@ -510,27 +522,35 @@
"""
ofile = pbs_job_state.ofile
efile = pbs_job_state.efile
+ ecfile = pbs_job_state.ecfile
job_file = pbs_job_state.job_file
# collect the output
try:
ofh = file(ofile, "r")
efh = file(efile, "r")
+ ecfh = file(ecfile, "r")
stdout = ofh.read( 32768 )
stderr = efh.read( 32768 )
+ # This should be an 8-bit exit code, but read ahead anyway:
+ exit_code = ecfh.read(32)
except:
stdout = ''
stderr = 'Job output not returned by PBS: the output datasets were deleted while the job was running, the job was manually dequeued or there was a cluster error.'
+ # By default, the exit code is 0, which usually indicates success
+ # (although clearly some error happened).
+ exit_code = 0
log.debug(stderr)
+ log.debug( "Job exit code: " + exit_code )
try:
- pbs_job_state.job_wrapper.finish( stdout, stderr )
+ pbs_job_state.job_wrapper.finish( stdout, stderr, exit_code )
except:
log.exception("Job wrapper finish method failed")
pbs_job_state.job_wrapper.fail("Unable to finish job", exception=True)
# clean up the pbs files
if self.app.config.cleanup_job == "always" or ( not stderr and self.app.config.cleanup_job == "onsuccess" ):
- self.cleanup( ( ofile, efile, job_file ) )
+ self.cleanup( ( ofile, efile, ecfile, job_file ) )
def fail_job( self, pbs_job_state ):
"""
@@ -594,6 +614,7 @@
pbs_job_state = PBSJobState()
pbs_job_state.ofile = "%s/%s.o" % (self.app.config.cluster_files_directory, job.id)
pbs_job_state.efile = "%s/%s.e" % (self.app.config.cluster_files_directory, job.id)
+ pbs_job_state.ecfile = "%s/%s.ec" % (self.app.config.cluster_files_directory, job.id)
pbs_job_state.job_file = "%s/%s.sh" % (self.app.config.cluster_files_directory, job.id)
pbs_job_state.job_id = str( job.job_runner_external_id )
pbs_job_state.runner_url = job_wrapper.get_job_runner()
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0

commit/galaxy-central: dannon: Remove debugging exception from 06249bb8e6c9.
by Bitbucket 20 Jun '12
by Bitbucket 20 Jun '12
20 Jun '12
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/663e03e40c86/
changeset: 663e03e40c86
user: dannon
date: 2012-06-20 09:39:05
summary: Remove debugging exception from 06249bb8e6c9.
affected #: 1 file
diff -r 06249bb8e6c9be1e2f94fbd860f123f4ae51a356 -r 663e03e40c86ffa0766dd6638ea7c0886632117d lib/galaxy/datatypes/data.py
--- a/lib/galaxy/datatypes/data.py
+++ b/lib/galaxy/datatypes/data.py
@@ -284,7 +284,6 @@
return open( dataset.file_name )
def display_data(self, trans, data, preview=False, filename=None, to_ext=None, size=None, offset=None, **kwd):
- raise Exception
""" Old display method, for transition """
#Relocate all composite datatype display to a common location.
composite_extensions = trans.app.datatypes_registry.get_composite_extensions( )
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0

commit/galaxy-central: Scott McManus: Adding in exit code management for local changes; I'm holding off on the PBS and generic DRMAA runners until they're tested.
by Bitbucket 20 Jun '12
by Bitbucket 20 Jun '12
20 Jun '12
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/06249bb8e6c9/
changeset: 06249bb8e6c9
user: Scott McManus
date: 2012-06-20 08:14:55
summary: Adding in exit code management for local changes; I'm holding off on the PBS and generic DRMAA runners until they're tested.
affected #: 4 files
diff -r f1c5d6639f2efcf0eec530d1901c9816d65b32b0 -r 06249bb8e6c9be1e2f94fbd860f123f4ae51a356 lib/galaxy/datatypes/data.py
--- a/lib/galaxy/datatypes/data.py
+++ b/lib/galaxy/datatypes/data.py
@@ -284,6 +284,7 @@
return open( dataset.file_name )
def display_data(self, trans, data, preview=False, filename=None, to_ext=None, size=None, offset=None, **kwd):
+ raise Exception
""" Old display method, for transition """
#Relocate all composite datatype display to a common location.
composite_extensions = trans.app.datatypes_registry.get_composite_extensions( )
diff -r f1c5d6639f2efcf0eec530d1901c9816d65b32b0 -r 06249bb8e6c9be1e2f94fbd860f123f4ae51a356 lib/galaxy/jobs/__init__.py
--- a/lib/galaxy/jobs/__init__.py
+++ b/lib/galaxy/jobs/__init__.py
@@ -285,12 +285,14 @@
self.sa_session.add( job )
self.sa_session.flush()
- def finish( self, stdout, stderr ):
+ def finish( self, stdout, stderr, tool_exit_code=0 ):
"""
Called to indicate that the associated command has been run. Updates
the output datasets based on stderr and stdout from the command, and
the contents of the output files.
"""
+ # TODO: Eliminate debugging code after testing all runners
+ log.debug( "JobWrapper.finish: exit code:" + str(tool_exit_code) )
# default post job setup
self.sa_session.expunge_all()
job = self.get_job()
@@ -317,17 +319,15 @@
# that range, then apply the error level and add in a message.
# If we've reached a fatal error rule, then stop.
max_error_level = galaxy.tools.StdioErrorLevel.NO_ERROR
- for exit_code in self.tool.stdio_exit_codes:
- # TODO: Fetch the exit code from the .rc file:
- tool_exit_code = 0
- if ( tool_exit_code >= exit_code.range_start and
- tool_exit_code <= exit_code.range_end ):
- if None != exit_code.desc:
- err_msg += exit_code.desc
+ for stdio_exit_code in self.tool.stdio_exit_codes:
+ if ( tool_exit_code >= stdio_exit_code.range_start and
+ tool_exit_code <= stdio_exit_code.range_end ):
+ if None != stdio_exit_code.desc:
+ err_msg += stdio_exit_code.desc
# TODO: Find somewhere to stick the err_msg - possibly to
# the source (stderr/stdout), possibly in a new db column.
max_error_level = max( max_error_level,
- exit_code.error_level )
+ stdio_exit_code.error_level )
if max_error_level >= galaxy.tools.StdioErrorLevel.FATAL:
break
# If there is a regular expression for scanning stdout/stderr,
diff -r f1c5d6639f2efcf0eec530d1901c9816d65b32b0 -r 06249bb8e6c9be1e2f94fbd860f123f4ae51a356 lib/galaxy/jobs/handler.py
--- a/lib/galaxy/jobs/handler.py
+++ b/lib/galaxy/jobs/handler.py
@@ -368,6 +368,7 @@
start_job_runners.append("tasks")
for name in start_job_runners:
self._load_plugin( name )
+ log.debug( "Job runners: " + ':'.join( start_job_runners ) )
def _load_plugin( self, name ):
module_name = 'galaxy.jobs.runners.' + name
@@ -397,6 +398,7 @@
def put( self, job_wrapper ):
try:
runner_name = self.__get_runner_name( job_wrapper )
+ log.debug( "Runner_name: " + runner_name )
if self.app.config.use_tasked_jobs and job_wrapper.tool.parallelism is not None and isinstance(job_wrapper, TaskWrapper):
#DBTODO Refactor
log.debug( "dispatching task %s, of job %d, to %s runner" %( job_wrapper.task_id, job_wrapper.job_id, runner_name ) )
diff -r f1c5d6639f2efcf0eec530d1901c9816d65b32b0 -r 06249bb8e6c9be1e2f94fbd860f123f4ae51a356 lib/galaxy/jobs/runners/local.py
--- a/lib/galaxy/jobs/runners/local.py
+++ b/lib/galaxy/jobs/runners/local.py
@@ -54,6 +54,7 @@
def run_job( self, job_wrapper ):
job_wrapper.set_runner( 'local:///', None )
stderr = stdout = command_line = ''
+ exit_code = 0
# Prepare the job to run
try:
job_wrapper.prepare()
@@ -99,7 +100,11 @@
if sleep_time < 8:
# So we don't stat every second
sleep_time *= 2
- proc.wait() # reap
+ # Reap the process and get the exit code. The exit code should
+ # only be None if the process isn't finished, but check anyway.
+ exit_code = proc.wait() # reap
+ if None == exit_code:
+ exit_code = 0
stdout_file.seek( 0 )
stderr_file.seek( 0 )
stdout = stdout_file.read( 32768 )
@@ -128,9 +133,9 @@
external_metadata_proc.wait()
log.debug( 'execution of external set_meta for job %d finished' % job_wrapper.job_id )
- # Finish the job
+ # Finish the job!
try:
- job_wrapper.finish( stdout, stderr )
+ job_wrapper.finish( stdout, stderr, exit_code )
except:
log.exception("Job wrapper finish method failed")
job_wrapper.fail("Unable to finish job", exception=True)
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
3 new commits in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/a8d96b0464c8/
changeset: a8d96b0464c8
user: fubar
date: 2012-06-20 02:45:10
summary: Removed hard coded sizes of 40 characters from templates/form.mako to allow parameter size= settings to work correctly
affected #: 1 file
diff -r 8d6138fa2251566b5d17d64e9f56f129d9dc624a -r a8d96b0464c85bd7996e4c9764f769c5be31d682 templates/form.mako
--- a/templates/form.mako
+++ b/templates/form.mako
@@ -81,7 +81,7 @@
%endif
<div class="form-row-input">
%if input.type == 'textarea':
- <textarea name="${input.name}" cols="40">${input.value}</textarea>
+ <textarea name="${input.name}">${input.value}</textarea>
%elif input.type == 'select':
<select name="${input.name}">
%for (name, value) in input.options:
@@ -89,7 +89,7 @@
%endfor
</select>
%else:
- <input type="${input.type}" name="${input.name}" value="${input.value}" size="40">
+ <input type="${input.type}" name="${input.name}" value="${input.value}">
%endif
</div>
%if input.error:
@@ -107,4 +107,4 @@
</form></div></div>
-</%def>
\ No newline at end of file
+</%def>
https://bitbucket.org/galaxy/galaxy-central/changeset/a1276b504934/
changeset: a1276b504934
user: fubar
date: 2012-06-20 02:58:54
summary: Needed a base.css change also to fix the restriction on tool form parameter widths
It will be interesting to see if anyone notices this....
affected #: 1 file
diff -r a8d96b0464c85bd7996e4c9764f769c5be31d682 -r a1276b5049345152c02fc1cffff341de4904f164 static/june_2007_style/blue/base.css
--- a/static/june_2007_style/blue/base.css
+++ b/static/june_2007_style/blue/base.css
@@ -569,14 +569,14 @@
div.form-row-error{background:#ffcccc;}
div.form-row label{font-weight:bold;display:block;margin-bottom:.2em;}
div.form-row label.inline{display:inline;}
-div.form-row-input{width:300px;float:left;}
+div.form-row-input{width:90%;float:left;}
div.form-row-input label{font-weight:normal;display:inline;}
div.form-row-error-message{width:300px;float:left;color:red;font-weight:bold;padding:3px 0;}
.form-row .help,.toolParamHelp{color:#666;}.form-row .help a,.toolParamHelp a{color:#666;}
.form-row.form-actions{background:whiteSmoke;border-top:solid #ddd 1px;padding-top:10px;padding-bottom:10px;margin-top:5px;}
select{padding:2px;font-size:12px;line-height:16px;}
select,input,textarea{font:inherit;}
-select,textarea,input[type="text"],input[type="file"],input[type="password"]{max-width:300px;}
+select,textarea,input[type="text"],input[type="file"],input[type="password"]{max-width:90%;}
textarea,input[type="text"],input[type="password"]{font-size:12px;line-height:16px;border:1px solid #999999;padding:3px;}
.search-query{display:inline-block;padding:4px;font-size:12px;line-height:16px;color:#555555;border:1px solid #999999;padding-left:14px !important;padding-right:14px !important;margin-bottom:0;-webkit-border-radius:14px;-moz-border-radius:14px;border-radius:14px;max-width:auto;}
.search-query:focus{border-color:rgba(24, 132, 218, 0.8);-webkit-box-shadow:inset 0 1px 1px rgba(0, 0, 0, 0.075),0 0 8px rgba(82, 168, 236, 0.6);-moz-box-shadow:inset 0 1px 1px rgba(0, 0, 0, 0.075),0 0 8px rgba(82, 168, 236, 0.6);box-shadow:inset 0 1px 1px rgba(0, 0, 0, 0.075),0 0 8px rgba(82, 168, 236, 0.6);outline:0;outline:thin dotted \9;}
@@ -724,7 +724,7 @@
div.toolSectionBody div.toolPanelLabel{padding-top:5px;padding-bottom:5px;margin-left:16px;margin-right:10px;display:list-item;list-style:none outside;}
div.toolTitleNoSection{padding-bottom:5px;font-weight:bold;}
#tool-search{padding-top:5px;padding-bottom:10px;position:relative;}
-#loading_indicator{position:fixed;right:10px;top:10px;height:32px;width:32px;display:none;background:url(largespinner.gif);}
+#loading_indicator{position:fixed;right:10px;top:10px;height:32px;width:32px;background:url(largespinner.gif);}
#content_table td{text-align:right;white-space:nowrap;padding:2px 10px;}
#content_table td.stringalign{text-align:left;}
.toolMenuAndView .toolForm{float:left;background-color:white;margin:10px;}
https://bitbucket.org/galaxy/galaxy-central/changeset/f1c5d6639f2e/
changeset: f1c5d6639f2e
user: fubar
date: 2012-06-20 03:02:58
summary: Can't be too careful - reverted base.css loading_indicator to tip version
affected #: 1 file
diff -r a1276b5049345152c02fc1cffff341de4904f164 -r f1c5d6639f2efcf0eec530d1901c9816d65b32b0 static/june_2007_style/blue/base.css
--- a/static/june_2007_style/blue/base.css
+++ b/static/june_2007_style/blue/base.css
@@ -724,7 +724,7 @@
div.toolSectionBody div.toolPanelLabel{padding-top:5px;padding-bottom:5px;margin-left:16px;margin-right:10px;display:list-item;list-style:none outside;}
div.toolTitleNoSection{padding-bottom:5px;font-weight:bold;}
#tool-search{padding-top:5px;padding-bottom:10px;position:relative;}
-#loading_indicator{position:fixed;right:10px;top:10px;height:32px;width:32px;background:url(largespinner.gif);}
+#loading_indicator{position:fixed;right:10px;top:10px;height:32px;width:32px;display:none;background:url(largespinner.gif);}
#content_table td{text-align:right;white-space:nowrap;padding:2px 10px;}
#content_table td.stringalign{text-align:left;}
.toolMenuAndView .toolForm{float:left;background-color:white;margin:10px;}
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/8d6138fa2251/
changeset: 8d6138fa2251
user: dannon
date: 2012-06-19 22:42:25
summary: S3 object store now functional.
Whitespace cleanup.
affected #: 1 file
diff -r 958d1f8b2caf9595d79e66d1eb1705110a1a6784 -r 8d6138fa2251566b5d17d64e9f56f129d9dc624a lib/galaxy/objectstore/__init__.py
--- a/lib/galaxy/objectstore/__init__.py
+++ b/lib/galaxy/objectstore/__init__.py
@@ -40,20 +40,20 @@
def __init__(self):
self.running = True
self.extra_dirs = {}
-
+
def shutdown(self):
self.running = False
-
+
def exists(self, obj, base_dir=None, dir_only=False, extra_dir=None, extra_dir_at_root=False, alt_name=None):
"""
Returns True if the object identified by `obj` exists in this file
store, False otherwise.
-
+
FIELD DESCRIPTIONS (these apply to all the methods in this class):
:type obj: object
:param obj: A Galaxy object with an assigned database ID accessible via
the .id attribute.
-
+
:type base_dir: string
:param base_dir: A key in self.extra_dirs corresponding to the base
directory in which this object should be created, or
@@ -64,19 +64,19 @@
identified by `obj` should be located, not the dataset
itself. This option applies to `extra_dir` argument as
well.
-
+
:type extra_dir: string
:param extra_dir: Append `extra_dir` to the directory structure where
the dataset identified by `obj` should be located.
(e.g., 000/extra_dir/obj.id)
-
+
:type extra_dir_at_root: bool
:param extra_dir_at_root: Applicable only if `extra_dir` is set.
If True, the `extra_dir` argument is placed at
root of the created directory structure rather
than at the end (e.g., extra_dir/000/obj.id
vs. 000/extra_dir/obj.id)
-
+
:type alt_name: string
:param alt_name: Use this name as the alternative name for the created
dataset rather than the default.
@@ -84,10 +84,10 @@
raise NotImplementedError()
def file_ready(self, obj, base_dir=None, dir_only=False, extra_dir=None, extra_dir_at_root=False, alt_name=None):
- """ A helper method that checks if a file corresponding to a dataset
+ """ A helper method that checks if a file corresponding to a dataset
is ready and available to be used. Return True if so, False otherwise."""
return True
-
+
def create(self, obj, base_dir=None, dir_only=False, extra_dir=None, extra_dir_at_root=False, alt_name=None):
"""
Mark the object identified by `obj` as existing in the store, but with
@@ -104,7 +104,7 @@
See `exists` method for the description of the fields.
"""
raise NotImplementedError()
-
+
def size(self, obj, extra_dir=None, extra_dir_at_root=False, alt_name=None):
"""
Return size of the object identified by `obj`.
@@ -112,13 +112,13 @@
See `exists` method for the description of the fields.
"""
raise NotImplementedError()
-
+
def delete(self, obj, entire_dir=False, base_dir=None, extra_dir=None, extra_dir_at_root=False, alt_name=None):
"""
Deletes the object identified by `obj`.
See `exists` method for the description of other fields.
:type entire_dir: bool
- :param entire_dir: If True, delete the entire directory pointed to by
+ :param entire_dir: If True, delete the entire directory pointed to by
extra_dir. For safety reasons, this option applies
only for and in conjunction with the extra_dir option.
"""
@@ -130,15 +130,15 @@
object identified uniquely by `obj`.
If the object does not exist raises `ObjectNotFound`.
See `exists` method for the description of other fields.
-
+
:type start: int
:param start: Set the position to start reading the dataset file
-
+
:type count: int
:param count: Read at most `count` bytes from the dataset
"""
raise NotImplementedError()
-
+
def get_filename(self, obj, base_dir=None, dir_only=False, extra_dir=None, extra_dir_at_root=False, alt_name=None):
"""
Get the expected filename (including the absolute path) which can be used
@@ -146,7 +146,7 @@
See `exists` method for the description of the fields.
"""
raise NotImplementedError()
-
+
def update_from_file(self, obj, base_dir=None, extra_dir=None, extra_dir_at_root=False, alt_name=None, file_name=None, create=False):
"""
Inform the store that the file associated with the object has been
@@ -154,16 +154,16 @@
of the default.
If the object does not exist raises `ObjectNotFound`.
See `exists` method for the description of other fields.
-
+
:type file_name: string
- :param file_name: Use file pointed to by `file_name` as the source for
+ :param file_name: Use file pointed to by `file_name` as the source for
updating the dataset identified by `obj`
-
+
:type create: bool
:param create: If True and the default dataset does not exist, create it first.
"""
raise NotImplementedError()
-
+
def get_object_url(self, obj, extra_dir=None, extra_dir_at_root=False, alt_name=None):
"""
If the store supports direct URL access, return a URL. Otherwise return
@@ -178,7 +178,7 @@
Return the percentage indicating how full the store is
"""
raise NotImplementedError()
-
+
## def get_staging_command( id ):
## """
## Return a shell command that can be prepended to the job script to stage the
@@ -212,21 +212,21 @@
self.extra_dirs['temp'] = config.new_file_path
if extra_dirs is not None:
self.extra_dirs.update( extra_dirs )
-
+
def _get_filename(self, obj, base_dir=None, dir_only=False, extra_dir=None, extra_dir_at_root=False, alt_name=None):
"""Class method that returns the absolute path for the file corresponding
- to the `obj`.id regardless of whether the file exists.
+ to the `obj`.id regardless of whether the file exists.
"""
path = self._construct_path(obj, base_dir=base_dir, dir_only=dir_only, extra_dir=extra_dir, extra_dir_at_root=extra_dir_at_root, alt_name=alt_name, old_style=True)
- # For backward compatibility, check the old style root path first; otherwise,
+ # For backward compatibility, check the old style root path first; otherwise,
# construct hashed path
if not os.path.exists(path):
return self._construct_path(obj, base_dir=base_dir, dir_only=dir_only, extra_dir=extra_dir, extra_dir_at_root=extra_dir_at_root, alt_name=alt_name)
-
+
def _construct_path(self, obj, old_style=False, base_dir=None, dir_only=False, extra_dir=None, extra_dir_at_root=False, alt_name=None, **kwargs):
""" Construct the expected absolute path for accessing the object
identified by `obj`.id.
-
+
:type base_dir: string
:param base_dir: A key in self.extra_dirs corresponding to the base
directory in which this object should be created, or
@@ -237,16 +237,16 @@
identified by `obj` should be located, not the
dataset itself. This option applies to `extra_dir`
argument as well.
-
+
:type extra_dir: string
:param extra_dir: Append the value of this parameter to the expected path
used to access the object identified by `obj`
(e.g., /files/000/<extra_dir>/dataset_10.dat).
-
+
:type alt_name: string
:param alt_name: Use this name as the alternative name for the returned
dataset rather than the default.
-
+
:type old_style: bool
param old_style: This option is used for backward compatibility. If True
the composed directory structure does not include a hash id
@@ -274,7 +274,7 @@
def exists(self, obj, **kwargs):
path = self._construct_path(obj, old_style=True, **kwargs)
- # For backward compatibility, check root path first; otherwise, construct
+ # For backward compatibility, check root path first; otherwise, construct
# and check hashed path
if os.path.exists(path):
return True
@@ -292,12 +292,12 @@
os.makedirs(dir)
# Create the file if it does not exist
if not dir_only:
- open(path, 'w').close()
+ open(path, 'w').close()
util.umask_fix_perms(path, self.config.umask, 0666)
def empty(self, obj, **kwargs):
return os.path.getsize(self.get_filename(obj, **kwargs)) > 0
-
+
def size(self, obj, **kwargs):
if self.exists(obj, **kwargs):
try:
@@ -306,7 +306,7 @@
return 0
else:
return 0
-
+
def delete(self, obj, entire_dir=False, **kwargs):
path = self.get_filename(obj, **kwargs)
extra_dir = kwargs.get('extra_dir', None)
@@ -327,16 +327,16 @@
content = data_file.read(count)
data_file.close()
return content
-
+
def get_filename(self, obj, **kwargs):
path = self._construct_path(obj, old_style=True, **kwargs)
- # For backward compatibility, check root path first; otherwise, construct
+ # For backward compatibility, check root path first; otherwise, construct
# and return hashed path
if os.path.exists(path):
return path
else:
return self._construct_path(obj, **kwargs)
-
+
def update_from_file(self, obj, file_name=None, create=False, **kwargs):
""" `create` parameter is not used in this implementation """
if create:
@@ -345,12 +345,12 @@
try:
shutil.copy(file_name, self.get_filename(obj, **kwargs))
except IOError, ex:
- log.critical('Error copying %s to %s: %s' % (file_name,
+ log.critical('Error copying %s to %s: %s' % (file_name,
self._get_filename(obj, **kwargs), ex))
-
+
def get_object_url(self, obj, **kwargs):
return None
-
+
def get_store_usage_percent(self):
st = os.statvfs(self.file_path)
return (float(st.f_blocks - st.f_bavail)/st.f_blocks) * 100
@@ -361,10 +361,9 @@
Object store that uses a directory for caching files, but defers and writes
back to another object store.
"""
-
+
def __init__(self, path, backend):
super(CachingObjectStore, self).__init__(self, path, backend)
-
class S3ObjectStore(ObjectStore):
@@ -381,16 +380,24 @@
self.s3_conn = S3Connection()
self.bucket = self._get_bucket(self.config.s3_bucket)
self.use_rr = self.config.use_reduced_redundancy
- self.cache_size = self.config.object_store_cache_size * 1073741824 # Convert GBs to bytes
+ self.cache_size = self.config.object_store_cache_size
self.transfer_progress = 0
# Clean cache only if value is set in universe_wsgi.ini
if self.cache_size != -1:
+ # Convert GBs to bytes for comparison
+ self.cache_size = self.cache_size * 1073741824
# Helper for interruptable sleep
self.sleeper = Sleeper()
self.cache_monitor_thread = threading.Thread(target=self.__cache_monitor)
self.cache_monitor_thread.start()
log.info("Cache cleaner manager started")
-
+ # Test if 'axel' is available for parallel download and pull the key into cache
+ try:
+ subprocess.call('axel')
+ self.use_axel = True
+ except OSError:
+ self.use_axel = False
+
def __cache_monitor(self):
time.sleep(2) # Wait for things to load before starting the monitor
while self.running:
@@ -421,19 +428,19 @@
delete_this_much = total_size - cache_limit
self.__clean_cache(file_list, delete_this_much)
self.sleeper.sleep(30) # Test cache size every 30 seconds?
-
+
def __clean_cache(self, file_list, delete_this_much):
""" Keep deleting files from the file_list until the size of the deleted
files is greater than the value in delete_this_much parameter.
-
+
:type file_list: list
:param file_list: List of candidate files that can be deleted. This method
will start deleting files from the beginning of the list so the list
should be sorted accordingly. The list must contains 3-element tuples,
positioned as follows: position 0 holds file last accessed timestamp
- (as time.struct_time), position 1 holds file path, and position 2 has
+ (as time.struct_time), position 1 holds file path, and position 2 has
file size (e.g., (<access time>, /mnt/data/dataset_1.dat), 472394)
-
+
:type delete_this_much: int
:param delete_this_much: Total size of files, in bytes, that should be deleted.
"""
@@ -454,7 +461,7 @@
else:
log.debug("Cache cleaning done. Total space freed: %s" % convert_bytes(deleted_amount))
return
-
+
def _get_bucket(self, bucket_name):
""" Sometimes a handle to a bucket is not established right away so try
it a few times. Raise error is connection is not established. """
@@ -463,13 +470,13 @@
bucket = self.s3_conn.get_bucket(bucket_name)
log.debug("Using S3 object store; got bucket '%s'" % bucket.name)
return bucket
- except S3ResponseError:
+ except S3ResponseError:
log.debug("Could not get bucket '%s', attempt %s/5" % (bucket_name, i+1))
time.sleep(2)
# All the attempts have been exhausted and connection was not established,
# raise error
raise S3ResponseError
-
+
def _fix_permissions(self, rel_path):
""" Set permissions on rel_path"""
for basedir, dirs, files in os.walk(rel_path):
@@ -478,10 +485,10 @@
path = os.path.join(basedir, f)
# Ignore symlinks
if os.path.islink(path):
- continue
+ continue
util.umask_fix_perms( path, self.config.umask, 0666, self.config.gid )
-
- def _construct_path(self, obj, dir_only=None, extra_dir=None, extra_dir_at_root=False, alt_name=None):
+
+ def _construct_path(self, obj, dir_only=None, extra_dir=None, extra_dir_at_root=False, alt_name=None, **kwargs):
rel_path = os.path.join(*directory_hash_id(obj.id))
if extra_dir is not None:
if extra_dir_at_root:
@@ -496,10 +503,10 @@
def _get_cache_path(self, rel_path):
return os.path.abspath(os.path.join(self.staging_path, rel_path))
-
+
def _get_transfer_progress(self):
return self.transfer_progress
-
+
def _get_size_in_s3(self, rel_path):
try:
key = self.bucket.get_key(rel_path)
@@ -510,7 +517,7 @@
except Exception, ex:
log.error("Could not get reference to the key object '%s'; returning -1 for key size: %s" % (rel_path, ex))
return -1
-
+
def _key_exists(self, rel_path):
exists = False
try:
@@ -532,15 +539,13 @@
if rel_path[0] == '/':
raise
return exists
-
+
def _in_cache(self, rel_path):
""" Check if the given dataset is in the local cache and return True if so. """
# log.debug("------ Checking cache for rel_path %s" % rel_path)
cache_path = self._get_cache_path(rel_path)
- exists = os.path.exists(cache_path)
- # print "Checking chache for %s; returning %s" % (cache_path, exists)
- return exists
- # EATODO: Part of checking if a file is in cache should be to ensure the
+ return os.path.exists(cache_path)
+ # TODO: Part of checking if a file is in cache should be to ensure the
# size of the cached file matches that on S3. Once the upload tool explicitly
# creates, this check sould be implemented- in the mean time, it's not
# looking likely to be implementable reliably.
@@ -562,11 +567,7 @@
# # print "***3 %s found in cache but not in S3 (in_cache=True)" % cache_path
# exists = True
# else:
- # # print "***4 %s does not exist (in_cache=False)" % cache_path
- # exists = False
- # # print "Checking cache for %s; returning %s" % (cache_path, exists)
- # return exists
- # # return False
+ # return False
def _pull_into_cache(self, rel_path):
# Ensure the cache directory structure exists (e.g., dataset_#_files/)
@@ -577,45 +578,39 @@
ok = self._download(rel_path)
self._fix_permissions(self._get_cache_path(rel_path_dir))
return ok
-
+
def _transfer_cb(self, complete, total):
self.transfer_progress += 10
- # print "Dataset transfer progress: %s" % self.transfer_progress
-
+
def _download(self, rel_path):
try:
log.debug("Pulling key '%s' into cache to %s" % (rel_path, self._get_cache_path(rel_path)))
key = self.bucket.get_key(rel_path)
- # Test is cache is large enough to hold the new file
- if key.size > self.cache_size:
+ # Test if cache is large enough to hold the new file
+ if self.cache_size > 0 and key.size > self.cache_size:
log.critical("File %s is larger (%s) than the cache size (%s). Cannot download." \
% (rel_path, key.size, self.cache_size))
return False
- # Test if 'axel' is available for parallel download and pull the key into cache
- try:
- ret_code = subprocess.call('axel')
- except OSError:
- ret_code = 127
- if ret_code == 127:
- self.transfer_progress = 0 # Reset transfer progress counter
- key.get_contents_to_filename(self._get_cache_path(rel_path), cb=self._transfer_cb, num_cb=10)
- #print "(ssss1) Pulled key '%s' into cache to %s" % (rel_path, self._get_cache_path(rel_path))
- return True
- else:
+ if self.use_axel:
+ log.debug("Parallel pulled key '%s' into cache to %s" % (rel_path, self._get_cache_path(rel_path)))
ncores = multiprocessing.cpu_count()
url = key.generate_url(7200)
ret_code = subprocess.call("axel -a -n %s '%s'" % (ncores, url))
if ret_code == 0:
- #print "(ssss2) Parallel pulled key '%s' into cache to %s" % (rel_path, self._get_cache_path(rel_path))
return True
+ else:
+ log.debug("Pulled key '%s' into cache to %s" % (rel_path, self._get_cache_path(rel_path)))
+ self.transfer_progress = 0 # Reset transfer progress counter
+ key.get_contents_to_filename(self._get_cache_path(rel_path), cb=self._transfer_cb, num_cb=10)
+ return True
except S3ResponseError, ex:
log.error("Problem downloading key '%s' from S3 bucket '%s': %s" % (rel_path, self.bucket.name, ex))
return False
-
+
def _push_to_s3(self, rel_path, source_file=None, from_string=None):
- """
- Push the file pointed to by `rel_path` to S3 naming the key `rel_path`.
- If `source_file` is provided, push that file instead while still using
+ """
+ Push the file pointed to by `rel_path` to S3 naming the key `rel_path`.
+ If `source_file` is provided, push that file instead while still using
`rel_path` as the key name.
If `from_string` is provided, set contents of the file to the value of
the string
@@ -651,16 +646,16 @@
except S3ResponseError, ex:
log.error("Trouble pushing S3 key '%s' from file '%s': %s" % (rel_path, source_file, ex))
return False
-
+
def file_ready(self, obj, **kwargs):
- """ A helper method that checks if a file corresponding to a dataset
+ """ A helper method that checks if a file corresponding to a dataset
is ready and available to be used. Return True if so, False otherwise."""
rel_path = self._construct_path(obj, **kwargs)
# Make sure the size in cache is available in its entirety
if self._in_cache(rel_path) and os.path.getsize(self._get_cache_path(rel_path)) == self._get_size_in_s3(rel_path):
return True
return False
-
+
def exists(self, obj, **kwargs):
in_cache = in_s3 = False
rel_path = self._construct_path(obj, **kwargs)
@@ -685,10 +680,10 @@
return True
else:
return False
-
+
def create(self, obj, **kwargs):
if not self.exists(obj, **kwargs):
- #print "S3 OS creating a dataset with ID %s" % dataset_id
+ #print "S3 OS creating a dataset with ID %s" % kwargs
# Pull out locally used fields
extra_dir = kwargs.get('extra_dir', None)
extra_dir_at_root = kwargs.get('extra_dir_at_root', False)
@@ -696,7 +691,8 @@
alt_name = kwargs.get('alt_name', None)
# print "---- Processing: %s; %s" % (alt_name, locals())
# Construct hashed path
- rel_path = os.path.join(*directory_hash_id(obj))
+ rel_path = os.path.join(*directory_hash_id(obj.id))
+
# Optionally append extra_dir
if extra_dir is not None:
if extra_dir_at_root:
@@ -717,13 +713,13 @@
rel_path = os.path.join(rel_path, alt_name if alt_name else "dataset_%s.dat" % obj.id)
open(os.path.join(self.staging_path, rel_path), 'w').close()
self._push_to_s3(rel_path, from_string='')
-
+
def empty(self, obj, **kwargs):
if self.exists(obj, **kwargs):
return bool(self.size(obj, **kwargs) > 0)
else:
raise ObjectNotFound()
-
+
def size(self, obj, **kwargs):
rel_path = self._construct_path(obj, **kwargs)
if self._in_cache(rel_path):
@@ -735,7 +731,7 @@
return self._get_size_in_s3(rel_path)
log.warning("Did not find dataset '%s', returning 0 for size" % rel_path)
return 0
-
+
def delete(self, obj, entire_dir=False, **kwargs):
rel_path = self._construct_path(obj, **kwargs)
extra_dir = kwargs.get('extra_dir', None)
@@ -765,7 +761,7 @@
except OSError, ex:
log.error('%s delete error %s' % (self._get_filename(obj, **kwargs), ex))
return False
-
+
def get_data(self, obj, start=0, count=-1, **kwargs):
rel_path = self._construct_path(obj, **kwargs)
# Check cache first and get file if not there
@@ -779,7 +775,7 @@
content = data_file.read(count)
data_file.close()
return content
-
+
def get_filename(self, obj, **kwargs):
#print "S3 get_filename for dataset: %s" % dataset_id
dir_only = kwargs.get('dir_only', False)
@@ -809,8 +805,8 @@
# return cache_path
raise ObjectNotFound()
# return cache_path # Until the upload tool does not explicitly create the dataset, return expected path
-
- def update_from_file(self, obj, file_name=None, create=False, **kwargs):
+
+ def update_from_file(self, obj, file_name=None, create=False, **kwargs):
if create:
self.create(obj, **kwargs)
if self.exists(obj, **kwargs):
@@ -833,7 +829,7 @@
self._push_to_s3(rel_path, source_file)
else:
raise ObjectNotFound()
-
+
def get_object_url(self, obj, **kwargs):
if self.exists(obj, **kwargs):
rel_path = self._construct_path(obj, **kwargs)
@@ -854,7 +850,7 @@
first store where the object exists is used, objects are created in a
store selected randomly, but with weighting.
"""
-
+
def __init__(self, config):
super(DistributedObjectStore, self).__init__()
self.distributed_config = config.distributed_object_store_config_file
@@ -1003,7 +999,7 @@
first store where the object exists is used, objects are always created
in the first store.
"""
-
+
def __init__(self, backends=[]):
super(HierarchicalObjectStore, self).__init__()
@@ -1027,7 +1023,7 @@
if bytes is None:
bytes = 0
bytes = float(bytes)
-
+
if bytes >= 1099511627776:
terabytes = bytes / 1099511627776
size = '%.2fTB' % terabytes
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/958d1f8b2caf/
changeset: 958d1f8b2caf
user: Scott McManus
date: 2012-06-19 20:10:08
summary: Merge issues.
affected #: 2 files
diff -r 22eea899376f142134c30abafb6e156a514574dd -r 958d1f8b2caf9595d79e66d1eb1705110a1a6784 lib/galaxy/jobs/__init__.py
--- a/lib/galaxy/jobs/__init__.py
+++ b/lib/galaxy/jobs/__init__.py
@@ -310,7 +310,7 @@
# Check exit codes and match regular expressions against stdout and
# stderr if this tool was configured to do so.
if ( len( self.tool.stdio_regexes ) > 0 or
- len( self.tool.exit_codes ) > 0 ):
+ len( self.tool.stdio_exit_codes ) > 0 ):
# We will check the exit code ranges in the order in which
# they were specified. Each exit_code is a ToolStdioExitCode
# that includes an applicable range. If the exit code was in
diff -r 22eea899376f142134c30abafb6e156a514574dd -r 958d1f8b2caf9595d79e66d1eb1705110a1a6784 lib/galaxy/jobs/runners/drmaa.py
--- a/lib/galaxy/jobs/runners/drmaa.py
+++ b/lib/galaxy/jobs/runners/drmaa.py
@@ -77,6 +77,7 @@
self.job_file = None
self.ofile = None
self.efile = None
+ self.rcfile = None
self.runner_url = None
class DRMAAJobRunner( BaseJobRunner ):
@@ -168,6 +169,7 @@
# define job attributes
ofile = "%s.drmout" % os.path.join(job_wrapper.working_directory, job_wrapper.get_id_tag())
efile = "%s.drmerr" % os.path.join(job_wrapper.working_directory, job_wrapper.get_id_tag())
+ rcfile = "%s.drmrc" % os.path.join(job_wrapper.working_directory, job_wrapper.get_id_tag())
job_name = "g%s_%s_%s" % ( job_wrapper.job_id, job_wrapper.tool.id, job_wrapper.user )
job_name = ''.join( map( lambda x: x if x in ( string.letters + string.digits + '_' ) else '_', job_name ) )
@@ -176,6 +178,7 @@
jt.jobName = job_name
jt.outputPath = ":%s" % ofile
jt.errorPath = ":%s" % efile
+ jt.returnCodePath = ":%s" % rcfile
native_spec = self.get_native_spec( runner_url )
if native_spec is not None:
jt.nativeSpecification = native_spec
@@ -228,6 +231,7 @@
drm_job_state.job_id = job_id
drm_job_state.ofile = ofile
drm_job_state.efile = efile
+ drm_job_state.rcfile = rcfile
drm_job_state.job_file = jt.remoteCommand
drm_job_state.old_state = 'new'
drm_job_state.running = False
@@ -312,6 +316,7 @@
"""
ofile = drm_job_state.ofile
efile = drm_job_state.efile
+ rcfile = drm_job_state.rcfile
job_file = drm_job_state.job_file
# collect the output
# wait for the files to appear
@@ -377,6 +382,7 @@
drm_job_state = DRMAAJobState()
drm_job_state.ofile = "%s.drmout" % os.path.join(os.getcwd(), job_wrapper.working_directory, job_wrapper.get_id_tag())
drm_job_state.efile = "%s.drmerr" % os.path.join(os.getcwd(), job_wrapper.working_directory, job_wrapper.get_id_tag())
+ drm_job_state.rcfile = "%s.drmrc" % os.path.join(os.getcwd(), job_wrapper.working_directory, job_wrapper.get_id_tag())
drm_job_state.job_file = "%s/galaxy_%s.sh" % (self.app.config.cluster_files_directory, job.id)
drm_job_state.job_id = str( job.job_runner_external_id )
drm_job_state.runner_url = job_wrapper.get_job_runner()
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/22eea899376f/
changeset: 22eea899376f
user: Scott McManus
date: 2012-06-19 18:18:11
summary: Fixing vimdiff errors.
affected #: 1 file
diff -r 3b2dc0a51d147c3fc8f0560d8fd3ebefc0792e99 -r 22eea899376f142134c30abafb6e156a514574dd lib/galaxy/tools/__init__.py
--- a/lib/galaxy/tools/__init__.py
+++ b/lib/galaxy/tools/__init__.py
@@ -2361,12 +2361,12 @@
installed_tool_dependencies = self.tool_shed_repository.tool_dependencies
else:
installed_tool_dependencies = None
- for requirement in self.requirements:
- # TODO: currently only supporting requirements of type package,
- # need to implement some mechanism for mapping other types
- # back to packages
+ for requirement in self.requirements:
+ # TODO: currently only supporting requirements of type package,
+ # need to implement some mechanism for mapping other types
+ # back to packages
log.debug( "Building dependency shell command for dependency '%s'", requirement.name )
- if requirement.type == 'package':
+ if requirement.type == 'package':
script_file, base_path, version = self.app.toolbox.dependency_manager.find_dep( name=requirement.name,
version=requirement.version,
type=requirement.type,
@@ -2656,7 +2656,7 @@
param_dict.update( { 'type' : 'number', 'init_value' : input.value,
'html' : urllib.quote( input.get_html( trans ) ),
'min': input.min,
- 'max': input.max
+ 'max': input.max,
'value': input.value
} )
else:
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0

commit/galaxy-central: Scott McManus: Added application of regular expressions and exit code. Pulling exit
by Bitbucket 19 Jun '12
by Bitbucket 19 Jun '12
19 Jun '12
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/3b2dc0a51d14/
changeset: 3b2dc0a51d14
user: Scott McManus
date: 2012-06-19 17:35:36
summary: Added application of regular expressions and exit code. Pulling exit
codes from the runners will be part of a separate submission.
affected #: 3 files
diff -r 7c495f835a1d436ad33dff6107784f106cc24980 -r 3b2dc0a51d147c3fc8f0560d8fd3ebefc0792e99 lib/galaxy/jobs/__init__.py
--- a/lib/galaxy/jobs/__init__.py
+++ b/lib/galaxy/jobs/__init__.py
@@ -305,10 +305,81 @@
if job.state == job.states.DELETED or job.state == job.states.ERROR:
#ERROR at this point means the job was deleted by an administrator.
return self.fail( job.info )
- if stderr:
- job.state = job.states.ERROR
+
+ err_msg = ""
+ # Check exit codes and match regular expressions against stdout and
+ # stderr if this tool was configured to do so.
+ if ( len( self.tool.stdio_regexes ) > 0 or
+ len( self.tool.exit_codes ) > 0 ):
+ # We will check the exit code ranges in the order in which
+ # they were specified. Each exit_code is a ToolStdioExitCode
+ # that includes an applicable range. If the exit code was in
+ # that range, then apply the error level and add in a message.
+ # If we've reached a fatal error rule, then stop.
+ max_error_level = galaxy.tools.StdioErrorLevel.NO_ERROR
+ for exit_code in self.tool.stdio_exit_codes:
+ # TODO: Fetch the exit code from the .rc file:
+ tool_exit_code = 0
+ if ( tool_exit_code >= exit_code.range_start and
+ tool_exit_code <= exit_code.range_end ):
+ if None != exit_code.desc:
+ err_msg += exit_code.desc
+ # TODO: Find somewhere to stick the err_msg - possibly to
+ # the source (stderr/stdout), possibly in a new db column.
+ max_error_level = max( max_error_level,
+ exit_code.error_level )
+ if max_error_level >= galaxy.tools.StdioErrorLevel.FATAL:
+ break
+ # If there is a regular expression for scanning stdout/stderr,
+ # then we assume that the tool writer overwrote the default
+ # behavior of just setting an error if there is *anything* on
+ # stderr.
+ if max_error_level < galaxy.tools.StdioErrorLevel.FATAL:
+ # We'll examine every regex. Each regex specifies whether
+ # it is to be run on stdout, stderr, or both. (It is
+ # possible for neither stdout nor stderr to be scanned,
+ # but those won't be scanned.) We record the highest
+ # error level, which are currently "warning" and "fatal".
+ # If fatal, then we set the job's state to ERROR.
+ # If warning, then we still set the job's state to OK
+ # but include a message. We'll do this if we haven't seen
+ # a fatal error yet
+ for regex in self.tool.stdio_regexes:
+ # If ( this regex should be matched against stdout )
+ # - Run the regex's match pattern against stdout
+ # - If it matched, then determine the error level.
+ # o If it was fatal, then we're done - break.
+ # Repeat the stdout stuff for stderr.
+ # TODO: Collapse this into a single function.
+ if ( regex.stdout_match ):
+ regex_match = re.search( regex.match, stdout )
+ if ( regex_match ):
+ err_msg += self.regex_err_msg( regex_match, regex )
+ max_error_level = max( max_error_level, regex.error_level )
+ if max_error_level >= galaxy.tools.StdioErrorLevel.FATAL:
+ break
+ if ( regex.stderr_match ):
+ regex_match = re.search( regex.match, stderr )
+ if ( regex_match ):
+ err_msg += self.regex_err_msg( regex_match, regex )
+ max_error_level = max( max_error_level,
+ regex.error_level )
+ if max_error_level >= galaxy.tools.StdioErrorLevel.FATAL:
+ break
+ # If we encountered a fatal error, then we'll need to set the
+ # job state accordingly. Otherwise the job is ok:
+ if max_error_level >= galaxy.tools.StdioErrorLevel.FATAL:
+ job.state = job.states.ERROR
+ else:
+ job.state = job.states.OK
+ # When there are no regular expressions and no exit codes to check,
+ # default to the previous behavior: when there's anything on stderr
+ # the job has an error, and the job is ok otherwise.
else:
- job.state = job.states.OK
+ if stderr:
+ job.state = job.states.ERROR
+ else:
+ job.state = job.states.OK
if self.version_string_cmd:
version_filename = self.get_version_string_path()
if os.path.exists(version_filename):
@@ -330,6 +401,7 @@
return self.fail( "Job %s's output dataset(s) could not be read" % job.id )
job_context = ExpressionContext( dict( stdout = stdout, stderr = stderr ) )
job_tool = self.app.toolbox.tools_by_id.get( job.tool_id, None )
+
def in_directory( file, directory ):
# Make both absolute.
directory = os.path.abspath( directory )
@@ -370,7 +442,11 @@
# Update (non-library) job output datasets through the object store
if dataset not in job.output_library_datasets:
self.app.object_store.update_from_file(dataset.dataset, create=True)
- if context['stderr']:
+ # TODO: The context['stderr'] holds stderr's contents. An error
+ # only really occurs if the job also has an error. So check the
+ # job's state:
+ #if context['stderr']:
+ if job.states.ERROR == job.state:
dataset.blurb = "error"
elif dataset.has_data():
# If the tool was expected to set the extension, attempt to retrieve it
@@ -385,7 +461,14 @@
( not self.external_output_metadata.external_metadata_set_successfully( dataset, self.sa_session ) \
and self.app.config.retry_metadata_internally ):
dataset.set_meta( overwrite = False )
- elif not self.external_output_metadata.external_metadata_set_successfully( dataset, self.sa_session ) and not context['stderr']:
+ # TODO: The context['stderr'] used to indicate that there
+ # was an error. Now we must rely on the job's state instead;
+ # that indicates whether the tool relied on stderr to indicate
+ # the state or whether the tool used exit codes and regular
+ # expressions to do so. So we use
+ # job.state == job.states.ERROR to replace this same test.
+ #elif not self.external_output_metadata.external_metadata_set_successfully( dataset, self.sa_session ) and not context['stderr']:
+ elif not self.external_output_metadata.external_metadata_set_successfully( dataset, self.sa_session ) and job.states.ERROR != job.state:
dataset._state = model.Dataset.states.FAILED_METADATA
else:
#load metadata from file
@@ -415,7 +498,12 @@
if dataset.ext == 'auto':
dataset.extension = 'txt'
self.sa_session.add( dataset )
- if context['stderr']:
+ # TODO: job.states.ERROR == job.state now replaces checking
+ # stderr for a problem:
+ #if context['stderr']:
+ if job.states.ERROR == job.state:
+ log.debug( "setting dataset state to ERROR" )
+ # TODO: This is where the state is being set to error. Change it!
dataset_assoc.dataset.dataset.state = model.Dataset.states.ERROR
else:
dataset_assoc.dataset.dataset.state = model.Dataset.states.OK
@@ -480,6 +568,29 @@
if self.app.config.cleanup_job == 'always' or ( not stderr and self.app.config.cleanup_job == 'onsuccess' ):
self.cleanup()
+ def regex_err_msg( self, match, regex ):
+ """
+ Return a message about the match on tool output using the given
+ ToolStdioRegex regex object. The regex_match is a MatchObject
+ that will contain the string matched on.
+ """
+ # Get the description for the error level:
+ err_msg = galaxy.tools.StdioErrorLevel.desc( regex.error_level ) + ": "
+ # If there's a description for the regular expression, then use it.
+ # Otherwise, we'll take the first 256 characters of the match.
+ if None != regex.desc:
+ err_msg += regex.desc
+ else:
+ mstart = match.start()
+ mend = match.end()
+ err_msg += "Matched on "
+ # TODO: Move the constant 256 somewhere else besides here.
+ if mend - mstart > 256:
+ err_msg += match.string[ mstart : mstart+256 ] + "..."
+ else:
+ err_msg += match.string[ mstart: mend ]
+ return err_msg
+
def cleanup( self ):
# remove temporary files
try:
diff -r 7c495f835a1d436ad33dff6107784f106cc24980 -r 3b2dc0a51d147c3fc8f0560d8fd3ebefc0792e99 lib/galaxy/tools/__init__.py
--- a/lib/galaxy/tools/__init__.py
+++ b/lib/galaxy/tools/__init__.py
@@ -37,6 +37,23 @@
log = logging.getLogger( __name__ )
+# These determine stdio-based error levels from matching on regular expressions
+# and exit codes. They are meant to be used comparatively, such as showing
+# that warning < fatal. This is really meant to just be an enum.
+class StdioErrorLevel( object ):
+ NO_ERROR = 0
+ WARNING = 1
+ FATAL = 2
+ MAX = 2
+ descs = {NO_ERROR : 'No error', WARNING : 'Warning', FATAL : 'Fatal error'}
+ @staticmethod
+ def desc( error_level ):
+ err_msg = "Unknown error"
+ if ( error_level > 0 and
+ error_level <= StdioErrorLevel.MAX ):
+ err_msg = StdioErrorLevel.descs[ error_level ]
+ return err_msg
+
class ToolNotFoundException( Exception ):
pass
@@ -1140,6 +1157,12 @@
# a warning and skip to the next.
for exit_code_elem in ( stdio_elem.findall( "exit_code" ) ):
exit_code = ToolStdioExitCode()
+ # Each exit code has an optional description that can be
+ # part of the "desc" or "description" attributes:
+ exit_code.desc = exit_code_elem.get( "desc" )
+ if None == exit_code.desc:
+ exit_code.desc = exit_code_elem.get( "description" )
+ # Parse the error level:
exit_code.error_level = (
self.parse_error_level( exit_code_elem.get( "level" )))
code_range = exit_code_elem.get( "range", "" )
@@ -1155,11 +1178,9 @@
# X:Y - Split on the colon. We do not allow a colon
# without a beginning or end, though we could.
# Also note that whitespace is eliminated.
- # TODO: Turn this into a single match - it will be
- # more efficient
- string.strip( code_range )
+ # TODO: Turn this into a single match - it should be
+ # more efficient.
code_range = re.sub( "\s", "", code_range )
- log.debug( "Code range after sub: %s" % code_range )
code_ranges = re.split( ":", code_range )
if ( len( code_ranges ) == 2 ):
if ( None == code_ranges[0] or '' == code_ranges[0] ):
@@ -1216,6 +1237,12 @@
for regex_elem in ( stdio_elem.findall( "regex" ) ):
# TODO: Fill in ToolStdioRegex
regex = ToolStdioRegex()
+ # Each regex has an optional description that can be
+ # part of the "desc" or "description" attributes:
+ regex.desc = regex_elem.get( "desc" )
+ if None == regex.desc:
+ regex.desc = regex_elem.get( "description" )
+ # Parse the error level
regex.error_level = (
self.parse_error_level( regex_elem.get( "level" ) ) )
regex.match = regex_elem.get( "match", "" )
@@ -1243,9 +1270,9 @@
# and anything to do with "err". If neither stdout nor
# stderr were specified, then raise a warning and scan both.
for src in src_list:
- if re.match( "out", src, re.IGNORECASE ):
+ if re.search( "out", src, re.IGNORECASE ):
regex.stdout_match = True
- if re.match( "err", src, re.IGNORECASE ):
+ if re.search( "err", src, re.IGNORECASE ):
regex.stderr_match = True
if (not regex.stdout_match and not regex.stderr_match):
log.warning( "Unable to determine if tool stream "
@@ -1262,24 +1289,25 @@
trace_msg = repr( traceback.format_tb( trace ) )
log.error( "Traceback: %s" % trace_msg )
+ # TODO: This method doesn't have to be part of the Tool class.
def parse_error_level( self, err_level ):
"""
Return fatal or warning depending on what's in the error level.
This will assume that the error level fatal is returned if it's
- unparsable. (This doesn't have to be part of the Tool class.)
+ unparsable.
"""
# What should the default be? I'm claiming it should be fatal:
# if you went to the trouble to write the rule, then it's
# probably a problem. I think there are easily three substantial
# camps: make it fatal, make it a warning, or, if it's missing,
- # just throw an exception and ignore it.
- return_level = "fatal"
+ # just throw an exception and ignore the exit_code element.
+ return_level = StdioErrorLevel.FATAL
try:
if ( None != err_level ):
if ( re.search( "warning", err_level, re.IGNORECASE ) ):
- return_level = "warning"
+ return_level = StdioErrorLevel.WARNING
elif ( re.search( "fatal", err_level, re.IGNORECASE ) ):
- return_level = "fatal"
+ return_level = StdioErrorLevel.FATAL
except Exception, e:
log.error( "Exception in parse_error_level "
+ str(sys.exc_info() ) )
@@ -2333,16 +2361,18 @@
installed_tool_dependencies = self.tool_shed_repository.tool_dependencies
else:
installed_tool_dependencies = None
- for requirement in self.requirements:
- # TODO: currently only supporting requirements of type package,
- # need to implement some mechanism for mapping other types
- # back to packages
+ for requirement in self.requirements:
+ # TODO: currently only supporting requirements of type package,
+ # need to implement some mechanism for mapping other types
+ # back to packages
log.debug( "Building dependency shell command for dependency '%s'", requirement.name )
- if requirement.type == 'package':
+ if requirement.type == 'package':
script_file, base_path, version = self.app.toolbox.dependency_manager.find_dep( name=requirement.name,
version=requirement.version,
type=requirement.type,
installed_tool_dependencies=installed_tool_dependencies )
+ if requirement.type == 'package':
+ script_file, base_path, version = self.app.toolbox.dependency_manager.find_dep( requirement.name, requirement.version )
if script_file is None and base_path is None:
log.warn( "Failed to resolve dependency on '%s', ignoring", requirement.name )
elif script_file is None:
@@ -2617,7 +2647,7 @@
elif isinstance( input, SelectToolParameter ):
param_dict.update( { 'type' : 'select',
'html' : urllib.quote( input.get_html( trans ) ),
- 'options': input.static_options
+ 'options': input.static_options
} )
elif isinstance( input, Conditional ):
# TODO.
@@ -2626,7 +2656,7 @@
param_dict.update( { 'type' : 'number', 'init_value' : input.value,
'html' : urllib.quote( input.get_html( trans ) ),
'min': input.min,
- 'max': input.max,
+ 'max': input.max
'value': input.value
} )
else:
@@ -2798,6 +2828,7 @@
self.stderr_match = False
# TODO: Define a common class or constant for error level:
self.error_level = "fatal"
+ self.desc = ""
class ToolStdioExitCode( object ):
"""
@@ -2809,6 +2840,7 @@
self.range_end = float( "inf" )
# TODO: Define a common class or constant for error level:
self.error_level = "fatal"
+ self.desc = ""
class ToolParameterValueWrapper( object ):
"""
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0

commit/galaxy-central: greg: Add the ability to browse, uninstall and reinstall tool dependencies installed with tool shed repositories.
by Bitbucket 18 Jun '12
by Bitbucket 18 Jun '12
18 Jun '12
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/7c495f835a1d/
changeset: 7c495f835a1d
user: greg
date: 2012-06-18 18:15:04
summary: Add the ability to browse, uninstall and reinstall tool dependencies installed with tool shed repositories.
affected #: 19 files
diff -r 87be4c8d7f7fa32f6023f0e251e3c9af8447f5cd -r 7c495f835a1d436ad33dff6107784f106cc24980 lib/galaxy/model/__init__.py
--- a/lib/galaxy/model/__init__.py
+++ b/lib/galaxy/model/__init__.py
@@ -2701,13 +2701,8 @@
return installed_dependencies
@property
def missing_tool_dependencies( self ):
- """Return the repository's tool dependencies that are not currently installed."""
- def add_missing_dependency( missing_dependencies_dict, name, version, type, installed_changeset_revision=None ):
- missing_dependencies_dict[ name ] = dict( version=version,
- type=type,
- installed_changeset_revision=installed_changeset_revision )
- return missing_dependencies_dict
- missing_dependencies = {}
+ """Return the repository's tool dependencies that are not currently installed, and may not ever have been installed."""
+ missing_dependencies = []
# Get the dependency information from the metadata for comparison against the installed tool dependencies.
tool_dependencies = self.metadata.get( 'tool_dependencies', None )
if tool_dependencies:
@@ -2717,25 +2712,27 @@
type = requirements_dict[ 'type' ]
if self.tool_dependencies:
found = False
- for installed_dependency in self.tool_dependencies:
- if installed_dependency.name==name and installed_dependency.version==version and installed_dependency.type==type:
+ for tool_dependency in self.tool_dependencies:
+ if tool_dependency.name==name and tool_dependency.version==version and tool_dependency.type==type:
found = True
- if installed_dependency.uninstalled:
- missing_dependencies = add_missing_dependency( missing_dependencies,
- installed_dependency.name,
- installed_dependency.version,
- installed_dependency.type,
- installed_dependency.installed_changeset_revision )
+ if tool_dependency.uninstalled:
+ missing_dependencies.append( ( tool_dependency.name, tool_dependency.version, tool_dependency.type ) )
break
if not found:
- missing_dependencies = add_missing_dependency( missing_dependencies, name, version, type )
- return missing_dependencies
- return None
+ missing_dependencies.append( ( name, version, type ) )
+ return missing_dependencies
+ @property
+ def uninstalled_tool_dependencies( self ):
+ """Return the repository's tool dependencies that have been uninstalled."""
+ uninstalled_tool_dependencies = []
+ for tool_dependency in self.tool_dependencies:
+ if tool_dependency.uninstalled:
+ uninstalled_tool_dependencies.append( tool_dependency )
+ return uninstalled_tool_dependencies
class ToolDependency( object ):
- def __init__( self, tool_shed_repository_id=None, installed_changeset_revision=None, name=None, version=None, type=None, uninstalled=False ):
+ def __init__( self, tool_shed_repository_id=None, name=None, version=None, type=None, uninstalled=False ):
self.tool_shed_repository_id = tool_shed_repository_id
- self.installed_changeset_revision = installed_changeset_revision
self.name = name
self.version = version
self.type = type
@@ -2746,7 +2743,8 @@
self.version,
self.tool_shed_repository.owner,
self.tool_shed_repository.name,
- self.installed_changeset_revision )
+ self.tool_shed_repository.installed_changeset_revision )
+
class ToolVersion( object ):
def __init__( self, id=None, create_time=None, tool_id=None, tool_shed_repository=None ):
self.id = id
diff -r 87be4c8d7f7fa32f6023f0e251e3c9af8447f5cd -r 7c495f835a1d436ad33dff6107784f106cc24980 lib/galaxy/model/mapping.py
--- a/lib/galaxy/model/mapping.py
+++ b/lib/galaxy/model/mapping.py
@@ -395,7 +395,6 @@
Column( "create_time", DateTime, default=now ),
Column( "update_time", DateTime, default=now, onupdate=now ),
Column( "tool_shed_repository_id", Integer, ForeignKey( "tool_shed_repository.id" ), index=True, nullable=False ),
- Column( "installed_changeset_revision", TrimmedString( 255 ) ),
Column( "name", TrimmedString( 255 ) ),
Column( "version", Text ),
Column( "type", TrimmedString( 40 ) ),
@@ -1681,6 +1680,7 @@
backref='tool_shed_repository' ),
tool_dependencies=relation( ToolDependency,
primaryjoin=( ToolShedRepository.table.c.id == ToolDependency.table.c.tool_shed_repository_id ),
+ order_by=ToolDependency.table.c.name,
backref='tool_shed_repository' ) ) )
assign_mapper( context, ToolDependency, ToolDependency.table )
diff -r 87be4c8d7f7fa32f6023f0e251e3c9af8447f5cd -r 7c495f835a1d436ad33dff6107784f106cc24980 lib/galaxy/model/migrate/versions/0101_drop_installed_changeset_revision_column.py
--- /dev/null
+++ b/lib/galaxy/model/migrate/versions/0101_drop_installed_changeset_revision_column.py
@@ -0,0 +1,39 @@
+"""
+Migration script to drop the installed_changeset_revision column from the tool_dependency table.
+"""
+from sqlalchemy import *
+from sqlalchemy.orm import *
+from migrate import *
+from migrate.changeset import *
+import sys, logging
+from galaxy.model.custom_types import *
+from sqlalchemy.exc import *
+import datetime
+now = datetime.datetime.utcnow
+
+log = logging.getLogger( __name__ )
+log.setLevel( logging.DEBUG )
+handler = logging.StreamHandler( sys.stdout )
+format = "%(name)s %(levelname)s %(asctime)s %(message)s"
+formatter = logging.Formatter( format )
+handler.setFormatter( formatter )
+log.addHandler( handler )
+
+metadata = MetaData( migrate_engine )
+
+def upgrade():
+ print __doc__
+ metadata.reflect()
+ try:
+ ToolDependency_table = Table( "tool_dependency", metadata, autoload=True )
+ except NoSuchTableError:
+ ToolDependency_table = None
+ log.debug( "Failed loading table tool_dependency" )
+ if ToolDependency_table:
+ try:
+ col = ToolDependency_table.c.installed_changeset_revision
+ col.drop()
+ except Exception, e:
+ log.debug( "Dropping column 'installed_changeset_revision' from tool_dependency table failed: %s" % ( str( e ) ) )
+def downgrade():
+ pass
diff -r 87be4c8d7f7fa32f6023f0e251e3c9af8447f5cd -r 7c495f835a1d436ad33dff6107784f106cc24980 lib/galaxy/tool_shed/install_manager.py
--- a/lib/galaxy/tool_shed/install_manager.py
+++ b/lib/galaxy/tool_shed/install_manager.py
@@ -120,8 +120,8 @@
if not is_displayed:
is_displayed = True
return is_displayed, tool_sections
- def handle_repository_contents( self, repository_clone_url, relative_install_dir, repository_elem, repository_name, description, installed_changeset_revision,
- ctx_rev, install_dependencies ):
+ def handle_repository_contents( self, repository_clone_url, relative_install_dir, repository_elem, repository_name, description,
+ installed_changeset_revision, ctx_rev, install_dependencies ):
# Generate the metadata for the installed tool shed repository, among other things. It is critical that the installed repository is
# updated to the desired changeset_revision before metadata is set because the process for setting metadata uses the repository files on disk.
# The values for the keys in each of the following dictionaries will be a list to allow for the same tool to be displayed in multiple places
@@ -176,7 +176,6 @@
# Install tool dependencies.
status, message = handle_tool_dependencies( app=self.app,
tool_shed_repository=tool_shed_repository,
- installed_changeset_revision=installed_changeset_revision,
tool_dependencies_config=tool_dependencies_config )
if status != 'ok' and message:
print 'The following error occurred from the InstallManager while installing tool dependencies:'
diff -r 87be4c8d7f7fa32f6023f0e251e3c9af8447f5cd -r 7c495f835a1d436ad33dff6107784f106cc24980 lib/galaxy/tool_shed/migrate/common.py
--- a/lib/galaxy/tool_shed/migrate/common.py
+++ b/lib/galaxy/tool_shed/migrate/common.py
@@ -48,7 +48,6 @@
tree = util.parse_xml( tool_panel_config )
root = tree.getroot()
for elem in root:
- missing_tool_dependencies = []
if elem.tag == 'tool':
missing_tool_configs_dict = check_tool_tag_set( elem, migrated_tool_configs_dict, missing_tool_configs_dict )
elif elem.tag == 'section':
@@ -116,6 +115,8 @@
self.datatypes_registry = galaxy.datatypes.registry.Registry()
# Load the data types in the Galaxy distribution, which are defined in self.config.datatypes_config.
self.datatypes_registry.load_datatypes( self.config.root, self.config.datatypes_config )
+ # Tool data tables
+ self.tool_data_tables = galaxy.tools.data.ToolDataTableManager( self.config.tool_data_path, self.config.tool_data_table_config_path )
# Initialize the tools, making sure the list of tool configs includes the reserved migrated_tools_conf.xml file.
tool_configs = self.config.tool_configs
if self.config.migrated_tools_config not in tool_configs:
diff -r 87be4c8d7f7fa32f6023f0e251e3c9af8447f5cd -r 7c495f835a1d436ad33dff6107784f106cc24980 lib/galaxy/tool_shed/tool_dependencies/common_util.py
--- a/lib/galaxy/tool_shed/tool_dependencies/common_util.py
+++ b/lib/galaxy/tool_shed/tool_dependencies/common_util.py
@@ -1,9 +1,9 @@
import os, shutil, tarfile, urllib2
from galaxy.datatypes.checkers import *
-DIRECTORY_BUILD_COMMAND_NAMES = [ 'change_directory' ]
-MOVE_BUILD_COMMAND_NAMES = [ 'move_directory_files', 'move_file' ]
-ALL_BUILD_COMMAND_NAMES = DIRECTORY_BUILD_COMMAND_NAMES + MOVE_BUILD_COMMAND_NAMES
+MISCELLANEOUS_ACTIONS = [ 'change_directory' ]
+MOVE_ACTIONS = [ 'move_directory_files', 'move_file' ]
+ALL_ACTIONS = MISCELLANEOUS_ACTIONS + MOVE_ACTIONS
def extract_tar( file_name, file_path ):
if isgzip( file_name ) or isbz2( file_name ):
diff -r 87be4c8d7f7fa32f6023f0e251e3c9af8447f5cd -r 7c495f835a1d436ad33dff6107784f106cc24980 lib/galaxy/tool_shed/tool_dependencies/fabric_util.py
--- a/lib/galaxy/tool_shed/tool_dependencies/fabric_util.py
+++ b/lib/galaxy/tool_shed/tool_dependencies/fabric_util.py
@@ -32,32 +32,20 @@
yield work_dir
if os.path.exists( work_dir ):
local( 'rm -rf %s' % work_dir )
-def handle_post_build_processing( tool_dependency_dir, install_dir, package_name=None ):
- cmd = "echo 'PATH=%s/bin:$PATH; export PATH' > %s/env.sh;chmod +x %s/env.sh" % ( install_dir, install_dir, install_dir )
+def handle_post_build_processing( tool_dependency_dir, install_dir, env_dependency_path, package_name=None ):
+ cmd = "echo 'PATH=%s:$PATH; export PATH' > %s/env.sh;chmod +x %s/env.sh" % ( env_dependency_path, install_dir, install_dir )
message = ''
output = local( cmd, capture=True )
log_results( cmd, output, os.path.join( install_dir, 'env_sh.log' ) )
if output.return_code:
message = '%s %s' % ( message, str( output.stderr ) )
- """
- Since automatic dependency installation requires a version attribute in the tool's <requirement> tag, we don't have to
- create a default symlink, but we'll keep this code around for a bit just in case we need it later.
- if package_name:
- package_dir = os.path.join( tool_dependency_dir, package_name )
- package_default = os.path.join( package_dir, 'default' )
- if not os.path.islink( package_default ):
- cmd = 'ln -s %s %s' % ( install_dir, package_default )
- output = local( cmd, capture=True )
- if output.return_code:
- message = '%s\n%s' % ( message, str( output.stderr ) )
- """
return message
def install_and_build_package( params_dict ):
"""Install a Galaxy tool dependency package either via a url or a mercurial or git clone command."""
install_dir = params_dict[ 'install_dir' ]
download_url = params_dict.get( 'download_url', None )
clone_cmd = params_dict.get( 'clone_cmd', None )
- build_commands = params_dict.get( 'build_commands', None )
+ actions = params_dict.get( 'actions', None )
package_name = params_dict.get( 'package_name', None )
with make_tmp_dir() as work_dir:
with lcd( work_dir ):
@@ -75,36 +63,36 @@
if output.return_code:
return '%s. ' % str( output.stderr )
dir = package_name
- if build_commands:
+ if actions:
with lcd( dir ):
current_dir = os.path.abspath( os.path.join( work_dir, dir ) )
- for build_command_tup in build_commands:
- build_command_key, build_command_dict = build_command_tup
- if build_command_key.find( 'v^v^v' ) >= 0:
- build_command_items = build_command_key.split( 'v^v^v' )
- build_command_name = build_command_items[ 0 ]
- build_command = build_command_items[ 1 ]
- elif build_command_key in common_util.ALL_BUILD_COMMAND_NAMES:
- build_command_name = build_command_key
+ for action_tup in actions:
+ action_key, action_dict = action_tup
+ if action_key.find( 'v^v^v' ) >= 0:
+ action_items = action_key.split( 'v^v^v' )
+ action_name = action_items[ 0 ]
+ action = action_items[ 1 ]
+ elif action_key in common_util.ALL_ACTIONS:
+ action_name = action_key
else:
- build_command_name = None
- if build_command_name:
- if build_command_name == 'change_directory':
- current_dir = os.path.join( current_dir, build_command )
+ action_name = None
+ if action_name:
+ if action_name == 'change_directory':
+ current_dir = os.path.join( current_dir, action )
lcd( current_dir )
- elif build_command_name == 'move_directory_files':
+ elif action_name == 'move_directory_files':
common_util.move_directory_files( current_dir=current_dir,
- source_dir=os.path.join( build_command_dict[ 'source_directory' ] ),
- destination_dir=os.path.join( build_command_dict[ 'destination_directory' ] ) )
- elif build_command_name == 'move_file':
+ source_dir=os.path.join( action_dict[ 'source_directory' ] ),
+ destination_dir=os.path.join( action_dict[ 'destination_directory' ] ) )
+ elif action_name == 'move_file':
common_util.move_file( current_dir=current_dir,
- source=os.path.join( build_command_dict[ 'source' ] ),
- destination_dir=os.path.join( build_command_dict[ 'destination' ] ) )
+ source=os.path.join( action_dict[ 'source' ] ),
+ destination_dir=os.path.join( action_dict[ 'destination' ] ) )
else:
- build_command = build_command_key
+ action = action_key
with settings( warn_only=True ):
- output = local( build_command, capture=True )
- log_results( build_command, output, os.path.join( install_dir, 'build_commands.log' ) )
+ output = local( action, capture=True )
+ log_results( action, output, os.path.join( install_dir, 'actions.log' ) )
if output.return_code:
return '%s. ' % str( output.stderr )
return ''
@@ -119,10 +107,10 @@
logfile = open( file_path, 'wb' )
logfile.write( "\n#############################################" )
logfile.write( '\n%s\nSTDOUT\n' % command )
+ logfile.write( str( fabric_AttributeString.stdout ) )
logfile.write( "#############################################\n" )
- logfile.write( str( fabric_AttributeString.stdout ) )
logfile.write( "\n#############################################" )
logfile.write( '\n%s\nSTDERR\n' % command )
+ logfile.write( str( fabric_AttributeString.stderr ) )
logfile.write( "#############################################\n" )
- logfile.write( str( fabric_AttributeString.stderr ) )
logfile.close()
diff -r 87be4c8d7f7fa32f6023f0e251e3c9af8447f5cd -r 7c495f835a1d436ad33dff6107784f106cc24980 lib/galaxy/tool_shed/tool_dependencies/install_util.py
--- a/lib/galaxy/tool_shed/tool_dependencies/install_util.py
+++ b/lib/galaxy/tool_shed/tool_dependencies/install_util.py
@@ -11,131 +11,132 @@
from elementtree import ElementTree, ElementInclude
from elementtree.ElementTree import Element, SubElement
-def create_or_update_tool_dependency( app, tool_shed_repository, changeset_revision, name, version, type ):
- """
- This method is called from Galaxy (never the tool shed) when a new tool_shed_repository is being installed or when an ininstalled repository is
- being reinstalled.
- """
- # First see if a tool_dependency record exists for the received changeset_revision.
+def create_or_update_tool_dependency( app, tool_shed_repository, name, version, type ):
+ # Called from Galaxy (never the tool shed) when a new repository is being installed or when an uninstalled repository is being reinstalled.
+ # First see if an appropriate tool_dependency record exists for the received tool_shed_repository.
sa_session = app.model.context.current
- tool_dependency = get_tool_dependency_by_shed_changeset_revision( app, tool_shed_repository, name, version, type, changeset_revision )
+ tool_dependency = get_tool_dependency_by_name_version_type_repository( app, tool_shed_repository, name, version, type )
if tool_dependency:
tool_dependency.uninstalled = False
else:
- # Check the tool_shed_repository's set of tool_depnedency records for any that are marked uninstalled. If one is found, set uninstalled to
- # False and update the value of installed_changeset_revision.
- found = False
- for tool_dependency in tool_shed_repository.tool_dependencies:
- if tool_dependency.name == name and tool_dependency.version == version and tool_dependency.type == type and tool_dependency.uninstalled:
- found = True
- tool_dependency.uninstalled = False
- tool_dependency.installed_changeset_revision = changeset_revision
- break
- if not found:
- # Create a new tool_dependency record for the tool_shed_repository.
- tool_dependency = app.model.ToolDependency( tool_shed_repository_id=tool_shed_repository.id,
- installed_changeset_revision=changeset_revision,
- name=name,
- version=version,
- type=type )
+ # Create a new tool_dependency record for the tool_shed_repository.
+ tool_dependency = app.model.ToolDependency( tool_shed_repository_id=tool_shed_repository.id,
+ name=name,
+ version=version,
+ type=type )
sa_session.add( tool_dependency )
sa_session.flush()
return tool_dependency
-def get_tool_dependency_install_dir( app, repository, installed_changeset_revision, package_name, package_version ):
+def get_tool_dependency_by_name_version_type_repository( app, repository, name, version, type ):
+ sa_session = app.model.context.current
+ return sa_session.query( app.model.ToolDependency ) \
+ .filter( and_( app.model.ToolDependency.table.c.tool_shed_repository_id == repository.id,
+ app.model.ToolDependency.table.c.name == name,
+ app.model.ToolDependency.table.c.version == version,
+ app.model.ToolDependency.table.c.type == type ) ) \
+ .first()
+def get_tool_dependency_install_dir( app, repository, package_name, package_version ):
return os.path.abspath( os.path.join( app.config.tool_dependency_dir,
package_name,
package_version,
repository.owner,
repository.name,
- installed_changeset_revision ) )
-def get_tool_dependency_by_shed_changeset_revision( app, repository, dependency_name, dependency_version, dependency_type, changeset_revision ):
- sa_session = app.model.context.current
- return sa_session.query( app.model.ToolDependency ) \
- .filter( and_( app.model.ToolDependency.table.c.tool_shed_repository_id == repository.id,
- app.model.ToolDependency.table.c.name == dependency_name,
- app.model.ToolDependency.table.c.version == dependency_version,
- app.model.ToolDependency.table.c.type == dependency_type,
- app.model.ToolDependency.table.c.installed_changeset_revision == changeset_revision ) ) \
- .first()
-def install_package( app, elem, tool_shed_repository, installed_changeset_revision ):
+ repository.installed_changeset_revision ) )
+def install_package( app, elem, tool_shed_repository, name=None, version=None ):
+ # If name and version are not None, then a specific tool dependency is being installed.
+ message = ''
# The value of package_name should match the value of the "package" type in the tool config's <requirements> tag set, but it's not required.
- message = ''
package_name = elem.get( 'name', None )
package_version = elem.get( 'version', None )
if package_name and package_version:
- install_dir = get_install_dir( app, tool_shed_repository, installed_changeset_revision, package_name, package_version )
- if not_installed( install_dir ):
- for package_elem in elem:
- if package_elem.tag == 'proprietary_fabfile':
- # TODO: This is not yet working...
- # Handle tool dependency installation where the repository includes one or more proprietary fabric scripts.
- if not fabric_version_checked:
- check_fabric_version()
- fabric_version_checked = True
- fabfile_name = package_elem.get( 'name', None )
- fabfile_path = os.path.abspath( os.path.join( os.path.split( tool_dependencies_config )[ 0 ], fabfile_name ) )
- print 'Installing tool dependencies via fabric script ', fabfile_path
- elif package_elem.tag == 'fabfile':
- # Handle tool dependency installation using a fabric script provided by Galaxy. Example tag set definition:
- fabfile_path = None
- for method_elem in package_elem.findall( 'method' ):
- error_message = run_fabric_method( app,
- method_elem,
- fabfile_path,
- app.config.tool_dependency_dir,
- install_dir,
- package_name=package_name )
- if error_message:
- message += '%s' % error_message
- else:
- tool_dependency = create_or_update_tool_dependency( app,
- tool_shed_repository,
- installed_changeset_revision,
- name=package_name,
- version=package_version,
- type='package' )
- print package_name, 'version', package_version, 'installed in', install_dir
- else:
- print '\nSkipping installation of tool dependency', package_name, 'version', package_version, 'since it is installed in', install_dir, '\n'
+ if ( not name and not version ) or ( name and version and name==package_name and version==package_version ):
+ install_dir = get_tool_dependency_install_dir( app, tool_shed_repository, package_name, package_version )
+ if not os.path.exists( install_dir ):
+ for package_elem in elem:
+ if package_elem.tag == 'proprietary_fabfile':
+ # TODO: This is not yet working...
+ # Handle tool dependency installation where the repository includes one or more proprietary fabric scripts.
+ if not fabric_version_checked:
+ check_fabric_version()
+ fabric_version_checked = True
+ fabfile_name = package_elem.get( 'name', None )
+ fabfile_path = os.path.abspath( os.path.join( os.path.split( tool_dependencies_config )[ 0 ], fabfile_name ) )
+ print 'Installing tool dependencies via fabric script ', fabfile_path
+ elif package_elem.tag == 'fabfile':
+ # Handle tool dependency installation using a fabric method included in the Galaxy framework.
+ fabfile_path = None
+ for method_elem in package_elem:
+ error_message = run_fabric_method( app,
+ method_elem,
+ fabfile_path,
+ app.config.tool_dependency_dir,
+ install_dir,
+ package_name=package_name )
+ if error_message:
+ message += '%s' % error_message
+ else:
+ tool_dependency = create_or_update_tool_dependency( app,
+ tool_shed_repository,
+ name=package_name,
+ version=package_version,
+ type='package' )
+ print package_name, 'version', package_version, 'installed in', install_dir
+ else:
+ print '\nSkipping installation of tool dependency', package_name, 'version', package_version, 'since it is installed in', install_dir, '\n'
return message
-def not_installed( install_dir ):
- # TODO: try executing a binary or something in addition to just seeing if the install_dir exists.
- return not os.path.exists( install_dir )
def run_fabric_method( app, elem, fabfile_path, tool_dependency_dir, install_dir, package_name=None, **kwd ):
"""Parse a tool_dependency.xml file's fabfile <method> tag set to build the method parameters and execute the method."""
if not os.path.exists( install_dir ):
os.makedirs( install_dir )
+ # Default value for env_dependency_path.
+ install_path, install_directory = os.path.split( install_dir )
+ if install_directory != 'bin':
+ env_dependency_path = os.path.join( install_dir, 'bin' )
+ else:
+ env_dependency_path = install_dir
method_name = elem.get( 'name', None )
params_dict = dict( install_dir=install_dir )
- build_commands = []
+ actions = []
for param_elem in elem:
param_name = param_elem.get( 'name' )
if param_name:
- if param_name == 'build_commands':
- for build_command_elem in param_elem:
- build_command_dict = {}
- build_command_name = build_command_elem.get( 'name' )
- if build_command_name:
- if build_command_name in MOVE_BUILD_COMMAND_NAMES:
- build_command_key = build_command_name
- for move_elem in build_command_elem:
- move_elem_text = move_elem.text.replace( '$INSTALL_DIR', install_dir )
- if move_elem_text:
- build_command_dict[ move_elem.tag ] = move_elem_text
- elif build_command_elem.text:
- build_command_key = '%sv^v^v%s' % ( build_command_name, build_command_elem.text )
- else:
+ if param_name == 'actions':
+ for action_elem in param_elem:
+ action_dict = {}
+ action_type = action_elem.get( 'type', 'shell_command' )
+ if action_type == 'shell_command':
+ # Example: <action type="shell_command">make</action>
+ action_key = action_elem.text.replace( '$INSTALL_DIR', install_dir )
+ if not action_key:
continue
+ elif action_type in MOVE_ACTIONS:
+ # Examples:
+ # <action type="move_file">
+ # <source>misc/some_file</source>
+ # <destination>$INSTALL_DIR/bin</destination>
+ # </action>
+ # <action type="move_directory_files">
+ # <source_directory>bin</source_directory>
+ # <destination_directory>$INSTALL_DIR/bin</destination_directory>
+ # </action>
+ action_key = action_type
+ for move_elem in action_elem:
+ move_elem_text = move_elem.text.replace( '$INSTALL_DIR', install_dir )
+ if move_elem_text:
+ action_dict[ move_elem.tag ] = move_elem_text
+ elif action_elem.text:
+ # Example: <action type="change_directory">bin</action>
+ action_key = '%sv^v^v%s' % ( action_type, action_elem.text )
else:
- build_command_key = build_command_elem.text.replace( '$INSTALL_DIR', install_dir )
- if not build_command_key:
- continue
- build_commands.append( ( build_command_key, build_command_dict ) )
- if build_commands:
- params_dict[ 'build_commands' ] = build_commands
+ continue
+ actions.append( ( action_key, action_dict ) )
+ if actions:
+ params_dict[ 'actions' ] = actions
+ elif param_name == 'env_dependency_path':
+ env_dependency_path = param_elem.text.replace( '$INSTALL_DIR', install_dir )
else:
if param_elem.text:
- params_dict[ param_name ] = param_elem.text
+ params_dict[ param_name ] = param_elem.text.replace( '$INSTALL_DIR', install_dir )
if package_name:
params_dict[ 'package_name' ] = package_name
if fabfile_path:
@@ -151,7 +152,7 @@
except Exception, e:
return '%s. ' % str( e )
try:
- message = handle_post_build_processing( tool_dependency_dir, install_dir, package_name=package_name )
+ message = handle_post_build_processing( tool_dependency_dir, install_dir, env_dependency_path, package_name=package_name )
if message:
return message
except:
@@ -164,17 +165,19 @@
"""
if not os.path.exists( install_dir ):
os.makedirs( install_dir )
+ # Default value for env_dependency_path.
+ env_dependency_path = install_dir
method_name = elem.get( 'name', None )
params_str = ''
- build_commands = []
+ actions = []
for param_elem in elem:
param_name = param_elem.get( 'name' )
if param_name:
- if param_name == 'build_commands':
- for build_command_elem in param_elem:
- build_commands.append( build_command_elem.text.replace( '$INSTALL_DIR', install_dir ) )
- if build_commands:
- params_str += 'build_commands=%s,' % tool_shed_encode( encoding_sep.join( build_commands ) )
+ if param_name == 'actions':
+ for action_elem in param_elem:
+ actions.append( action_elem.text.replace( '$INSTALL_DIR', install_dir ) )
+ if actions:
+ params_str += 'actions=%s,' % tool_shed_encode( encoding_sep.join( actions ) )
else:
if param_elem.text:
param_value = tool_shed_encode( param_elem.text )
@@ -190,7 +193,7 @@
return "Exception executing fabric script %s: %s. " % ( str( fabfile_path ), str( e ) )
if returncode:
return message
- message = handle_post_build_processing( tool_dependency_dir, install_dir, package_name=package_name )
+ message = handle_post_build_processing( tool_dependency_dir, install_dir, env_dependency_path, package_name=package_name )
if message:
return message
else:
diff -r 87be4c8d7f7fa32f6023f0e251e3c9af8447f5cd -r 7c495f835a1d436ad33dff6107784f106cc24980 lib/galaxy/tools/__init__.py
--- a/lib/galaxy/tools/__init__.py
+++ b/lib/galaxy/tools/__init__.py
@@ -798,11 +798,11 @@
def tool_shed_repository( self ):
# If this tool is included in an installed tool shed repository, return it.
if self.tool_shed:
- return get_tool_shed_repository_by_shed_name_owner_changeset_revision( self.app,
- self.tool_shed,
- self.repository_name,
- self.repository_owner,
- self.installed_changeset_revision )
+ return get_tool_shed_repository_by_shed_name_owner_installed_changeset_revision( self.app,
+ self.tool_shed,
+ self.repository_name,
+ self.repository_owner,
+ self.installed_changeset_revision )
return None
def __get_job_run_config( self, run_configs, key, job_params=None ):
# Look through runners/handlers to find one with matching parameters.
diff -r 87be4c8d7f7fa32f6023f0e251e3c9af8447f5cd -r 7c495f835a1d436ad33dff6107784f106cc24980 lib/galaxy/tools/deps/__init__.py
--- a/lib/galaxy/tools/deps/__init__.py
+++ b/lib/galaxy/tools/deps/__init__.py
@@ -44,18 +44,20 @@
def _find_dep_versioned( self, name, version, type='package', installed_tool_dependencies=None ):
installed_dependency = None
if installed_tool_dependencies:
- for installed_dependency in installed_tool_dependencies:
- if not installed_dependency.uninstalled:
- if installed_dependency.name==name and installed_dependency.version==version and installed_dependency.type==type:
+ for installed_tool_dependency in installed_tool_dependencies:
+ if not installed_tool_dependency.uninstalled:
+ if installed_tool_dependency.name==name and installed_tool_dependency.version==version and installed_tool_dependency.type==type:
+ installed_dependency = installed_tool_dependency
break
for base_path in self.base_paths:
if installed_dependency:
tool_shed_repository = installed_dependency.tool_shed_repository
path = os.path.join( base_path,
- name, version,
+ name,
+ version,
tool_shed_repository.owner,
tool_shed_repository.name,
- installed_dependency.installed_changeset_revision )
+ tool_shed_repository.installed_changeset_revision )
else:
path = os.path.join( base_path, name, version )
script = os.path.join( path, 'env.sh' )
diff -r 87be4c8d7f7fa32f6023f0e251e3c9af8447f5cd -r 7c495f835a1d436ad33dff6107784f106cc24980 lib/galaxy/util/shed_util.py
--- a/lib/galaxy/util/shed_util.py
+++ b/lib/galaxy/util/shed_util.py
@@ -326,7 +326,11 @@
if not owner:
owner = get_repository_owner_from_clone_url( repository_clone_url )
includes_datatypes = 'datatypes' in metadata_dict
- tool_shed_repository = get_tool_shed_repository_by_shed_name_owner_installed_changeset_revision( app, tool_shed, name, owner, installed_changeset_revision )
+ tool_shed_repository = get_tool_shed_repository_by_shed_name_owner_installed_changeset_revision( app,
+ tool_shed,
+ name,
+ owner,
+ installed_changeset_revision )
if tool_shed_repository:
tool_shed_repository.description = description
tool_shed_repository.changeset_revision = current_changeset_revision
@@ -425,10 +429,10 @@
for tool_dict in metadata_dict[ 'tools' ]:
requirements = tool_dict.get( 'requirements', [] )
for requirement_dict in requirements:
- requirement_name = requirement_dict.get( 'name', None )
- requirement_version = requirement_dict.get( 'version', None )
- requirement_type = requirement_dict.get( 'type', None )
- if requirement_name == tool_dependency_name and requirement_version == tool_dependency_version and requirement_type == tool_dependency_type:
+ req_name = requirement_dict.get( 'name', None )
+ req_version = requirement_dict.get( 'version', None )
+ req_type = requirement_dict.get( 'type', None )
+ if req_name==tool_dependency_name and req_version==tool_dependency_version and req_type==tool_dependency_type:
can_generate_dependency_metadata = True
break
if not can_generate_dependency_metadata:
@@ -1159,13 +1163,13 @@
message = str( e )
error = True
return error, message
-def handle_tool_dependencies( app, tool_shed_repository, installed_changeset_revision, tool_dependencies_config ):
+def handle_tool_dependencies( app, tool_shed_repository, tool_dependencies_config, name=None, version=None, type='package' ):
"""
Install and build tool dependencies defined in the tool_dependencies_config. This config's tag sets can currently refer to installation
methods in Galaxy's tool_dependencies module. In the future, proprietary fabric scripts contained in the repository will be supported.
Future enhancements to handling tool dependencies may provide installation processes in addition to fabric based processes. The dependencies
will be installed in:
- ~/<app.config.tool_dependency_dir>/<package_name>/<package_version>/<repository_owner>/<repository_name>/<installed_changeset_revision>
+ ~/<app.config.tool_dependency_dir>/<package_name>/<package_version>/<repo_owner>/<repo_name>/<repo_installed_changeset_revision>
"""
status = 'ok'
message = ''
@@ -1175,8 +1179,8 @@
ElementInclude.include( root )
fabric_version_checked = False
for elem in root:
- if elem.tag == 'package':
- error_message = install_package( app, elem, tool_shed_repository, installed_changeset_revision )
+ if elem.tag == type:
+ error_message = install_package( app, elem, tool_shed_repository, name=name, version=version )
if error_message:
message += ' %s' % error_message
if message:
@@ -1288,12 +1292,9 @@
tool_shed_repository,
current_changeset_revision,
work_dir )
- # Install dependencies for repository tools. The tool_dependency.installed_changeset_revision value will be the value of
- # tool_shed_repository.changeset_revision (this method's current_changeset_revision). This approach will allow for different
- # versions of the same tool_dependency to be installed for associated versions of tools included in the installed repository.
+ # Install dependencies for repository tools.
status, message = handle_tool_dependencies( app=trans.app,
tool_shed_repository=tool_shed_repository,
- installed_changeset_revision=current_changeset_revision,
tool_dependencies_config=tool_dependencies_config )
if status != 'ok' and message:
print 'The following error occurred from load_repository_contents while installing tool dependencies:'
diff -r 87be4c8d7f7fa32f6023f0e251e3c9af8447f5cd -r 7c495f835a1d436ad33dff6107784f106cc24980 lib/galaxy/web/controllers/admin_toolshed.py
--- a/lib/galaxy/web/controllers/admin_toolshed.py
+++ b/lib/galaxy/web/controllers/admin_toolshed.py
@@ -2,7 +2,7 @@
from galaxy.web.controllers.admin import *
from galaxy.util.json import from_json_string, to_json_string
from galaxy.util.shed_util import *
-from galaxy.tool_shed.tool_dependencies.install_util import get_tool_dependency_install_dir, not_installed
+from galaxy.tool_shed.tool_dependencies.install_util import get_tool_dependency_install_dir
from galaxy.tool_shed.encoding_util import *
from galaxy import eggs, tools
@@ -196,6 +196,7 @@
repository = get_repository( trans, kwd[ 'id' ] )
shed_tool_conf, tool_path, relative_install_dir = get_tool_panel_config_tool_path_install_dir( trans.app, repository )
repository_install_dir = os.path.abspath ( relative_install_dir )
+ errors = ''
if params.get( 'deactivate_or_uninstall_repository_button', False ):
if repository.includes_tools:
# Handle tool panel alterations.
@@ -218,32 +219,24 @@
removed = False
if removed:
repository.uninstalled = True
- # Remove all installed tool dependencies - this is required when uninstalling the repository.
- for tool_dependency in repository.tool_dependencies:
- dependency_install_dir = os.path.abspath( os.path.join( trans.app.config.tool_dependency_dir,
- tool_dependency.name,
- tool_dependency.version,
- repository.owner,
- repository.name,
- tool_dependency.installed_changeset_revision ) )
- try:
- shutil.rmtree( dependency_install_dir )
- log.debug( "Removed tool dependency installation directory: %s" % str( dependency_install_dir ) )
- removed = True
- except Exception, e:
- log.debug( "Error removing tool dependency installation directory %s: %s" % ( str( dependency_install_dir ), str( e ) ) )
- removed = False
- if removed:
- tool_dependency.uninstalled = True
- trans.sa_session.add( tool_dependency )
+ # Remove all installed tool dependencies.
+ for tool_dependency in repository.installed_tool_dependencies:
+ uninstalled, error_message = remove_tool_dependency( trans, tool_dependency )
+ if error_message:
+ errors = '%s %s' % ( errors, error_message )
repository.deleted = True
trans.sa_session.add( repository )
trans.sa_session.flush()
if remove_from_disk_checked:
- message = 'The repository named <b>%s</b> has been uninstalled.' % repository.name
+ message = 'The repository named <b>%s</b> has been uninstalled. ' % repository.name
+ if errors:
+ message += 'Attempting to uninstall tool dependencies resulted in errors: %s' % errors
+ status = 'error'
+ else:
+ status = 'done'
else:
- message = 'The repository named <b>%s</b> has been deactivated.' % repository.name
- status = 'done'
+ message = 'The repository named <b>%s</b> has been deactivated. ' % repository.name
+ status = 'done'
return trans.response.send_redirect( web.url_for( controller='admin_toolshed',
action='browse_repositories',
message=message,
@@ -277,33 +270,6 @@
return get_repository_file_contents( file_path )
@web.expose
@web.require_admin
- def install_tool_dependencies( self, trans, **kwd ):
- """Install dependencies for tools included in the repository when the repository is being installed."""
- message = kwd.get( 'message', '' )
- status = kwd.get( 'status', 'done' )
- tool_shed_url = kwd[ 'tool_shed_url' ]
- repo_info_dict = kwd[ 'repo_info_dict' ]
- includes_tools = util.string_as_bool( kwd.get( 'includes_tools', False ) )
- # Decode the encoded repo_info_dict param value.
- dict_with_tool_dependencies = tool_shed_decode( repo_info_dict )
- # The repo_info_dict includes tool dependencies which we need to display so the user knows what will be installed.
- new_repo_info_dict = {}
- for name, repo_info_tuple in dict_with_tool_dependencies.items():
- description, repository_clone_url, changeset_revision, ctx_rev, repository_owner, tool_dependencies = repo_info_tuple
- # Create a new repo_info_dict by eliminating tool-dependencies from the repo_info_tuple.
- new_repo_info_dict[ name ] = ( description, repository_clone_url, changeset_revision, ctx_rev )
- repo_info_dict = tool_shed_encode( new_repo_info_dict )
- install_tool_dependencies_check_box = CheckboxField( 'install_tool_dependencies', checked=True )
- return trans.fill_template( '/admin/tool_shed_repository/install_tool_dependencies.mako',
- tool_shed_url=tool_shed_url,
- repo_info_dict=repo_info_dict,
- dict_with_tool_dependencies=dict_with_tool_dependencies,
- includes_tools=includes_tools,
- install_tool_dependencies_check_box=install_tool_dependencies_check_box,
- message=message,
- status=status )
- @web.expose
- @web.require_admin
def install_missing_tool_dependencies( self, trans, **kwd ):
"""
Install dependencies for tools included in the repository that were not installed when the repository was installed or that are
@@ -327,18 +293,15 @@
install_dir=relative_install_dir )
status, message = handle_tool_dependencies( app=trans.app,
tool_shed_repository=repository,
- installed_changeset_revision=repository.installed_changeset_revision,
tool_dependencies_config=tool_dependencies_config )
try:
shutil.rmtree( work_dir )
except:
pass
- tool_dependencies_missing = status == 'error'
return trans.fill_template( '/admin/tool_shed_repository/manage_repository.mako',
repository=repository,
description=repository.description,
repo_files_dir=repo_files_dir,
- tool_dependencies_missing=tool_dependencies_missing,
message=message,
status=status )
if reinstalling and kwd.get( 'install_missing_tool_dependencies_button', False ):
@@ -352,12 +315,11 @@
if not reinstalling:
# Filter the tool_dependencies dictionary to eliminate successfully installed dependencies.
filtered_tool_dependencies = {}
- for dependency_key, requirements_dict in tool_dependencies.items():
- name = requirements_dict[ 'name' ]
- version = requirements_dict[ 'version' ]
- install_dir = get_tool_dependency_install_dir( trans.app, repository, repository.changeset_revision, name, version )
- if not_installed( install_dir ):
- filtered_tool_dependencies[ dependency_key ] = requirements_dict
+ for missing_dependency_tup in repository.missing_tool_dependencies:
+ name, version, type = missing_dependency_tup
+ dependency_key = '%s/%s' % ( name, version )
+ install_dir = get_tool_dependency_install_dir( trans.app, repository, name, version )
+ filtered_tool_dependencies[ dependency_key ] = dict( name=name, type=type, version=version )
tool_dependencies = filtered_tool_dependencies
no_changes = kwd.get( 'no_changes', '' )
no_changes_checked = CheckboxField.is_checked( no_changes )
@@ -557,6 +519,66 @@
status=status )
@web.expose
@web.require_admin
+ def install_tool_dependencies( self, trans, **kwd ):
+ """Install dependencies for tools included in the repository when the repository is being installed."""
+ message = kwd.get( 'message', '' )
+ status = kwd.get( 'status', 'done' )
+ tool_shed_url = kwd[ 'tool_shed_url' ]
+ repo_info_dict = kwd[ 'repo_info_dict' ]
+ includes_tools = util.string_as_bool( kwd.get( 'includes_tools', False ) )
+ # Decode the encoded repo_info_dict param value.
+ dict_with_tool_dependencies = tool_shed_decode( repo_info_dict )
+ # The repo_info_dict includes tool dependencies which we need to display so the user knows what will be installed.
+ new_repo_info_dict = {}
+ for name, repo_info_tuple in dict_with_tool_dependencies.items():
+ description, repository_clone_url, changeset_revision, ctx_rev, repository_owner, tool_dependencies = repo_info_tuple
+ # Create a new repo_info_dict by eliminating tool-dependencies from the repo_info_tuple.
+ new_repo_info_dict[ name ] = ( description, repository_clone_url, changeset_revision, ctx_rev )
+ repo_info_dict = tool_shed_encode( new_repo_info_dict )
+ install_tool_dependencies_check_box = CheckboxField( 'install_tool_dependencies', checked=True )
+ return trans.fill_template( '/admin/tool_shed_repository/install_tool_dependencies.mako',
+ tool_shed_url=tool_shed_url,
+ repo_info_dict=repo_info_dict,
+ dict_with_tool_dependencies=dict_with_tool_dependencies,
+ includes_tools=includes_tools,
+ install_tool_dependencies_check_box=install_tool_dependencies_check_box,
+ message=message,
+ status=status )
+ @web.expose
+ @web.require_admin
+ def install_tool_dependency( self, trans, name, version, type, repository_id, **kwd ):
+ """Install dependencies for tools included in the repository when the repository is being installed."""
+ message = kwd.get( 'message', '' )
+ status = kwd.get( 'status', 'done' )
+ repository = get_repository( trans, repository_id )
+ shed_tool_conf, tool_path, relative_install_dir = get_tool_panel_config_tool_path_install_dir( trans.app, repository )
+ repo_files_dir = os.path.abspath( os.path.join( relative_install_dir, repository.name ) )
+ # Get the tool_dependencies.xml file from the repository.
+ work_dir = make_tmp_directory()
+ tool_dependencies_config = get_config_from_repository( trans.app,
+ 'tool_dependencies.xml',
+ repository,
+ repository.changeset_revision,
+ work_dir,
+ install_dir=relative_install_dir )
+ status, message = handle_tool_dependencies( app=trans.app,
+ tool_shed_repository=repository,
+ tool_dependencies_config=tool_dependencies_config,
+ name=name,
+ version=version,
+ type=type )
+ try:
+ shutil.rmtree( work_dir )
+ except:
+ pass
+ return trans.fill_template( '/admin/tool_shed_repository/manage_repository.mako',
+ repository=repository,
+ description=repository.description,
+ repo_files_dir=repo_files_dir,
+ message=message,
+ status=status )
+ @web.expose
+ @web.require_admin
def manage_repository( self, trans, **kwd ):
params = util.Params( kwd )
message = util.restore_text( params.get( 'message', '' ) )
@@ -791,6 +813,24 @@
status=status )
@web.expose
@web.require_admin
+ def uninstall_tool_dependency( self, trans, **kwd ):
+ params = util.Params( kwd )
+ message = util.restore_text( params.get( 'message', '' ) )
+ status = params.get( 'status', 'done' )
+ repository = get_repository( trans, kwd[ 'repository_id' ] )
+ tool_dependency = get_tool_dependency( trans, kwd[ 'id' ] )
+ uninstalled, error_message = remove_tool_dependency( trans, tool_dependency )
+ if uninstalled:
+ message = "The '%s' tool dependency has been uninstalled." % tool_dependency.name
+ else:
+ message = "Error attempting to uninstall the '%s' tool dependency: %s" % ( tool_dependency.name, error_message )
+ status = 'error'
+ return trans.fill_template( '/admin/tool_shed_repository/manage_tool_dependencies.mako',
+ repository=repository,
+ message=message,
+ status=status )
+ @web.expose
+ @web.require_admin
def update_to_changeset_revision( self, trans, **kwd ):
"""Update a cloned repository to the latest revision possible."""
params = util.Params( kwd )
diff -r 87be4c8d7f7fa32f6023f0e251e3c9af8447f5cd -r 7c495f835a1d436ad33dff6107784f106cc24980 templates/admin/tool_shed_repository/browse_repository.mako
--- a/templates/admin/tool_shed_repository/browse_repository.mako
+++ b/templates/admin/tool_shed_repository/browse_repository.mako
@@ -19,10 +19,13 @@
<div popupmenu="repository-${repository.id}-popup"><a class="action-button" href="${h.url_for( controller='admin_toolshed', action='manage_repository', id=trans.security.encode_id( repository.id ) )}">Manage repository</a><a class="action-button" href="${h.url_for( controller='admin_toolshed', action='check_for_updates', id=trans.security.encode_id( repository.id ) )}">Get updates</a>
- <a class="action-button" href="${h.url_for( controller='admin_toolshed', action='deactivate_or_uninstall_repository', id=trans.security.encode_id( repository.id ) )}">Deactivate or Uninstall</a>
+ <a class="action-button" href="${h.url_for( controller='admin_toolshed', action='deactivate_or_uninstall_repository', id=trans.security.encode_id( repository.id ) )}">Deactivate or uninstall repository</a>
%if repository.tool_dependencies:
<a class="action-button" href="${h.url_for( controller='admin_toolshed', action='manage_tool_dependencies', id=trans.security.encode_id( repository.id ) )}">Manage tool dependencies</a>
%endif
+ %if repository.missing_tool_dependencies:
+ <a class="action-button" href="${h.url_for( controller='admin_toolshed', action='install_missing_tool_dependencies', id=trans.security.encode_id( repository.id ) )}">Install missing tool dependencies</a>
+ %endif
</div></ul>
diff -r 87be4c8d7f7fa32f6023f0e251e3c9af8447f5cd -r 7c495f835a1d436ad33dff6107784f106cc24980 templates/admin/tool_shed_repository/browse_tool_dependency.mako
--- a/templates/admin/tool_shed_repository/browse_tool_dependency.mako
+++ b/templates/admin/tool_shed_repository/browse_tool_dependency.mako
@@ -17,12 +17,12 @@
<ul class="manage-table-actions"><li><a class="action-button" id="tool_dependency-${tool_dependency.id}-popup" class="menubutton">Repository Actions</a></li><div popupmenu="tool_dependency-${tool_dependency.id}-popup">
+ <a class="action-button" href="${h.url_for( controller='admin_toolshed', action='browse_repository', id=trans.security.encode_id( repository.id ) )}">Browse repository files</a><a class="action-button" href="${h.url_for( controller='admin_toolshed', action='manage_repository', id=trans.security.encode_id( repository.id ) )}">Manage repository</a><a class="action-button" href="${h.url_for( controller='admin_toolshed', action='check_for_updates', id=trans.security.encode_id( repository.id ) )}">Get updates</a>
- <a class="action-button" href="${h.url_for( controller='admin_toolshed', action='deactivate_or_uninstall_repository', id=trans.security.encode_id( repository.id ) )}">Deactivate or Uninstall</a>
- %if repository.tool_dependencies:
- <a class="action-button" href="${h.url_for( controller='admin_toolshed', action='manage_tool_dependencies', id=trans.security.encode_id( repository.id ) )}">Manage tool dependencies</a>
- %endif
+ <a class="action-button" href="${h.url_for( controller='admin_toolshed', action='deactivate_or_uninstall_repository', id=trans.security.encode_id( repository.id ) )}">Deactivate or uninstall repository</a>
+ <a class="action-button" href="${h.url_for( controller='admin_toolshed', action='manage_tool_dependencies', id=trans.security.encode_id( repository.id ) )}">Manage tool dependencies</a>
+ <a class="action-button" href="${h.url_for( controller='admin_toolshed', action='uninstall_tool_dependency', id=trans.security.encode_id( tool_dependency.id ), repository_id=trans.security.encode_id( repository.id ) )}">Uninstall this tool dependency</a></div></ul>
diff -r 87be4c8d7f7fa32f6023f0e251e3c9af8447f5cd -r 7c495f835a1d436ad33dff6107784f106cc24980 templates/admin/tool_shed_repository/install_missing_tool_dependencies.mako
--- a/templates/admin/tool_shed_repository/install_missing_tool_dependencies.mako
+++ b/templates/admin/tool_shed_repository/install_missing_tool_dependencies.mako
@@ -3,6 +3,23 @@
<% import os %>
+<br/><br/>
+<ul class="manage-table-actions">
+ <li><a class="action-button" id="repository-${repository.id}-popup" class="menubutton">Repository Actions</a></li>
+ <div popupmenu="repository-${repository.id}-popup">
+ <a class="action-button" href="${h.url_for( controller='admin_toolshed', action='browse_repository', id=trans.security.encode_id( repository.id ) )}">Browse repository files</a>
+ <a class="action-button" href="${h.url_for( controller='admin_toolshed', action='manage_repository', id=trans.security.encode_id( repository.id ) )}">Manage repository</a>
+ <a class="action-button" href="${h.url_for( controller='admin_toolshed', action='check_for_updates', id=trans.security.encode_id( repository.id ) )}">Get updates</a>
+ %if repository.includes_tools:
+ <a class="action-button" href="${h.url_for( controller='admin_toolshed', action='set_tool_versions', id=trans.security.encode_id( repository.id ) )}">Set tool versions</a>
+ %endif
+ %if repository.tool_dependencies:
+ <a class="action-button" href="${h.url_for( controller='admin_toolshed', action='manage_tool_dependencies', id=trans.security.encode_id( repository.id ) )}">Manage tool dependencies</a>
+ %endif
+ <a class="action-button" href="${h.url_for( controller='admin_toolshed', action='deactivate_or_uninstall_repository', id=trans.security.encode_id( repository.id ) )}">Deactivate or uninstall repository</a>
+ </div>
+</ul>
+
%if message:
${render_msg( message, status )}
%endif
@@ -33,10 +50,10 @@
<form name="install_missing_tool_dependencies" id="install_missing_tool_dependencies" action="${h.url_for( controller='admin_toolshed', action='install_missing_tool_dependencies', id=trans.security.encode_id( repository.id ), tool_panel_section=tool_panel_section, new_tool_panel_section=new_tool_panel_section, reinstalling=reinstalling )}" method="post" ><div style="clear: both"></div><div class="form-row">
- <label>Install tool dependencies?</label>
+ <label>Install missing tool dependencies?</label>
${install_tool_dependencies_check_box.get_html()}
<div class="toolParamHelp" style="clear: both;">
- Un-check to skip installation of these tool dependencies.
+ Un-check to skip installation of these missing tool dependencies.
</div>
## Fake the no_changes_check_box value.
%if no_changes_checked:
@@ -48,7 +65,7 @@
<div style="clear: both"></div><div class="form-row"><table class="grid">
- <tr><td colspan="4" bgcolor="#D8D8D8"><b>Tool dependencies</b></td></tr>
+ <tr><td colspan="4" bgcolor="#D8D8D8"><b>Missing tool dependencies</b></td></tr><tr><th>Name</th><th>Version</th>
@@ -68,6 +85,7 @@
repository.changeset_revision )
readme_text = requirements_dict.get( 'readme', None )
%>
+ %if not os.path.exists( install_dir ):
<tr><td>${name}</td><td>${version}</td>
@@ -78,6 +96,7 @@
<tr><td colspan="4" bgcolor="#FFFFCC">${name} ${version} requirements and installation information</td></tr><tr><td colspan="4"><pre>${readme_text}</pre></td></tr>
%endif
+ %endif
%endfor
</table><div style="clear: both"></div>
diff -r 87be4c8d7f7fa32f6023f0e251e3c9af8447f5cd -r 7c495f835a1d436ad33dff6107784f106cc24980 templates/admin/tool_shed_repository/install_tool_dependencies.mako
--- a/templates/admin/tool_shed_repository/install_tool_dependencies.mako
+++ b/templates/admin/tool_shed_repository/install_tool_dependencies.mako
@@ -61,6 +61,7 @@
changeset_revision )
readme_text = requirements_dict.get( 'readme', None )
%>
+ %if not os.path.exists( install_dir ):
<tr><td>${name}</td><td>${version}</td>
@@ -71,6 +72,7 @@
<tr><td colspan="4" bgcolor="#FFFFCC">${name} ${version} requirements and installation information</td></tr><tr><td colspan="4"><pre>${readme_text}</pre></td></tr>
%endif
+ %endif
%endfor
%endfor
</table>
diff -r 87be4c8d7f7fa32f6023f0e251e3c9af8447f5cd -r 7c495f835a1d436ad33dff6107784f106cc24980 templates/admin/tool_shed_repository/manage_repository.mako
--- a/templates/admin/tool_shed_repository/manage_repository.mako
+++ b/templates/admin/tool_shed_repository/manage_repository.mako
@@ -14,7 +14,10 @@
%if repository.tool_dependencies:
<a class="action-button" href="${h.url_for( controller='admin_toolshed', action='manage_tool_dependencies', id=trans.security.encode_id( repository.id ) )}">Manage tool dependencies</a>
%endif
- <a class="action-button" href="${h.url_for( controller='admin_toolshed', action='deactivate_or_uninstall_repository', id=trans.security.encode_id( repository.id ) )}">Deactivate or Uninstall</a>
+ %if repository.missing_tool_dependencies:
+ <a class="action-button" href="${h.url_for( controller='admin_toolshed', action='install_missing_tool_dependencies', id=trans.security.encode_id( repository.id ) )}">Install missing tool dependencies</a>
+ %endif
+ <a class="action-button" href="${h.url_for( controller='admin_toolshed', action='deactivate_or_uninstall_repository', id=trans.security.encode_id( repository.id ) )}">Deactivate or uninstall repository</a></div></ul>
@@ -85,14 +88,24 @@
<table class="grid"><tr><td><b>name</b></td>
+ <td><b>version</b></td><td><b>type</b></td>
- <td><b>version</b></td></tr>
- %for name, requirements_dict in missing_tool_dependencies.items():
+ %for index, missing_dependency_tup in enumerate( missing_tool_dependencies ):
+ <% name, version, type = missing_dependency_tup %><tr>
- <td>${requirements_dict[ 'name' ]}</td>
- <td>${requirements_dict[ 'type' ]}</td>
- <td>${requirements_dict[ 'version' ]}</td>
+ <td>
+ <div style="float: left; margin-left: 1px;" class="menubutton split popup" id="missing_dependency-${index}-popup">
+ <a class="view-info" href="${h.url_for( controller='admin_toolshed', action='install_tool_dependency', name=name, version=version, type=type, repository_id=trans.security.encode_id( repository.id ) )}">
+ ${name}
+ </a>
+ </div>
+ <div popupmenu="missing_dependency-${index}-popup">
+ <a class="action-button" href="${h.url_for( controller='admin_toolshed', action='install_tool_dependency', name=name, version=version, type=type, repository_id=trans.security.encode_id( repository.id ) )}">Install this dependency</a>
+ </div>
+ </td>
+ <td>${version}</td>
+ <td>${type}</td></tr>
%endfor
</table>
@@ -112,18 +125,23 @@
<table class="grid"><tr><td><b>name</b></td>
+ <td><b>version</b></td><td><b>type</b></td>
- <td><b>version</b></td></tr>
%for installed_tool_dependency in installed_tool_dependencies:
<tr><td>
- <a class="view-info" href="${h.url_for( controller='admin_toolshed', action='browse_tool_dependency', id=trans.security.encode_id( installed_tool_dependency.id ), repository_id=trans.security.encode_id( repository.id ) )}">
- ${installed_tool_dependency.name}
- </a>
+ <div style="float: left; margin-left: 1px;" class="menubutton split popup" id="dependency-${installed_tool_dependency.id}-popup">
+ <a class="view-info" href="${h.url_for( controller='admin_toolshed', action='browse_tool_dependency', id=trans.security.encode_id( installed_tool_dependency.id ), repository_id=trans.security.encode_id( repository.id ) )}">
+ ${installed_tool_dependency.name}
+ </a>
+ </div>
+ <div popupmenu="dependency-${installed_tool_dependency.id}-popup">
+ <a class="action-button" href="${h.url_for( controller='admin_toolshed', action='uninstall_tool_dependency', id=trans.security.encode_id( installed_tool_dependency.id ), repository_id=trans.security.encode_id( repository.id ) )}">Uninstall this dependency</a>
+ </div></td>
+ <td>${installed_tool_dependency.version}</td><td>${installed_tool_dependency.type}</td>
- <td>${installed_tool_dependency.version}</td></tr>
%endfor
</table>
diff -r 87be4c8d7f7fa32f6023f0e251e3c9af8447f5cd -r 7c495f835a1d436ad33dff6107784f106cc24980 templates/admin/tool_shed_repository/manage_tool_dependencies.mako
--- a/templates/admin/tool_shed_repository/manage_tool_dependencies.mako
+++ b/templates/admin/tool_shed_repository/manage_tool_dependencies.mako
@@ -7,6 +7,7 @@
<ul class="manage-table-actions"><li><a class="action-button" id="repository-${repository.id}-popup" class="menubutton">Repository Actions</a></li><div popupmenu="repository-${repository.id}-popup">
+ <a class="action-button" href="${h.url_for( controller='admin_toolshed', action='manage_repository', id=trans.security.encode_id( repository.id ) )}">Manage repository</a><a class="action-button" href="${h.url_for( controller='admin_toolshed', action='browse_repository', id=trans.security.encode_id( repository.id ) )}">Browse repository files</a><a class="action-button" href="${h.url_for( controller='admin_toolshed', action='check_for_updates', id=trans.security.encode_id( repository.id ) )}">Get updates</a>
%if repository.includes_tools:
@@ -15,7 +16,7 @@
%if repository.missing_tool_dependencies:
<a class="action-button" href="${h.url_for( controller='admin_toolshed', action='install_missing_tool_dependencies', id=trans.security.encode_id( repository.id ) )}">Install missing tool dependencies</a>
%endif
- <a class="action-button" href="${h.url_for( controller='admin_toolshed', action='deactivate_or_uninstall_repository', id=trans.security.encode_id( repository.id ) )}">Deactivate or Uninstall</a>
+ <a class="action-button" href="${h.url_for( controller='admin_toolshed', action='deactivate_or_uninstall_repository', id=trans.security.encode_id( repository.id ) )}">Deactivate or uninstall repository</a></div></ul>
@@ -24,7 +25,7 @@
%endif
<div class="toolForm">
- <div class="toolFormTitle">${repository.name} repository's tool dependencies</div>
+ <div class="toolFormTitle">Repository '${repository.name}' tool dependencies</div><div class="toolFormBody"><div class="form-row"><table class="grid">
@@ -33,32 +34,46 @@
name = tool_dependency.name
version = tool_dependency.version
type = tool_dependency.type
- installed_changeset_revision = tool_dependency.installed_changeset_revision
uninstalled = tool_dependency.uninstalled
- install_dir = os.path.abspath( os.path.join( trans.app.config.tool_dependency_dir,
- name,
- version,
- repository.owner,
- repository.name,
- installed_changeset_revision ) )
+ install_dir = tool_dependency.installation_directory( trans.app )
%>
- <tr><td bgcolor="#D8D8D8"><b>Name</b></td><td bgcolor="#D8D8D8">${name}</td></tr>
- <tr><th>Version</th><td>${version}</td></tr>
- <tr><th>Type</th><td>${type}</td></tr>
- <tr>
- <th>Install directory</th>
- <td>
+ <tr>
+ <td bgcolor="#D8D8D8">
+ <div style="float: left; margin-left: 1px;" class="menubutton split popup" id="dependency-${tool_dependency.id}-popup">
%if uninstalled:
- This dependency is not currently installed
+ <a class="view-info" href="${h.url_for( controller='admin_toolshed', action='install_tool_dependency', name=name, version=version, type=type, repository_id=trans.security.encode_id( repository.id ) )}">
+ <b>Name</b>
+ </a>
+ <div popupmenu="dependency-${tool_dependency.id}-popup">
+ <a class="action-button" href="${h.url_for( controller='admin_toolshed', action='install_tool_dependency', name=name, version=version, type=type, repository_id=trans.security.encode_id( repository.id ) )}">Install this dependency</a>
+ </div>
%else:
<a class="view-info" href="${h.url_for( controller='admin_toolshed', action='browse_tool_dependency', id=trans.security.encode_id( tool_dependency.id ), repository_id=trans.security.encode_id( repository.id ) )}">
- ${install_dir}
+ <b>Name</b></a>
+ <div popupmenu="dependency-${tool_dependency.id}-popup">
+ <a class="action-button" href="${h.url_for( controller='admin_toolshed', action='uninstall_tool_dependency', id=trans.security.encode_id( tool_dependency.id ), repository_id=trans.security.encode_id( repository.id ) )}">Uninstall this dependency</a>
+ </div>
%endif
- </td>
- </tr>
- <tr><th>Installed changeset revision</th><td>${installed_changeset_revision}</td></tr>
- <tr><th>Uninstalled</th><td>${uninstalled}</td></tr>
+ </div>
+ </td>
+ <td bgcolor="#D8D8D8">${name}</td>
+ </tr>
+ <tr><th>Version</th><td>${version}</td></tr>
+ <tr><th>Type</th><td>${type}</td></tr>
+ <tr>
+ <th>Install directory</th>
+ <td>
+ %if uninstalled:
+ This dependency is not currently installed
+ %else:
+ <a class="view-info" href="${h.url_for( controller='admin_toolshed', action='browse_tool_dependency', id=trans.security.encode_id( tool_dependency.id ), repository_id=trans.security.encode_id( repository.id ) )}">
+ ${install_dir}
+ </a>
+ %endif
+ </td>
+ </tr>
+ <tr><th>Uninstalled</th><td>${uninstalled}</td></tr>
%endfor
</table><div style="clear: both"></div>
diff -r 87be4c8d7f7fa32f6023f0e251e3c9af8447f5cd -r 7c495f835a1d436ad33dff6107784f106cc24980 templates/admin/tool_shed_repository/view_tool_metadata.mako
--- a/templates/admin/tool_shed_repository/view_tool_metadata.mako
+++ b/templates/admin/tool_shed_repository/view_tool_metadata.mako
@@ -11,7 +11,7 @@
%if repository.tool_dependencies:
<a class="action-button" href="${h.url_for( controller='admin_toolshed', action='manage_tool_dependencies', id=trans.security.encode_id( repository.id ) )}">Manage tool dependencies</a>
%endif
- <a class="action-button" href="${h.url_for( controller='admin_toolshed', action='deactivate_or_uninstall_repository', id=trans.security.encode_id( repository.id ) )}">Deactivate or Uninstall</a>
+ <a class="action-button" href="${h.url_for( controller='admin_toolshed', action='deactivate_or_uninstall_repository', id=trans.security.encode_id( repository.id ) )}">Deactivate or uninstall repository</a></div></ul>
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0

commit/galaxy-central: afgane: Fix API history purge; a few documentation corrections
by Bitbucket 18 Jun '12
by Bitbucket 18 Jun '12
18 Jun '12
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/87be4c8d7f7f/
changeset: 87be4c8d7f7f
user: afgane
date: 2012-06-18 06:53:02
summary: Fix API history purge; a few documentation corrections
affected #: 3 files
diff -r aaf5c82a55794c97125696aecb27056b5087fdb7 -r 87be4c8d7f7fa32f6023f0e251e3c9af8447f5cd lib/galaxy/web/api/histories.py
--- a/lib/galaxy/web/api/histories.py
+++ b/lib/galaxy/web/api/histories.py
@@ -130,6 +130,7 @@
history.deleted = True
if purge and trans.app.config.allow_user_dataset_purge:
+ # First purge all the datasets
for hda in history.datasets:
if hda.purged:
continue
@@ -143,6 +144,9 @@
except:
pass
trans.sa_session.flush()
+ # Now mark the history as purged
+ history.purged = True
+ self.sa_session.add( history )
trans.sa_session.flush()
return 'OK'
@@ -150,8 +154,8 @@
@web.expose_api
def undelete( self, trans, id, **kwd ):
"""
- POST /api/histories/deleted/{encoded_quota_id}/undelete
- Undeletes a quota
+ POST /api/histories/deleted/{encoded_history_id}/undelete
+ Undeletes a history
"""
history_id = id
history = self.get_history( trans, history_id, check_ownership=True, check_accessible=False, deleted=True )
diff -r aaf5c82a55794c97125696aecb27056b5087fdb7 -r 87be4c8d7f7fa32f6023f0e251e3c9af8447f5cd lib/galaxy/web/api/users.py
--- a/lib/galaxy/web/api/users.py
+++ b/lib/galaxy/web/api/users.py
@@ -65,7 +65,7 @@
@web.expose_api
def create( self, trans, payload, **kwd ):
"""
- /api/users
+ POST /api/users
Creates a new Galaxy user.
"""
if not trans.app.config.allow_user_creation:
diff -r aaf5c82a55794c97125696aecb27056b5087fdb7 -r 87be4c8d7f7fa32f6023f0e251e3c9af8447f5cd scripts/api/library_upload_from_import_dir.py
--- a/scripts/api/library_upload_from_import_dir.py
+++ b/scripts/api/library_upload_from_import_dir.py
@@ -1,5 +1,8 @@
#!/usr/bin/env python
-
+"""
+Example usage:
+./library_upload_from_import_dir.py <key> http://127.0.0.1:8080/api/libraries/dda47097d9189f15/contents Fdda47097d9189f15 auto /Users/EnisAfgan/projects/pprojects/galaxy/lib_upload_dir ?
+"""
import os, sys
sys.path.insert( 0, os.path.dirname( __file__ ) )
from common import submit
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
2 new commits in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/54898b02d42a/
changeset: 54898b02d42a
user: jgoecks
date: 2012-06-15 23:43:54
summary: Visualization framework improvements.
affected #: 5 files
diff -r 0bd0c35351c4eb9c7103b6d737543317abc54ac0 -r 54898b02d42afdacaef261a49bbef18dd563365d static/scripts/mvc/tools.js
--- a/static/scripts/mvc/tools.js
+++ b/static/scripts/mvc/tools.js
@@ -68,6 +68,16 @@
},
/**
+ * Set many input values at once.
+ */
+ set_input_values: function(inputs_dict) {
+ var self = this;
+ _.each(_.keys(inputs_dict), function(input_name) {
+ self.set_input_value(input_name, inputs_dict[input_name]);
+ });
+ },
+
+ /**
* Run tool; returns a Deferred that resolves to the tool's output(s).
*/
run: function() {
diff -r 0bd0c35351c4eb9c7103b6d737543317abc54ac0 -r 54898b02d42afdacaef261a49bbef18dd563365d static/scripts/viz/trackster.js
--- a/static/scripts/viz/trackster.js
+++ b/static/scripts/viz/trackster.js
@@ -176,56 +176,6 @@
var extend = require('class').extend,
slotting = require('slotting'),
painters = require('painters');
-
-
-// ---- Canvas management and extensions ----
-
-/**
- * Canvas manager is used to create canvases, for browsers, this deals with
- * backward comparibility using excanvas, as well as providing a pattern cache
- */
-var CanvasManager = function( document, default_font ) {
- this.document = document;
- this.default_font = default_font !== undefined ? default_font : "9px Monaco, Lucida Console, monospace";
-
- this.dummy_canvas = this.new_canvas();
- this.dummy_context = this.dummy_canvas.getContext('2d');
- this.dummy_context.font = this.default_font;
-
- this.char_width_px = this.dummy_context.measureText("A").width;
-
- this.patterns = {};
-
- // FIXME: move somewhere to make this more general
- this.load_pattern( 'right_strand', "/visualization/strand_right.png" );
- this.load_pattern( 'left_strand', "/visualization/strand_left.png" );
- this.load_pattern( 'right_strand_inv', "/visualization/strand_right_inv.png" );
- this.load_pattern( 'left_strand_inv', "/visualization/strand_left_inv.png" );
-}
-
-extend( CanvasManager.prototype, {
- load_pattern: function( key, path ) {
- var patterns = this.patterns,
- dummy_context = this.dummy_context,
- image = new Image();
- image.src = galaxy_paths.attributes.image_path + path;
- image.onload = function() {
- patterns[key] = dummy_context.createPattern( image, "repeat" );
- }
- },
- get_pattern: function( key ) {
- return this.patterns[key];
- },
- new_canvas: function() {
- var canvas = this.document.createElement("canvas");
- // If using excanvas in IE, we need to explicately attach the canvas
- // methods to the DOM element
- if (window.G_vmlCanvasManager) { G_vmlCanvasManager.initElement(canvas); }
- // Keep a reference back to the manager
- canvas.manager = this;
- return canvas;
- }
-});
// ---- Web UI specific utilities ----
@@ -2985,18 +2935,20 @@
this.data_url = ('data_url' in obj_dict ? obj_dict.data_url : default_data_url);
this.data_url_extra_params = {}
this.data_query_wait = ('data_query_wait' in obj_dict ? obj_dict.data_query_wait : DEFAULT_DATA_QUERY_WAIT);
- this.dataset_check_url = converted_datasets_state_url;
+ this.dataset_check_url = ('converted_datasets_state_url' in obj_dict ? obj_dict.converted_datasets_state_url : converted_datasets_state_url);
// A little ugly creating data manager right now due to transition to Backbone-based objects.
- var dataset = new Dataset({
- id: obj_dict.dataset_id,
- hda_ldda: obj_dict.hda_ldda
- });
+ var track = this,
+ dataset = new Dataset({
+ id: obj_dict.dataset_id,
+ hda_ldda: obj_dict.hda_ldda
+ });
this.data_manager = ('data_manager' in obj_dict ?
obj_dict.data_manager :
new GenomeDataManager({
dataset: dataset,
- data_url: default_data_url,
+ data_url: track.data_url,
+ dataset_state_url: track.dataset_check_url,
data_mode_compatible: this.data_and_mode_compatible,
can_subset: this.can_subset,
}));
diff -r 0bd0c35351c4eb9c7103b6d737543317abc54ac0 -r 54898b02d42afdacaef261a49bbef18dd563365d static/scripts/viz/trackster_ui.js
--- a/static/scripts/viz/trackster_ui.js
+++ b/static/scripts/viz/trackster_ui.js
@@ -36,7 +36,7 @@
* Create new object from a template. A template can be either an object dictionary or an
* object itself.
*/
-var object_from_template = function(template, container) {
+var object_from_template = function(template, view, container) {
if ('copy' in template) {
// Template is an object.
return template.copy(container);
@@ -93,7 +93,7 @@
drawable_type,
drawable;
for (var i = 0; i < drawables_config.length; i++) {
- view.add_drawable( object_from_template( drawables_config[i], view ) );
+ view.add_drawable( object_from_template( drawables_config[i], view, view ) );
}
}
diff -r 0bd0c35351c4eb9c7103b6d737543317abc54ac0 -r 54898b02d42afdacaef261a49bbef18dd563365d static/scripts/viz/visualization.js
--- a/static/scripts/viz/visualization.js
+++ b/static/scripts/viz/visualization.js
@@ -1,5 +1,7 @@
/**
- * Model, view, and controller objects for Galaxy tools and tool panel.
+ * Model, view, and controller objects for Galaxy visualization framework.
+ *
+ * Required libraries: Backbone, jQuery
*
* Models have no references to views, instead using events to indicate state
* changes; this is advantageous because multiple views can use the same object
@@ -45,14 +47,66 @@
}
});
+// TODO: move to Backbone
+
+/**
+ * Canvas manager is used to create canvases, for browsers, this deals with
+ * backward comparibility using excanvas, as well as providing a pattern cache
+ */
+var CanvasManager = function(default_font) {
+ this.default_font = default_font !== undefined ? default_font : "9px Monaco, Lucida Console, monospace";
+
+ this.dummy_canvas = this.new_canvas();
+ this.dummy_context = this.dummy_canvas.getContext('2d');
+ this.dummy_context.font = this.default_font;
+
+ this.char_width_px = this.dummy_context.measureText("A").width;
+
+ this.patterns = {};
+
+ // FIXME: move somewhere to make this more general
+ this.load_pattern( 'right_strand', "/visualization/strand_right.png" );
+ this.load_pattern( 'left_strand', "/visualization/strand_left.png" );
+ this.load_pattern( 'right_strand_inv', "/visualization/strand_right_inv.png" );
+ this.load_pattern( 'left_strand_inv', "/visualization/strand_left_inv.png" );
+}
+
+_.extend( CanvasManager.prototype, {
+ load_pattern: function( key, path ) {
+ var patterns = this.patterns,
+ dummy_context = this.dummy_context,
+ image = new Image();
+ image.src = galaxy_paths.attributes.image_path + path;
+ image.onload = function() {
+ patterns[key] = dummy_context.createPattern( image, "repeat" );
+ }
+ },
+ get_pattern: function( key ) {
+ return this.patterns[key];
+ },
+ new_canvas: function() {
+ var canvas = $("<canvas/>")[0];
+ // If using excanvas in IE, we need to explicately attach the canvas
+ // methods to the DOM element
+ if (window.G_vmlCanvasManager) { G_vmlCanvasManager.initElement(canvas); }
+ // Keep a reference back to the manager
+ canvas.manager = this;
+ return canvas;
+ }
+});
+
/**
* Generic cache that handles key/value pairs.
*/
var Cache = Backbone.Model.extend({
defaults: {
num_elements: 20,
- obj_cache: {},
- key_ary: []
+ obj_cache: null,
+ key_ary: null
+ },
+
+ initialize: function(options) {
+ this.clear();
},
get_elt: function(key) {
@@ -114,9 +168,36 @@
dataset: null,
filters_manager: null,
data_url: null,
+ dataset_state_url: null,
data_mode_compatible: function(entry, mode) { return true; },
can_subset: function(entry) { return false; }
}),
+
+ /**
+ * Returns deferred that resolves to true when dataset is ready (or false if dataset
+ * cannot be used).
+ */
+ data_is_ready: function() {
+ var dataset = this.get('dataset'),
+ ready_deferred = $.Deferred(),
+ ss_deferred = new ServerStateDeferred({
+ ajax_settings: {
+ url: this.get('dataset_state_url'),
+ data: {
+ dataset_id: dataset.id,
+ hda_ldda: dataset.get('hda_ldda')
+ },
+ dataType: "json"
+ },
+ interval: 5000,
+ success_fn: function(response) { return response !== "pending"; }
+ });
+
+ $.when(ss_deferred.go()).then(function(response) {
+ ready_deferred.resolve(response === "ok" || response === "data" );
+ });
+ return ready_deferred;
+ },
/**
* Load data from server; returns AJAX object so that use of Deferred is possible.
@@ -366,6 +447,10 @@
end: this.get('end')
});
},
+
+ length: function() {
+ return this.get('end') - this.get('start');
+ },
/** Returns region in canonical form chrom:start-end */
toString: function() {
@@ -438,6 +523,10 @@
}
});
+var GenomeRegionCollection = Backbone.Collection.extend({
+ model: GenomeRegion
+});
+
/**
* A genome browser bookmark.
*/
diff -r 0bd0c35351c4eb9c7103b6d737543317abc54ac0 -r 54898b02d42afdacaef261a49bbef18dd563365d templates/base_panels.mako
--- a/templates/base_panels.mako
+++ b/templates/base_panels.mako
@@ -53,7 +53,11 @@
var galaxy_paths = new GalaxyPaths({
root_path: '${h.url_for( "/" )}',
image_path: '${h.url_for( "/static/images" )}',
- tool_url: '${h.url_for( controller="/api/tools" )}'
+ tool_url: '${h.url_for( controller="/api/tools" )}',
+ data_url: '${h.url_for( controller="tracks", action="data" )}',
+ raw_data_url: '${h.url_for( controller="tracks", action="raw_data" )}',
+ converted_datasets_state_url: '${h.url_for( controller="tracks", action="converted_datasets_state" )}',
+ dataset_state_url: '${h.url_for( controller="tracks", action="dataset_state" )}'
});
</script></%def>
https://bitbucket.org/galaxy/galaxy-central/changeset/aaf5c82a5579/
changeset: aaf5c82a5579
user: jgoecks
date: 2012-06-15 23:45:22
summary: Merge
affected #: 5 files
diff -r ccfcca670dfa5303d09a178473b663c15e5dc80a -r aaf5c82a55794c97125696aecb27056b5087fdb7 static/scripts/mvc/tools.js
--- a/static/scripts/mvc/tools.js
+++ b/static/scripts/mvc/tools.js
@@ -68,6 +68,16 @@
},
/**
+ * Set many input values at once.
+ */
+ set_input_values: function(inputs_dict) {
+ var self = this;
+ _.each(_.keys(inputs_dict), function(input_name) {
+ self.set_input_value(input_name, inputs_dict[input_name]);
+ });
+ },
+
+ /**
* Run tool; returns a Deferred that resolves to the tool's output(s).
*/
run: function() {
diff -r ccfcca670dfa5303d09a178473b663c15e5dc80a -r aaf5c82a55794c97125696aecb27056b5087fdb7 static/scripts/viz/trackster.js
--- a/static/scripts/viz/trackster.js
+++ b/static/scripts/viz/trackster.js
@@ -176,56 +176,6 @@
var extend = require('class').extend,
slotting = require('slotting'),
painters = require('painters');
-
-
-// ---- Canvas management and extensions ----
-
-/**
- * Canvas manager is used to create canvases, for browsers, this deals with
- * backward comparibility using excanvas, as well as providing a pattern cache
- */
-var CanvasManager = function( document, default_font ) {
- this.document = document;
- this.default_font = default_font !== undefined ? default_font : "9px Monaco, Lucida Console, monospace";
-
- this.dummy_canvas = this.new_canvas();
- this.dummy_context = this.dummy_canvas.getContext('2d');
- this.dummy_context.font = this.default_font;
-
- this.char_width_px = this.dummy_context.measureText("A").width;
-
- this.patterns = {};
-
- // FIXME: move somewhere to make this more general
- this.load_pattern( 'right_strand', "/visualization/strand_right.png" );
- this.load_pattern( 'left_strand', "/visualization/strand_left.png" );
- this.load_pattern( 'right_strand_inv', "/visualization/strand_right_inv.png" );
- this.load_pattern( 'left_strand_inv', "/visualization/strand_left_inv.png" );
-}
-
-extend( CanvasManager.prototype, {
- load_pattern: function( key, path ) {
- var patterns = this.patterns,
- dummy_context = this.dummy_context,
- image = new Image();
- image.src = galaxy_paths.attributes.image_path + path;
- image.onload = function() {
- patterns[key] = dummy_context.createPattern( image, "repeat" );
- }
- },
- get_pattern: function( key ) {
- return this.patterns[key];
- },
- new_canvas: function() {
- var canvas = this.document.createElement("canvas");
- // If using excanvas in IE, we need to explicately attach the canvas
- // methods to the DOM element
- if (window.G_vmlCanvasManager) { G_vmlCanvasManager.initElement(canvas); }
- // Keep a reference back to the manager
- canvas.manager = this;
- return canvas;
- }
-});
// ---- Web UI specific utilities ----
@@ -2985,18 +2935,20 @@
this.data_url = ('data_url' in obj_dict ? obj_dict.data_url : default_data_url);
this.data_url_extra_params = {}
this.data_query_wait = ('data_query_wait' in obj_dict ? obj_dict.data_query_wait : DEFAULT_DATA_QUERY_WAIT);
- this.dataset_check_url = converted_datasets_state_url;
+ this.dataset_check_url = ('converted_datasets_state_url' in obj_dict ? obj_dict.converted_datasets_state_url : converted_datasets_state_url);
// A little ugly creating data manager right now due to transition to Backbone-based objects.
- var dataset = new Dataset({
- id: obj_dict.dataset_id,
- hda_ldda: obj_dict.hda_ldda
- });
+ var track = this,
+ dataset = new Dataset({
+ id: obj_dict.dataset_id,
+ hda_ldda: obj_dict.hda_ldda
+ });
this.data_manager = ('data_manager' in obj_dict ?
obj_dict.data_manager :
new GenomeDataManager({
dataset: dataset,
- data_url: default_data_url,
+ data_url: track.data_url,
+ dataset_state_url: track.dataset_check_url,
data_mode_compatible: this.data_and_mode_compatible,
can_subset: this.can_subset,
}));
diff -r ccfcca670dfa5303d09a178473b663c15e5dc80a -r aaf5c82a55794c97125696aecb27056b5087fdb7 static/scripts/viz/trackster_ui.js
--- a/static/scripts/viz/trackster_ui.js
+++ b/static/scripts/viz/trackster_ui.js
@@ -36,7 +36,7 @@
* Create new object from a template. A template can be either an object dictionary or an
* object itself.
*/
-var object_from_template = function(template, container) {
+var object_from_template = function(template, view, container) {
if ('copy' in template) {
// Template is an object.
return template.copy(container);
@@ -93,7 +93,7 @@
drawable_type,
drawable;
for (var i = 0; i < drawables_config.length; i++) {
- view.add_drawable( object_from_template( drawables_config[i], view ) );
+ view.add_drawable( object_from_template( drawables_config[i], view, view ) );
}
}
diff -r ccfcca670dfa5303d09a178473b663c15e5dc80a -r aaf5c82a55794c97125696aecb27056b5087fdb7 static/scripts/viz/visualization.js
--- a/static/scripts/viz/visualization.js
+++ b/static/scripts/viz/visualization.js
@@ -1,5 +1,7 @@
/**
- * Model, view, and controller objects for Galaxy tools and tool panel.
+ * Model, view, and controller objects for Galaxy visualization framework.
+ *
+ * Required libraries: Backbone, jQuery
*
* Models have no references to views, instead using events to indicate state
* changes; this is advantageous because multiple views can use the same object
@@ -45,14 +47,66 @@
}
});
+// TODO: move to Backbone
+
+/**
+ * Canvas manager is used to create canvases, for browsers, this deals with
+ * backward comparibility using excanvas, as well as providing a pattern cache
+ */
+var CanvasManager = function(default_font) {
+ this.default_font = default_font !== undefined ? default_font : "9px Monaco, Lucida Console, monospace";
+
+ this.dummy_canvas = this.new_canvas();
+ this.dummy_context = this.dummy_canvas.getContext('2d');
+ this.dummy_context.font = this.default_font;
+
+ this.char_width_px = this.dummy_context.measureText("A").width;
+
+ this.patterns = {};
+
+ // FIXME: move somewhere to make this more general
+ this.load_pattern( 'right_strand', "/visualization/strand_right.png" );
+ this.load_pattern( 'left_strand', "/visualization/strand_left.png" );
+ this.load_pattern( 'right_strand_inv', "/visualization/strand_right_inv.png" );
+ this.load_pattern( 'left_strand_inv', "/visualization/strand_left_inv.png" );
+}
+
+_.extend( CanvasManager.prototype, {
+ load_pattern: function( key, path ) {
+ var patterns = this.patterns,
+ dummy_context = this.dummy_context,
+ image = new Image();
+ image.src = galaxy_paths.attributes.image_path + path;
+ image.onload = function() {
+ patterns[key] = dummy_context.createPattern( image, "repeat" );
+ }
+ },
+ get_pattern: function( key ) {
+ return this.patterns[key];
+ },
+ new_canvas: function() {
+ var canvas = $("<canvas/>")[0];
+ // If using excanvas in IE, we need to explicately attach the canvas
+ // methods to the DOM element
+ if (window.G_vmlCanvasManager) { G_vmlCanvasManager.initElement(canvas); }
+ // Keep a reference back to the manager
+ canvas.manager = this;
+ return canvas;
+ }
+});
+
/**
* Generic cache that handles key/value pairs.
*/
var Cache = Backbone.Model.extend({
defaults: {
num_elements: 20,
- obj_cache: {},
- key_ary: []
+ obj_cache: null,
+ key_ary: null
+ },
+
+ initialize: function(options) {
+ this.clear();
},
get_elt: function(key) {
@@ -114,9 +168,36 @@
dataset: null,
filters_manager: null,
data_url: null,
+ dataset_state_url: null,
data_mode_compatible: function(entry, mode) { return true; },
can_subset: function(entry) { return false; }
}),
+
+ /**
+ * Returns deferred that resolves to true when dataset is ready (or false if dataset
+ * cannot be used).
+ */
+ data_is_ready: function() {
+ var dataset = this.get('dataset'),
+ ready_deferred = $.Deferred(),
+ ss_deferred = new ServerStateDeferred({
+ ajax_settings: {
+ url: this.get('dataset_state_url'),
+ data: {
+ dataset_id: dataset.id,
+ hda_ldda: dataset.get('hda_ldda')
+ },
+ dataType: "json"
+ },
+ interval: 5000,
+ success_fn: function(response) { return response !== "pending"; }
+ });
+
+ $.when(ss_deferred.go()).then(function(response) {
+ ready_deferred.resolve(response === "ok" || response === "data" );
+ });
+ return ready_deferred;
+ },
/**
* Load data from server; returns AJAX object so that use of Deferred is possible.
@@ -366,6 +447,10 @@
end: this.get('end')
});
},
+
+ length: function() {
+ return this.get('end') - this.get('start');
+ },
/** Returns region in canonical form chrom:start-end */
toString: function() {
@@ -438,6 +523,10 @@
}
});
+var GenomeRegionCollection = Backbone.Collection.extend({
+ model: GenomeRegion
+});
+
/**
* A genome browser bookmark.
*/
diff -r ccfcca670dfa5303d09a178473b663c15e5dc80a -r aaf5c82a55794c97125696aecb27056b5087fdb7 templates/base_panels.mako
--- a/templates/base_panels.mako
+++ b/templates/base_panels.mako
@@ -53,7 +53,11 @@
var galaxy_paths = new GalaxyPaths({
root_path: '${h.url_for( "/" )}',
image_path: '${h.url_for( "/static/images" )}',
- tool_url: '${h.url_for( controller="/api/tools" )}'
+ tool_url: '${h.url_for( controller="/api/tools" )}',
+ data_url: '${h.url_for( controller="tracks", action="data" )}',
+ raw_data_url: '${h.url_for( controller="tracks", action="raw_data" )}',
+ converted_datasets_state_url: '${h.url_for( controller="tracks", action="converted_datasets_state" )}',
+ dataset_state_url: '${h.url_for( controller="tracks", action="dataset_state" )}'
});
</script></%def>
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0

commit/galaxy-central: dannon: Updates to tabular dataset display. Ship first data chunk with the page, and use metadata or datatype column_names as a table header row if available.
by Bitbucket 15 Jun '12
by Bitbucket 15 Jun '12
15 Jun '12
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/ccfcca670dfa/
changeset: ccfcca670dfa
user: dannon
date: 2012-06-15 23:01:05
summary: Updates to tabular dataset display. Ship first data chunk with the page, and use metadata or datatype column_names as a table header row if available.
affected #: 4 files
diff -r 0bd0c35351c4eb9c7103b6d737543317abc54ac0 -r ccfcca670dfa5303d09a178473b663c15e5dc80a lib/galaxy/datatypes/tabular.py
--- a/lib/galaxy/datatypes/tabular.py
+++ b/lib/galaxy/datatypes/tabular.py
@@ -19,7 +19,7 @@
class Tabular( data.Text ):
"""Tab delimited data"""
- CHUNK_SIZE = 20000
+ CHUNK_SIZE = 50000
"""Add metadata elements"""
MetadataElement( name="comment_lines", default=0, desc="Number of comment lines", readonly=False, optional=True, no_value=0 )
@@ -246,26 +246,40 @@
raise Exception, "Can't create peek rows %s" % str( exc )
return "".join( out )
+ def get_chunk(self, trans, dataset, chunk):
+ ck_index = int(chunk)
+ f = open(dataset.file_name)
+ f.seek(ck_index * self.CHUNK_SIZE)
+ # If we aren't at the start of the file, seek to next newline. Do this better eventually.
+ if f.tell() != 0:
+ cursor = f.read(1)
+ while cursor and cursor != '\n':
+ cursor = f.read(1)
+ ck_data = f.read(self.CHUNK_SIZE)
+ cursor = f.read(1)
+ while cursor and ck_data[-1] != '\n':
+ ck_data += cursor
+ cursor = f.read(1)
+ return to_json_string({'ck_data': ck_data, 'ck_index': ck_index+1})
+
def display_data(self, trans, dataset, preview=False, filename=None, to_ext=None, chunk=None):
#TODO Prevent failure when displaying extremely long > 50kb lines.
if to_ext:
return self._serve_raw(trans, dataset, to_ext)
if chunk:
- ck_index = int(chunk)
- f = open(dataset.file_name)
- f.seek(ck_index * self.CHUNK_SIZE)
- # If we aren't at the start of the file, seek to next newline. Do this better eventually.
- if f.tell() != 0:
- cursor = f.read(1)
- while cursor and cursor != '\n':
- cursor = f.read(1)
- ck_data = f.read(self.CHUNK_SIZE)
- cursor = f.read(1)
- while cursor and ck_data[-1] != '\n':
- ck_data += cursor
- cursor = f.read(1)
- return to_json_string({'ck_data': ck_data, 'ck_index': ck_index+1})
- return trans.fill_template( "/dataset/tabular_chunked.mako",dataset = dataset)
+ return self.get_chunk(trans, dataset, chunk)
+ else:
+ column_names = 'null'
+ if dataset.metadata.column_names:
+ column_names = dataset.metadata.column_names
+ elif hasattr(dataset.datatype, 'column_names'):
+ column_names = dataset.datatype.column_names
+ return trans.fill_template( "/dataset/tabular_chunked.mako",
+ dataset = dataset,
+ chunk = self.get_chunk(trans, dataset, 0),
+ column_number = dataset.metadata.columns,
+ column_names = column_names,
+ column_types = dataset.metadata.column_types)
def set_peek( self, dataset, line_count=None, is_multi_byte=False):
super(Tabular, self).set_peek( dataset, line_count=line_count, is_multi_byte=is_multi_byte)
diff -r 0bd0c35351c4eb9c7103b6d737543317abc54ac0 -r ccfcca670dfa5303d09a178473b663c15e5dc80a static/june_2007_style/base.less
--- a/static/june_2007_style/base.less
+++ b/static/june_2007_style/base.less
@@ -1713,6 +1713,7 @@
top:10px;
height:32px;
width:32px;
+ display:none;
background:url(largespinner.gif);
}
diff -r 0bd0c35351c4eb9c7103b6d737543317abc54ac0 -r ccfcca670dfa5303d09a178473b663c15e5dc80a static/june_2007_style/blue/base.css
--- a/static/june_2007_style/blue/base.css
+++ b/static/june_2007_style/blue/base.css
@@ -724,7 +724,7 @@
div.toolSectionBody div.toolPanelLabel{padding-top:5px;padding-bottom:5px;margin-left:16px;margin-right:10px;display:list-item;list-style:none outside;}
div.toolTitleNoSection{padding-bottom:5px;font-weight:bold;}
#tool-search{padding-top:5px;padding-bottom:10px;position:relative;}
-#loading_indicator{position:fixed;right:10px;top:10px;height:32px;width:32px;background:url(largespinner.gif);}
+#loading_indicator{position:fixed;right:10px;top:10px;height:32px;width:32px;display:none;background:url(largespinner.gif);}
#content_table td{text-align:right;white-space:nowrap;padding:2px 10px;}
#content_table td.stringalign{text-align:left;}
.toolMenuAndView .toolForm{float:left;background-color:white;margin:10px;}
diff -r 0bd0c35351c4eb9c7103b6d737543317abc54ac0 -r ccfcca670dfa5303d09a178473b663c15e5dc80a templates/dataset/tabular_chunked.mako
--- a/templates/dataset/tabular_chunked.mako
+++ b/templates/dataset/tabular_chunked.mako
@@ -6,18 +6,19 @@
<%def name="javascripts()">
${parent.javascripts()}
<script type="text/javascript">
+ var DATASET_URL = "${h.url_for( controller='/dataset', action='display', dataset_id=trans.security.encode_id( dataset.id ))}";
+ var COLUMN_NUMBER = ${column_number};
+ var COLUMN_TYPES = ${column_types};
+ var COLUMN_NAMES = ${column_names};
- var DATASET_URL = "${h.url_for( controller='/dataset', action='display', dataset_id=trans.security.encode_id( dataset.id ))}";
- var DATASET_COLS = ${dataset.metadata.columns};
- var DATASET_TYPES = ${dataset.metadata.column_types};
-
+ var chunk = ${chunk};
var current_chunk = 0;
function renderCell(cell_contents, index, colspan){
if (colspan !== undefined){
return $('<td>').attr('colspan', colspan).addClass('stringalign').text(cell_contents);
}
- else if (DATASET_TYPES[index] == 'str'){
+ else if (COLUMN_TYPES[index] == 'str'){
/* Left align all str columns, right align the rest */
return $('<td>').addClass('stringalign').text(cell_contents);;
}
@@ -30,19 +31,19 @@
/* Check length of cells to ensure this is a complete row. */
var cells = line.split('\t');
var row = $('<tr>');
- if (cells.length == DATASET_COLS){
+ if (cells.length == COLUMN_NUMBER){
$.each(cells, function(index, cell_contents){
row.append(renderCell(cell_contents, index));
});
}
- else if(cells.length > DATASET_COLS){
+ else if(cells.length > COLUMN_NUMBER){
/* SAM file or like format with optional metadata included */
- $.each(cells.slice(0, DATASET_COLS -1), function(index, cell_contents){
+ $.each(cells.slice(0, COLUMN_NUMBER -1), function(index, cell_contents){
row.append(renderCell(cell_contents, index));
});
- row.append(renderCell(cells.slice(DATASET_COLS -1).join('\t'), DATASET_COLS-1));
+ row.append(renderCell(cells.slice(COLUMN_NUMBER -1).join('\t'), COLUMN_NUMBER-1));
}
- else if(DATASET_COLS > 5 && cells.length == DATASET_COLS - 1 ){
+ else if(COLUMN_NUMBER > 5 && cells.length == COLUMN_NUMBER - 1 ){
/* SAM file or like format with optional metadata missing */
$.each(cells, function(index, cell_contents){
row.append(renderCell(cell_contents, index));
@@ -51,41 +52,44 @@
}
else{
/* Comment line, just return the one cell*/
- row.append(renderCell(line, 0, DATASET_COLS));
+ row.append(renderCell(line, 0, COLUMN_NUMBER));
}
return row;
}
- function fillTable(){
- if (current_chunk !== -1){
- var table = $('#content_table');
- $.getJSON(DATASET_URL, {chunk: current_chunk}, function (result) {
- if (result.ck_data !== ""){
- var lines = result.ck_data.split('\n');
- $.each(lines, function(index, line){
- table.append(renderRow(line));
- });
- current_chunk = result.ck_index;
- }
- else {
- current_chunk = -1;
- }
+ function renderChunk(chunk){
+ var table = $('#content_table');
+ if (chunk.ck_data == ""){
+ current_chunk = -1;
+ }
+ else if(chunk.ck_index === current_chunk + 1){
+ if (current_chunk === 0 && COLUMN_NAMES){
+ table.append('<tr><th>' + COLUMN_NAMES.join('</th><th>') + '</th></tr>');
+ }
+ var lines = chunk.ck_data.split('\n');
+ $.each(lines, function(index, line){
+ table.append(renderRow(line));
});
+ current_chunk = chunk.ck_index;
}
}
$(document).ready(function(){
- fillTable();
+ renderChunk(chunk);
$(window).scroll(function(){
if ($(window).scrollTop() == $(document).height() - $(window).height()){
- fillTable();
+ if (current_chunk !== -1){
+ $.getJSON(DATASET_URL,
+ {chunk: current_chunk},
+ function(result){renderChunk(result)});
+ }
}
});
- $('#loading_indicator').ajaxStart(function(){
- $(this).show();
- }).ajaxStop(function(){
- $(this).hide();
- });
+ $('#loading_indicator').ajaxStart(function(){
+ $(this).show();
+ }).ajaxStop(function(){
+ $(this).hide();
+ });
});
</script></%def>
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0

commit/galaxy-central: dannon: Missing file from previous commit (renaming /cloud to /cloudlaunch)
by Bitbucket 15 Jun '12
by Bitbucket 15 Jun '12
15 Jun '12
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/0bd0c35351c4/
changeset: 0bd0c35351c4
user: dannon
date: 2012-06-15 20:55:18
summary: Missing file from previous commit (renaming /cloud to /cloudlaunch)
affected #: 1 file
diff -r 38d9e07b58aa99ded2604cdd1aab4ca0f8761aa6 -r 0bd0c35351c4eb9c7103b6d737543317abc54ac0 templates/webapps/galaxy/base_panels.mako
--- a/templates/webapps/galaxy/base_panels.mako
+++ b/templates/webapps/galaxy/base_panels.mako
@@ -113,9 +113,9 @@
%if app.config.get_bool( 'enable_cloud_launch', False ):
<%
menu_options = [
- [_('New Cloud Cluster'), h.url_for( controller='/cloud', action='index' ) ],
+ [_('New Cloud Cluster'), h.url_for( controller='/cloudlaunch', action='index' ) ],
]
- tab( "cloud", _("Cloud"), h.url_for( controller='/cloud', action='index'), menu_options=menu_options )
+ tab( "cloud", _("Cloud"), h.url_for( controller='/cloudlaunch', action='index'), menu_options=menu_options )
%>
%endif
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0

commit/galaxy-central: dannon: Rename 'cloud' controller to 'cloudlaunch' to avoid name collisions -- Cloudman interface for cloud instances, and /cloud redirect on main.
by Bitbucket 15 Jun '12
by Bitbucket 15 Jun '12
15 Jun '12
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/38d9e07b58aa/
changeset: 38d9e07b58aa
user: dannon
date: 2012-06-15 20:51:12
summary: Rename 'cloud' controller to 'cloudlaunch' to avoid name collisions -- Cloudman interface for cloud instances, and /cloud redirect on main.
affected #: 3 files
diff -r 5f3e2ffbae695cd7ae56b2ce4e4c0ed1182e2051 -r 38d9e07b58aa99ded2604cdd1aab4ca0f8761aa6 lib/galaxy/web/controllers/cloud.py
--- a/lib/galaxy/web/controllers/cloud.py
+++ /dev/null
@@ -1,220 +0,0 @@
-"""
-Cloud Controller: handles all cloud interactions.
-
-Adapted from Brad Chapman and Enis Afgan's BioCloudCentral
-BioCloudCentral Source: https://github.com/chapmanb/biocloudcentral
-
-"""
-
-import datetime
-import logging
-import time
-from galaxy import eggs
-import pkg_resources
-pkg_resources.require('boto')
-import boto
-from galaxy import web
-from galaxy.web.base.controller import BaseUIController
-from boto.ec2.regioninfo import RegionInfo
-from boto.exception import EC2ResponseError
-
-log = logging.getLogger(__name__)
-
-class CloudController(BaseUIController):
- def __init__(self, app):
- BaseUIController.__init__(self, app)
-
- @web.expose
- def index(self, trans):
- return trans.fill_template("cloud/index.mako")
-
- @web.expose
- def launch_instance(self, trans, cluster_name, password, key_id, secret, instance_type):
- ec2_error = None
- try:
- # Create security group & key pair used when starting an instance
- ec2_conn = connect_ec2(key_id, secret)
- sg_name = create_cm_security_group(ec2_conn)
- kp_name, kp_material = create_key_pair(ec2_conn)
- except EC2ResponseError, err:
- ec2_error = err.error_message
- if ec2_error:
- return trans.fill_template("cloud/run.mako", error = ec2_error)
- else:
- user_provided_data={'cluster_name':cluster_name,
- 'access_key':key_id,
- 'secret_key':secret,
- 'instance_type':instance_type}
- if password:
- user_provided_data['password'] = password
- rs = run_instance(ec2_conn=ec2_conn,
- user_provided_data=user_provided_data,
- key_name=kp_name,
- security_groups=[sg_name])
- if rs:
- instance = rs.instances[0]
- ct = 0
- while not instance.public_dns_name:
- # Can take a second to have public dns name registered.
- # DBTODO, push this into a page update, this is not ideal.
- instance.update()
- ct +=1
- time.sleep(1)
- return trans.fill_template("cloud/run.mako",
- instance = rs.instances[0],
- kp_name = kp_name,
- kp_material = kp_material)
- else:
- return trans.fill_template("cloud/run.mako",
- error = "Instance failure, but no specific error was detected. Please check your AWS Console.")
-
-# ## Cloud interaction methods
-def connect_ec2(a_key, s_key):
- """ Create and return an EC2 connection object.
- """
- # Use variables for forward looking flexibility
- # AWS connection values
- region_name = 'us-east-1'
- region_endpoint = 'ec2.amazonaws.com'
- is_secure = True
- ec2_port = None
- ec2_conn_path = '/'
- r = RegionInfo(name=region_name, endpoint=region_endpoint)
- ec2_conn = boto.connect_ec2(aws_access_key_id=a_key,
- aws_secret_access_key=s_key,
- api_version='2011-11-01', # needed for availability zone support
- is_secure=is_secure,
- region=r,
- port=ec2_port,
- path=ec2_conn_path)
- return ec2_conn
-
-def create_cm_security_group(ec2_conn, sg_name='CloudMan'):
- """ Create a security group with all authorizations required to run CloudMan.
- If the group already exists, check its rules and add the missing ones.
- Return the name of the created security group.
- """
- cmsg = None
- # Check if this security group already exists
- sgs = ec2_conn.get_all_security_groups()
- for sg in sgs:
- if sg.name == sg_name:
- cmsg = sg
- log.debug("Security group '%s' already exists; will add authorizations next." % sg_name)
- break
- # If it does not exist, create security group
- if cmsg is None:
- log.debug("Creating Security Group %s" % sg_name)
- cmsg = ec2_conn.create_security_group(sg_name, 'A security group for CloudMan')
- # Add appropriate authorization rules
- # If these rules already exist, nothing will be changed in the SG
- ports = (('80', '80'), # Web UI
- ('20', '21'), # FTP
- ('22', '22'), # ssh
- ('30000', '30100'), # FTP transfer
- ('42284', '42284')) # CloudMan UI
- for port in ports:
- try:
- if not rule_exists(cmsg.rules, from_port=port[0], to_port=port[1]):
- cmsg.authorize(ip_protocol='tcp', from_port=port[0], to_port=port[1], cidr_ip='0.0.0.0/0')
- else:
- log.debug("Rule (%s:%s) already exists in the SG" % (port[0], port[1]))
- except EC2ResponseError, e:
- log.error("A problem with security group authorizations: %s" % e)
- # Add rule that allows communication between instances in the same SG
- g_rule_exists = False # Flag to indicate if group rule already exists
- for rule in cmsg.rules:
- for grant in rule.grants:
- if grant.name == cmsg.name:
- g_rule_exists = True
- log.debug("Group rule already exists in the SG")
- if g_rule_exists:
- break
- if g_rule_exists is False:
- try:
- cmsg.authorize(src_group=cmsg)
- except EC2ResponseError, e:
- log.error("A problem w/ security group authorization: %s" % e)
- log.info("Done configuring '%s' security group" % cmsg.name)
- return cmsg.name
-
-def rule_exists(rules, from_port, to_port, ip_protocol='tcp', cidr_ip='0.0.0.0/0'):
- """ A convenience method to check if an authorization rule in a security
- group exists.
- """
- for rule in rules:
- if rule.ip_protocol == ip_protocol and rule.from_port == from_port and \
- rule.to_port == to_port and cidr_ip in [ip.cidr_ip for ip in rule.grants]:
- return True
- return False
-
-def create_key_pair(ec2_conn, key_name='cloudman_key_pair'):
- """ Create a key pair with the provided name.
- Return the name of the key or None if there was an error creating the key.
- """
- kp = None
- # Check if a key pair under the given name already exists. If it does not,
- # create it, else return.
- kps = ec2_conn.get_all_key_pairs()
- for akp in kps:
- if akp.name == key_name:
- log.debug("Key pair '%s' already exists; not creating it again." % key_name)
- return akp.name, None
- try:
- kp = ec2_conn.create_key_pair(key_name)
- except EC2ResponseError, e:
- log.error("Problem creating key pair '%s': %s" % (key_name, e))
- return None, None
- return kp.name, kp.material
-
-def run_instance(ec2_conn, user_provided_data, image_id='ami-da58aab3',
- kernel_id=None, ramdisk_id=None, key_name='cloudman_key_pair',
- security_groups=['CloudMan']):
- """ Start an instance. If instance start was OK, return the ResultSet object
- else return None.
- """
- rs = None
- instance_type = user_provided_data['instance_type']
- # Remove 'instance_type' key from the dict before creating user data
- del user_provided_data['instance_type']
- placement = _find_placement(ec2_conn, instance_type)
- ud = "\n".join(['%s: %s' % (key, value) for key, value in user_provided_data.iteritems() if key != 'kp_material'])
- try:
- rs = ec2_conn.run_instances(image_id=image_id,
- instance_type=instance_type,
- key_name=key_name,
- security_groups=security_groups,
- user_data=ud,
- kernel_id=kernel_id,
- ramdisk_id=ramdisk_id,
- placement=placement)
- except EC2ResponseError, e:
- log.error("Problem starting an instance: %s\n%s" % (e, e.body))
- if rs:
- try:
- log.info("Started an instance with ID %s" % rs.instances[0].id)
- except Exception, e:
- log.error("Problem with the started instance object: %s" % e)
- else:
- log.warning("Problem starting an instance?")
- return rs
-
-def _find_placement(ec2_conn, instance_type):
- """Find a region zone that supports our requested instance type.
-
- We need to check spot prices in the potential availability zones
- for support before deciding on a region:
-
- http://blog.piefox.com/2011/07/ec2-availability-zones-and-instance.html
- """
- base = ec2_conn.region.name
- yesterday = datetime.datetime.now() - datetime.timedelta(1)
- for loc_choice in ["b", "a", "c", "d"]:
- cur_loc = "{base}{ext}".format(base=base, ext=loc_choice)
- if len(ec2_conn.get_spot_price_history(instance_type=instance_type,
- end_time=yesterday.isoformat(),
- availability_zone=cur_loc)) > 0:
- return cur_loc
- log.error("Did not find availabilty zone in {0} for {1}".format(base, instance_type))
- return None
-
diff -r 5f3e2ffbae695cd7ae56b2ce4e4c0ed1182e2051 -r 38d9e07b58aa99ded2604cdd1aab4ca0f8761aa6 lib/galaxy/web/controllers/cloudlaunch.py
--- /dev/null
+++ b/lib/galaxy/web/controllers/cloudlaunch.py
@@ -0,0 +1,220 @@
+"""
+Cloud Controller: handles all cloud interactions.
+
+Adapted from Brad Chapman and Enis Afgan's BioCloudCentral
+BioCloudCentral Source: https://github.com/chapmanb/biocloudcentral
+
+"""
+
+import datetime
+import logging
+import time
+from galaxy import eggs
+import pkg_resources
+pkg_resources.require('boto')
+import boto
+from galaxy import web
+from galaxy.web.base.controller import BaseUIController
+from boto.ec2.regioninfo import RegionInfo
+from boto.exception import EC2ResponseError
+
+log = logging.getLogger(__name__)
+
+class CloudController(BaseUIController):
+ def __init__(self, app):
+ BaseUIController.__init__(self, app)
+
+ @web.expose
+ def index(self, trans):
+ return trans.fill_template("cloud/index.mako")
+
+ @web.expose
+ def launch_instance(self, trans, cluster_name, password, key_id, secret, instance_type):
+ ec2_error = None
+ try:
+ # Create security group & key pair used when starting an instance
+ ec2_conn = connect_ec2(key_id, secret)
+ sg_name = create_cm_security_group(ec2_conn)
+ kp_name, kp_material = create_key_pair(ec2_conn)
+ except EC2ResponseError, err:
+ ec2_error = err.error_message
+ if ec2_error:
+ return trans.fill_template("cloud/run.mako", error = ec2_error)
+ else:
+ user_provided_data={'cluster_name':cluster_name,
+ 'access_key':key_id,
+ 'secret_key':secret,
+ 'instance_type':instance_type}
+ if password:
+ user_provided_data['password'] = password
+ rs = run_instance(ec2_conn=ec2_conn,
+ user_provided_data=user_provided_data,
+ key_name=kp_name,
+ security_groups=[sg_name])
+ if rs:
+ instance = rs.instances[0]
+ ct = 0
+ while not instance.public_dns_name:
+ # Can take a second to have public dns name registered.
+ # DBTODO, push this into a page update, this is not ideal.
+ instance.update()
+ ct +=1
+ time.sleep(1)
+ return trans.fill_template("cloud/run.mako",
+ instance = rs.instances[0],
+ kp_name = kp_name,
+ kp_material = kp_material)
+ else:
+ return trans.fill_template("cloud/run.mako",
+ error = "Instance failure, but no specific error was detected. Please check your AWS Console.")
+
+# ## Cloud interaction methods
+def connect_ec2(a_key, s_key):
+ """ Create and return an EC2 connection object.
+ """
+ # Use variables for forward looking flexibility
+ # AWS connection values
+ region_name = 'us-east-1'
+ region_endpoint = 'ec2.amazonaws.com'
+ is_secure = True
+ ec2_port = None
+ ec2_conn_path = '/'
+ r = RegionInfo(name=region_name, endpoint=region_endpoint)
+ ec2_conn = boto.connect_ec2(aws_access_key_id=a_key,
+ aws_secret_access_key=s_key,
+ api_version='2011-11-01', # needed for availability zone support
+ is_secure=is_secure,
+ region=r,
+ port=ec2_port,
+ path=ec2_conn_path)
+ return ec2_conn
+
+def create_cm_security_group(ec2_conn, sg_name='CloudMan'):
+ """ Create a security group with all authorizations required to run CloudMan.
+ If the group already exists, check its rules and add the missing ones.
+ Return the name of the created security group.
+ """
+ cmsg = None
+ # Check if this security group already exists
+ sgs = ec2_conn.get_all_security_groups()
+ for sg in sgs:
+ if sg.name == sg_name:
+ cmsg = sg
+ log.debug("Security group '%s' already exists; will add authorizations next." % sg_name)
+ break
+ # If it does not exist, create security group
+ if cmsg is None:
+ log.debug("Creating Security Group %s" % sg_name)
+ cmsg = ec2_conn.create_security_group(sg_name, 'A security group for CloudMan')
+ # Add appropriate authorization rules
+ # If these rules already exist, nothing will be changed in the SG
+ ports = (('80', '80'), # Web UI
+ ('20', '21'), # FTP
+ ('22', '22'), # ssh
+ ('30000', '30100'), # FTP transfer
+ ('42284', '42284')) # CloudMan UI
+ for port in ports:
+ try:
+ if not rule_exists(cmsg.rules, from_port=port[0], to_port=port[1]):
+ cmsg.authorize(ip_protocol='tcp', from_port=port[0], to_port=port[1], cidr_ip='0.0.0.0/0')
+ else:
+ log.debug("Rule (%s:%s) already exists in the SG" % (port[0], port[1]))
+ except EC2ResponseError, e:
+ log.error("A problem with security group authorizations: %s" % e)
+ # Add rule that allows communication between instances in the same SG
+ g_rule_exists = False # Flag to indicate if group rule already exists
+ for rule in cmsg.rules:
+ for grant in rule.grants:
+ if grant.name == cmsg.name:
+ g_rule_exists = True
+ log.debug("Group rule already exists in the SG")
+ if g_rule_exists:
+ break
+ if g_rule_exists is False:
+ try:
+ cmsg.authorize(src_group=cmsg)
+ except EC2ResponseError, e:
+ log.error("A problem w/ security group authorization: %s" % e)
+ log.info("Done configuring '%s' security group" % cmsg.name)
+ return cmsg.name
+
+def rule_exists(rules, from_port, to_port, ip_protocol='tcp', cidr_ip='0.0.0.0/0'):
+ """ A convenience method to check if an authorization rule in a security
+ group exists.
+ """
+ for rule in rules:
+ if rule.ip_protocol == ip_protocol and rule.from_port == from_port and \
+ rule.to_port == to_port and cidr_ip in [ip.cidr_ip for ip in rule.grants]:
+ return True
+ return False
+
+def create_key_pair(ec2_conn, key_name='cloudman_key_pair'):
+ """ Create a key pair with the provided name.
+ Return the name of the key or None if there was an error creating the key.
+ """
+ kp = None
+ # Check if a key pair under the given name already exists. If it does not,
+ # create it, else return.
+ kps = ec2_conn.get_all_key_pairs()
+ for akp in kps:
+ if akp.name == key_name:
+ log.debug("Key pair '%s' already exists; not creating it again." % key_name)
+ return akp.name, None
+ try:
+ kp = ec2_conn.create_key_pair(key_name)
+ except EC2ResponseError, e:
+ log.error("Problem creating key pair '%s': %s" % (key_name, e))
+ return None, None
+ return kp.name, kp.material
+
+def run_instance(ec2_conn, user_provided_data, image_id='ami-da58aab3',
+ kernel_id=None, ramdisk_id=None, key_name='cloudman_key_pair',
+ security_groups=['CloudMan']):
+ """ Start an instance. If instance start was OK, return the ResultSet object
+ else return None.
+ """
+ rs = None
+ instance_type = user_provided_data['instance_type']
+ # Remove 'instance_type' key from the dict before creating user data
+ del user_provided_data['instance_type']
+ placement = _find_placement(ec2_conn, instance_type)
+ ud = "\n".join(['%s: %s' % (key, value) for key, value in user_provided_data.iteritems() if key != 'kp_material'])
+ try:
+ rs = ec2_conn.run_instances(image_id=image_id,
+ instance_type=instance_type,
+ key_name=key_name,
+ security_groups=security_groups,
+ user_data=ud,
+ kernel_id=kernel_id,
+ ramdisk_id=ramdisk_id,
+ placement=placement)
+ except EC2ResponseError, e:
+ log.error("Problem starting an instance: %s\n%s" % (e, e.body))
+ if rs:
+ try:
+ log.info("Started an instance with ID %s" % rs.instances[0].id)
+ except Exception, e:
+ log.error("Problem with the started instance object: %s" % e)
+ else:
+ log.warning("Problem starting an instance?")
+ return rs
+
+def _find_placement(ec2_conn, instance_type):
+ """Find a region zone that supports our requested instance type.
+
+ We need to check spot prices in the potential availability zones
+ for support before deciding on a region:
+
+ http://blog.piefox.com/2011/07/ec2-availability-zones-and-instance.html
+ """
+ base = ec2_conn.region.name
+ yesterday = datetime.datetime.now() - datetime.timedelta(1)
+ for loc_choice in ["b", "a", "c", "d"]:
+ cur_loc = "{base}{ext}".format(base=base, ext=loc_choice)
+ if len(ec2_conn.get_spot_price_history(instance_type=instance_type,
+ end_time=yesterday.isoformat(),
+ availability_zone=cur_loc)) > 0:
+ return cur_loc
+ log.error("Did not find availabilty zone in {0} for {1}".format(base, instance_type))
+ return None
+
diff -r 5f3e2ffbae695cd7ae56b2ce4e4c0ed1182e2051 -r 38d9e07b58aa99ded2604cdd1aab4ca0f8761aa6 templates/cloud/index.mako
--- a/templates/cloud/index.mako
+++ b/templates/cloud/index.mako
@@ -56,7 +56,7 @@
<div class="page-container" style="padding: 10px;"><h2>Launch a Galaxy Cloud Instance</h2><div class="toolForm">
- <form action="cloud/launch_instance" method="post">
+ <form action="${h.url_for( controller='cloudlaunch', action='launch_instance' )}" method="post"><div class="form-row"><label for="id_cluster_name">Cluster Name</label><input type="text" size="80" name="cluster_name" id="id_cluster_name"/><br/>
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0

commit/galaxy-central: dannon: Allow null password in Cloud Launch form. Include error response in logging.
by Bitbucket 15 Jun '12
by Bitbucket 15 Jun '12
15 Jun '12
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/5f3e2ffbae69/
changeset: 5f3e2ffbae69
user: dannon
date: 2012-06-15 20:41:52
summary: Allow null password in Cloud Launch form. Include error response in logging.
affected #: 1 file
diff -r 4a1389e037c60d83abdcc38ccb30314c4ef4a530 -r 5f3e2ffbae695cd7ae56b2ce4e4c0ed1182e2051 lib/galaxy/web/controllers/cloud.py
--- a/lib/galaxy/web/controllers/cloud.py
+++ b/lib/galaxy/web/controllers/cloud.py
@@ -41,12 +41,14 @@
if ec2_error:
return trans.fill_template("cloud/run.mako", error = ec2_error)
else:
+ user_provided_data={'cluster_name':cluster_name,
+ 'access_key':key_id,
+ 'secret_key':secret,
+ 'instance_type':instance_type}
+ if password:
+ user_provided_data['password'] = password
rs = run_instance(ec2_conn=ec2_conn,
- user_provided_data={'cluster_name':cluster_name,
- 'password':password,
- 'access_key':key_id,
- 'secret_key':secret,
- 'instance_type':instance_type},
+ user_provided_data=user_provided_data,
key_name=kp_name,
security_groups=[sg_name])
if rs:
@@ -187,7 +189,7 @@
ramdisk_id=ramdisk_id,
placement=placement)
except EC2ResponseError, e:
- log.error("Problem starting an instance: %s" % e)
+ log.error("Problem starting an instance: %s\n%s" % (e, e.body))
if rs:
try:
log.info("Started an instance with ID %s" % rs.instances[0].id)
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0

commit/galaxy-central: greg: Fix a typo when creating or updating a tool shed repository db record.
by Bitbucket 15 Jun '12
by Bitbucket 15 Jun '12
15 Jun '12
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/4a1389e037c6/
changeset: 4a1389e037c6
user: greg
date: 2012-06-15 16:21:15
summary: Fix a typo when creating or updating a tool shed repository db record.
affected #: 1 file
diff -r 622fd4f0a437c897fea83e3e60eba5191f8548f5 -r 4a1389e037c60d83abdcc38ccb30314c4ef4a530 lib/galaxy/util/shed_util.py
--- a/lib/galaxy/util/shed_util.py
+++ b/lib/galaxy/util/shed_util.py
@@ -340,7 +340,7 @@
name=name,
description=description,
owner=owner,
- installed_changeset_revision=changeset_revision,
+ installed_changeset_revision=installed_changeset_revision,
changeset_revision=current_changeset_revision,
ctx_rev=ctx_rev,
metadata=metadata_dict,
@@ -1492,6 +1492,22 @@
if uninstall:
# Write the current in-memory version of the integrated_tool_panel.xml file to disk.
trans.app.toolbox.write_integrated_tool_panel_config_file()
+def remove_tool_dependency( trans, tool_dependency ):
+ dependency_install_dir = tool_dependency.installation_directory( trans.app )
+ try:
+ shutil.rmtree( dependency_install_dir )
+ removed = True
+ error_message = ''
+ log.debug( "Removed tool dependency installation directory: %s" % str( dependency_install_dir ) )
+ except Exception, e:
+ removed = False
+ error_message = "Error removing tool dependency installation directory %s: %s" % ( str( dependency_install_dir ), str( e ) )
+ log.debug( error_message )
+ if removed:
+ tool_dependency.uninstalled = True
+ trans.sa_session.add( tool_dependency )
+ trans.sa_session.flush()
+ return removed, error_message
def reset_tool_data_tables( app ):
# Reset the tool_data_tables to an empty dictionary.
app.tool_data_tables.data_tables = {}
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0

commit/galaxy-central: jgoecks: Fix bug in packing track that is a library dataset
by Bitbucket 14 Jun '12
by Bitbucket 14 Jun '12
14 Jun '12
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/622fd4f0a437/
changeset: 622fd4f0a437
user: jgoecks
date: 2012-06-14 22:57:09
summary: Fix bug in packing track that is a library dataset
affected #: 1 file
diff -r ea2fbfe6d9db5b9eac1830a7e9de1aea0dcf5806 -r 622fd4f0a437c897fea83e3e60eba5191f8548f5 lib/galaxy/web/base/controller.py
--- a/lib/galaxy/web/base/controller.py
+++ b/lib/galaxy/web/base/controller.py
@@ -348,7 +348,7 @@
def get_library_dataset( self, trans, id, check_ownership=False, check_accessible=True ):
return self.get_object( trans, id, 'LibraryDataset', check_ownership=False, check_accessible=check_accessible )
-class UsesVisualizationMixin( SharableItemSecurityMixin ):
+class UsesVisualizationMixin( SharableItemSecurityMixin, UsesLibraryMixinItems ):
""" Mixin for controllers that use Visualization objects. """
viz_types = [ "trackster", "circster" ]
@@ -468,6 +468,10 @@
def pack_track( track_dict ):
dataset_id = track_dict['dataset_id']
hda_ldda = track_dict.get('hda_ldda', 'hda')
+ if hda_ldda == 'ldda':
+ # HACK: need to encode library dataset ID because get_hda_or_ldda
+ # only works for encoded datasets.
+ dataset_id = trans.security.encode_id( dataset_id )
dataset = self.get_hda_or_ldda( trans, hda_ldda, dataset_id )
try:
@@ -573,7 +577,7 @@
if hda_ldda == "hda":
return self.get_dataset( trans, dataset_id, check_ownership=False, check_accessible=True )
else:
- return trans.sa_session.query( trans.app.model.LibraryDatasetDatasetAssociation ).get( trans.security.decode_id( dataset_id ) )
+ return self.get_library_dataset_dataset_association( trans, dataset_id )
# -- Helper functions --
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0