galaxy-commits
Threads by month
- ----- 2024 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2023 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2022 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2021 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2020 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2019 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2018 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2017 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2016 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2015 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2014 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2013 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2012 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2011 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2010 -----
- December
- November
- October
- September
- August
- July
- June
- May
June 2012
- 1 participants
- 98 discussions
commit/galaxy-central: Scott McManus: Added exit code/regex parsing PBS runner and fixed DRMAA/SGE runner
by Bitbucket 25 Jun '12
by Bitbucket 25 Jun '12
25 Jun '12
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/fdccf979fd8f/
changeset: fdccf979fd8f
user: Scott McManus
date: 2012-06-25 22:43:18
summary: Added exit code/regex parsing PBS runner and fixed DRMAA/SGE runner
affected #: 3681 files
diff -r 319b2b0e832c8ea5ef520c76722f7ad6270507c3 -r fdccf979fd8f2a7dcd3de0a8266af1c7c68f0542 lib/galaxy/jobs/__init__.py
--- a/lib/galaxy/jobs/__init__.py
+++ b/lib/galaxy/jobs/__init__.py
@@ -291,8 +291,6 @@
the output datasets based on stderr and stdout from the command, and
the contents of the output files.
"""
- # TODO: Eliminate debugging code after testing all runners
- log.debug( "JobWrapper.finish: exit code:" + str(tool_exit_code) )
# default post job setup
self.sa_session.expunge_all()
job = self.get_job()
@@ -319,6 +317,7 @@
# that range, then apply the error level and add in a message.
# If we've reached a fatal error rule, then stop.
max_error_level = galaxy.tools.StdioErrorLevel.NO_ERROR
+ tool_exit_code = int( tool_exit_code )
for stdio_exit_code in self.tool.stdio_exit_codes:
if ( tool_exit_code >= stdio_exit_code.range_start and
tool_exit_code <= stdio_exit_code.range_end ):
diff -r 319b2b0e832c8ea5ef520c76722f7ad6270507c3 -r fdccf979fd8f2a7dcd3de0a8266af1c7c68f0542 lib/galaxy/jobs/runners/drmaa.py
--- a/lib/galaxy/jobs/runners/drmaa.py
+++ b/lib/galaxy/jobs/runners/drmaa.py
@@ -39,6 +39,11 @@
drmaa.JobState.FAILED: 'job finished, but failed',
}
+# The last four lines (following the last fi) will:
+# - setup the env
+# - move to the job wrapper's working directory
+# - execute the command
+# - take the command's exit code ($?) and write it to a file.
drm_template = """#!/bin/sh
GALAXY_LIB="%s"
if [ "$GALAXY_LIB" != "None" ]; then
@@ -52,6 +57,7 @@
%s
cd %s
%s
+echo $? > %s
"""
def __lineno__():
"""Returns the current line number in our program."""
@@ -77,7 +83,7 @@
self.job_file = None
self.ofile = None
self.efile = None
- self.rcfile = None
+ self.ecfile = None
self.runner_url = None
class DRMAAJobRunner( BaseJobRunner ):
@@ -169,7 +175,7 @@
# define job attributes
ofile = "%s.drmout" % os.path.join(job_wrapper.working_directory, job_wrapper.get_id_tag())
efile = "%s.drmerr" % os.path.join(job_wrapper.working_directory, job_wrapper.get_id_tag())
- rcfile = "%s.drmrc" % os.path.join(job_wrapper.working_directory, job_wrapper.get_id_tag())
+ ecfile = "%s.drmec" % os.path.join(job_wrapper.working_directory, job_wrapper.get_id_tag())
job_name = "g%s_%s_%s" % ( job_wrapper.job_id, job_wrapper.tool.id, job_wrapper.user )
job_name = ''.join( map( lambda x: x if x in ( string.letters + string.digits + '_' ) else '_', job_name ) )
@@ -178,7 +184,7 @@
jt.jobName = job_name
jt.outputPath = ":%s" % ofile
jt.errorPath = ":%s" % efile
- jt.returnCodePath = ":%s" % rcfile
+ # Avoid a jt.exitCodePath for now - it's only used when finishing.
native_spec = self.get_native_spec( runner_url )
if native_spec is not None:
jt.nativeSpecification = native_spec
@@ -187,7 +193,8 @@
script = drm_template % ( job_wrapper.galaxy_lib_dir,
job_wrapper.get_env_setup_clause(),
os.path.abspath( job_wrapper.working_directory ),
- command_line )
+ command_line,
+ ecfile )
try:
fh = file( jt.remoteCommand, "w" )
@@ -231,7 +238,7 @@
drm_job_state.job_id = job_id
drm_job_state.ofile = ofile
drm_job_state.efile = efile
- drm_job_state.rcfile = rcfile
+ drm_job_state.ecfile = ecfile
drm_job_state.job_file = jt.remoteCommand
drm_job_state.old_state = 'new'
drm_job_state.running = False
@@ -316,17 +323,22 @@
"""
ofile = drm_job_state.ofile
efile = drm_job_state.efile
- rcfile = drm_job_state.rcfile
+ ecfile = drm_job_state.ecfile
job_file = drm_job_state.job_file
# collect the output
# wait for the files to appear
which_try = 0
+ # By default, the exit code is 0, which typically indicates success.
+ exit_code = 0
while which_try < (self.app.config.retry_job_output_collection + 1):
try:
ofh = file(ofile, "r")
efh = file(efile, "r")
+ ecfh = file(ecfile, "r")
stdout = ofh.read( 32768 )
stderr = efh.read( 32768 )
+ # The exit code should only be 8 bits, but read more anyway
+ exit_code_str = ecfh.read(32)
which_try = (self.app.config.retry_job_output_collection + 1)
except:
if which_try == self.app.config.retry_job_output_collection:
@@ -337,8 +349,15 @@
time.sleep(1)
which_try += 1
+ # Decode the exit code. If it's bogus, then just use 0.
try:
- drm_job_state.job_wrapper.finish( stdout, stderr )
+ exit_code = int(exit_code_str)
+ except:
+ log.warning( "Exit code " + exit_code_str + " invalid. Using 0." )
+ exit_code = 0
+
+ try:
+ drm_job_state.job_wrapper.finish( stdout, stderr, int(exit_code) )
except:
log.exception("Job wrapper finish method failed")
@@ -382,7 +401,7 @@
drm_job_state = DRMAAJobState()
drm_job_state.ofile = "%s.drmout" % os.path.join(os.getcwd(), job_wrapper.working_directory, job_wrapper.get_id_tag())
drm_job_state.efile = "%s.drmerr" % os.path.join(os.getcwd(), job_wrapper.working_directory, job_wrapper.get_id_tag())
- drm_job_state.rcfile = "%s.drmrc" % os.path.join(os.getcwd(), job_wrapper.working_directory, job_wrapper.get_id_tag())
+ drm_job_state.ecfile = "%s.drmec" % os.path.join(os.getcwd(), job_wrapper.working_directory, job_wrapper.get_id_tag())
drm_job_state.job_file = "%s/galaxy_%s.sh" % (self.app.config.cluster_files_directory, job.id)
drm_job_state.job_id = str( job.job_runner_external_id )
drm_job_state.runner_url = job_wrapper.get_job_runner()
diff -r 319b2b0e832c8ea5ef520c76722f7ad6270507c3 -r fdccf979fd8f2a7dcd3de0a8266af1c7c68f0542 lib/galaxy/jobs/runners/pbs.py
--- a/lib/galaxy/jobs/runners/pbs.py
+++ b/lib/galaxy/jobs/runners/pbs.py
@@ -532,16 +532,22 @@
stdout = ofh.read( 32768 )
stderr = efh.read( 32768 )
# This should be an 8-bit exit code, but read ahead anyway:
- exit_code = ecfh.read(32)
+ exit_code_str = ecfh.read(32)
except:
stdout = ''
stderr = 'Job output not returned by PBS: the output datasets were deleted while the job was running, the job was manually dequeued or there was a cluster error.'
# By default, the exit code is 0, which usually indicates success
# (although clearly some error happened).
+ exit_code_str = ""
+
+ # Translate the exit code string to an integer; use 0 on failure.
+ try:
+ exit_code = int( exit_code_str )
+ except:
+ log.warning( "Exit code " + exit_code_str + " was invalid. Using 0." )
exit_code = 0
- log.debug(stderr)
- log.debug( "Job exit code: " + exit_code )
+ # Call on the job wrapper to complete the call:
try:
pbs_job_state.job_wrapper.finish( stdout, stderr, exit_code )
except:
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
2 new commits in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/c04ab20c6ee2/
changeset: c04ab20c6ee2
user: dan
date: 2012-06-25 21:53:57
summary: Fix for getting mime type of converted datasets when not specified explicitly for external display applications.
affected #: 1 file
diff -r 20e01e610de056e2f2855df9a3840c315b17587f -r c04ab20c6ee25af93a8aedc529a6689e8d64e8b2 lib/galaxy/datatypes/display_applications/parameters.py
--- a/lib/galaxy/datatypes/display_applications/parameters.py
+++ b/lib/galaxy/datatypes/display_applications/parameters.py
@@ -195,6 +195,8 @@
mime = self.trans.app.datatypes_registry.get_mimetype_by_extension( ".".split( self._url )[ -1 ], None )
if mime:
return mime
+ if hasattr( self.value, 'get_mime' ):
+ return self.value.get_mime()
return self.other_values[ DEFAULT_DATASET_NAME ].get_mime()
@property
def action_name( self ):
https://bitbucket.org/galaxy/galaxy-central/changeset/319b2b0e832c/
changeset: 319b2b0e832c
user: dan
date: 2012-06-25 21:54:27
summary: Add name parameter to IGV external display applications.
affected #: 2 files
diff -r c04ab20c6ee25af93a8aedc529a6689e8d64e8b2 -r 319b2b0e832c8ea5ef520c76722f7ad6270507c3 display_applications/igv/bam.xml
--- a/display_applications/igv/bam.xml
+++ b/display_applications/igv/bam.xml
@@ -81,9 +81,9 @@
</param><param type="template" name="redirect_url" strip="True" >
#if $site_id.startswith( 'local_' )
- ${site_link}?file=${bam_file.qp}&genome=${site_organism}&merge=true
+ ${site_link}?file=${bam_file.qp}&genome=${site_organism}&merge=true&name=${qp( $bam_file.name )}
#elif $site_id.startswith( 'web_link_' ):
- ${site_link}?sessionURL=${bam_file.qp}&genome=${site_organism}&merge=true
+ ${site_link}?sessionURL=${bam_file.qp}&genome=${site_organism}&merge=true&name=${qp( $bam_file.name )}
#else:
${jnlp.url}
#end if
diff -r c04ab20c6ee25af93a8aedc529a6689e8d64e8b2 -r 319b2b0e832c8ea5ef520c76722f7ad6270507c3 display_applications/igv/vcf.xml
--- a/display_applications/igv/vcf.xml
+++ b/display_applications/igv/vcf.xml
@@ -81,9 +81,9 @@
</param><param type="template" name="redirect_url" strip="True" >
#if $site_id.startswith( 'local_' )
- ${site_link}?file=${bgzip_file.qp}&genome=${site_organism}&merge=true
+ ${site_link}?file=${bgzip_file.qp}&genome=${site_organism}&merge=true&name=${qp( $bgzip_file.name )}
#elif $site_id.startswith( 'web_link_' ):
- ${site_link}?sessionURL=${bgzip_file.qp}&genome=${site_organism}&merge=true
+ ${site_link}?sessionURL=${bgzip_file.qp}&genome=${site_organism}&merge=true&name=${qp( $bgzip_file.name )}
#else:
${jnlp.url}
#end if
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: greg: Implement support for handling refined xml definition for installing tool dependencies along with installed tool shed repositories.
by Bitbucket 25 Jun '12
by Bitbucket 25 Jun '12
25 Jun '12
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/20e01e610de0/
changeset: 20e01e610de0
user: greg
date: 2012-06-25 21:46:35
summary: Implement support for handling refined xml definition for installing tool dependencies along with installed tool shed repositories.
affected #: 7 files
diff -r d2aba0918cf01b1c5be95f7b0b59cc52cd889dbd -r 20e01e610de056e2f2855df9a3840c315b17587f lib/galaxy/tool_shed/install_manager.py
--- a/lib/galaxy/tool_shed/install_manager.py
+++ b/lib/galaxy/tool_shed/install_manager.py
@@ -149,6 +149,11 @@
repository_clone_url,
metadata_dict,
dist_to_shed=True )
+ if 'tool_dependencies' in metadata_dict:
+ # All tool_dependency objects must be created before the tools are processed no matter whether tool dependencies are going to be installed.
+ tool_dependencies = create_tool_dependency_objects( self.app, tool_shed_repository, installed_changeset_revision )
+ else:
+ tool_dependencies = None
if 'tools' in metadata_dict:
work_dir = make_tmp_directory()
repository_tools_tups = get_repository_tools_tups( self.app, metadata_dict )
@@ -165,7 +170,7 @@
repository_tools_tups, sample_files_copied = handle_missing_index_file( self.app, self.tool_path, sample_files, repository_tools_tups )
# Copy remaining sample files included in the repository to the ~/tool-data directory of the local Galaxy instance.
copy_sample_files( self.app, sample_files, sample_files_copied=sample_files_copied )
- if install_dependencies and 'tool_dependencies' in metadata_dict:
+ if install_dependencies and tool_dependencies and 'tool_dependencies' in metadata_dict:
# Get the tool_dependencies.xml file from the repository.
tool_dependencies_config = get_config_from_repository( self.app,
'tool_dependencies.xml',
@@ -173,12 +178,14 @@
installed_changeset_revision,
work_dir )
# Install tool dependencies.
- status, message = handle_tool_dependencies( app=self.app,
- tool_shed_repository=tool_shed_repository,
- tool_dependencies_config=tool_dependencies_config )
- if status != 'done' and message:
- print 'The following error occurred from the InstallManager while installing tool dependencies:'
- print message
+ installed_tool_dependencies = handle_tool_dependencies( app=self.app,
+ tool_shed_repository=tool_shed_repository,
+ tool_dependencies_config=tool_dependencies_config,
+ tool_dependencies=tool_dependencies )
+ for installed_tool_dependency in installed_tool_dependencies:
+ if installed_tool_dependency.status == self.app.model.ToolDependency.installation_status.ERROR:
+ print '\nThe following error occurred from the InstallManager while installing tool dependency ', installed_tool_dependency.name, ':'
+ print installed_tool_dependency.error_message, '\n\n'
add_to_tool_panel( self.app,
repository_name,
repository_clone_url,
diff -r d2aba0918cf01b1c5be95f7b0b59cc52cd889dbd -r 20e01e610de056e2f2855df9a3840c315b17587f lib/galaxy/tool_shed/tool_dependencies/fabric_util.py
--- a/lib/galaxy/tool_shed/tool_dependencies/fabric_util.py
+++ b/lib/galaxy/tool_shed/tool_dependencies/fabric_util.py
@@ -35,6 +35,7 @@
if os.path.exists( work_dir ):
local( 'rm -rf %s' % work_dir )
def handle_post_build_processing( app, tool_dependency, install_dir, env_dependency_path, package_name=None ):
+ # TODO: This method is deprecated and should be eliminated when the implementation for handling proprietary fabric scripts is implemented.
sa_session = app.model.context.current
cmd = "echo 'PATH=%s:$PATH; export PATH' > %s/env.sh;chmod +x %s/env.sh" % ( env_dependency_path, install_dir, install_dir )
output = local( cmd, capture=True )
@@ -44,49 +45,84 @@
tool_dependency.error_message = str( output.stderr )
sa_session.add( tool_dependency )
sa_session.flush()
-def install_and_build_package( app, tool_dependency, params_dict ):
+def install_and_build_package( app, tool_dependency, actions_dict ):
"""Install a Galaxy tool dependency package either via a url or a mercurial or git clone command."""
sa_session = app.model.context.current
- install_dir = params_dict[ 'install_dir' ]
- download_url = params_dict.get( 'download_url', None )
- clone_cmd = params_dict.get( 'clone_cmd', None )
- actions = params_dict.get( 'actions', None )
- package_name = params_dict.get( 'package_name', None )
- with make_tmp_dir() as work_dir:
- with lcd( work_dir ):
- if download_url:
- downloaded_filename = os.path.split( download_url )[ -1 ]
- downloaded_file_path = common_util.url_download( work_dir, downloaded_filename, download_url )
- if common_util.istar( downloaded_file_path ):
- common_util.extract_tar( downloaded_file_path, work_dir )
- dir = common_util.tar_extraction_directory( work_dir, downloaded_filename )
- else:
- dir = work_dir
- elif clone_cmd:
- output = local( clone_cmd, capture=True )
- log_results( clone_cmd, output, os.path.join( install_dir, INSTALLATION_LOG ) )
- if output.return_code:
- tool_dependency.status = app.model.ToolDependency.installation_status.ERROR
- tool_dependency.error_message = str( output.stderr )
- sa_session.add( tool_dependency )
- sa_session.flush()
- return
- dir = package_name
- if actions:
- with lcd( dir ):
- current_dir = os.path.abspath( os.path.join( work_dir, dir ) )
- for action_tup in actions:
- action_key, action_dict = action_tup
- if action_key == 'move_directory_files':
+ install_dir = actions_dict[ 'install_dir' ]
+ package_name = actions_dict[ 'package_name' ]
+ #download_url = actions_dict.get( 'download_url', None )
+ #clone_cmd = actions_dict.get( 'clone_cmd', None )
+ actions = actions_dict.get( 'actions', None )
+ if actions:
+ with make_tmp_dir() as work_dir:
+ with lcd( work_dir ):
+ # The first action in the list of actions will be the one that defines the installation process. There
+ # are currently only two supported processes; download_by_url and clone via a "shell_command" action type.
+ action_type, action_dict = actions[ 0 ]
+ if action_type == 'download_by_url':
+ # <action type="download_by_url">http://sourceforge.net/projects/samtools/files/samtools/0.1.18/samtools-0.1…</action>
+ url = action_dict[ 'url' ]
+ downloaded_filename = os.path.split( url )[ -1 ]
+ downloaded_file_path = common_util.url_download( work_dir, downloaded_filename, url )
+ if common_util.istar( downloaded_file_path ):
+ common_util.extract_tar( downloaded_file_path, work_dir )
+ dir = common_util.tar_extraction_directory( work_dir, downloaded_filename )
+ else:
+ dir = work_dir
+ elif action_type == 'shell_command':
+ # <action type="shell_command">git clone --recursive git://github.com/ekg/freebayes.git</action>
+ clone_cmd = action_dict[ 'command' ]
+ output = local( clone_cmd, capture=True )
+ log_results( clone_cmd, output, os.path.join( install_dir, INSTALLATION_LOG ) )
+ if output.return_code:
+ tool_dependency.status = app.model.ToolDependency.installation_status.ERROR
+ tool_dependency.error_message = str( output.stderr )
+ sa_session.add( tool_dependency )
+ sa_session.flush()
+ return
+ dir = package_name
+ if not os.path.exists( dir ):
+ os.makedirs( dir )
+ # The package has been down-loaded, so we can now perform all of the actions defined for building it.
+ with lcd( dir ):
+ for action_tup in actions[ 1: ]:
+ action_type, action_dict = action_tup
+ current_dir = os.path.abspath( os.path.join( work_dir, dir ) )
+ if action_type == 'move_directory_files':
common_util.move_directory_files( current_dir=current_dir,
source_dir=os.path.join( action_dict[ 'source_directory' ] ),
destination_dir=os.path.join( action_dict[ 'destination_directory' ] ) )
- elif action_key == 'move_file':
+ elif action_type == 'move_file':
common_util.move_file( current_dir=current_dir,
source=os.path.join( action_dict[ 'source' ] ),
destination_dir=os.path.join( action_dict[ 'destination' ] ) )
- else:
- action = action_key
+ elif action_type == 'set_environment':
+ # Currently the only action supported in this category is "environment_variable".
+ env_var_dict = action_dict[ 'environment_variable' ]
+ env_var_name = env_var_dict[ 'name' ]
+ env_var_action = env_var_dict[ 'action' ]
+ env_var_value = env_var_dict[ 'value' ]
+ if env_var_action == 'prepend_to':
+ changed_value = '%s:$%s' % ( env_var_value, env_var_name )
+ elif env_var_action == 'set_to':
+ changed_value = '%s' % env_var_value
+ elif env_var_action == 'append_to':
+ changed_value = '$%s:%s' % ( env_var_name, env_var_value )
+ cmd = "echo '%s=%s; export %s' > %s/env.sh;chmod +x %s/env.sh" % ( env_var_name,
+ changed_value,
+ env_var_name,
+ install_dir,
+ install_dir )
+ output = local( cmd, capture=True )
+ log_results( cmd, output, os.path.join( install_dir, INSTALLATION_LOG ) )
+ if output.return_code:
+ tool_dependency.status = app.model.ToolDependency.installation_status.ERROR
+ tool_dependency.error_message = str( output.stderr )
+ sa_session.add( tool_dependency )
+ sa_session.flush()
+ return
+ elif action_type == 'shell_command':
+ action = action_dict[ 'command' ]
with settings( warn_only=True ):
output = local( action, capture=True )
log_results( action, output, os.path.join( install_dir, INSTALLATION_LOG ) )
diff -r d2aba0918cf01b1c5be95f7b0b59cc52cd889dbd -r 20e01e610de056e2f2855df9a3840c315b17587f lib/galaxy/tool_shed/tool_dependencies/install_util.py
--- a/lib/galaxy/tool_shed/tool_dependencies/install_util.py
+++ b/lib/galaxy/tool_shed/tool_dependencies/install_util.py
@@ -51,117 +51,109 @@
install_dir = get_tool_dependency_install_dir( app, tool_shed_repository, package_name, package_version )
if not os.path.exists( install_dir ):
for package_elem in elem:
- if package_elem.tag == 'proprietary_fabfile':
- # TODO: This is not yet working...
- # Handle tool dependency installation where the repository includes one or more proprietary fabric scripts.
- if not fabric_version_checked:
- check_fabric_version()
- fabric_version_checked = True
- fabfile_name = package_elem.get( 'name', None )
- fabfile_path = os.path.abspath( os.path.join( os.path.split( tool_dependencies_config )[ 0 ], fabfile_name ) )
- print 'Installing tool dependencies via fabric script ', fabfile_path
- elif package_elem.tag == 'fabfile':
- # Handle tool dependency installation using a fabric method included in the Galaxy framework.
- fabfile_path = None
- for method_elem in package_elem:
+ if package_elem.tag == 'install':
+ # <install version="1.0">
+ package_install_version = package_elem.get( 'version', '1.0' )
tool_dependency = create_or_update_tool_dependency( app,
tool_shed_repository,
name=package_name,
version=package_version,
type='package',
status=app.model.ToolDependency.installation_status.INSTALLING )
- run_fabric_method( app, tool_dependency, method_elem, fabfile_path, install_dir, package_name=package_name )
- sa_session.refresh( tool_dependency )
- if tool_dependency.status != app.model.ToolDependency.installation_status.ERROR:
- print package_name, 'version', package_version, 'installed in', install_dir
+ if package_install_version == '1.0':
+ # Handle tool dependency installation using a fabric method included in the Galaxy framework.
+ for actions_elem in package_elem:
+ install_via_fabric( app, tool_dependency, actions_elem, install_dir, package_name=package_name )
+ sa_session.refresh( tool_dependency )
+ if tool_dependency.status != app.model.ToolDependency.installation_status.ERROR:
+ print package_name, 'version', package_version, 'installed in', install_dir
+ elif package_elem.tag == 'readme':
+ # Nothing to be done.
+ continue
+ #elif package_elem.tag == 'proprietary_fabfile':
+ # # TODO: This is not yet supported or functionally correct...
+ # # Handle tool dependency installation where the repository includes one or more proprietary fabric scripts.
+ # if not fabric_version_checked:
+ # check_fabric_version()
+ # fabric_version_checked = True
+ # fabfile_name = package_elem.get( 'name', None )
+ # proprietary_fabfile_path = os.path.abspath( os.path.join( os.path.split( tool_dependencies_config )[ 0 ], fabfile_name ) )
+ # print 'Installing tool dependencies via fabric script ', proprietary_fabfile_path
else:
print '\nSkipping installation of tool dependency', package_name, 'version', package_version, 'since it is installed in', install_dir, '\n'
return tool_dependency
-def run_fabric_method( app, tool_dependency, elem, fabfile_path, install_dir, package_name=None, **kwd ):
- """Parse a tool_dependency.xml file's fabfile <method> tag set to build the method parameters and execute the method."""
+def install_via_fabric( app, tool_dependency, actions_elem, install_dir, package_name=None, proprietary_fabfile_path=None, **kwd ):
+ """Parse a tool_dependency.xml file's <actions> tag set to gather information for the installation via fabric."""
sa_session = app.model.context.current
if not os.path.exists( install_dir ):
os.makedirs( install_dir )
- # Default value for env_dependency_path.
- install_path, install_directory = os.path.split( install_dir )
- if install_directory != 'bin':
- env_dependency_path = os.path.join( install_dir, 'bin' )
+ actions_dict = dict( install_dir=install_dir )
+ if package_name:
+ actions_dict[ 'package_name' ] = package_name
+ actions = []
+ for action_elem in actions_elem:
+ action_dict = {}
+ action_type = action_elem.get( 'type', 'shell_command' )
+ if action_type == 'shell_command':
+ # <action type="shell_command">make</action>
+ action_elem_text = action_elem.text.replace( '$INSTALL_DIR', install_dir )
+ if action_elem_text:
+ action_dict[ 'command' ] = action_elem_text
+ else:
+ continue
+ elif action_type == 'download_by_url':
+ # <action type="download_by_url">http://sourceforge.net/projects/samtools/files/samtools/0.1.18/samtools-0.1…</action>
+ if action_elem.text:
+ action_dict[ 'url' ] = action_elem.text
+ else:
+ continue
+ elif action_type in [ 'move_directory_files', 'move_file' ]:
+ # <action type="move_file">
+ # <source>misc/some_file</source>
+ # <destination>$INSTALL_DIR/bin</destination>
+ # </action>
+ # <action type="move_directory_files">
+ # <source_directory>bin</source_directory>
+ # <destination_directory>$INSTALL_DIR/bin</destination_directory>
+ # </action>
+ for move_elem in action_elem:
+ move_elem_text = move_elem.text.replace( '$INSTALL_DIR', install_dir )
+ if move_elem_text:
+ action_dict[ move_elem.tag ] = move_elem_text
+ elif action_type == 'set_environment':
+ # <action type="set_environment">
+ # <environment_variable name="PATH" action="prepend_to">$INSTALL_DIR/bin</environment_variable>
+ # </action>
+ for env_elem in action_elem:
+ if env_elem.tag == 'environment_variable':
+ env_var_name = env_elem.get( 'name', 'PATH' )
+ env_var_action = env_elem.get( 'action', 'prepend_to' )
+ env_var_text = env_elem.text.replace( '$INSTALL_DIR', install_dir )
+ if env_var_text:
+ action_dict[ env_elem.tag ] = dict( name=env_var_name, action=env_var_action, value=env_var_text )
+ else:
+ continue
+ actions.append( ( action_type, action_dict ) )
+ if actions:
+ actions_dict[ 'actions' ] = actions
+ if proprietary_fabfile_path:
+ # TODO: this is not yet supported or functional, but when it is handle it using the fabric api.
+ # run_proprietary_fabric_method( app, elem, proprietary_fabfile_path, install_dir, package_name=package_name )
+ raise Exception( 'Tool dependency installation using proprietary fabric scripts is not yet supported.' )
else:
- env_dependency_path = install_dir
- method_name = elem.get( 'name', None )
- params_dict = dict( install_dir=install_dir )
- actions = []
- for param_elem in elem:
- param_name = param_elem.get( 'name' )
- if param_name:
- if param_name == 'actions':
- for action_elem in param_elem:
- action_dict = {}
- action_type = action_elem.get( 'type', 'shell_command' )
- if action_type == 'shell_command':
- # Example: <action type="shell_command">make</action>
- action_key = action_elem.text.replace( '$INSTALL_DIR', install_dir )
- if not action_key:
- continue
- elif action_type in [ 'move_directory_files', 'move_file' ]:
- # Examples:
- # <action type="move_file">
- # <source>misc/some_file</source>
- # <destination>$INSTALL_DIR/bin</destination>
- # </action>
- # <action type="move_directory_files">
- # <source_directory>bin</source_directory>
- # <destination_directory>$INSTALL_DIR/bin</destination_directory>
- # </action>
- action_key = action_type
- for move_elem in action_elem:
- move_elem_text = move_elem.text.replace( '$INSTALL_DIR', install_dir )
- if move_elem_text:
- action_dict[ move_elem.tag ] = move_elem_text
- else:
- continue
- actions.append( ( action_key, action_dict ) )
- if actions:
- params_dict[ 'actions' ] = actions
- elif param_name == 'env_dependency_path':
- env_dependency_path = param_elem.text.replace( '$INSTALL_DIR', install_dir )
- else:
- if param_elem.text:
- params_dict[ param_name ] = param_elem.text.replace( '$INSTALL_DIR', install_dir )
- if package_name:
- params_dict[ 'package_name' ] = package_name
- if fabfile_path:
- # TODO: Handle this using the fabric api.
- # run_proprietary_fabric_method( app, elem, fabfile_path, install_dir, package_name=package_name )
- return 'Tool dependency installation using proprietary fabric scripts is not yet supported. '
- else:
- # There is currently only 1 fabric method, install_and_build_package().
try:
- install_and_build_package( app, tool_dependency, params_dict )
+ # There is currently only one fabric method.
+ install_and_build_package( app, tool_dependency, actions_dict )
except Exception, e:
tool_dependency.status = app.model.ToolDependency.installation_status.ERROR
tool_dependency.error_message = str( e )
sa_session.add( tool_dependency )
sa_session.flush()
- sa_session.refresh( tool_dependency )
- if tool_dependency.status != app.model.ToolDependency.installation_status.ERROR:
- try:
- handle_post_build_processing( app,
- tool_dependency,
- install_dir,
- env_dependency_path,
- package_name=package_name )
- except Exception, e:
- tool_dependency.status = app.model.ToolDependency.installation_status.ERROR
- tool_dependency.error_message = str( e )
- sa_session.add( tool_dependency )
- sa_session.flush()
- sa_session.refresh( tool_dependency )
if tool_dependency.status != app.model.ToolDependency.installation_status.ERROR:
tool_dependency.status = app.model.ToolDependency.installation_status.INSTALLED
sa_session.add( tool_dependency )
sa_session.flush()
-def run_proprietary_fabric_method( app, elem, fabfile_path, install_dir, package_name=None, **kwd ):
+def run_proprietary_fabric_method( app, elem, proprietary_fabfile_path, install_dir, package_name=None, **kwd ):
"""
TODO: Handle this using the fabric api.
Parse a tool_dependency.xml file's fabfile <method> tag set to build the method parameters and execute the method.
@@ -190,10 +182,10 @@
else:
params_str = params_str.rstrip( ',' )
try:
- cmd = 'fab -f %s %s:%s' % ( fabfile_path, method_name, params_str )
+ cmd = 'fab -f %s %s:%s' % ( proprietary_fabfile_path, method_name, params_str )
returncode, message = run_subprocess( app, cmd )
except Exception, e:
- return "Exception executing fabric script %s: %s. " % ( str( fabfile_path ), str( e ) )
+ return "Exception executing fabric script %s: %s. " % ( str( proprietary_fabfile_path ), str( e ) )
if returncode:
return message
message = handle_post_build_processing( app, tool_dependency, install_dir, env_dependency_path, package_name=package_name )
diff -r d2aba0918cf01b1c5be95f7b0b59cc52cd889dbd -r 20e01e610de056e2f2855df9a3840c315b17587f lib/galaxy/util/shed_util.py
--- a/lib/galaxy/util/shed_util.py
+++ b/lib/galaxy/util/shed_util.py
@@ -479,52 +479,16 @@
package_name = elem.get( 'name', None )
package_version = elem.get( 'version', None )
if package_name and package_version:
+ dependency_key = '%s/%s' % ( package_name, package_version )
requirements_dict [ 'name' ] = package_name
+ requirements_dict [ 'version' ] = package_version
requirements_dict [ 'type' ] = 'package'
- requirements_dict [ 'version' ] = package_version
- dependency_key = '%s/%s' % ( package_name, package_version )
- fabfiles_dict = {}
for sub_elem in elem:
- if sub_elem.tag == 'proprietary_fabfile':
- requirements_dict = generate_fabfile_metadata( sub_elem, requirements_dict, proprietary=True )
- elif sub_elem.tag == 'fabfile':
- requirements_dict = generate_fabfile_metadata( sub_elem, requirements_dict, proprietary=False )
- elif sub_elem.tag == 'readme':
+ if sub_elem.tag == 'readme':
requirements_dict[ 'readme' ] = sub_elem.text
if requirements_dict:
tool_dependencies_dict[ dependency_key ] = requirements_dict
return tool_dependencies_dict
-def generate_fabfile_metadata( elem, requirements_dict, proprietary=False ):
- """
- <proprietary_fabfile name="fabfile.py">
- <method name="install_and_build">
- <param name="download_url">ftp://emboss.open-bio.org/pub/EMBOSS/old/5.0.0/EMBOSS-5.0.0.tar.gz</param>
- <param name="download_url">ftp://emboss.open-bio.org/pub/EMBOSS/old/5.0.0/PHYLIP-3.6b.tar.gz</param>
- </method>
- </proprietary_fabfile>
- """
- fabfiles_dict = {}
- fabfile_name = elem.get( 'name', None )
- if fabfile_name:
- for method_elem in elem.findall( 'method' ):
- method_name = method_elem.get( 'name', None )
- if method_name:
- params_str = ''
- for param_elem in method_elem.findall( 'param' ):
- param_name = param_elem.get( 'name', None )
- param_value = param_elem.text
- if param_name and param_value:
- params_str += '%s=%s,' % ( param_name, param_value )
- fabfiles_dict[ 'fabfile' ] = fabfile_name
- fabfiles_dict[ 'method' ] = method_name
- fabfiles_dict[ 'params' ] = params_str.rstrip( ',' )
- if fabfiles_dict:
- if proprietary:
- key = 'proprietary_fabfiles'
- else:
- key = 'fabfiles'
- requirements_dict[ key ] = fabfiles_dict
- return requirements_dict
def generate_metadata_using_disk_files( toolbox, relative_install_dir, repository_clone_url ):
"""Generate metadata using only the repository files on disk - files are not retrieved from the repository manifest."""
metadata_dict = {}
@@ -1194,7 +1158,7 @@
message = str( e )
error = True
return error, message
-def handle_tool_dependencies( app, tool_shed_repository, tool_dependencies_config, tool_dependencies=None ):
+def handle_tool_dependencies( app, tool_shed_repository, tool_dependencies_config, tool_dependencies ):
"""
Install and build tool dependencies defined in the tool_dependencies_config. This config's tag sets can currently refer to installation
methods in Galaxy's tool_dependencies module. In the future, proprietary fabric scripts contained in the repository will be supported.
@@ -1202,8 +1166,7 @@
will be installed in:
~/<app.config.tool_dependency_dir>/<package_name>/<package_version>/<repo_owner>/<repo_name>/<repo_installed_changeset_revision>
"""
- status = 'done'
- message = ''
+ installed_tool_dependencies = []
# Parse the tool_dependencies.xml config.
tree = ElementTree.parse( tool_dependencies_config )
root = tree.getroot()
@@ -1214,7 +1177,8 @@
package_name = elem.get( 'name', None )
package_version = elem.get( 'version', None )
if package_name and package_version:
- can_install = True
+ # The value of tool_dependencies will be None only when this method is called by the InstallManager. In that case, tool
+ # dependency installation is not ajaxian, so the ToolDependency objects do not yet exist.
if tool_dependencies:
# Only install the package if it is not already installed.
can_install = False
@@ -1223,12 +1187,14 @@
can_install = tool_dependency.status in [ app.model.ToolDependency.installation_status.NEVER_INSTALLED,
app.model.ToolDependency.installation_status.UNINSTALLED ]
break
+ else:
+ can_install = False
if can_install:
tool_dependency = install_package( app, elem, tool_shed_repository, tool_dependencies=tool_dependencies )
- if tool_dependency and tool_dependency.status == app.model.ToolDependency.installation_status.ERROR:
- message = tool_dependency.error_message
- status = 'error'
- return status, message
+ if tool_dependency and tool_dependency.status in [ app.model.ToolDependency.installation_status.INSTALLED,
+ app.model.ToolDependency.installation_status.ERROR ]:
+ installed_tool_dependencies.append( tool_dependency )
+ return installed_tool_dependencies
def handle_tool_versions( app, tool_version_dicts, tool_shed_repository ):
"""
Using the list of tool_version_dicts retrieved from the tool shed (one per changeset revison up to the currently installed changeset revision),
diff -r d2aba0918cf01b1c5be95f7b0b59cc52cd889dbd -r 20e01e610de056e2f2855df9a3840c315b17587f lib/galaxy/web/controllers/admin_toolshed.py
--- a/lib/galaxy/web/controllers/admin_toolshed.py
+++ b/lib/galaxy/web/controllers/admin_toolshed.py
@@ -113,6 +113,11 @@
use_paging = False
columns = [
NameColumn( "Name",
+ link=( lambda item: iff( item.status in \
+ [ model.ToolDependency.installation_status.NEVER_INSTALLED,
+ model.ToolDependency.installation_status.INSTALLING,
+ model.ToolDependency.installation_status.UNINSTALLED ], \
+ None, dict( action="manage_tool_dependencies", operation='browse', id=item.id ) ) ),
filterable="advanced" ),
VersionColumn( "Version",
filterable="advanced" ),
@@ -130,8 +135,17 @@
allow_multiple=True,
allow_popup=False,
condition=( lambda item: item.status in [ model.ToolDependency.installation_status.INSTALLED,
- model.ToolDependency.installation_status.ERROR ] ) )
+ model.ToolDependency.installation_status.ERROR ] ) )
]
+ def build_initial_query( self, trans, **kwd ):
+ tool_dependency_ids = kwd.get( 'tool_dependency_ids', None )
+ if tool_dependency_ids:
+ clause_list = []
+ for tool_dependency_id in tool_dependency_ids:
+ clause_list.append( self.model_class.table.c.id == trans.security.decode_id( tool_dependency_id ) )
+ return trans.sa_session.query( self.model_class ) \
+ .filter( or_( *clause_list ) )
+ return trans.sa_session.query( self.model_class )
def apply_query_filter( self, trans, query, **kwd ):
tool_dependency_id = kwd.get( 'tool_dependency_id', None )
if not tool_dependency_id:
@@ -363,6 +377,7 @@
def initiate_tool_dependency_installation( self, trans, tool_dependencies ):
"""Install specified dependencies for repository tools."""
# Get the tool_shed_repository from one of the tool_dependencies.
+ message = ''
tool_shed_repository = tool_dependencies[ 0 ].tool_shed_repository
work_dir = make_tmp_directory()
# Get the tool_dependencies.xml file from the repository.
@@ -371,17 +386,23 @@
tool_shed_repository,
tool_shed_repository.changeset_revision,
work_dir )
- status, message = handle_tool_dependencies( app=trans.app,
- tool_shed_repository=tool_shed_repository,
- tool_dependencies_config=tool_dependencies_config,
- tool_dependencies=tool_dependencies )
+ installed_tool_dependencies = handle_tool_dependencies( app=trans.app,
+ tool_shed_repository=tool_shed_repository,
+ tool_dependencies_config=tool_dependencies_config,
+ tool_dependencies=tool_dependencies )
+ for installed_tool_dependency in installed_tool_dependencies:
+ if installed_tool_dependency.status == trans.app.model.ToolDependency.installation_status.ERROR:
+ message += ' %s' % installed_tool_dependency.error_message
try:
shutil.rmtree( work_dir )
except:
pass
tool_dependency_ids = [ trans.security.encode_id( td.id ) for td in tool_dependencies ]
- if not message:
- message = "Installed tool dependencies: %s" % ','.join( td.name for td in tool_dependencies )
+ if message:
+ status = 'error'
+ else:
+ message = "Installed tool dependencies: %s" % ','.join( td.name for td in installed_tool_dependencies )
+ status = 'done'
return trans.response.send_redirect( web.url_for( controller='admin_toolshed',
action='manage_tool_dependencies',
tool_dependency_ids=tool_dependency_ids,
diff -r d2aba0918cf01b1c5be95f7b0b59cc52cd889dbd -r 20e01e610de056e2f2855df9a3840c315b17587f templates/admin/tool_shed_repository/manage_repository.mako
--- a/templates/admin/tool_shed_repository/manage_repository.mako
+++ b/templates/admin/tool_shed_repository/manage_repository.mako
@@ -88,6 +88,7 @@
<td><b>name</b></td><td><b>version</b></td><td><b>type</b></td>
+ <td><b>status</b></td></tr>
%for tool_dependency in missing_tool_dependencies:
<tr>
@@ -98,6 +99,7 @@
</td><td>${tool_dependency.version}</td><td>${tool_dependency.type}</td>
+ <td>${tool_dependency.status}</td></tr>
%endfor
</table>
diff -r d2aba0918cf01b1c5be95f7b0b59cc52cd889dbd -r 20e01e610de056e2f2855df9a3840c315b17587f templates/webapps/community/repository/common.mako
--- a/templates/webapps/community/repository/common.mako
+++ b/templates/webapps/community/repository/common.mako
@@ -111,13 +111,14 @@
%for dependency_key, requirements_dict in tool_dependencies.items():
<%
name = requirements_dict[ 'name' ]
+ version = requirements_dict[ 'version' ]
type = requirements_dict[ 'type' ]
- version = requirements_dict[ 'version' ]
+
%><tr><td>${name}</td>
+ <td>${version}</td><td>${type}</td>
- <td>${version}</td></tr>
%endfor
</table>
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: inithello: Added Ensembl build parser. Improved genome downloader interface. Added post-download indexing feature.
by Bitbucket 25 Jun '12
by Bitbucket 25 Jun '12
25 Jun '12
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/d2aba0918cf0/
changeset: d2aba0918cf0
user: inithello
date: 2012-06-25 15:39:41
summary: Added Ensembl build parser. Improved genome downloader interface. Added post-download indexing feature.
affected #: 17 files
diff -r c4f325ba7caa1c86b0f97a0a486c3bbdea15c754 -r d2aba0918cf01b1c5be95f7b0b59cc52cd889dbd .hgignore
--- a/.hgignore
+++ b/.hgignore
@@ -50,7 +50,9 @@
tool-data/shared/igv/igv_build_sites.txt
tool-data/shared/rviewer/rviewer_build_sites.txt
tool-data/shared/ucsc/builds.txt
+tool-data/shared/ensembl/builds.txt
tool-data/*.loc
+tool-data/genome/*
# Test output
run_functional_tests.html
@@ -72,4 +74,5 @@
*.orig
.DS_Store
*.rej
-*~
\ No newline at end of file
+*~
+
diff -r c4f325ba7caa1c86b0f97a0a486c3bbdea15c754 -r d2aba0918cf01b1c5be95f7b0b59cc52cd889dbd cron/get_ensembl.py
--- /dev/null
+++ b/cron/get_ensembl.py
@@ -0,0 +1,22 @@
+from galaxy import eggs
+import pkg_resources
+pkg_resources.require("SQLAlchemy >= 0.4")
+pkg_resources.require("MySQL_python")
+from sqlalchemy import *
+
+
+engine = create_engine( 'mysql://anonymous@ensembldb.ensembl.org:5306', pool_recycle=3600 )
+conn = engine.connect()
+dbs = conn.execute( "SHOW DATABASES LIKE 'ensembl_website_%%'" )
+builds = {}
+lines = []
+for res in dbs:
+ dbname = res[0]
+ release = dbname.split('_')[-1]
+ genomes = conn.execute( "SELECT RS.assembly_code, S.name, S.common_name, %s FROM ensembl_website_%s.release_species RS LEFT JOIN ensembl_website_%s.species S on RS.species_id = S.species_id" % ( release, release, release ) )
+ for genome in genomes:
+ builds[genome[0]] = dict( release=genome[3], species='%s (%s/%s)' % ( genome[1], genome[2], genome[0] ) )
+for build in builds.items():
+ lines.append( '\t'.join( [ build[0], '%d' % build[1]['release'], build[1]['species'] ] ) )
+
+print '\n'.join( lines )
\ No newline at end of file
diff -r c4f325ba7caa1c86b0f97a0a486c3bbdea15c754 -r d2aba0918cf01b1c5be95f7b0b59cc52cd889dbd cron/parse_publicbuilds.py
--- /dev/null
+++ b/cron/parse_publicbuilds.py
@@ -0,0 +1,57 @@
+#!/usr/bin/env python
+
+"""
+Connects to the URL specified and outputs builds available at that
+DSN in tabular format. USCS Test gateway is used as default.
+build description
+"""
+
+import sys
+import urllib
+if sys.version_info[:2] >= ( 2, 5 ):
+ import xml.etree.ElementTree as ElementTree
+else:
+ from galaxy import eggs
+ import pkg_resources; pkg_resources.require( "elementtree" )
+ from elementtree import ElementTree
+
+URL = "http://genome.cse.ucsc.edu/cgi-bin/das/dsn"
+
+def getbuilds(url):
+ try:
+ page = urllib.urlopen(URL)
+ except:
+ print "#Unable to open " + URL
+ print "?\tunspecified (?)"
+ sys.exit(1)
+
+ text = page.read()
+ try:
+ tree = ElementTree.fromstring(text)
+ except:
+ print "#Invalid xml passed back from " + URL
+ print "?\tunspecified (?)"
+ sys.exit(1)
+
+ print "#Harvested from http://genome.cse.ucsc.edu/cgi-bin/das/dsn"
+ print "?\tunspecified (?)"
+ for dsn in tree:
+ build = dsn.find("SOURCE").attrib['id']
+ description = dsn.find("DESCRIPTION").text.replace(" - Genome at UCSC","").replace(" Genome at UCSC","")
+
+ fields = description.split(" ")
+ temp = fields[0]
+ for i in range(len(fields)-1):
+ if temp == fields[i+1]:
+ fields.pop(i+1)
+ else:
+ temp = fields[i+1]
+ description = " ".join(fields)
+ yield [build,description]
+
+if __name__ == "__main__":
+ if len(sys.argv) > 1:
+ URL = sys.argv[1]
+ for build in getbuilds(URL):
+ print build[0]+"\t"+build[1]+" ("+build[0]+")"
+
diff -r c4f325ba7caa1c86b0f97a0a486c3bbdea15c754 -r d2aba0918cf01b1c5be95f7b0b59cc52cd889dbd cron/updateensembl.sh.sample
--- /dev/null
+++ b/cron/updateensembl.sh.sample
@@ -0,0 +1,42 @@
+#!/bin/sh
+#
+# Script to update Ensembl shared data tables. The idea is to update, but if
+# the update fails, not replace current data/tables with error
+# messages.
+
+# Edit this line to refer to galaxy's path:
+GALAXY=/path/to/galaxy
+PYTHONPATH=${GALAXY}/lib
+export PYTHONPATH
+
+# setup directories
+echo "Creating required directories."
+DIRS="
+${GALAXY}/tool-data/shared/ensembl
+${GALAXY}/tool-data/shared/ensembl/new
+"
+for dir in $DIRS; do
+ if [ ! -d $dir ]; then
+ echo "Creating $dir"
+ mkdir $dir
+ else
+ echo "$dir already exists, continuing."
+ fi
+done
+
+date
+echo "Updating Ensembl shared data tables."
+
+# Try to build "builds.txt"
+echo "Updating builds.txt"
+python ${GALAXY}/cron/get_ensembl.py > ${GALAXY}/tool-data/shared/ensembl/new/builds.txt
+if [ $? -eq 0 ]
+then
+ diff ${GALAXY}/tool-data/shared/ensembl/new/builds.txt ${GALAXY}/tool-data/shared/ensembl/builds.txt > /dev/null 2>&1
+ if [ $? -ne 0 ]
+ then
+ cp -f ${GALAXY}/tool-data/shared/ensembl/new/builds.txt ${GALAXY}/tool-data/shared/ensembl/builds.txt
+ fi
+else
+ echo "Failed to update builds.txt" >&2
+fi
diff -r c4f325ba7caa1c86b0f97a0a486c3bbdea15c754 -r d2aba0918cf01b1c5be95f7b0b59cc52cd889dbd cron/updateucsc.sh.sample
--- a/cron/updateucsc.sh.sample
+++ b/cron/updateucsc.sh.sample
@@ -28,6 +28,20 @@
date
echo "Updating UCSC shared data tables."
+# Try to build "publicbuilds.txt"
+echo "Updating publicbuilds.txt"
+python ${GALAXY}/cron/parse_publicbuilds.py > ${GALAXY}/tool-data/shared/ucsc/new/publicbuilds.txt
+if [ $? -eq 0 ]
+then
+ diff ${GALAXY}/tool-data/shared/ucsc/new/publicbuilds.txt ${GALAXY}/tool-data/shared/ucsc/publicbuilds.txt > /dev/null 2>&1
+ if [ $? -ne 0 ]
+ then
+ cp -f ${GALAXY}/tool-data/shared/ucsc/new/publicbuilds.txt ${GALAXY}/tool-data/shared/ucsc/publicbuilds.txt
+ fi
+else
+ echo "Failed to update publicbuilds.txt" >&2
+fi
+
# Try to build "builds.txt"
echo "Updating builds.txt"
python ${GALAXY}/cron/parse_builds.py > ${GALAXY}/tool-data/shared/ucsc/new/builds.txt
diff -r c4f325ba7caa1c86b0f97a0a486c3bbdea15c754 -r d2aba0918cf01b1c5be95f7b0b59cc52cd889dbd lib/galaxy/jobs/deferred/genome_index.py
--- /dev/null
+++ b/lib/galaxy/jobs/deferred/genome_index.py
@@ -0,0 +1,43 @@
+"""
+Module for managing genome transfer jobs.
+"""
+from __future__ import with_statement
+
+import logging, shutil, gzip, bz2, zipfile, tempfile, tarfile, sys, os
+
+from galaxy import eggs
+from sqlalchemy import and_
+from data_transfer import *
+
+log = logging.getLogger( __name__ )
+
+__all__ = [ 'GenomeIndexPlugin' ]
+
+class GenomeIndexPlugin( DataTransfer ):
+
+ def __init__( self, app ):
+ super( GenomeIndexPlugin, self ).__init__( app )
+ self.app = app
+ self.tool = app.toolbox.tools_by_id['__GENOME_INDEX__']
+ self.sa_session = app.model.context.current
+
+ def create_job( self, trans, path, indexes, dbkey, intname ):
+ params = dict( user=trans.user.id, path=path, indexes=indexes, dbkey=dbkey, intname=intname )
+ deferred = trans.app.model.DeferredJob( state = self.app.model.DeferredJob.states.NEW, plugin = 'GenomeIndexPlugin', params = params )
+ self.sa_session.add( deferred )
+ self.sa_session.flush()
+ log.debug( 'Job created, id %d' % deferred.id )
+ return deferred.id
+
+ def check_job( self, job ):
+ log.debug( 'Job check' )
+ return 'ready'
+
+ def run_job( self, job ):
+ incoming = dict( path=os.path.abspath( job.params[ 'path' ] ), indexer=job.params[ 'indexes' ][0], user=job.params[ 'user' ] )
+ indexjob = self.tool.execute( self, set_output_hid=False, history=None, incoming=incoming, transfer=None, deferred=job )
+ job.params[ 'indexjob' ] = indexjob[0].id
+ job.state = self.app.model.DeferredJob.states.RUNNING
+ self.sa_session.add( job )
+ self.sa_session.flush()
+ return self.app.model.DeferredJob.states.RUNNING
diff -r c4f325ba7caa1c86b0f97a0a486c3bbdea15c754 -r d2aba0918cf01b1c5be95f7b0b59cc52cd889dbd lib/galaxy/jobs/deferred/genome_transfer.py
--- a/lib/galaxy/jobs/deferred/genome_transfer.py
+++ b/lib/galaxy/jobs/deferred/genome_transfer.py
@@ -78,10 +78,11 @@
def get_job_status( self, jobid ):
job = self.sa_session.query( self.app.model.DeferredJob ).get( int( jobid ) )
- if not hasattr( job, 'transfer_job' ):
- job.transfer_job = self.sa_session.query( self.app.model.TransferJob ).get( int( job.params[ 'transfer_job_id' ] ) )
- else:
- self.sa_session.refresh( job.transfer_job )
+ if 'transfer_job_id' in job.params:
+ if not hasattr( job, 'transfer_job' ):
+ job.transfer_job = self.sa_session.query( self.app.model.TransferJob ).get( int( job.params[ 'transfer_job_id' ] ) )
+ else:
+ self.sa_session.refresh( job.transfer_job )
return job
def run_job( self, job ):
@@ -139,7 +140,6 @@
if not chunk:
break
os.write( fd, chunk )
- os.write( fd, '\n' )
os.close( fd )
compressed.close()
elif data_type == 'bzip':
@@ -154,7 +154,6 @@
if not chunk:
break
os.write( fd, chunk )
- os.write( fd, '\n' )
os.close( fd )
compressed.close()
elif data_type == 'zip':
@@ -177,7 +176,6 @@
if not chunk:
break
os.write( fd, chunk )
- os.write( fd, '\n' )
zipped_file.close()
else:
try:
@@ -223,8 +221,8 @@
else:
job.state = self.app.model.DeferredJob.states.OK
self.sa_session.add( job )
+ self.sa_session.flush()
return self.app.model.DeferredJob.states.OK
- self.sa_session.flush()
def _check_compress( self, filepath ):
retval = ''
diff -r c4f325ba7caa1c86b0f97a0a486c3bbdea15c754 -r d2aba0918cf01b1c5be95f7b0b59cc52cd889dbd lib/galaxy/jobs/deferred/liftover_transfer.py
--- a/lib/galaxy/jobs/deferred/liftover_transfer.py
+++ b/lib/galaxy/jobs/deferred/liftover_transfer.py
@@ -40,7 +40,7 @@
deferred = trans.app.model.DeferredJob( state = self.app.model.DeferredJob.states.NEW, plugin = 'LiftOverTransferPlugin', params = params )
self.sa_session.add( deferred )
self.sa_session.flush()
- return deferred.id
+ return job.id
def check_job( self, job ):
if job.params['type'] == 'init_transfer':
@@ -98,7 +98,9 @@
transfer = job.transfer_job
if params[ 'type' ] == 'extract_transfer':
CHUNK_SIZE = 2**20
- destpath = os.path.join( self.app.config.get( 'genome_data_path', 'tool-data/genome' ), job.params[ 'dbkey' ], 'liftOver' )
+ destpath = os.path.join( self.app.config.get( 'genome_data_path', 'tool-data/genome' ), source, 'liftOver' )
+ if not os.path.exists( destpath ):
+ os.makedirs( destpath )
destfile = job.params[ 'destfile' ]
destfilepath = os.path.join( destpath, destfile )
tmpprefix = '%s_%s_download_unzip_' % ( job.params['dbkey'], job.params[ 'transfer_job_id' ] )
diff -r c4f325ba7caa1c86b0f97a0a486c3bbdea15c754 -r d2aba0918cf01b1c5be95f7b0b59cc52cd889dbd lib/galaxy/tools/actions/index_genome.py
--- a/lib/galaxy/tools/actions/index_genome.py
+++ b/lib/galaxy/tools/actions/index_genome.py
@@ -21,7 +21,9 @@
job.tool_id = tool.id
job.user_id = incoming['user']
start_job_state = job.state # should be job.states.NEW
- job.state = job.states.WAITING # we need to set job state to something other than NEW, or else when tracking jobs in db it will be picked up before we have added input / output parameters
+ job.state = job.states.WAITING # we need to set job state to something other than NEW,
+ # or else when tracking jobs in db it will be picked up
+ # before we have added input / output parameters
trans.sa_session.add( job )
# Create dataset that will serve as archive.
diff -r c4f325ba7caa1c86b0f97a0a486c3bbdea15c754 -r d2aba0918cf01b1c5be95f7b0b59cc52cd889dbd lib/galaxy/tools/genome_index/__init__.py
--- a/lib/galaxy/tools/genome_index/__init__.py
+++ b/lib/galaxy/tools/genome_index/__init__.py
@@ -13,13 +13,12 @@
def load_genome_index_tools( toolbox ):
""" Adds tools for indexing genomes via the main job runner. """
- # Use same process as that used in load_external_metadata_tool; see that
- # method for why create tool description files on the fly.
+ # Create XML for loading the tool.
tool_xml_text = """
<tool id="__GENOME_INDEX__" name="Index Genome" version="0.1" tool_type="genome_index"><type class="GenomeIndexTool" module="galaxy.tools"/><action module="galaxy.tools.actions.index_genome" class="GenomeIndexToolAction"/>
- <command>$__GENOME_INDEX_COMMAND__ $output_file $output_file.files_path $__app__.config.rsync_url</command>
+ <command>$__GENOME_INDEX_COMMAND__ $output_file $output_file.files_path $__app__.config.rsync_url "$__app__.config.tool_data_path"</command><inputs><param name="__GENOME_INDEX_COMMAND__" type="hidden"/></inputs>
@@ -29,7 +28,7 @@
</tool>
"""
- # Load export tool.
+ # Load index tool.
tmp_name = tempfile.NamedTemporaryFile()
tmp_name.write( tool_xml_text )
tmp_name.flush()
@@ -166,6 +165,10 @@
self._check_link( fasta, target )
for line in location:
self._add_line( line[ 'file' ], line[ 'line' ] )
+ deferred.state = app.model.DeferredJob.states.OK
+ sa_session.add( deferred )
+ sa_session.flush()
+
def _check_link( self, targetfile, symlink ):
target = os.path.relpath( targetfile, os.path.dirname( symlink ) )
diff -r c4f325ba7caa1c86b0f97a0a486c3bbdea15c754 -r d2aba0918cf01b1c5be95f7b0b59cc52cd889dbd lib/galaxy/tools/genome_index/index_genome.py
--- a/lib/galaxy/tools/genome_index/index_genome.py
+++ b/lib/galaxy/tools/genome_index/index_genome.py
@@ -10,7 +10,8 @@
import optparse, sys, os, tempfile, time, subprocess, shlex, json, tarfile, shutil
class ManagedIndexer():
- def __init__( self, output_file, infile, workingdir, rsync_url ):
+ def __init__( self, output_file, infile, workingdir, rsync_url, tooldata ):
+ self.tooldatapath = os.path.abspath( tooldata )
self.workingdir = os.path.abspath( workingdir )
self.outfile = open( os.path.abspath( output_file ), 'w' )
self.basedir = os.path.split( self.workingdir )[0]
@@ -44,11 +45,12 @@
with WithChDir( self.workingdir ):
self._log( 'Running indexer %s.' % indexer )
result = getattr( self, self.indexers[ indexer ] )()
- if result is None:
- self._log( 'Error running indexer %s.' % indexer )
+ if result in [ None, False ]:
+ self._log( 'Error running indexer %s, %s' % ( indexer, result ) )
self._flush_files()
return True
else:
+ self._log( self.locations )
self._log( 'Indexer %s completed successfully.' % indexer )
self._flush_files()
@@ -93,6 +95,7 @@
os.remove( self.fafile )
return self._bwa_cs()
else:
+ self._log( 'BWA (base) exited with code %s' % result )
return False
def _bwa_cs( self ):
@@ -109,6 +112,7 @@
self.locations[ 'cs' ].append( self.fafile )
os.remove( self.fafile )
else:
+ self._log( 'BWA (color) exited with code %s' % result )
return False
else:
self.locations[ 'cs' ].append( self.fafile )
@@ -136,6 +140,7 @@
os.remove( self.fafile )
return self._bowtie_cs()
else:
+ self._log( 'Bowtie (base) exited with code %s' % result )
return False
def _bowtie_cs( self ):
@@ -149,6 +154,7 @@
if result == 0:
self.locations[ 'cs' ].append( self.genome )
else:
+ self._log( 'Bowtie (color) exited with code %s' % result )
return False
os.remove( os.path.join( indexdir, self.fafile ) )
else:
@@ -174,6 +180,7 @@
os.remove( self.fafile )
return True
else:
+ self._log( 'Bowtie2 exited with code %s' % result )
return False
def _twobit( self ):
@@ -193,6 +200,7 @@
os.remove( self.fafile )
return True
else:
+ self._log( 'faToTwoBit exited with code %s' % result )
return False
def _perm( self ):
@@ -208,12 +216,15 @@
command = shlex.split("PerM %s %s --readFormat fastq --seed %s -m -s %s" % (self.fafile, read_length, seed, index))
result = subprocess.call( command )
if result != 0:
+ self._log( 'PerM (base) exited with code %s' % result )
return False
self.locations[ 'nt' ].append( [ key, desc, index ] )
os.remove( self.fafile )
return self._perm_cs()
def _perm_cs( self ):
+ genome = self.genome
+ read_length = 50
if not os.path.exists( 'cs' ):
os.makedirs( 'cs' )
with WithChDir( 'cs' ):
@@ -223,12 +234,13 @@
desc = '%s: seed=%s, read length=%s' % (genome, seed, read_length)
index = "%s_color_%s_%s.index" % (genome, seed, read_length)
if not os.path.exists( index ):
- command = shlex.split("PerM %s %s --readFormat csfastq --seed %s -m -s %s" % (local_ref, read_length, seed, index))
+ command = shlex.split("PerM %s %s --readFormat csfastq --seed %s -m -s %s" % (self.fafile, read_length, seed, index))
result = subprocess.call( command, stderr=self.logfile, stdout=self.logfile )
if result != 0:
+ self._log( 'PerM (color) exited with code %s' % result )
return False
self.locations[ 'cs' ].append( [ key, desc, index ] )
- os.remove( local_ref )
+ os.remove( self.fafile )
temptar = tarfile.open( 'cs.tar', 'w' )
temptar.add( 'cs' )
temptar.close()
@@ -241,17 +253,19 @@
self.locations[ 'nt' ].append( self.fafile )
return True
local_ref = self.fafile
- srma = 'tool-data/shared/jars/srma.jar'
+ srma = os.path.abspath( os.path.join( self.tooldatapath, 'shared/jars/picard/CreateSequenceDictionary.jar' ) )
genome = os.path.splitext( self.fafile )[0]
self._check_link()
if not os.path.exists( '%s.fai' % self.fafile ) and not os.path.exists( '%s.fai' % self.genome ):
command = shlex.split( 'samtools faidx %s' % self.fafile )
subprocess.call( command, stderr=self.logfile )
- command = shlex.split( "java -cp %s net.sf.picard.sam.CreateSequenceDictionary R=%s O=%s/%s.dict URI=%s" \
- % ( srma, local_ref, os.curdir, genome, local_ref ) )
+ command = shlex.split( "java -jar %s R=%s O=%s.dict URI=%s" \
+ % ( srma, local_ref, genome, local_ref ) )
if not os.path.exists( '%s.dict' % self.genome ):
result = subprocess.call( command, stderr=self.logfile, stdout=self.logfile )
+ self._log( ' '.join( command ) )
if result != 0:
+ self._log( 'Picard exited with code %s' % result )
return False
self.locations[ 'nt' ].append( self.fafile )
os.remove( self.fafile )
@@ -260,17 +274,20 @@
def _sam( self ):
local_ref = self.fafile
local_file = os.path.splitext( self.fafile )[ 0 ]
+ print 'Trying rsync'
result = self._do_rsync( '/sam_index/' )
if result == 0 and ( os.path.exists( '%s.fai' % self.fafile ) or os.path.exists( '%s.fai' % self.genome ) ):
- self.locations[ 'nt' ].append( local_ref )
+ self.locations[ 'nt' ].append( '%s.fai' % local_ref )
return True
self._check_link()
+ print 'Trying indexer'
command = shlex.split("samtools faidx %s" % local_ref)
- result = subprocess.call( command, stderr=self.logfile )
+ result = subprocess.call( command, stderr=self.logfile, stdout=self.logfile )
if result != 0:
+ self._log( 'SAM exited with code %s' % result )
return False
else:
- self.locations[ 'nt' ].append( local_ref )
+ self.locations[ 'nt' ].append( '%s.fai' % local_ref )
os.remove( local_ref )
return True
@@ -288,9 +305,9 @@
# Parse command line.
parser = optparse.OptionParser()
(options, args) = parser.parse_args()
- indexer, infile, outfile, working_dir, rsync_url = args
+ indexer, infile, outfile, working_dir, rsync_url, tooldata = args
# Create archive.
- idxobj = ManagedIndexer( outfile, infile, working_dir, rsync_url )
+ idxobj = ManagedIndexer( outfile, infile, working_dir, rsync_url, tooldata )
idxobj.run_indexer( indexer )
\ No newline at end of file
diff -r c4f325ba7caa1c86b0f97a0a486c3bbdea15c754 -r d2aba0918cf01b1c5be95f7b0b59cc52cd889dbd lib/galaxy/util/__init__.py
--- a/lib/galaxy/util/__init__.py
+++ b/lib/galaxy/util/__init__.py
@@ -407,6 +407,22 @@
db_names = DBNames( [( db_names.default_value, db_names.default_name )] )
return db_names
+def read_ensembl( filename, ucsc ):
+ """ Read Ensembl build names from file """
+ ucsc_builds = []
+ for build in ucsc:
+ ucsc_builds.append( build[0] )
+ ensembl_builds = list()
+ try:
+ for line in open( filename ):
+ if line[0:1] in [ '#', '\t' ]: continue
+ fields = line.replace("\r","").replace("\n","").split("\t")
+ if fields[0] in ucsc_builds: continue
+ ensembl_builds.append( dict( dbkey=fields[0], release=fields[1], name=fields[2].replace( '_', ' ' ) ) )
+ except Exception, e:
+ print "ERROR: Unable to read builds file:", e
+ return ensembl_builds
+
def read_build_sites( filename, check_builds=True ):
""" read db names to ucsc mappings from file, this file should probably be merged with the one above """
build_sites = []
@@ -634,11 +650,15 @@
s.quit()
galaxy_root_path = os.path.join(__path__[0], "..","..","..")
+
# The dbnames list is used in edit attributes and the upload tool
dbnames = read_dbnames( os.path.join( galaxy_root_path, "tool-data", "shared", "ucsc", "builds.txt" ) )
+ucsc_names = read_dbnames( os.path.join( galaxy_root_path, "tool-data", "shared", "ucsc", "publicbuilds.txt" ) )
+ensembl_names = read_ensembl( os.path.join( galaxy_root_path, "tool-data", "shared", "ensembl", "builds.txt" ), ucsc_names )
ucsc_build_sites = read_build_sites( os.path.join( galaxy_root_path, "tool-data", "shared", "ucsc", "ucsc_build_sites.txt" ) )
gbrowse_build_sites = read_build_sites( os.path.join( galaxy_root_path, "tool-data", "shared", "gbrowse", "gbrowse_build_sites.txt" ) )
genetrack_sites = read_build_sites( os.path.join( galaxy_root_path, "tool-data", "shared", "genetrack", "genetrack_sites.txt" ), check_builds=False )
+dlnames = dict(ucsc=ucsc_names, ensembl=ensembl_names)
def galaxy_directory():
return os.path.abspath(galaxy_root_path)
diff -r c4f325ba7caa1c86b0f97a0a486c3bbdea15c754 -r d2aba0918cf01b1c5be95f7b0b59cc52cd889dbd lib/galaxy/web/controllers/data_admin.py
--- a/lib/galaxy/web/controllers/data_admin.py
+++ b/lib/galaxy/web/controllers/data_admin.py
@@ -26,17 +26,69 @@
error='panel-error-message',
queued='state-color-waiting'
)
-
+
@web.expose
@web.require_admin
def manage_data( self, trans, **kwd ):
+ genomes = dict()
if trans.app.config.get_bool( 'enable_beta_job_managers', False ) == False:
return trans.fill_template( '/admin/data_admin/betajob.mako' )
- dbkeys = trans.db_builds
- return trans.fill_template( '/admin/data_admin/data_form.mako', dbkeys=dbkeys )
+ for line in trans.app.tool_data_tables.data_tables[ 'all_fasta' ].data:
+ indexers = dict( bowtie_indexes='Generate', bowtie2_indexes='Generate', bwa_indexes='Generate', perm_base_indexes='Generate', srma_indexes='Generate', sam_fa_indexes='Generate' )
+ dbkey = line[0]
+ name = line[2]
+ indexers[ 'name' ] = name
+ indexers[ 'fapath' ] = line[3]
+ genomes[ dbkey ] = indexers
+ for table in [ 'bowtie_indexes', 'bowtie2_indexes', 'bwa_indexes', 'srma_indexes' ]:
+ for line in trans.app.tool_data_tables.data_tables[ table ].data:
+ dbkey = line[0]
+ genomes[ dbkey ][ table ] = 'Generated'
+ for line in trans.app.tool_data_tables.data_tables[ 'sam_fa_indexes' ].data:
+ genomes[ line[1] ][ 'sam_fa_indexes' ] = 'Generated'
+ for line in trans.app.tool_data_tables.data_tables[ 'perm_base_indexes' ].data:
+ genomes[ line[1].split(':')[0] ][ 'perm_base_indexes' ] = 'Generated'
+ jobgrid = []
+ sa_session = trans.app.model.context.current
+ jobs = sa_session.query( model.GenomeIndexToolData ).order_by( model.GenomeIndexToolData.created_time.desc() ).filter_by( user_id=trans.get_user().id ).group_by( model.GenomeIndexToolData.deferred ).limit( 20 ).all()
+ prevjobid = 0
+ for job in jobs:
+ if prevjobid == job.deferred.id:
+ continue
+ prevjobid = job.deferred.id
+ state = job.deferred.state
+ params = job.deferred.params
+ if job.transfer is not None:
+ jobtype = 'download'
+ else:
+ jobtype = 'index'
+ indexers = ', '.join( params['indexes'] )
+ jobgrid.append( dict( jobtype=jobtype, indexers=indexers, rowclass=state, deferred=job.deferred.id, state=state, intname=job.deferred.params[ 'intname' ], dbkey=job.deferred.params[ 'dbkey' ] ) )
+ return trans.fill_template( '/admin/data_admin/local_data.mako', jobgrid=jobgrid, genomes=genomes )
+
+ @web.expose
+ @web.require_admin
+ def add_genome( self, trans, **kwd ):
+ if trans.app.config.get_bool( 'enable_beta_job_managers', False ) == False:
+ return trans.fill_template( '/admin/data_admin/betajob.mako' )
+ dbkeys = trans.ucsc_builds
+ ensemblkeys = trans.ensembl_builds
+ return trans.fill_template( '/admin/data_admin/data_form.mako', dbkeys=dbkeys, ensembls=ensemblkeys )
@web.expose
@web.require_admin
+ def index_build( self, trans, **kwd ):
+ """Index a previously downloaded genome."""
+ params = util.Params( kwd )
+ path = os.path.abspath( params.get( 'path', None ) )
+ indexes = [ params.get( 'indexes', None ) ]
+ dbkey = params.get( 'dbkey', None )
+ intname = params.get( 'longname', None )
+ indexjob = trans.app.job_manager.deferred_job_queue.plugins['GenomeIndexPlugin'].create_job( trans, path, indexes, dbkey, intname )
+ return indexjob
+
+ @web.expose
+ @web.require_admin
def download_build( self, trans, **kwd ):
"""Download a genome from a remote source and add it to the library."""
params = util.Params( kwd )
@@ -57,21 +109,21 @@
protocol = 'http'
if source == 'NCBI':
- dbkey = params.get('dbkey', '')[0]
+ dbkey = params.get('ncbi_dbkey', '')[0]
url = 'http://togows.dbcls.jp/entry/ncbi-nucleotide/%s.fasta' % dbkey
elif source == 'Broad':
- dbkey = params.get('dbkey', '')[0]
+ dbkey = params.get('broad_dbkey', '')[0]
url = 'ftp://ftp.broadinstitute.org/pub/seq/references/%s.fasta' % dbkey
elif source == 'UCSC':
longname = None
- for build in trans.db_builds:
- if dbkey[1] == build[0]:
+ for build in trans.ucsc_builds:
+ if dbkey == build[0]:
dbkey = build[0]
longname = build[1]
break
assert dbkey is not '?', 'That build was not found'
ftp = ftplib.FTP('hgdownload.cse.ucsc.edu')
- ftp.login('anonymous', 'user(a)example.com')
+ ftp.login('anonymous', trans.get_user().email)
checker = []
liftover = []
newlift = []
@@ -81,10 +133,12 @@
fname = chain.split( '/' )[-1]
target = fname.replace( '.over.chain.gz', '' ).split( 'To' )[1]
target = target[0].lower() + target[1:]
- newlift.append( [ chain, dbkey, target ] )
+ if not os.path.exists( os.path.join( trans.app.config.get( 'genome_data_path', 'tool-data/genome' ), dbkey, 'liftOver', fname ) ):
+ newlift.append( [ chain, dbkey, target ] )
current = dbkey[0].upper() + dbkey[1:]
targetfile = '%sTo%s.over.chain.gz' % ( target, current )
- newlift.append( [ '/goldenPath/%s/liftOver/%s' % ( target, targetfile ), target, dbkey ] )
+ if not os.path.exists( os.path.join( trans.app.config.get( 'genome_data_path', 'tool-data/genome' ), target, 'liftOver', targetfile ) ):
+ newlift.append( [ '/goldenPath/%s/liftOver/%s' % ( target, targetfile ), target, dbkey ] )
except:
newlift = None
pass
@@ -103,36 +157,35 @@
status = u'error'
return trans.fill_template( '/admin/data_admin/data_form.mako',
message=message,
- status=status )
+ status=status,
+ ensembls=trans.ensembl_builds,
+ dbkeys=trans.ucsc_builds )
elif source == 'Ensembl':
- section = params.get('ensembl_section', '')
- release1 = params.get('release_number', '')
- organism = params.get('organism', '')
- name = params.get('name', '')
- longname = organism
- dbkey = name
- release2 = params.get('release2', '')
- release2 = ".%s" % release2 if release2 else ""
- if section == 'standard':
- url = 'ftp://ftp.ensembl.org/pub/release-%s/fasta/%s/dna/%s.%s%s.dna.toplevel.fa.gz' % \
- (release1, organism.lower(), organism, name, release2)
- else:
- url = 'ftp://ftp.ensemblgenomes.org/pub/%s/release-%s/fasta/%s/dna/%s.%s%s.dna.top…' % \
- (section, release1, organism.lower(), organism, name, release2)
- elif source == 'local':
- url = 'http://127.0.0.1/%s.tar.gz' % dbkey
+ dbkey = params.get( 'ensembl_dbkey', None )
+ assert dbkey is not '?', 'That build was not found'
+ for build in trans.ensembl_builds:
+ if build[ 'dbkey' ] == dbkey:
+ dbkey = build[ 'dbkey' ]
+ release = build[ 'release' ]
+ pathname = '_'.join( build[ 'name' ].split(' ')[0:2] )
+ longname = build[ 'name' ].replace('_', ' ')
+ break
+ url = 'ftp://ftp.ensembl.org/pub/release-%s/fasta/%s/dna/%s.%s.%s.dna.toplevel.fa.…' % ( release, pathname.lower(), pathname, dbkey, release )
+ log.debug( build )
+ log.debug( url )
else:
- raise ValueError
+ raise ValueError, 'Somehow an invalid data source was specified.'
params = dict( protocol='http', name=dbkey, datatype='fasta', url=url, user=trans.user.id )
jobid = trans.app.job_manager.deferred_job_queue.plugins['GenomeTransferPlugin'].create_job( trans, url, dbkey, longname, indexers )
chainjob = []
if newlift is not None:
for chain in newlift:
- liftover_url = u'ftp://hgdownload.cse.ucsc.edu%s' % chain[0]
+ liftover_url = u'ftp://hgdownload.cse.ucsc.edu%s' % chain[0]
from_genome = chain[1]
to_genome = chain[2]
destfile = liftover_url.split('/')[-1].replace('.gz', '')
- chainjob.append( trans.app.job_manager.deferred_job_queue.plugins['LiftOverTransferPlugin'].create_job( trans, liftover_url, dbkey, from_genome, to_genome, destfile, jobid ) )
+ lochain = trans.app.job_manager.deferred_job_queue.plugins['LiftOverTransferPlugin'].create_job( trans, liftover_url, dbkey, from_genome, to_genome, destfile, jobid )
+ chainjob.append( lochain )
job = trans.app.job_manager.deferred_job_queue.plugins['GenomeTransferPlugin'].get_job_status( jobid )
job.params['liftover'] = chainjob
trans.app.model.context.current.add( job )
@@ -146,9 +199,13 @@
def monitor_status( self, trans, **kwd ):
params = util.Params( kwd )
jobid = params.get( 'job', '' )
+ gname = params.get( 'intname', '' )
+ deferred = trans.app.model.context.current.query( model.DeferredJob ).filter_by( id=jobid ).first()
+ gname = deferred.params[ 'intname' ]
+ indexers = ', '.join( deferred.params[ 'indexes' ] )
jobs = self._get_jobs( jobid, trans )
jsonjobs = json.dumps( jobs )
- return trans.fill_template( '/admin/data_admin/download_status.mako', mainjob=jobid, jobs=jobs, jsonjobs=jsonjobs )
+ return trans.fill_template( '/admin/data_admin/download_status.mako', name=gname, indexers=indexers, mainjob=jobid, jobs=jobs, jsonjobs=jsonjobs )
@web.expose
@web.require_admin
@@ -160,16 +217,6 @@
jobs = self._get_jobs( jobid, trans )
return trans.fill_template( '/admin/data_admin/ajax_status.mako', json=json.dumps( jobs ) )
- @web.expose
- @web.require_admin
- def job_status( self, trans, **kwd ):
- params = util.Params( kwd )
- jobid = params.get( 'jobid', None )
- jobtype = params.get( 'jobtype', None )
- fillvals = None
- fillvals = self._get_job( jobid, jobtype, trans )
- return trans.fill_template( '/admin/data_admin/ajax_status.mako', json=json.dumps( fillvals ) )
-
def _get_job( self, jobid, jobtype, trans ):
sa = trans.app.model.context.current
if jobtype == 'liftover':
@@ -191,12 +238,12 @@
job = trans.app.job_manager.deferred_job_queue.plugins['GenomeTransferPlugin'].get_job_status( jobid )
sa_session = trans.app.model.context.current
jobs.append( self._get_job( job.id, 'deferred', trans ) )
- jobs.append( self._get_job( job.transfer_job.id, 'transfer', trans ) )
- idxjobs = sa_session.query( model.GenomeIndexToolData ).filter_by( deferred_job_id=job.id, transfer_job_id=job.transfer_job.id ).all()
- if job.params.has_key( 'liftover' ):
- for jobid in job.params[ 'liftover' ]:
- jobs.append( self._get_job( jobid, 'liftover', trans ) )
- for idxjob in idxjobs:
- #print idxjob
- jobs.append( self._get_job( idxjob.job_id, 'index', trans ) )
+ if hasattr( job, 'transfer_job' ): # This is a transfer job, check for indexers
+ jobs.append( self._get_job( job.transfer_job.id, 'transfer', trans ) )
+ idxjobs = sa_session.query( model.GenomeIndexToolData ).filter_by( deferred_job_id=job.id, transfer_job_id=job.transfer_job.id ).all()
+ if job.params.has_key( 'liftover' ) and job.params[ 'liftover' ] is not None:
+ for jobid in job.params[ 'liftover' ]:
+ jobs.append( self._get_job( jobid, 'liftover', trans ) )
+ for idxjob in idxjobs:
+ jobs.append( self._get_job( idxjob.job_id, 'index', trans ) )
return jobs
diff -r c4f325ba7caa1c86b0f97a0a486c3bbdea15c754 -r d2aba0918cf01b1c5be95f7b0b59cc52cd889dbd lib/galaxy/web/framework/__init__.py
--- a/lib/galaxy/web/framework/__init__.py
+++ b/lib/galaxy/web/framework/__init__.py
@@ -799,6 +799,14 @@
dbnames.extend( util.dbnames )
return dbnames
+ @property
+ def ucsc_builds( self ):
+ return util.dlnames['ucsc']
+
+ @property
+ def ensembl_builds( self ):
+ return util.dlnames['ensembl']
+
def db_dataset_for( self, dbkey ):
"""
Returns the db_file dataset associated/needed by `dataset`, or `None`.
@@ -957,6 +965,14 @@
dbnames.append((key, "%s (%s) [Custom]" % (chrom_dict['name'], key) ))
dbnames.extend( util.dbnames )
return dbnames
+
+ @property
+ def ucsc_builds( self ):
+ return util.dlnames['ucsc']
+
+ @property
+ def ensembl_builds( self ):
+ return util.dlnames['ensembl']
class GalaxyWebUITransaction( GalaxyWebTransaction ):
def __init__( self, environ, app, webapp, session_cookie ):
diff -r c4f325ba7caa1c86b0f97a0a486c3bbdea15c754 -r d2aba0918cf01b1c5be95f7b0b59cc52cd889dbd templates/admin/data_admin/data_form.mako
--- a/templates/admin/data_admin/data_form.mako
+++ b/templates/admin/data_admin/data_form.mako
@@ -62,7 +62,7 @@
<div class="form-row"><label for="indexers">Indexers</label><select name="indexers" multiple style="width: 200px; height: 125px;">
- <option value="2bit">TwoBit</option>
+ <option value="2bit" selected>TwoBit</option><option value="bowtie">Bowtie</option><option value="bowtie2">Bowtie 2</option><option value="bwa">BWA</option>
@@ -75,7 +75,7 @@
</div></div><h2>Parameters</h2>
- <div id="params_generic" class="params-block" style="display: block;">
+ <div id="params_Broad" class="params-block" style="display: block;"><div class="form-row"><label for="longname">Internal Name</label><input name="longname" type="text" label="Internal Name" />
@@ -88,55 +88,47 @@
</div><div id="dlparams"><div class="form-row">
- <label for="dbkey">External Name</label>
- <input name="dbkey" type="text" label="Genome Unique Name" />
+ <label for="broad_dbkey">External Name</label>
+ <input name="broad_dbkey" type="text" label="Genome Unique Name" /><div style="clear: both;"> </div></div></div></div>
- <div id="params_ensembl" class="params-block">
+ <div id="params_NCBI" class="params-block" style="display: block;"><div class="form-row">
- <label for="ensembl_section">Section</label>
- <input name="ensembl_section" type="text" label="Section" />
+ <label for="longname">Internal Name</label>
+ <input name="longname" type="text" label="Internal Name" /><div style="clear: both;"> </div>
- <div class="toolParamHelp" style="clear: both;">
- Ensembl section, either standard or one of plants, protists, metazoa, fungi, bacteria.
- </div></div><div class="form-row">
- <label for="release_number">Release Number</label>
- <input name="release_number" type="text" label="Release" />
+ <label for="uniqid">Internal Unique Identifier</label>
+ <input name="uniqid" type="text" label="Internal Identifier" /><div style="clear: both;"> </div>
- <div class="toolParamHelp" style="clear: both;">
- Release number, e.g. ftp://ftp.ensembl.org/pub/release-<strong style="color: red;">56</strong>/fasta/callithrix_jacchus/dna/Callithrix_jacchus.calJac3.56.dna.toplevel.fa.gz
- </div></div>
- <div class="form-row">
- <label for="organism">Organism</label>
- <input name="organism" type="text" label="Organism" />
- <div style="clear: both;"> </div>
- <div class="toolParamHelp" style="clear: both;">
- Organism long name, e.g. ftp://ftp.ensembl.org/pub/release-56/fasta/callithrix_jacchus/dna/<strong style="color: red;">Callithrix_jacchus</strong>.calJac3.56.dna.toplevel.fa.gz
- </div>
- </div>
- <div class="form-row">
- <label for="name">Name</label>
- <input name="name" type="text" label="name" />
- <div style="clear: both;"> </div>
- <div class="toolParamHelp" style="clear: both;">
- Organism short name, e.g. ftp://ftp.ensembl.org/pub/release-56/fasta/callithrix_jacchus/dna/Callithri….<strong style="color: red;">calJac3</strong>.56.dna.toplevel.fa.gz
- </div>
- </div>
- <div class="form-row">
- <label for="release2">Release ID</label>
- <input name="release2" type="text" label="Release ID" />
- <div style="clear: both;"> </div>
- <div class="toolParamHelp" style="clear: both;">
- Release ID, e.g. ftp://ftp.ensembl.org/pub/release-56/fasta/callithrix_jacchus/dna/Callithri….<strong style="color: red;">56</strong>.dna.toplevel.fa.gz
+ <div id="dlparams">
+ <div class="form-row">
+ <label for="ncbi_dbkey">External Name</label>
+ <input name="ncbi_dbkey" type="text" label="Genome Unique Name" />
+ <div style="clear: both;"> </div></div></div></div>
- <div id="params_ucsc" class="params-block">
+ <div id="params_Ensembl" class="params-block">
+ <div class="form-row">
+ <label>Genome:</label>
+ <div class="form-row-input">
+ <select name="ensembl_dbkey" last_selected_value="?">
+ %for dbkey in ensembls:
+ <option value="${dbkey['dbkey']}">${dbkey['dbkey']} - ${dbkey['name']}</option>
+ %endfor
+ </select>
+ </div>
+ <div class="toolParamHelp" style="clear: both;">
+ If you can't find the build you want in this list, <insert link to instructions here>
+ </div>
+ </div>
+ </div>
+ <div id="params_UCSC" class="params-block"><div class="form-row"><label>Genome:</label><div class="form-row-input">
@@ -166,23 +158,11 @@
checkDataSource();
});
function checkDataSource() {
- var ds = $('#datasource').val()
+ var ds = $('#datasource').val();
$('.params-block').each(function() {
$(this).hide();
});
- switch (ds) {
- case 'UCSC':
- $('#params_ucsc').show();
- break;
- case 'Ensembl':
- $('#params_ensembl').show();
- break;
- case 'NCBI':
- case 'Broad':
- default:
- $('#params_generic').show();
- break;
- }
+ $('#params_' + ds).show();
};
</script></form>
diff -r c4f325ba7caa1c86b0f97a0a486c3bbdea15c754 -r d2aba0918cf01b1c5be95f7b0b59cc52cd889dbd templates/admin/data_admin/download_status.mako
--- a/templates/admin/data_admin/download_status.mako
+++ b/templates/admin/data_admin/download_status.mako
@@ -33,10 +33,15 @@
</div></div></%def>
-<p>The genome build and any selected indexers have been added to the job queue. Below you will see the status of each job.</p>
+<p>${name} been added to the job queue
+ %if indexers:
+ to be indexed with ${indexers}
+ %endif
+ </p><table id="jobStatus"></table>
-<a href="${h.url_for( controller='data_admin', action='manage_data' )}">Return to the download form</a>
+<p><a href="${h.url_for( controller='data_admin', action='manage_data' )}">Overview</a>.</p>
+<p><a href="${h.url_for( controller='data_admin', action='add_genome' )}">Download form</a>.</p><script type="text/javascript">
jobs = ${jsonjobs}
finalstates = new Array('done', 'error', 'ok');
diff -r c4f325ba7caa1c86b0f97a0a486c3bbdea15c754 -r d2aba0918cf01b1c5be95f7b0b59cc52cd889dbd templates/admin/data_admin/local_data.mako
--- /dev/null
+++ b/templates/admin/data_admin/local_data.mako
@@ -0,0 +1,161 @@
+<%inherit file="/base.mako"/>
+<%namespace file="/message.mako" import="render_msg" />
+<%namespace file="/library/common/common.mako" import="common_javascripts" />
+
+<%!
+ def inherit(context):
+ if context.get('use_panels'):
+ return '/webapps/galaxy/base_panels.mako'
+ else:
+ return '/base.mako'
+%>
+<%inherit file="${inherit(context)}"/>
+
+<%def name="init()">
+<%
+ self.has_left_panel=False
+ self.has_right_panel=False
+ self.message_box_visible=False
+ self.active_view="user"
+ self.overlay_visible=False
+ self.has_accessible_datasets = False
+%>
+</%def>
+<%def name="stylesheets()">
+ ${parent.stylesheets()}
+ ${h.css( "autocomplete_tagging" )}
+</%def>
+<%def name="javascripts()">
+ ${parent.javascripts()}
+ ${h.js("jquery.autocomplete", "autocomplete_tagging" )}
+</%def>
+##
+## Override methods from base.mako and base_panels.mako
+##
+<%def name="center_panel()">
+ <div style="overflow: auto; height: 100%;">
+ <div class="page-container" style="padding: 10px;">
+ ${render_content()}
+ </div>
+ </div>
+</%def>
+<style type="text/css">
+ .params-block { display: none; }
+ td, th { padding-left: 10px; padding-right: 10px; }
+ td.Generate { text-decoration: underline; background-color: #EEEEEE; }
+ td.Generating { text-decoration: none; background-color: #FFFFCC; }
+ td.Generated { background-color: #CCFFCC; }
+</style>
+<div class="toolForm">
+ %if message:
+ <div class="${status}">${message}</div>
+ %endif
+ <div class="toolFormTitle">Currently tracked builds <a class="action-button" href="/data_admin/add_genome">Add new</a></div>
+ <div class="toolFormBody">
+ <h2>Locally cached data:</h2>
+ <h3>NOTE: Indexers queued here will not be reflected in the table until Galaxy is restarted.</h3>
+ <table id="locfiles">
+ <tr><th>Database ID</th><th>Name</th><th>Bowtie</th><th>Bowtie 2</th><th>BWA</th><th>Sam</th><th>Picard</th><th>PerM</th></tr>
+ %for dbkey in sorted(genomes.keys()):
+ <tr>
+ <td>${dbkey}</td>
+ <td>${genomes[dbkey]['name']}</td>
+ <td id="${dbkey}-bowtie" class="indexcell ${genomes[dbkey]['bowtie_indexes']}" data-fapath="${genomes[dbkey]['fapath']}" data-longname="${genomes[dbkey]['name']}" data-index="bowtie" data-dbkey="${dbkey}">${genomes[dbkey]['bowtie_indexes']}</td>
+ <td id="${dbkey}-bowtie2" class="indexcell ${genomes[dbkey]['bowtie2_indexes']}" data-fapath="${genomes[dbkey]['fapath']}" data-longname="${genomes[dbkey]['name']}" data-index="bowtie2" data-dbkey="${dbkey}">${genomes[dbkey]['bowtie2_indexes']}</td>
+ <td id="${dbkey}-bwa" class="indexcell ${genomes[dbkey]['bwa_indexes']}" data-fapath="${genomes[dbkey]['fapath']}" data-longname="${genomes[dbkey]['name']}" data-index="bwa" data-dbkey="${dbkey}">${genomes[dbkey]['bwa_indexes']}</td>
+ <td id="${dbkey}-sam" class="indexcell ${genomes[dbkey]['sam_fa_indexes']}" data-fapath="${genomes[dbkey]['fapath']}" data-longname="${genomes[dbkey]['name']}" data-index="sam" data-dbkey="${dbkey}">${genomes[dbkey]['sam_fa_indexes']}</td>
+ <td id="${dbkey}-picard" class="indexcell ${genomes[dbkey]['srma_indexes']}" data-fapath="${genomes[dbkey]['fapath']}" data-longname="${genomes[dbkey]['name']}" data-index="picard" data-dbkey="${dbkey}">${genomes[dbkey]['srma_indexes']}</td>
+ <td id="${dbkey}-perm" class="indexcell ${genomes[dbkey]['perm_base_indexes']}" data-fapath="${genomes[dbkey]['fapath']}" data-longname="${genomes[dbkey]['name']}" data-index="perm" data-dbkey="${dbkey}">${genomes[dbkey]['perm_base_indexes']}</td>
+ </tr>
+ %endfor
+ </table>
+ <h2>Recent jobs:</h2>
+ <p>Click the job ID to see job details. Note that this list only shows jobs initiated by your account.</p>
+ <div id="recentJobs">
+ %for job in jobgrid:
+ <div id="job-${job['deferred']}" data-dbkey="${job['dbkey']}" data-name="${job['intname']}" data-indexes="${job['indexers']}" data-jobid="${job['deferred']}" data-state="${job['state']}" class="historyItem-${job['state']} historyItemWrapper historyItem">
+ <p>Job ID <a href="${h.url_for( controller='data_admin', action='monitor_status', job=job['deferred'] )}">${job['deferred']}</a>:
+ %if job['jobtype'] == 'download':
+ Download <em>${job['intname']}</em>
+ %if job['indexers']:
+ and index with ${job['indexers']}
+ %endif
+ %else:
+ Index <em>${job['intname']}</em> with ${job['indexers']}
+ %endif
+ </p>
+ </div>
+ %endfor
+ </div>
+</div>
+<script type="text/javascript">
+ finalstates = new Array('done', 'error', 'ok');
+ $('.indexcell').click(function() {
+ status = $(this).html();
+ elem = $(this);
+ if (status != 'Generate') {
+ return;
+ }
+ longname = $(this).attr('data-longname');
+ dbkey = $(this).attr('data-dbkey');
+ indexes = $(this).attr('data-index');
+ path = $(this).attr('data-fapath');
+ $.post('${h.url_for( controller='data_admin', action='index_build' )}', { longname: longname, dbkey: dbkey, indexes: indexes, path: path }, function(data) {
+ if (data == 'ERROR') {
+ alert('There was an error.');
+ }
+ else {
+ elem.html('Generating');
+ elem.attr('class', 'indexcell Generating');
+ }
+ newhtml = '<div data-dbkey="' + dbkey + '" data-name="' + longname + '" data-indexes="' + indexes + '" id="job-' + data + '" class="historyItem-new historyItemWrapper historyItem">' +
+ '<p>Job ID <a href="${h.url_for( controller='data_admin', action='monitor_status')}?job=' + data + '">' + data + '</a>: ' +
+ 'Index <em>' + longname + '</em> with ' + indexes + '</p></div>';
+ $('#recentJobs').prepend(newhtml);
+ $('#job-' + data).delay(3000).queue(function(n) {
+ checkJob(data);
+ n();
+ });
+ });
+ });
+
+ function checkJob(jobid) {
+ $.get('${h.url_for( controller='data_admin', action='get_jobs' )}', { jobid: jobid }, function(data) {
+ jsondata = JSON.parse(data)[0];
+ jsondata["name"] = $('#job-' + jobid).attr('data-name');
+ jsondata["dbkey"] = $('#job-' + jobid).attr('data-dbkey');
+ jsondata["indexes"] = $('#job-' + jobid).attr('data-indexes');
+ newhtml = makeNewHTML(jsondata);
+ $('#job-' + jobid).replaceWith(newhtml);
+ if ($.inArray(jsondata["status"], finalstates) == -1) {
+ $('#job-' + jobid).delay(3000).queue(function(n) {
+ checkJob(jobid);
+ n();
+ });
+ }
+ if (jsondata["status"] == 'done' || jsondata["status"] == 'ok') {
+ elem = $('#' + jsondata["dbkey"] + '-' + jsondata["indexes"]);
+ elem.html('Generated');
+ elem.attr('class', 'indexcell Generated');
+ }
+ });
+ }
+
+ function makeNewHTML(jsondata) {
+ newhtml = '<div data-dbkey="' + jsondata["dbkey"] + '" data-name="' + jsondata["name"] + '" data-indexes="' + jsondata["indexes"] + '" id="job-' + jsondata["jobid"] + '" class="historyItem-' + jsondata["status"] + ' historyItemWrapper historyItem">' +
+ '<p>Job ID <a href="${h.url_for( controller='data_admin', action='monitor_status')}?job=' + jsondata["jobid"] + '">' + jsondata["jobid"] + '</a>: ' +
+ 'Index <em>' + jsondata["name"] + '</em> with ' + jsondata["indexes"] + '</p></div>';
+ return newhtml;
+ }
+
+ $(document).ready(function() {
+ $('.historyItem').each(function() {
+ state = $(this).attr('data-state');
+ jobid = $(this).attr('data-jobid');
+ if ($.inArray(state, finalstates) == -1) {
+ checkJob(jobid);
+ }
+ });
+ });
+
+</script>
\ No newline at end of file
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: greg: Revert one of the "fixes" in 7301:46de10d8c8e5.
by Bitbucket 22 Jun '12
by Bitbucket 22 Jun '12
22 Jun '12
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/c4f325ba7caa/
changeset: c4f325ba7caa
user: greg
date: 2012-06-22 20:23:28
summary: Revert one of the "fixes" in 7301:46de10d8c8e5.
affected #: 1 file
diff -r 46de10d8c8e5ffd537f7c13e4833aa8635ab6fcf -r c4f325ba7caa1c86b0f97a0a486c3bbdea15c754 lib/galaxy/webapps/community/controllers/common.py
--- a/lib/galaxy/webapps/community/controllers/common.py
+++ b/lib/galaxy/webapps/community/controllers/common.py
@@ -438,13 +438,7 @@
# Find all tool configs.
ctx_file_name = strip_path( filename )
if ctx_file_name not in NOT_TOOL_CONFIGS and filename.endswith( '.xml' ):
- is_tool_config, valid, tool, error_message, sample_files, deleted_sample_files = load_tool_from_tmp_directory( trans,
- repo,
- repo_dir,
- ctx,
- filename,
- work_dir )
- all_sample_files_copied.extend( sample_files )
+ is_tool_config, valid, tool, error_message = load_tool_from_tmp_directory( trans, repo, repo_dir, ctx, filename, work_dir )
if is_tool_config and valid and tool is not None:
sample_files_copied, can_set_metadata, invalid_files = check_tool_input_params( trans,
repo,
@@ -861,8 +855,6 @@
tool = None
valid = False
error_message = ''
- sample_files = []
- deleted_sample_files = []
tmp_config = get_named_tmpfile_from_ctx( ctx, filename, dir )
if tmp_config:
if not ( check_binary( tmp_config ) or check_image( tmp_config ) or check_gzip( tmp_config )[ 0 ]
@@ -876,9 +868,6 @@
log.debug( "Error parsing %s, exception: %s" % ( tmp_config, str( e ) ) )
is_tool_config = False
if is_tool_config:
- sample_files, deleted_sample_files = get_list_of_copied_sample_files( repo, ctx, dir=dir )
- if sample_files:
- trans.app.config.tool_data_path = dir
# Load entries into the tool_data_tables if the tool requires them.
tool_data_table_config = copy_file_from_manifest( repo, ctx, 'tool_data_table_conf.xml.sample', dir )
if tool_data_table_config:
@@ -903,7 +892,7 @@
error_message = str( e )
# Reset the tool_data_tables by loading the empty tool_data_table_conf.xml file.
reset_tool_data_tables( trans.app )
- return is_tool_config, valid, tool, error_message, sample_files, deleted_sample_files
+ return is_tool_config, valid, tool, error_message
def new_tool_metadata_required( trans, id, metadata_dict ):
"""
Compare the last saved metadata for each tool in the repository with the new metadata
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: greg: More tool shed refinements to enable working with Galaxy's ToolDataTableManager.
by Bitbucket 22 Jun '12
by Bitbucket 22 Jun '12
22 Jun '12
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/46de10d8c8e5/
changeset: 46de10d8c8e5
user: greg
date: 2012-06-22 20:10:53
summary: More tool shed refinements to enable working with Galaxy's ToolDataTableManager.
affected #: 1 file
diff -r f197c4346cc44bf3996eee6b24e2c393ad519d9d -r 46de10d8c8e5ffd537f7c13e4833aa8635ab6fcf lib/galaxy/webapps/community/controllers/common.py
--- a/lib/galaxy/webapps/community/controllers/common.py
+++ b/lib/galaxy/webapps/community/controllers/common.py
@@ -429,6 +429,7 @@
sample_files, deleted_sample_files = get_list_of_copied_sample_files( repo, ctx, dir=work_dir )
if sample_files:
trans.app.config.tool_data_path = work_dir
+ all_sample_files_copied = []
# Handle the tool_data_table_conf.xml.sample file if it is included in the repository.
if 'tool_data_table_conf.xml.sample' in sample_files:
tool_data_table_config = copy_file_from_manifest( repo, ctx, 'tool_data_table_conf.xml.sample', work_dir )
@@ -437,7 +438,13 @@
# Find all tool configs.
ctx_file_name = strip_path( filename )
if ctx_file_name not in NOT_TOOL_CONFIGS and filename.endswith( '.xml' ):
- is_tool_config, valid, tool, error_message = load_tool_from_tmp_directory( trans, repo, repo_dir, ctx, filename, work_dir )
+ is_tool_config, valid, tool, error_message, sample_files, deleted_sample_files = load_tool_from_tmp_directory( trans,
+ repo,
+ repo_dir,
+ ctx,
+ filename,
+ work_dir )
+ all_sample_files_copied.extend( sample_files )
if is_tool_config and valid and tool is not None:
sample_files_copied, can_set_metadata, invalid_files = check_tool_input_params( trans,
repo,
@@ -449,33 +456,13 @@
invalid_files,
original_tool_data_path,
work_dir )
+ all_sample_files_copied.extend( sample_files_copied )
if can_set_metadata:
# Update the list of metadata dictionaries for tools in metadata_dict.
repository_clone_url = generate_clone_url( trans, id )
metadata_dict = generate_tool_metadata( filename, tool, repository_clone_url, metadata_dict )
else:
invalid_tool_configs.append( ctx_file_name )
- # Remove all copied sample files from both the original tool data path (~/shed-tool-data) and the temporary
- # value of trans.app.config.tool_data_path, which is work_dir.
- for copied_sample_file in sample_files_copied:
- copied_file = copied_sample_file.replace( '.sample', '' )
- try:
- os.unlink( os.path.join( trans.app.config.tool_data_path, copied_sample_file ) )
- except:
- pass
- try:
- os.unlink( os.path.join( trans.app.config.tool_data_path, copied_file ) )
- except:
- pass
- if trans.app.config.tool_data_path == work_dir:
- try:
- os.unlink( os.path.join( original_tool_data_path, copied_sample_file ) )
- except:
- pass
- try:
- os.unlink( os.path.join( original_tool_data_path, copied_file ) )
- except:
- pass
elif is_tool_config:
if not error_message:
error_message = 'Unknown problems loading tool.'
@@ -508,6 +495,27 @@
shutil.rmtree( work_dir )
except:
pass
+ # Remove all copied sample files from both the original tool data path (~/shed-tool-data) and the temporary
+ # value of trans.app.config.tool_data_path, which is work_dir.
+ for copied_sample_file in all_sample_files_copied:
+ copied_file = copied_sample_file.replace( '.sample', '' )
+ try:
+ os.unlink( os.path.join( trans.app.config.tool_data_path, copied_sample_file ) )
+ except:
+ pass
+ try:
+ os.unlink( os.path.join( trans.app.config.tool_data_path, copied_file ) )
+ except:
+ pass
+ if trans.app.config.tool_data_path == work_dir:
+ try:
+ os.unlink( os.path.join( original_tool_data_path, copied_sample_file ) )
+ except:
+ pass
+ try:
+ os.unlink( os.path.join( original_tool_data_path, copied_file ) )
+ except:
+ pass
return metadata_dict, invalid_files, deleted_sample_files
def generate_tool_guid( trans, repository, tool ):
"""
@@ -810,6 +818,7 @@
except:
pass
return tool, message
+ original_tool_data_path = trans.app.config.tool_data_path
tool_config_filename = strip_path( tool_config_filename )
repository = get_repository( trans, repository_id )
repo_files_dir = repository.repo_path
@@ -818,6 +827,9 @@
tool = None
message = ''
work_dir = make_tmp_directory()
+ sample_files, deleted_sample_files = get_list_of_copied_sample_files( repo, ctx, dir=work_dir )
+ if sample_files:
+ trans.app.config.tool_data_path = work_dir
# Load entries into the tool_data_tables if the tool requires them.
tool_data_table_config = copy_file_from_manifest( repo, ctx, 'tool_data_table_conf.xml.sample', work_dir )
if tool_data_table_config:
@@ -841,12 +853,16 @@
shutil.rmtree( work_dir )
except:
pass
+ if sample_files:
+ trans.app.config.tool_data_path = original_tool_data_path
return tool, message
def load_tool_from_tmp_directory( trans, repo, repo_dir, ctx, filename, dir ):
is_tool_config = False
tool = None
valid = False
error_message = ''
+ sample_files = []
+ deleted_sample_files = []
tmp_config = get_named_tmpfile_from_ctx( ctx, filename, dir )
if tmp_config:
if not ( check_binary( tmp_config ) or check_image( tmp_config ) or check_gzip( tmp_config )[ 0 ]
@@ -860,6 +876,9 @@
log.debug( "Error parsing %s, exception: %s" % ( tmp_config, str( e ) ) )
is_tool_config = False
if is_tool_config:
+ sample_files, deleted_sample_files = get_list_of_copied_sample_files( repo, ctx, dir=dir )
+ if sample_files:
+ trans.app.config.tool_data_path = dir
# Load entries into the tool_data_tables if the tool requires them.
tool_data_table_config = copy_file_from_manifest( repo, ctx, 'tool_data_table_conf.xml.sample', dir )
if tool_data_table_config:
@@ -884,7 +903,7 @@
error_message = str( e )
# Reset the tool_data_tables by loading the empty tool_data_table_conf.xml file.
reset_tool_data_tables( trans.app )
- return is_tool_config, valid, tool, error_message
+ return is_tool_config, valid, tool, error_message, sample_files, deleted_sample_files
def new_tool_metadata_required( trans, id, metadata_dict ):
"""
Compare the last saved metadata for each tool in the repository with the new metadata
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: dan: Allow external display applications to work when login_required=True.
by Bitbucket 22 Jun '12
by Bitbucket 22 Jun '12
22 Jun '12
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/f197c4346cc4/
changeset: f197c4346cc4
user: dan
date: 2012-06-22 19:31:29
summary: Allow external display applications to work when login_required=True.
affected #: 1 file
diff -r db2bf800496478a5ea041480c3c514c2620e28ae -r f197c4346cc44bf3996eee6b24e2c393ad519d9d lib/galaxy/web/framework/__init__.py
--- a/lib/galaxy/web/framework/__init__.py
+++ b/lib/galaxy/web/framework/__init__.py
@@ -419,7 +419,7 @@
# The value of session_cookie can be one of
# 'galaxysession' or 'galaxycommunitysession'
# Currently this method does nothing unless session_cookie is 'galaxysession'
- if session_cookie == 'galaxysession':
+ if session_cookie == 'galaxysession' and self.galaxy_session.user is None:
# TODO: re-engineer to eliminate the use of allowed_paths
# as maintenance overhead is far too high.
allowed_paths = (
@@ -443,16 +443,23 @@
url_for( controller='dataset', action='list' )
)
display_as = url_for( controller='root', action='display_as' )
- if self.galaxy_session.user is None:
- if self.app.config.ucsc_display_sites and self.request.path == display_as:
- try:
- host = socket.gethostbyaddr( self.environ[ 'REMOTE_ADDR' ] )[0]
- except( socket.error, socket.herror, socket.gaierror, socket.timeout ):
- host = None
- if host in UCSC_SERVERS:
+ if self.app.config.ucsc_display_sites and self.request.path == display_as:
+ try:
+ host = socket.gethostbyaddr( self.environ[ 'REMOTE_ADDR' ] )[0]
+ except( socket.error, socket.herror, socket.gaierror, socket.timeout ):
+ host = None
+ if host in UCSC_SERVERS:
+ return
+ external_display_path = url_for( controller='dataset', action='display_application' )
+ if self.request.path.startswith( external_display_path ):
+ request_path_split = external_display_path.split( '/' )
+ try:
+ if self.app.datatypes_registry.display_applications.get( request_path_split[-5] ) and request_path_split[-4] in self.app.datatypes_registry.display_applications.get( request_path_split[-5] ).links and request_path_split[-3] != 'None':
return
- if self.request.path not in allowed_paths:
- self.response.send_redirect( url_for( controller='root', action='index' ) )
+ except IndexError:
+ pass
+ if self.request.path not in allowed_paths:
+ self.response.send_redirect( url_for( controller='root', action='index' ) )
def __create_new_session( self, prev_galaxy_session=None, user_for_new_session=None ):
"""
Create a new GalaxySession for this request, possibly with a connection
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: greg: Refinements for installing and managing tool dependencies for tools contained in installed tool shed repositories.
by Bitbucket 22 Jun '12
by Bitbucket 22 Jun '12
22 Jun '12
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/db2bf8004964/
changeset: db2bf8004964
user: greg
date: 2012-06-22 16:21:09
summary: Refinements for installing and managing tool dependencies for tools contained in installed tool shed repositories.
affected #: 26 files
diff -r a0a4f15fe0958c5ff2658c1695c63023b9cf6d39 -r db2bf800496478a5ea041480c3c514c2620e28ae lib/galaxy/model/__init__.py
--- a/lib/galaxy/model/__init__.py
+++ b/lib/galaxy/model/__init__.py
@@ -2647,6 +2647,12 @@
pass
class ToolShedRepository( object ):
+ installation_status = Bunch( CLONED='cloned',
+ SETTING_TOOL_VERSIONS='setting tool versions',
+ INSTALLING_TOOL_DEPENDENCIES='installing tool dependencies',
+ INSTALLED='installed',
+ ERROR='error',
+ UNINSTALLED='uninstalled' )
def __init__( self, id=None, create_time=None, tool_shed=None, name=None, description=None, owner=None, installed_changeset_revision=None,
changeset_revision=None, ctx_rev=None, metadata=None, includes_datatypes=False, update_available=False, deleted=False,
uninstalled=False, dist_to_shed=False ):
@@ -2696,47 +2702,59 @@
"""Return the repository's tool dependencies that are currently installed."""
installed_dependencies = []
for tool_dependency in self.tool_dependencies:
- if not tool_dependency.uninstalled:
+ if tool_dependency.status == ToolDependency.installation_status.INSTALLED:
installed_dependencies.append( tool_dependency )
return installed_dependencies
@property
def missing_tool_dependencies( self ):
"""Return the repository's tool dependencies that are not currently installed, and may not ever have been installed."""
missing_dependencies = []
- # Get the dependency information from the metadata for comparison against the installed tool dependencies.
- tool_dependencies = self.metadata.get( 'tool_dependencies', None )
- if tool_dependencies:
- for dependency_key, requirements_dict in tool_dependencies.items():
- name = requirements_dict[ 'name' ]
- version = requirements_dict[ 'version' ]
- type = requirements_dict[ 'type' ]
- if self.tool_dependencies:
- found = False
- for tool_dependency in self.tool_dependencies:
- if tool_dependency.name==name and tool_dependency.version==version and tool_dependency.type==type:
- found = True
- if tool_dependency.uninstalled:
- missing_dependencies.append( ( tool_dependency.name, tool_dependency.version, tool_dependency.type ) )
- break
- if not found:
- missing_dependencies.append( ( name, version, type ) )
+ for tool_dependency in self.tool_dependencies:
+ if tool_dependency.status in [ ToolDependency.installation_status.NEVER_INSTALLED,
+ ToolDependency.installation_status.ERROR,
+ ToolDependency.installation_status.UNINSTALLED ]:
+ missing_dependencies.append( tool_dependency )
return missing_dependencies
@property
+ def tool_dependencies_being_installed( self ):
+ dependencies_being_installed = []
+ for tool_dependency in self.tool_dependencies:
+ if tool_dependency.status == ToolDependency.installation_status.INSTALLING:
+ dependencies_being_installed.append( tool_dependency )
+ return dependencies_being_installed
+ @property
+ def tool_dependencies_with_installation_errors( self ):
+ dependencies_with_installation_errors = []
+ for tool_dependency in self.tool_dependencies:
+ if tool_dependency.status == ToolDependency.installation_status.ERROR:
+ dependencies_with_installation_errors.append( tool_dependency )
+ return dependencies_with_installation_errors
+ @property
def uninstalled_tool_dependencies( self ):
"""Return the repository's tool dependencies that have been uninstalled."""
uninstalled_tool_dependencies = []
for tool_dependency in self.tool_dependencies:
- if tool_dependency.uninstalled:
+ if tool_dependency.status == ToolDependency.installation_status.UNINSTALLED:
uninstalled_tool_dependencies.append( tool_dependency )
return uninstalled_tool_dependencies
class ToolDependency( object ):
- def __init__( self, tool_shed_repository_id=None, name=None, version=None, type=None, uninstalled=False ):
+ installation_status = Bunch( NEVER_INSTALLED='Never installed',
+ INSTALLING='Installing',
+ INSTALLED='Installed',
+ ERROR='Error',
+ UNINSTALLED='Uninstalled' )
+ states = Bunch( INSTALLING = 'running',
+ OK = 'ok',
+ ERROR = 'error',
+ UNINSTALLED = 'deleted_new' )
+ def __init__( self, tool_shed_repository_id=None, name=None, version=None, type=None, status=None, error_message=None ):
self.tool_shed_repository_id = tool_shed_repository_id
self.name = name
self.version = version
self.type = type
- self.uninstalled = uninstalled
+ self.status = status
+ self.error_message = error_message
def installation_directory( self, app ):
return os.path.join( app.config.tool_dependency_dir,
self.name,
diff -r a0a4f15fe0958c5ff2658c1695c63023b9cf6d39 -r db2bf800496478a5ea041480c3c514c2620e28ae lib/galaxy/model/mapping.py
--- a/lib/galaxy/model/mapping.py
+++ b/lib/galaxy/model/mapping.py
@@ -398,7 +398,8 @@
Column( "name", TrimmedString( 255 ) ),
Column( "version", Text ),
Column( "type", TrimmedString( 40 ) ),
- Column( "uninstalled", Boolean, default=False ) )
+ Column( "status", TrimmedString( 255 ), nullable=False ),
+ Column( "error_message", TEXT ) )
ToolVersion.table = Table( "tool_version", metadata,
Column( "id", Integer, primary_key=True ),
diff -r a0a4f15fe0958c5ff2658c1695c63023b9cf6d39 -r db2bf800496478a5ea041480c3c514c2620e28ae lib/galaxy/model/migrate/versions/0102_add_tool_dependency_status_columns.py
--- /dev/null
+++ b/lib/galaxy/model/migrate/versions/0102_add_tool_dependency_status_columns.py
@@ -0,0 +1,63 @@
+"""
+Migration script to add status and error_message columns to the tool_dependency table and drop the uninstalled column from the tool_dependency table.
+"""
+
+from sqlalchemy import *
+from sqlalchemy.orm import *
+from migrate import *
+from migrate.changeset import *
+
+import datetime
+now = datetime.datetime.utcnow
+# Need our custom types, but don't import anything else from model
+from galaxy.model.custom_types import *
+
+import sys, logging
+log = logging.getLogger( __name__ )
+log.setLevel(logging.DEBUG)
+handler = logging.StreamHandler( sys.stdout )
+format = "%(name)s %(levelname)s %(asctime)s %(message)s"
+formatter = logging.Formatter( format )
+handler.setFormatter( formatter )
+log.addHandler( handler )
+
+metadata = MetaData( migrate_engine )
+db_session = scoped_session( sessionmaker( bind=migrate_engine, autoflush=False, autocommit=True ) )
+
+def upgrade():
+ print __doc__
+ metadata.reflect()
+ ToolDependency_table = Table( "tool_dependency", metadata, autoload=True )
+ col = Column( "status", TrimmedString( 255 ), nullable=False )
+ try:
+ col.create( ToolDependency_table )
+ assert col is ToolDependency_table.c.status
+ except Exception, e:
+ print "Adding status column to the tool_dependency table failed: %s" % str( e )
+ col = Column( "error_message", TEXT )
+ try:
+ col.create( ToolDependency_table )
+ assert col is ToolDependency_table.c.error_message
+ except Exception, e:
+ print "Adding error_message column to the tool_dependency table failed: %s" % str( e )
+ try:
+ ToolDependency_table.c.uninstalled.drop()
+ except Exception, e:
+ print "Dropping uninstalled column from the tool_dependency table failed: %s" % str( e )
+def downgrade():
+ metadata.reflect()
+ ToolDependency_table = Table( "tool_dependency", metadata, autoload=True )
+ try:
+ ToolDependency_table.c.status.drop()
+ except Exception, e:
+ print "Dropping column status from the tool_dependency table failed: %s" % str( e )
+ try:
+ ToolDependency_table.c.error_message.drop()
+ except Exception, e:
+ print "Dropping column error_message from the tool_dependency table failed: %s" % str( e )
+ col = Column( "uninstalled", Boolean, default=False )
+ try:
+ col.create( ToolDependency_table )
+ assert col is ToolDependency_table.c.uninstalled
+ except Exception, e:
+ print "Adding uninstalled column to the tool_dependency table failed: %s" % str( e )
diff -r a0a4f15fe0958c5ff2658c1695c63023b9cf6d39 -r db2bf800496478a5ea041480c3c514c2620e28ae lib/galaxy/tool_shed/install_manager.py
--- a/lib/galaxy/tool_shed/install_manager.py
+++ b/lib/galaxy/tool_shed/install_manager.py
@@ -7,7 +7,6 @@
from galaxy.util.json import from_json_string, to_json_string
from galaxy.util.shed_util import *
from galaxy.util.odict import odict
-from galaxy.tool_shed.migrate.common import *
REPOSITORY_OWNER = 'devteam'
@@ -177,7 +176,7 @@
status, message = handle_tool_dependencies( app=self.app,
tool_shed_repository=tool_shed_repository,
tool_dependencies_config=tool_dependencies_config )
- if status != 'ok' and message:
+ if status != 'done' and message:
print 'The following error occurred from the InstallManager while installing tool dependencies:'
print message
add_to_tool_panel( self.app,
diff -r a0a4f15fe0958c5ff2658c1695c63023b9cf6d39 -r db2bf800496478a5ea041480c3c514c2620e28ae lib/galaxy/tool_shed/tool_dependencies/common_util.py
--- a/lib/galaxy/tool_shed/tool_dependencies/common_util.py
+++ b/lib/galaxy/tool_shed/tool_dependencies/common_util.py
@@ -1,10 +1,6 @@
import os, shutil, tarfile, urllib2
from galaxy.datatypes.checkers import *
-MISCELLANEOUS_ACTIONS = [ 'change_directory' ]
-MOVE_ACTIONS = [ 'move_directory_files', 'move_file' ]
-ALL_ACTIONS = MISCELLANEOUS_ACTIONS + MOVE_ACTIONS
-
def extract_tar( file_name, file_path ):
if isgzip( file_name ) or isbz2( file_name ):
# Open for reading with transparent compression.
diff -r a0a4f15fe0958c5ff2658c1695c63023b9cf6d39 -r db2bf800496478a5ea041480c3c514c2620e28ae lib/galaxy/tool_shed/tool_dependencies/fabric_util.py
--- a/lib/galaxy/tool_shed/tool_dependencies/fabric_util.py
+++ b/lib/galaxy/tool_shed/tool_dependencies/fabric_util.py
@@ -13,6 +13,8 @@
from fabric.api import env, lcd, local, settings
+INSTALLATION_LOG = 'INSTALLATION.log'
+
def check_fabric_version():
version = env.version
if int( version.split( "." )[ 0 ] ) < 1:
@@ -32,16 +34,19 @@
yield work_dir
if os.path.exists( work_dir ):
local( 'rm -rf %s' % work_dir )
-def handle_post_build_processing( tool_dependency_dir, install_dir, env_dependency_path, package_name=None ):
+def handle_post_build_processing( app, tool_dependency, install_dir, env_dependency_path, package_name=None ):
+ sa_session = app.model.context.current
cmd = "echo 'PATH=%s:$PATH; export PATH' > %s/env.sh;chmod +x %s/env.sh" % ( env_dependency_path, install_dir, install_dir )
- message = ''
output = local( cmd, capture=True )
- log_results( cmd, output, os.path.join( install_dir, 'env_sh.log' ) )
+ log_results( cmd, output, os.path.join( install_dir, INSTALLATION_LOG ) )
if output.return_code:
- message = '%s %s' % ( message, str( output.stderr ) )
- return message
-def install_and_build_package( params_dict ):
+ tool_dependency.status = app.model.ToolDependency.installation_status.ERROR
+ tool_dependency.error_message = str( output.stderr )
+ sa_session.add( tool_dependency )
+ sa_session.flush()
+def install_and_build_package( app, tool_dependency, params_dict ):
"""Install a Galaxy tool dependency package either via a url or a mercurial or git clone command."""
+ sa_session = app.model.context.current
install_dir = params_dict[ 'install_dir' ]
download_url = params_dict.get( 'download_url', None )
clone_cmd = params_dict.get( 'clone_cmd', None )
@@ -59,43 +64,38 @@
dir = work_dir
elif clone_cmd:
output = local( clone_cmd, capture=True )
- log_results( clone_cmd, output, os.path.join( install_dir, 'clone_repository.log' ) )
+ log_results( clone_cmd, output, os.path.join( install_dir, INSTALLATION_LOG ) )
if output.return_code:
- return '%s. ' % str( output.stderr )
+ tool_dependency.status = app.model.ToolDependency.installation_status.ERROR
+ tool_dependency.error_message = str( output.stderr )
+ sa_session.add( tool_dependency )
+ sa_session.flush()
+ return
dir = package_name
if actions:
with lcd( dir ):
current_dir = os.path.abspath( os.path.join( work_dir, dir ) )
for action_tup in actions:
action_key, action_dict = action_tup
- if action_key.find( 'v^v^v' ) >= 0:
- action_items = action_key.split( 'v^v^v' )
- action_name = action_items[ 0 ]
- action = action_items[ 1 ]
- elif action_key in common_util.ALL_ACTIONS:
- action_name = action_key
- else:
- action_name = None
- if action_name:
- if action_name == 'change_directory':
- current_dir = os.path.join( current_dir, action )
- lcd( current_dir )
- elif action_name == 'move_directory_files':
- common_util.move_directory_files( current_dir=current_dir,
- source_dir=os.path.join( action_dict[ 'source_directory' ] ),
- destination_dir=os.path.join( action_dict[ 'destination_directory' ] ) )
- elif action_name == 'move_file':
- common_util.move_file( current_dir=current_dir,
- source=os.path.join( action_dict[ 'source' ] ),
- destination_dir=os.path.join( action_dict[ 'destination' ] ) )
+ if action_key == 'move_directory_files':
+ common_util.move_directory_files( current_dir=current_dir,
+ source_dir=os.path.join( action_dict[ 'source_directory' ] ),
+ destination_dir=os.path.join( action_dict[ 'destination_directory' ] ) )
+ elif action_key == 'move_file':
+ common_util.move_file( current_dir=current_dir,
+ source=os.path.join( action_dict[ 'source' ] ),
+ destination_dir=os.path.join( action_dict[ 'destination' ] ) )
else:
action = action_key
with settings( warn_only=True ):
output = local( action, capture=True )
- log_results( action, output, os.path.join( install_dir, 'actions.log' ) )
+ log_results( action, output, os.path.join( install_dir, INSTALLATION_LOG ) )
if output.return_code:
- return '%s. ' % str( output.stderr )
- return ''
+ tool_dependency.status = app.model.ToolDependency.installation_status.ERROR
+ tool_dependency.error_message = str( output.stderr )
+ sa_session.add( tool_dependency )
+ sa_session.flush()
+ return
def log_results( command, fabric_AttributeString, file_path ):
"""
Write attributes of fabric.operations._AttributeString (which is the output of executing command using fabric's local() method)
@@ -105,12 +105,12 @@
logfile = open( file_path, 'ab' )
else:
logfile = open( file_path, 'wb' )
- logfile.write( "\n#############################################" )
- logfile.write( '\n%s\nSTDOUT\n' % command )
+ logfile.write( "\n#############################################\n" )
+ logfile.write( '%s\nSTDOUT\n' % command )
logfile.write( str( fabric_AttributeString.stdout ) )
- logfile.write( "#############################################\n" )
- logfile.write( "\n#############################################" )
- logfile.write( '\n%s\nSTDERR\n' % command )
+ logfile.write( "\n#############################################\n" )
+ logfile.write( "\n#############################################\n" )
+ logfile.write( '%s\nSTDERR\n' % command )
logfile.write( str( fabric_AttributeString.stderr ) )
- logfile.write( "#############################################\n" )
+ logfile.write( "\n#############################################\n" )
logfile.close()
diff -r a0a4f15fe0958c5ff2658c1695c63023b9cf6d39 -r db2bf800496478a5ea041480c3c514c2620e28ae lib/galaxy/tool_shed/tool_dependencies/install_util.py
--- a/lib/galaxy/tool_shed/tool_dependencies/install_util.py
+++ b/lib/galaxy/tool_shed/tool_dependencies/install_util.py
@@ -11,19 +11,16 @@
from elementtree import ElementTree, ElementInclude
from elementtree.ElementTree import Element, SubElement
-def create_or_update_tool_dependency( app, tool_shed_repository, name, version, type ):
+def create_or_update_tool_dependency( app, tool_shed_repository, name, version, type, status ):
# Called from Galaxy (never the tool shed) when a new repository is being installed or when an uninstalled repository is being reinstalled.
+ sa_session = app.model.context.current
# First see if an appropriate tool_dependency record exists for the received tool_shed_repository.
- sa_session = app.model.context.current
tool_dependency = get_tool_dependency_by_name_version_type_repository( app, tool_shed_repository, name, version, type )
if tool_dependency:
- tool_dependency.uninstalled = False
+ tool_dependency.status = status
else:
# Create a new tool_dependency record for the tool_shed_repository.
- tool_dependency = app.model.ToolDependency( tool_shed_repository_id=tool_shed_repository.id,
- name=name,
- version=version,
- type=type )
+ tool_dependency = app.model.ToolDependency( tool_shed_repository.id, name, version, type, status )
sa_session.add( tool_dependency )
sa_session.flush()
return tool_dependency
@@ -42,14 +39,15 @@
repository.owner,
repository.name,
repository.installed_changeset_revision ) )
-def install_package( app, elem, tool_shed_repository, name=None, version=None ):
- # If name and version are not None, then a specific tool dependency is being installed.
- message = ''
+def install_package( app, elem, tool_shed_repository, tool_dependencies=None ):
+ # The value of tool_dependencies is a partial or full list of ToolDependency records associated with the tool_shed_repository.
+ sa_session = app.model.context.current
+ tool_dependency = None
# The value of package_name should match the value of the "package" type in the tool config's <requirements> tag set, but it's not required.
package_name = elem.get( 'name', None )
package_version = elem.get( 'version', None )
if package_name and package_version:
- if ( not name and not version ) or ( name and version and name==package_name and version==package_version ):
+ if tool_dependencies:
install_dir = get_tool_dependency_install_dir( app, tool_shed_repository, package_name, package_version )
if not os.path.exists( install_dir ):
for package_elem in elem:
@@ -66,26 +64,22 @@
# Handle tool dependency installation using a fabric method included in the Galaxy framework.
fabfile_path = None
for method_elem in package_elem:
- error_message = run_fabric_method( app,
- method_elem,
- fabfile_path,
- app.config.tool_dependency_dir,
- install_dir,
- package_name=package_name )
- if error_message:
- message += '%s' % error_message
- else:
- tool_dependency = create_or_update_tool_dependency( app,
- tool_shed_repository,
- name=package_name,
- version=package_version,
- type='package' )
+ tool_dependency = create_or_update_tool_dependency( app,
+ tool_shed_repository,
+ name=package_name,
+ version=package_version,
+ type='package',
+ status=app.model.ToolDependency.installation_status.INSTALLING )
+ run_fabric_method( app, tool_dependency, method_elem, fabfile_path, install_dir, package_name=package_name )
+ sa_session.refresh( tool_dependency )
+ if tool_dependency.status != app.model.ToolDependency.installation_status.ERROR:
print package_name, 'version', package_version, 'installed in', install_dir
else:
print '\nSkipping installation of tool dependency', package_name, 'version', package_version, 'since it is installed in', install_dir, '\n'
- return message
-def run_fabric_method( app, elem, fabfile_path, tool_dependency_dir, install_dir, package_name=None, **kwd ):
+ return tool_dependency
+def run_fabric_method( app, tool_dependency, elem, fabfile_path, install_dir, package_name=None, **kwd ):
"""Parse a tool_dependency.xml file's fabfile <method> tag set to build the method parameters and execute the method."""
+ sa_session = app.model.context.current
if not os.path.exists( install_dir ):
os.makedirs( install_dir )
# Default value for env_dependency_path.
@@ -109,7 +103,7 @@
action_key = action_elem.text.replace( '$INSTALL_DIR', install_dir )
if not action_key:
continue
- elif action_type in MOVE_ACTIONS:
+ elif action_type in [ 'move_directory_files', 'move_file' ]:
# Examples:
# <action type="move_file">
# <source>misc/some_file</source>
@@ -124,9 +118,6 @@
move_elem_text = move_elem.text.replace( '$INSTALL_DIR', install_dir )
if move_elem_text:
action_dict[ move_elem.tag ] = move_elem_text
- elif action_elem.text:
- # Example: <action type="change_directory">bin</action>
- action_key = '%sv^v^v%s' % ( action_type, action_elem.text )
else:
continue
actions.append( ( action_key, action_dict ) )
@@ -141,24 +132,36 @@
params_dict[ 'package_name' ] = package_name
if fabfile_path:
# TODO: Handle this using the fabric api.
- # run_proprietary_fabric_method( app, elem, fabfile_path, tool_dependency_dir, install_dir, package_name=package_name )
+ # run_proprietary_fabric_method( app, elem, fabfile_path, install_dir, package_name=package_name )
return 'Tool dependency installation using proprietary fabric scripts is not yet supported. '
else:
# There is currently only 1 fabric method, install_and_build_package().
try:
- message = install_and_build_package( params_dict )
- if message:
- return message
+ install_and_build_package( app, tool_dependency, params_dict )
except Exception, e:
- return '%s. ' % str( e )
- try:
- message = handle_post_build_processing( tool_dependency_dir, install_dir, env_dependency_path, package_name=package_name )
- if message:
- return message
- except:
- return '%s. ' % str( e )
- return ''
-def run_proprietary_fabric_method( app, elem, fabfile_path, tool_dependency_dir, install_dir, package_name=None, **kwd ):
+ tool_dependency.status = app.model.ToolDependency.installation_status.ERROR
+ tool_dependency.error_message = str( e )
+ sa_session.add( tool_dependency )
+ sa_session.flush()
+ sa_session.refresh( tool_dependency )
+ if tool_dependency.status != app.model.ToolDependency.installation_status.ERROR:
+ try:
+ handle_post_build_processing( app,
+ tool_dependency,
+ install_dir,
+ env_dependency_path,
+ package_name=package_name )
+ except Exception, e:
+ tool_dependency.status = app.model.ToolDependency.installation_status.ERROR
+ tool_dependency.error_message = str( e )
+ sa_session.add( tool_dependency )
+ sa_session.flush()
+ sa_session.refresh( tool_dependency )
+ if tool_dependency.status != app.model.ToolDependency.installation_status.ERROR:
+ tool_dependency.status = app.model.ToolDependency.installation_status.INSTALLED
+ sa_session.add( tool_dependency )
+ sa_session.flush()
+def run_proprietary_fabric_method( app, elem, fabfile_path, install_dir, package_name=None, **kwd ):
"""
TODO: Handle this using the fabric api.
Parse a tool_dependency.xml file's fabfile <method> tag set to build the method parameters and execute the method.
@@ -193,7 +196,7 @@
return "Exception executing fabric script %s: %s. " % ( str( fabfile_path ), str( e ) )
if returncode:
return message
- message = handle_post_build_processing( tool_dependency_dir, install_dir, env_dependency_path, package_name=package_name )
+ message = handle_post_build_processing( app, tool_dependency, install_dir, env_dependency_path, package_name=package_name )
if message:
return message
else:
diff -r a0a4f15fe0958c5ff2658c1695c63023b9cf6d39 -r db2bf800496478a5ea041480c3c514c2620e28ae lib/galaxy/util/shed_util.py
--- a/lib/galaxy/util/shed_util.py
+++ b/lib/galaxy/util/shed_util.py
@@ -6,7 +6,7 @@
from galaxy.datatypes.checkers import *
from galaxy.util.json import *
from galaxy.tools.search import ToolBoxSearch
-from galaxy.tool_shed.tool_dependencies.install_util import install_package
+from galaxy.tool_shed.tool_dependencies.install_util import create_or_update_tool_dependency, install_package
from galaxy.tool_shed.encoding_util import *
from galaxy.model.orm import *
@@ -318,8 +318,8 @@
# to be in the Galaxy distribution, but have been moved to the main Galaxy tool shed.
if current_changeset_revision is None:
# The current_changeset_revision is not passed if a repository is being installed for the first time. If a previously installed repository
- # was later uninstalled, this value should be received as the value of that change set to which the repository had been updated just prior to
- # it being uninstalled.
+ # was later uninstalled, this value should be received as the value of that change set to which the repository had been updated just prior
+ # to it being uninstalled.
current_changeset_revision = installed_changeset_revision
sa_session = app.model.context.current
tool_shed = get_tool_shed_from_clone_url( repository_clone_url )
@@ -353,6 +353,37 @@
sa_session.add( tool_shed_repository )
sa_session.flush()
return tool_shed_repository
+def create_tool_dependency_objects( app, tool_shed_repository, current_changeset_revision ):
+ # Create or update a ToolDependency for each entry in tool_dependencies_config. This method is called when installing a new tool_shed_repository.
+ tool_dependency_objects = []
+ work_dir = make_tmp_directory()
+ # Get the tool_dependencies.xml file from the repository.
+ tool_dependencies_config = get_config_from_repository( app,
+ 'tool_dependencies.xml',
+ tool_shed_repository,
+ current_changeset_revision,
+ work_dir )
+ tree = ElementTree.parse( tool_dependencies_config )
+ root = tree.getroot()
+ ElementInclude.include( root )
+ fabric_version_checked = False
+ for elem in root:
+ if elem.tag == 'package':
+ package_name = elem.get( 'name', None )
+ package_version = elem.get( 'version', None )
+ if package_name and package_version:
+ tool_dependency = create_or_update_tool_dependency( app,
+ tool_shed_repository,
+ name=package_name,
+ version=package_version,
+ type='package',
+ status=app.model.ToolDependency.installation_status.NEVER_INSTALLED )
+ tool_dependency_objects.append( tool_dependency )
+ try:
+ shutil.rmtree( work_dir )
+ except:
+ pass
+ return tool_dependency_objects
def generate_clone_url( trans, repository ):
"""Generate the URL for cloning a repository."""
tool_shed_url = get_url_from_repository_tool_shed( trans.app, repository )
@@ -1163,7 +1194,7 @@
message = str( e )
error = True
return error, message
-def handle_tool_dependencies( app, tool_shed_repository, tool_dependencies_config, name=None, version=None, type='package' ):
+def handle_tool_dependencies( app, tool_shed_repository, tool_dependencies_config, tool_dependencies=None ):
"""
Install and build tool dependencies defined in the tool_dependencies_config. This config's tag sets can currently refer to installation
methods in Galaxy's tool_dependencies module. In the future, proprietary fabric scripts contained in the repository will be supported.
@@ -1171,7 +1202,7 @@
will be installed in:
~/<app.config.tool_dependency_dir>/<package_name>/<package_version>/<repo_owner>/<repo_name>/<repo_installed_changeset_revision>
"""
- status = 'ok'
+ status = 'done'
message = ''
# Parse the tool_dependencies.xml config.
tree = ElementTree.parse( tool_dependencies_config )
@@ -1179,12 +1210,24 @@
ElementInclude.include( root )
fabric_version_checked = False
for elem in root:
- if elem.tag == type:
- error_message = install_package( app, elem, tool_shed_repository, name=name, version=version )
- if error_message:
- message += ' %s' % error_message
- if message:
- status = 'error'
+ if elem.tag == 'package':
+ package_name = elem.get( 'name', None )
+ package_version = elem.get( 'version', None )
+ if package_name and package_version:
+ can_install = True
+ if tool_dependencies:
+ # Only install the package if it is not already installed.
+ can_install = False
+ for tool_dependency in tool_dependencies:
+ if tool_dependency.name==package_name and tool_dependency.version==package_version:
+ can_install = tool_dependency.status in [ app.model.ToolDependency.installation_status.NEVER_INSTALLED,
+ app.model.ToolDependency.installation_status.UNINSTALLED ]
+ break
+ if can_install:
+ tool_dependency = install_package( app, elem, tool_shed_repository, tool_dependencies=tool_dependencies )
+ if tool_dependency and tool_dependency.status == app.model.ToolDependency.installation_status.ERROR:
+ message = tool_dependency.error_message
+ status = 'error'
return status, message
def handle_tool_versions( app, tool_version_dicts, tool_shed_repository ):
"""
@@ -1247,13 +1290,11 @@
# Load or deactivate proprietary datatype display applications
app.datatypes_registry.load_display_applications( installed_repository_dict=installed_repository_dict, deactivate=deactivate )
def load_repository_contents( trans, repository_name, description, owner, installed_changeset_revision, current_changeset_revision, ctx_rev,
- tool_path, repository_clone_url, relative_install_dir, tool_shed=None, tool_section=None, shed_tool_conf=None,
- install_tool_dependencies=False ):
+ tool_path, repository_clone_url, relative_install_dir, tool_shed=None, tool_section=None, shed_tool_conf=None ):
"""
Generate the metadata for the installed tool shed repository, among other things. This method is called from Galaxy (never the tool shed)
when an admin is installing a new repository or reinstalling an uninstalled repository.
"""
- message = ''
metadata_dict = generate_metadata_using_disk_files( trans.app.toolbox, relative_install_dir, repository_clone_url )
# Add a new record to the tool_shed_repository table if one doesn't already exist. If one exists but is marked deleted, undelete it. This
# must happen before the call to add_to_tool_panel() below because tools will not be properly loaded if the repository is marked deleted.
@@ -1285,20 +1326,6 @@
repository_tools_tups, sample_files_copied = handle_missing_index_file( trans.app, tool_path, sample_files, repository_tools_tups )
# Copy remaining sample files included in the repository to the ~/tool-data directory of the local Galaxy instance.
copy_sample_files( trans.app, sample_files, sample_files_copied=sample_files_copied )
- if install_tool_dependencies and 'tool_dependencies' in metadata_dict:
- # Get the tool_dependencies.xml file from the repository.
- tool_dependencies_config = get_config_from_repository( trans.app,
- 'tool_dependencies.xml',
- tool_shed_repository,
- current_changeset_revision,
- work_dir )
- # Install dependencies for repository tools.
- status, message = handle_tool_dependencies( app=trans.app,
- tool_shed_repository=tool_shed_repository,
- tool_dependencies_config=tool_dependencies_config )
- if status != 'ok' and message:
- print 'The following error occurred from load_repository_contents while installing tool dependencies:'
- print message
add_to_tool_panel( app=trans.app,
repository_name=repository_name,
repository_clone_url=repository_clone_url,
@@ -1340,7 +1367,9 @@
shutil.rmtree( work_dir )
except:
pass
- return tool_shed_repository, metadata_dict, message
+ if 'tool_dependencies' in metadata_dict:
+ tool_dependencies = create_tool_dependency_objects( trans.app, tool_shed_repository, current_changeset_revision )
+ return tool_shed_repository, metadata_dict
def make_tmp_directory():
tmp_dir = os.getenv( 'TMPDIR', '' )
if tmp_dir:
@@ -1505,7 +1534,7 @@
error_message = "Error removing tool dependency installation directory %s: %s" % ( str( dependency_install_dir ), str( e ) )
log.debug( error_message )
if removed:
- tool_dependency.uninstalled = True
+ tool_dependency.status = trans.model.ToolDependency.installation_status.UNINSTALLED
trans.sa_session.add( tool_dependency )
trans.sa_session.flush()
return removed, error_message
diff -r a0a4f15fe0958c5ff2658c1695c63023b9cf6d39 -r db2bf800496478a5ea041480c3c514c2620e28ae lib/galaxy/web/controllers/admin_toolshed.py
--- a/lib/galaxy/web/controllers/admin_toolshed.py
+++ b/lib/galaxy/web/controllers/admin_toolshed.py
@@ -77,9 +77,71 @@
def build_initial_query( self, trans, **kwd ):
return trans.sa_session.query( self.model_class )
+class ToolDependencyGrid( grids.Grid ):
+ class NameColumn( grids.TextColumn ):
+ def get_value( self, trans, grid, tool_dependency ):
+ return tool_dependency.name
+ class VersionColumn( grids.TextColumn ):
+ def get_value( self, trans, grid, tool_dependency ):
+ return tool_dependency.version
+ class TypeColumn( grids.TextColumn ):
+ def get_value( self, trans, grid, tool_dependency ):
+ return tool_dependency.type
+ class StatusColumn( grids.TextColumn ):
+ def get_value( self, trans, grid, tool_dependency ):
+ if tool_dependency.status in [ trans.model.ToolDependency.installation_status.INSTALLING ]:
+ return tool_dependency.status
+ else:
+ if tool_dependency.status in [ trans.model.ToolDependency.installation_status.NEVER_INSTALLED,
+ trans.model.ToolDependency.installation_status.UNINSTALLED ]:
+ bgcolor = trans.model.ToolDependency.states.UNINSTALLED
+ elif tool_dependency.status in [ trans.model.ToolDependency.installation_status.ERROR ]:
+ bgcolor = trans.model.ToolDependency.states.ERROR
+ elif tool_dependency.status in [ trans.model.ToolDependency.installation_status.INSTALLED ]:
+ bgcolor = trans.model.ToolDependency.states.OK
+ rval = '<div class="count-box state-color-%s" id="ToolDependencyStatus-%s">' % ( bgcolor, trans.security.encode_id( tool_dependency.id ) )
+ rval += '%s</div>' % tool_dependency.status
+ return rval
+
+ webapp = "galaxy"
+ title = "Tool Dependencies"
+ template = "admin/tool_shed_repository/tool_dependencies_grid.mako"
+ model_class = model.ToolDependency
+ default_sort_key = "-create_time"
+ num_rows_per_page = 50
+ preserve_state = True
+ use_paging = False
+ columns = [
+ NameColumn( "Name",
+ filterable="advanced" ),
+ VersionColumn( "Version",
+ filterable="advanced" ),
+ TypeColumn( "Type",
+ filterable="advanced" ),
+ StatusColumn( "Installation Status",
+ filterable="advanced" ),
+ ]
+ operations = [
+ grids.GridOperation( "Install",
+ allow_multiple=True,
+ condition=( lambda item: item.status in [ model.ToolDependency.installation_status.NEVER_INSTALLED,
+ model.ToolDependency.installation_status.UNINSTALLED ] ) ),
+ grids.GridOperation( "Uninstall",
+ allow_multiple=True,
+ allow_popup=False,
+ condition=( lambda item: item.status in [ model.ToolDependency.installation_status.INSTALLED,
+ model.ToolDependency.installation_status.ERROR ] ) )
+ ]
+ def apply_query_filter( self, trans, query, **kwd ):
+ tool_dependency_id = kwd.get( 'tool_dependency_id', None )
+ if not tool_dependency_id:
+ return query
+ return query.filter_by( tool_dependency_id=trans.security.decode_id( tool_dependency_id ) )
+
class AdminToolshed( AdminGalaxy ):
repository_list_grid = RepositoryListGrid()
+ tool_dependency_grid = ToolDependencyGrid()
@web.expose
@web.require_admin
@@ -153,9 +215,8 @@
message = util.restore_text( params.get( 'message', '' ) )
status = params.get( 'status', 'done' )
tool_dependency = get_tool_dependency( trans, kwd[ 'id' ] )
- repository = get_repository( trans, kwd[ 'repository_id' ] )
return trans.fill_template( '/admin/tool_shed_repository/browse_tool_dependency.mako',
- repository=repository,
+ repository=tool_dependency.tool_shed_repository,
tool_dependency=tool_dependency,
message=message,
status=status )
@@ -187,6 +248,35 @@
return trans.response.send_redirect( url )
@web.expose
@web.require_admin
+ def confirm_tool_dependency_install( self, trans, **kwd ):
+ """Display a page enabling the Galaxy administrator to choose to install tool dependencies for a tool shed repository they are installing."""
+ # This method is called from the tool shed (never Galaxy) when a tool shed repository that includes a file named tool_dependencies.xml
+ # is being installed into a local Galaxy instance.
+ message = kwd.get( 'message', '' )
+ status = kwd.get( 'status', 'done' )
+ tool_shed_url = kwd[ 'tool_shed_url' ]
+ repo_info_dict = kwd[ 'repo_info_dict' ]
+ includes_tools = util.string_as_bool( kwd.get( 'includes_tools', False ) )
+ # Decode the encoded repo_info_dict param value.
+ dict_with_tool_dependencies = tool_shed_decode( repo_info_dict )
+ # The repo_info_dict includes tool dependencies which we need to display so the user knows what will be installed.
+ new_repo_info_dict = {}
+ for name, repo_info_tuple in dict_with_tool_dependencies.items():
+ description, repository_clone_url, changeset_revision, ctx_rev, repository_owner, tool_dependencies = repo_info_tuple
+ # Create a new repo_info_dict by eliminating tool-dependencies from the repo_info_tuple.
+ new_repo_info_dict[ name ] = ( description, repository_clone_url, changeset_revision, ctx_rev )
+ repo_info_dict = tool_shed_encode( new_repo_info_dict )
+ install_tool_dependencies_check_box = CheckboxField( 'install_tool_dependencies', checked=True )
+ return trans.fill_template( '/admin/tool_shed_repository/confirm_tool_dependency_install.mako',
+ tool_shed_url=tool_shed_url,
+ repo_info_dict=repo_info_dict,
+ dict_with_tool_dependencies=dict_with_tool_dependencies,
+ includes_tools=includes_tools,
+ install_tool_dependencies_check_box=install_tool_dependencies_check_box,
+ message=message,
+ status=status )
+ @web.expose
+ @web.require_admin
def deactivate_or_uninstall_repository( self, trans, **kwd ):
params = util.Params( kwd )
message = util.restore_text( params.get( 'message', '' ) )
@@ -270,71 +360,33 @@
return get_repository_file_contents( file_path )
@web.expose
@web.require_admin
- def install_missing_tool_dependencies( self, trans, **kwd ):
- """
- Install dependencies for tools included in the repository that were not installed when the repository was installed or that are
- being reinstalled after the repository was uninstalled.
- """
- reinstalling = util.string_as_bool( kwd.get( 'reinstalling', False ) )
- message = kwd.get( 'message', '' )
- status = kwd.get( 'status', 'done' )
- repository = get_repository( trans, kwd[ 'id' ] )
- install_tool_dependencies = CheckboxField.is_checked( kwd.get( 'install_tool_dependencies', '' ) )
- if not reinstalling and install_tool_dependencies and kwd.get( 'install_missing_tool_dependencies_button', False ):
- shed_tool_conf, tool_path, relative_install_dir = get_tool_panel_config_tool_path_install_dir( trans.app, repository )
- repo_files_dir = os.path.abspath( os.path.join( relative_install_dir, repository.name ) )
- # Get the tool_dependencies.xml file from the repository.
- work_dir = make_tmp_directory()
- tool_dependencies_config = get_config_from_repository( trans.app,
- 'tool_dependencies.xml',
- repository,
- repository.changeset_revision,
- work_dir,
- install_dir=relative_install_dir )
- status, message = handle_tool_dependencies( app=trans.app,
- tool_shed_repository=repository,
- tool_dependencies_config=tool_dependencies_config )
- try:
- shutil.rmtree( work_dir )
- except:
- pass
- return trans.fill_template( '/admin/tool_shed_repository/manage_repository.mako',
- repository=repository,
- description=repository.description,
- repo_files_dir=repo_files_dir,
- message=message,
- status=status )
- if reinstalling and kwd.get( 'install_missing_tool_dependencies_button', False ):
- # The user has been presented the option to install tool dependencies, so redirect to reinstall the repository, sending
- # along the user's choice.
- return trans.response.send_redirect( web.url_for( controller='admin_toolshed',
- action='reinstall_repository',
- **kwd ) )
- tool_dependencies = repository.metadata[ 'tool_dependencies' ]
- install_tool_dependencies_check_box = CheckboxField( 'install_tool_dependencies', checked=True )
- if not reinstalling:
- # Filter the tool_dependencies dictionary to eliminate successfully installed dependencies.
- filtered_tool_dependencies = {}
- for missing_dependency_tup in repository.missing_tool_dependencies:
- name, version, type = missing_dependency_tup
- dependency_key = '%s/%s' % ( name, version )
- install_dir = get_tool_dependency_install_dir( trans.app, repository, name, version )
- filtered_tool_dependencies[ dependency_key ] = dict( name=name, type=type, version=version )
- tool_dependencies = filtered_tool_dependencies
- no_changes = kwd.get( 'no_changes', '' )
- no_changes_checked = CheckboxField.is_checked( no_changes )
- new_tool_panel_section = kwd.get( 'new_tool_panel_section', '' )
- tool_panel_section = kwd.get( 'tool_panel_section', '' )
- return trans.fill_template( '/admin/tool_shed_repository/install_missing_tool_dependencies.mako',
- repository=repository,
- reinstalling=reinstalling,
- tool_dependencies=tool_dependencies,
- no_changes_checked=no_changes_checked,
- new_tool_panel_section=new_tool_panel_section,
- tool_panel_section=tool_panel_section,
- install_tool_dependencies_check_box=install_tool_dependencies_check_box,
- message=message,
- status=status )
+ def initiate_tool_dependency_installation( self, trans, tool_dependencies ):
+ """Install specified dependencies for repository tools."""
+ # Get the tool_shed_repository from one of the tool_dependencies.
+ tool_shed_repository = tool_dependencies[ 0 ].tool_shed_repository
+ work_dir = make_tmp_directory()
+ # Get the tool_dependencies.xml file from the repository.
+ tool_dependencies_config = get_config_from_repository( trans.app,
+ 'tool_dependencies.xml',
+ tool_shed_repository,
+ tool_shed_repository.changeset_revision,
+ work_dir )
+ status, message = handle_tool_dependencies( app=trans.app,
+ tool_shed_repository=tool_shed_repository,
+ tool_dependencies_config=tool_dependencies_config,
+ tool_dependencies=tool_dependencies )
+ try:
+ shutil.rmtree( work_dir )
+ except:
+ pass
+ tool_dependency_ids = [ trans.security.encode_id( td.id ) for td in tool_dependencies ]
+ if not message:
+ message = "Installed tool dependencies: %s" % ','.join( td.name for td in tool_dependencies )
+ return trans.response.send_redirect( web.url_for( controller='admin_toolshed',
+ action='manage_tool_dependencies',
+ tool_dependency_ids=tool_dependency_ids,
+ message=message,
+ status=status ) )
@web.expose
@web.require_admin
def install_repository( self, trans, **kwd ):
@@ -413,23 +465,19 @@
clone_repository( repository_clone_url, os.path.abspath( relative_install_dir ), ctx_rev )
owner = get_repository_owner( clean_repository_clone_url( repository_clone_url ) )
tool_shed = clean_tool_shed_url( tool_shed_url )
- tool_shed_repository, metadata_dict, error_message = load_repository_contents( trans,
- repository_name=name,
- description=description,
- owner=owner,
- installed_changeset_revision=changeset_revision,
- current_changeset_revision=changeset_revision,
- ctx_rev=ctx_rev,
- tool_path=tool_path,
- repository_clone_url=repository_clone_url,
- relative_install_dir=relative_install_dir,
- tool_shed=tool_shed,
- tool_section=tool_section,
- shed_tool_conf=shed_tool_conf,
- install_tool_dependencies=install_tool_dependencies )
- if error_message:
- message += error_message
- status = 'error'
+ tool_shed_repository, metadata_dict, = load_repository_contents( trans,
+ repository_name=name,
+ description=description,
+ owner=owner,
+ installed_changeset_revision=changeset_revision,
+ current_changeset_revision=changeset_revision,
+ ctx_rev=ctx_rev,
+ tool_path=tool_path,
+ repository_clone_url=repository_clone_url,
+ relative_install_dir=relative_install_dir,
+ tool_shed=tool_shed,
+ tool_section=tool_section,
+ shed_tool_conf=shed_tool_conf )
if 'tools' in metadata_dict:
# Get the tool_versions from the tool shed for each tool in the installed change set.
url = '%srepository/get_tool_versions?name=%s&owner=%s&changeset_revision=%s&webapp=galaxy&no_reset=true' % \
@@ -449,26 +497,28 @@
if installed_repository_names:
installed_repository_names.sort()
num_repositories_installed = len( installed_repository_names )
- if install_tool_dependencies:
- dependency_str = ' along with tool dependencies '
- else:
- dependency_str = ''
if tool_section:
- message += 'Installed %d %s%sand all tools were loaded into tool panel section <b>%s</b>:<br/>Installed repositories: ' % \
+ message += 'Installed %d %s and all tools were loaded into tool panel section <b>%s</b>:<br/>Installed repositories: ' % \
( num_repositories_installed,
inflector.cond_plural( num_repositories_installed, 'repository' ),
- dependency_str,
tool_section.name )
else:
- message += 'Installed %d %s%s and all tools were loaded into the tool panel outside of any sections.<br/>Installed repositories: ' % \
+ message += 'Installed %d %s and all tools were loaded into the tool panel outside of any sections.<br/>Installed repositories: ' % \
( num_repositories_installed,
- inflector.cond_plural( num_repositories_installed, 'repository' ),
- dependency_str )
+ inflector.cond_plural( num_repositories_installed, 'repository' ) )
for i, repo_name in enumerate( installed_repository_names ):
if i == len( installed_repository_names ) -1:
message += '%s.<br/>' % repo_name
else:
message += '%s, ' % repo_name
+ if install_tool_dependencies:
+ tool_dependency_ids = [ trans.security.encode_id( td.id ) for td in tool_shed_repository.missing_tool_dependencies ]
+ return trans.response.send_redirect( web.url_for( controller='admin_toolshed',
+ action='manage_tool_dependencies',
+ operation='install',
+ tool_dependency_ids=tool_dependency_ids,
+ status=status,
+ message=message ) )
return trans.response.send_redirect( web.url_for( controller='admin_toolshed',
action='browse_repositories',
message=message,
@@ -520,61 +570,42 @@
@web.expose
@web.require_admin
def install_tool_dependencies( self, trans, **kwd ):
- """Install dependencies for tools included in the repository when the repository is being installed."""
- message = kwd.get( 'message', '' )
- status = kwd.get( 'status', 'done' )
- tool_shed_url = kwd[ 'tool_shed_url' ]
- repo_info_dict = kwd[ 'repo_info_dict' ]
- includes_tools = util.string_as_bool( kwd.get( 'includes_tools', False ) )
- # Decode the encoded repo_info_dict param value.
- dict_with_tool_dependencies = tool_shed_decode( repo_info_dict )
- # The repo_info_dict includes tool dependencies which we need to display so the user knows what will be installed.
- new_repo_info_dict = {}
- for name, repo_info_tuple in dict_with_tool_dependencies.items():
- description, repository_clone_url, changeset_revision, ctx_rev, repository_owner, tool_dependencies = repo_info_tuple
- # Create a new repo_info_dict by eliminating tool-dependencies from the repo_info_tuple.
- new_repo_info_dict[ name ] = ( description, repository_clone_url, changeset_revision, ctx_rev )
- repo_info_dict = tool_shed_encode( new_repo_info_dict )
- install_tool_dependencies_check_box = CheckboxField( 'install_tool_dependencies', checked=True )
+ params = util.Params( kwd )
+ message = util.restore_text( params.get( 'message', '' ) )
+ status = params.get( 'status', 'done' )
+ tool_dependency_ids = util.listify( params.get( 'tool_dependency_ids', None ) )
+ if not tool_dependency_ids:
+ tool_dependency_ids = util.listify( params.get( 'id', None ) )
+ tool_dependencies = []
+ for tool_dependency_id in tool_dependency_ids:
+ tool_dependency = get_tool_dependency( trans, tool_dependency_id )
+ tool_dependencies.append( tool_dependency )
+ if kwd.get( 'install_tool_dependencies_button', False ):
+ # Filter tool dependencies to only those that are installed.
+ tool_dependencies_for_installation = []
+ for tool_dependency in tool_dependencies:
+ if tool_dependency.status in [ trans.model.ToolDependency.installation_status.UNINSTALLED,
+ trans.model.ToolDependency.installation_status.ERROR ]:
+ tool_dependencies_for_installation.append( tool_dependency )
+ if tool_dependencies_for_installation:
+ # Redirect back to the ToolDependencyGrid before initiating installation.
+ tool_dependency_ids = [ trans.security.encode_id( td.id ) for td in tool_dependencies_for_installation ]
+ new_kwd = dict( action='manage_tool_dependencies',
+ operation='initiate_tool_dependency_installation',
+ tool_dependency_ids=tool_dependency_ids,
+ message=message,
+ status=status )
+ return self.tool_dependency_grid( trans, **new_kwd )
+ else:
+ message = 'All of the selected tool dependencies are already installed.'
+ status = 'error'
+ return trans.response.send_redirect( web.url_for( controller='admin_toolshed',
+ action='manage_tool_dependencies',
+ tool_dependency_ids=tool_dependency_ids,
+ status=status,
+ message=message ) )
return trans.fill_template( '/admin/tool_shed_repository/install_tool_dependencies.mako',
- tool_shed_url=tool_shed_url,
- repo_info_dict=repo_info_dict,
- dict_with_tool_dependencies=dict_with_tool_dependencies,
- includes_tools=includes_tools,
- install_tool_dependencies_check_box=install_tool_dependencies_check_box,
- message=message,
- status=status )
- @web.expose
- @web.require_admin
- def install_tool_dependency( self, trans, name, version, type, repository_id, **kwd ):
- """Install dependencies for tools included in the repository when the repository is being installed."""
- message = kwd.get( 'message', '' )
- status = kwd.get( 'status', 'done' )
- repository = get_repository( trans, repository_id )
- shed_tool_conf, tool_path, relative_install_dir = get_tool_panel_config_tool_path_install_dir( trans.app, repository )
- repo_files_dir = os.path.abspath( os.path.join( relative_install_dir, repository.name ) )
- # Get the tool_dependencies.xml file from the repository.
- work_dir = make_tmp_directory()
- tool_dependencies_config = get_config_from_repository( trans.app,
- 'tool_dependencies.xml',
- repository,
- repository.changeset_revision,
- work_dir,
- install_dir=relative_install_dir )
- status, message = handle_tool_dependencies( app=trans.app,
- tool_shed_repository=repository,
- tool_dependencies_config=tool_dependencies_config,
- name=name,
- version=version,
- type=type )
- try:
- shutil.rmtree( work_dir )
- except:
- pass
- return trans.fill_template( '/admin/tool_shed_repository/manage_repository.mako',
- repository=repository,
- description=repository.description,
- repo_files_dir=repo_files_dir,
+ tool_dependencies=tool_dependencies,
message=message,
status=status )
@web.expose
@@ -614,12 +645,79 @@
params = util.Params( kwd )
message = util.restore_text( params.get( 'message', '' ) )
status = params.get( 'status', 'done' )
- repository_id = kwd[ 'id' ]
- repository = get_repository( trans, repository_id )
- return trans.fill_template( '/admin/tool_shed_repository/manage_tool_dependencies.mako',
- repository=repository,
- message=message,
- status=status )
+ tool_dependency_id = params.get( 'tool_dependency_id', None )
+ tool_dependency_ids = util.listify( params.get( 'tool_dependency_ids', None ) )
+ if not tool_dependency_ids:
+ tool_dependency_ids = util.listify( params.get( 'id', None ) )
+ if tool_dependency_id and tool_dependency_id not in tool_dependency_ids:
+ tool_dependency_ids.append( tool_dependency_id )
+ tool_dependencies = []
+ # We need a tool_shed_repository, so get it from one of the tool_dependencies.
+ tool_dependency = get_tool_dependency( trans, tool_dependency_ids[ 0 ] )
+ tool_shed_repository = tool_dependency.tool_shed_repository
+ self.tool_dependency_grid.title = "Tool shed repository '%s' tool dependencies" % tool_shed_repository.name
+ self.tool_dependency_grid.global_actions = \
+ [ grids.GridAction( label='Browse repository',
+ url_args=dict( controller='admin_toolshed',
+ action='browse_repository',
+ id=trans.security.encode_id( tool_shed_repository.id ) ) ),
+ grids.GridAction( label='Manage repository',
+ url_args=dict( controller='admin_toolshed',
+ action='manage_repository',
+ id=trans.security.encode_id( tool_shed_repository.id ) ) ),
+ grids.GridAction( label='Get updates',
+ url_args=dict( controller='admin_toolshed',
+ action='check_for_updates',
+ id=trans.security.encode_id( tool_shed_repository.id ) ) ),
+ grids.GridAction( label='Set tool versions',
+ url_args=dict( controller='admin_toolshed',
+ action='set_tool_versions',
+ id=trans.security.encode_id( tool_shed_repository.id ) ) ),
+ grids.GridAction( label='Deactivate or uninstall repository',
+ url_args=dict( controller='admin_toolshed',
+ action='deactivate_or_uninstall_repository',
+ id=trans.security.encode_id( tool_shed_repository.id ) ) ) ]
+ if 'operation' in kwd:
+ operation = kwd[ 'operation' ].lower()
+ if not tool_dependency_ids:
+ message = 'Select at least 1 tool dependency to %s.' % operation
+ kwd[ 'message' ] = message
+ kwd[ 'status' ] = 'error'
+ del kwd[ 'operation' ]
+ return trans.response.send_redirect( web.url_for( controller='admin_toolshed',
+ action='manage_tool_dependencies',
+ **kwd ) )
+ if operation == 'browse':
+ return trans.response.send_redirect( web.url_for( controller='admin_toolshed',
+ action='browse_tool_dependency',
+ **kwd ) )
+ elif operation == 'uninstall':
+ tool_dependencies_for_uninstallation = []
+ for tool_dependency_id in tool_dependency_ids:
+ tool_dependency = get_tool_dependency( trans, tool_dependency_id )
+ if tool_dependency.status in [ trans.model.ToolDependency.installation_status.INSTALLED,
+ trans.model.ToolDependency.installation_status.ERROR ]:
+ tool_dependencies_for_uninstallation.append( tool_dependency )
+ if tool_dependencies_for_uninstallation:
+ return trans.response.send_redirect( web.url_for( controller='admin_toolshed',
+ action='uninstall_tool_dependencies',
+ **kwd ) )
+ else:
+ kwd[ 'message' ] = 'All selected tool dependencies are already uninstalled.'
+ kwd[ 'status' ] = 'error'
+ elif operation == "install":
+ tool_dependencies_for_installation = []
+ for tool_dependency_id in tool_dependency_ids:
+ tool_dependency = get_tool_dependency( trans, tool_dependency_id )
+ if tool_dependency.status in [ trans.model.ToolDependency.installation_status.NEVER_INSTALLED,
+ trans.model.ToolDependency.installation_status.UNINSTALLED ]:
+ tool_dependencies_for_installation.append( tool_dependency )
+ if tool_dependencies_for_installation:
+ self.initiate_tool_dependency_installation( trans, tool_dependencies_for_installation )
+ else:
+ kwd[ 'message' ] = 'All selected tool dependencies are already installed.'
+ kwd[ 'status' ] = 'error'
+ return self.tool_dependency_grid( trans, **kwd )
@web.json
@web.require_admin
def open_folder( self, trans, folder_path ):
@@ -710,39 +808,52 @@
tool_section = trans.app.toolbox.tool_panel[ section_key ]
else:
tool_section = None
- tool_shed_repository, metadata_dict, error_message = load_repository_contents( trans,
- repository_name=repository.name,
- description=repository.description,
- owner=repository.owner,
- installed_changeset_revision=repository.installed_changeset_revision,
- current_changeset_revision=current_changeset_revision,
- ctx_rev=ctx_rev,
- tool_path=tool_path,
- repository_clone_url=repository_clone_url,
- relative_install_dir=relative_install_dir,
- tool_shed=repository.tool_shed,
- tool_section=tool_section,
- shed_tool_conf=shed_tool_conf,
- install_tool_dependencies=install_tool_dependencies )
- if error_message:
- # We'll only have an error_message if there was a problem installing tool dependencies.
- message += error_message
- status = 'error'
+ tool_shed_repository, metadata_dict, load_repository_contents( trans,
+ repository_name=repository.name,
+ description=repository.description,
+ owner=repository.owner,
+ installed_changeset_revision=repository.installed_changeset_revision,
+ current_changeset_revision=current_changeset_revision,
+ ctx_rev=ctx_rev,
+ tool_path=tool_path,
+ repository_clone_url=repository_clone_url,
+ relative_install_dir=relative_install_dir,
+ tool_shed=repository.tool_shed,
+ tool_section=tool_section,
+ shed_tool_conf=shed_tool_conf )
repository.uninstalled = False
repository.deleted = False
trans.sa_session.add( repository )
trans.sa_session.flush()
+ message += 'The <b>%s</b> repository has been reinstalled. ' % repository.name
if install_tool_dependencies:
- dependency_str = ' along with tool dependencies'
- if error_message:
- dependency_str += ', but with some errors installing the dependencies'
- else:
- dependency_str = ' without tool dependencies'
- message += 'The <b>%s</b> repository has been reinstalled%s. ' % ( repository.name, dependency_str )
+ message += 'The following tool dependencies are now being installed, please wait...'
+ tool_dependency_ids = [ trans.security.encode_id( td.id ) for td in tool_shed_repository.missing_tool_dependencies ]
+ return trans.response.send_redirect( web.url_for( controller='admin_toolshed',
+ action='manage_tool_dependencies',
+ operation='install',
+ tool_dependency_ids=tool_dependency_ids,
+ status=status,
+ message=message ) )
return trans.response.send_redirect( web.url_for( controller='admin_toolshed',
action='browse_repositories',
message=message,
status=status ) )
+ @web.json
+ def repository_installation_status_updates( self, trans, id=None, status=None ):
+ # Avoid caching
+ trans.response.headers[ 'Pragma' ] = 'no-cache'
+ trans.response.headers[ 'Expires' ] = '0'
+ # Create new HTML for any that have changed
+ rval = {}
+ if id is not None and status is not None:
+ repository = trans.sa_session.query( trans.model.ToolShedRepository ).get( trans.security.decode_id( id ) )
+ if repository.status != status:
+ repository.status = status
+ rval[ id ] = { "status": repository.status,
+ "html_status": unicode( trans.fill_template( "admin/tool_shed_repository/repository_installation_status.mako",
+ repository=repository ),
+ 'utf-8' ) }
@web.expose
@web.require_admin
def reselect_tool_panel_section( self, trans, **kwd ):
@@ -811,22 +922,62 @@
repo_files_dir=repo_files_dir,
message=message,
status=status )
+ @web.json
+ def tool_dependency_status_updates( self, trans, ids=None, status_list=None ):
+ # Avoid caching
+ trans.response.headers[ 'Pragma' ] = 'no-cache'
+ trans.response.headers[ 'Expires' ] = '0'
+ # Create new HTML for any that have changed
+ rval = {}
+ if ids is not None and status_list is not None:
+ ids = ids.split( "," )
+ status_list = status_list.split( "," )
+ for id, status in zip( ids, status_list ):
+ tool_dependency = trans.sa_session.query( trans.model.ToolDependency ).get( trans.security.decode_id( id ) )
+ if tool_dependency.status != status:
+ rval[ id ] = { "status": tool_dependency.status,
+ "html_status": unicode( trans.fill_template( "admin/tool_shed_repository/tool_dependency_installation_status.mako",
+ tool_dependency=tool_dependency ),
+ 'utf-8' ) }
+ return rval
@web.expose
@web.require_admin
- def uninstall_tool_dependency( self, trans, **kwd ):
+ def uninstall_tool_dependencies( self, trans, **kwd ):
params = util.Params( kwd )
message = util.restore_text( params.get( 'message', '' ) )
status = params.get( 'status', 'done' )
- repository = get_repository( trans, kwd[ 'repository_id' ] )
- tool_dependency = get_tool_dependency( trans, kwd[ 'id' ] )
- uninstalled, error_message = remove_tool_dependency( trans, tool_dependency )
- if uninstalled:
- message = "The '%s' tool dependency has been uninstalled." % tool_dependency.name
- else:
- message = "Error attempting to uninstall the '%s' tool dependency: %s" % ( tool_dependency.name, error_message )
- status = 'error'
- return trans.fill_template( '/admin/tool_shed_repository/manage_tool_dependencies.mako',
- repository=repository,
+ tool_dependency_ids = util.listify( params.get( 'tool_dependency_ids', None ) )
+ if not tool_dependency_ids:
+ tool_dependency_ids = util.listify( params.get( 'id', None ) )
+ tool_dependencies = []
+ for tool_dependency_id in tool_dependency_ids:
+ tool_dependency = get_tool_dependency( trans, tool_dependency_id )
+ tool_dependencies.append( tool_dependency )
+ if kwd.get( 'uninstall_tool_dependencies_button', False ):
+ errors = False
+ # Filter tool dependencies to only those that are installed.
+ tool_dependencies_for_uninstallation = []
+ for tool_dependency in tool_dependencies:
+ if tool_dependency.status in [ trans.model.ToolDependency.installation_status.INSTALLED,
+ trans.model.ToolDependency.installation_status.ERROR ]:
+ tool_dependencies_for_uninstallation.append( tool_dependency )
+ for tool_dependency in tool_dependencies_for_uninstallation:
+ uninstalled, error_message = remove_tool_dependency( trans, tool_dependency )
+ if error_message:
+ errors = True
+ message = '%s %s' % ( message, error_message )
+ if errors:
+ message = "Error attempting to uninstall tool dependencies: %s" % message
+ status = 'error'
+ else:
+ message = "These tool dependencies have been uninstalled: %s" % ','.join( td.name for td in tool_dependencies_for_uninstallation )
+ return trans.response.send_redirect( web.url_for( controller='admin_toolshed',
+ action='manage_tool_dependencies',
+ tool_dependency_ids=tool_dependency_ids,
+ status=status,
+ message=message ) )
+ return trans.fill_template( '/admin/tool_shed_repository/uninstall_tool_dependencies.mako',
+ tool_dependencies=tool_dependencies,
message=message,
status=status )
@web.expose
@@ -864,11 +1015,14 @@
repository.update_available = False
trans.sa_session.add( repository )
trans.sa_session.flush()
+ # Create tool_dependency records if necessary.
+ if 'tool_dependencies' in metadata_dict:
+ tool_dependencies = create_tool_dependency_objects( trans.app, repository, repository.changeset_revision )
message = "The installed repository named '%s' has been updated to change set revision '%s'. " % ( name, latest_changeset_revision )
# See if any tool dependencies can be installed.
shed_tool_conf, tool_path, relative_install_dir = get_tool_panel_config_tool_path_install_dir( trans.app, repository )
if repository.missing_tool_dependencies:
- message += "Select <b>Install tool dependencies</b> from the repository's pop-up menu to install tool dependencies."
+ message += "Click the name of one of the missing tool dependencies listed below to install tool dependencies."
else:
message = "The directory containing the installed repository named '%s' cannot be found. " % name
status = 'error'
@@ -951,3 +1105,6 @@
def get_repository( trans, id ):
"""Get a tool_shed_repository from the database via id"""
return trans.sa_session.query( trans.model.ToolShedRepository ).get( trans.security.decode_id( id ) )
+def get_tool_dependency( trans, id ):
+ """Get a tool_dependency from the database via id"""
+ return trans.sa_session.query( trans.model.ToolDependency ).get( trans.security.decode_id( id ) )
diff -r a0a4f15fe0958c5ff2658c1695c63023b9cf6d39 -r db2bf800496478a5ea041480c3c514c2620e28ae lib/galaxy/web/controllers/tool_runner.py
--- a/lib/galaxy/web/controllers/tool_runner.py
+++ b/lib/galaxy/web/controllers/tool_runner.py
@@ -128,18 +128,17 @@
else:
tool_id_select_field = None
tool = tools[ 0 ]
- if tool.id == job.tool_id and tool.version == job.tool_version:
+ if ( tool.id == job.tool_id or tool.old_id == job.tool_id ) and tool.version == job.tool_version:
tool_id_version_message = ''
elif tool.id == job.tool_id:
if job.tool_version == None:
# For some reason jobs don't always keep track of the tool version.
tool_id_version_message = ''
else:
+ tool_id_version_message = 'This job was initially run with tool version "%s", which is not currently available. ' % job.tool_version
if len( tools ) > 1:
- tool_id_version_message = 'This job was initially run with tool version "%s", which is not currently available. ' % job.tool_version
tool_id_version_message += 'You can rerun the job with the selected tool or choose another derivation of the tool.'
else:
- tool_id_version_message = 'This job was initially run with tool version "%s", which is not currently available. ' % job.tool_version
tool_id_version_message += 'You can rerun the job with this tool version, which is a derivation of the original tool.'
else:
if len( tools ) > 1:
diff -r a0a4f15fe0958c5ff2658c1695c63023b9cf6d39 -r db2bf800496478a5ea041480c3c514c2620e28ae lib/galaxy/webapps/community/controllers/repository.py
--- a/lib/galaxy/webapps/community/controllers/repository.py
+++ b/lib/galaxy/webapps/community/controllers/repository.py
@@ -1229,7 +1229,7 @@
encoded_repo_info_dict = encode( repo_info_dict )
if includes_tool_dependencies:
# Redirect back to local Galaxy to present the option to install tool dependencies.
- url = '%sadmin_toolshed/install_tool_dependencies?tool_shed_url=%s&repo_info_dict=%s&includes_tools=%s' % \
+ url = '%sadmin_toolshed/confirm_tool_dependency_install?tool_shed_url=%s&repo_info_dict=%s&includes_tools=%s' % \
( galaxy_url, url_for( '/', qualified=True ), encoded_repo_info_dict, str( includes_tools ) )
else:
# Redirect back to local Galaxy to perform install.
diff -r a0a4f15fe0958c5ff2658c1695c63023b9cf6d39 -r db2bf800496478a5ea041480c3c514c2620e28ae templates/admin/tool_shed_repository/browse_repository.mako
--- a/templates/admin/tool_shed_repository/browse_repository.mako
+++ b/templates/admin/tool_shed_repository/browse_repository.mako
@@ -10,7 +10,7 @@
<%def name="javascripts()">
${parent.javascripts()}
${h.js( "ui.core", "jquery.dynatree" )}
- ${common_javascripts(repository.name, repository.repo_files_directory(trans.app))}
+ ${browse_files(repository.name, repository.repo_files_directory(trans.app))}
</%def><br/><br/>
@@ -21,10 +21,8 @@
<a class="action-button" href="${h.url_for( controller='admin_toolshed', action='check_for_updates', id=trans.security.encode_id( repository.id ) )}">Get updates</a><a class="action-button" href="${h.url_for( controller='admin_toolshed', action='deactivate_or_uninstall_repository', id=trans.security.encode_id( repository.id ) )}">Deactivate or uninstall repository</a>
%if repository.tool_dependencies:
- <a class="action-button" href="${h.url_for( controller='admin_toolshed', action='manage_tool_dependencies', id=trans.security.encode_id( repository.id ) )}">Manage tool dependencies</a>
- %endif
- %if repository.missing_tool_dependencies:
- <a class="action-button" href="${h.url_for( controller='admin_toolshed', action='install_missing_tool_dependencies', id=trans.security.encode_id( repository.id ) )}">Install missing tool dependencies</a>
+ <% tool_dependency_ids = [ trans.security.encode_id( td.id ) for td in repository.tool_dependencies ] %>
+ <a class="action-button" href="${h.url_for( controller='admin_toolshed', action='manage_tool_dependencies', tool_dependency_ids=tool_dependency_ids )}">Manage tool dependencies</a>
%endif
</div></ul>
diff -r a0a4f15fe0958c5ff2658c1695c63023b9cf6d39 -r db2bf800496478a5ea041480c3c514c2620e28ae templates/admin/tool_shed_repository/browse_tool_dependency.mako
--- a/templates/admin/tool_shed_repository/browse_tool_dependency.mako
+++ b/templates/admin/tool_shed_repository/browse_tool_dependency.mako
@@ -10,9 +10,11 @@
<%def name="javascripts()">
${parent.javascripts()}
${h.js( "ui.core", "jquery.dynatree" )}
- ${common_javascripts(tool_dependency.name, tool_dependency.installation_directory( trans.app ))}
+ ${browse_files(tool_dependency.name, tool_dependency.installation_directory( trans.app ))}
</%def>
+<% tool_dependency_ids = [ trans.security.encode_id( td.id ) for td in repository.tool_dependencies ] %>
+
<br/><br/><ul class="manage-table-actions"><li><a class="action-button" id="tool_dependency-${tool_dependency.id}-popup" class="menubutton">Repository Actions</a></li>
@@ -21,8 +23,7 @@
<a class="action-button" href="${h.url_for( controller='admin_toolshed', action='manage_repository', id=trans.security.encode_id( repository.id ) )}">Manage repository</a><a class="action-button" href="${h.url_for( controller='admin_toolshed', action='check_for_updates', id=trans.security.encode_id( repository.id ) )}">Get updates</a><a class="action-button" href="${h.url_for( controller='admin_toolshed', action='deactivate_or_uninstall_repository', id=trans.security.encode_id( repository.id ) )}">Deactivate or uninstall repository</a>
- <a class="action-button" href="${h.url_for( controller='admin_toolshed', action='manage_tool_dependencies', id=trans.security.encode_id( repository.id ) )}">Manage tool dependencies</a>
- <a class="action-button" href="${h.url_for( controller='admin_toolshed', action='uninstall_tool_dependency', id=trans.security.encode_id( tool_dependency.id ), repository_id=trans.security.encode_id( repository.id ) )}">Uninstall this tool dependency</a>
+ <a class="action-button" href="${h.url_for( controller='admin_toolshed', action='manage_tool_dependencies', tool_dependency_ids=tool_dependency_ids )}">Manage tool dependencies</a></div></ul>
diff -r a0a4f15fe0958c5ff2658c1695c63023b9cf6d39 -r db2bf800496478a5ea041480c3c514c2620e28ae templates/admin/tool_shed_repository/common.mako
--- a/templates/admin/tool_shed_repository/common.mako
+++ b/templates/admin/tool_shed_repository/common.mako
@@ -1,4 +1,4 @@
-<%def name="common_javascripts(title_text, directory_path)">
+<%def name="browse_files(title_text, directory_path)"><script type="text/javascript">
$(function(){
$("#tree").ajaxComplete(function(event, XMLHttpRequest, ajaxOptions) {
@@ -66,3 +66,131 @@
});
</script></%def>
+
+<%def name="dependency_status_updater()">
+ <script type="text/javascript">
+
+ // Tool dependency status updater - used to update the installation status on the Tool Dependencies grid.
+ // Looks for changes in tool dependency installation status using an async request. Keeps calling itself
+ // (via setTimeout) until dependency installation status is neither 'Installing' nor 'Building'.
+ var tool_dependency_status_updater = function ( dependency_status_list ) {
+ // See if there are any items left to track
+ var empty = true;
+ for ( i in dependency_status_list ) {
+ empty = false;
+ break;
+ }
+ if ( ! empty ) {
+ setTimeout( function() { tool_dependency_status_updater_callback( dependency_status_list ) }, 3000 );
+ }
+ };
+ var tool_dependency_status_updater_callback = function ( dependency_status_list ) {
+ var ids = []
+ var status_list = []
+ $.each( dependency_status_list, function ( id, dependency_status ) {
+ ids.push( id );
+ status_list.push( dependency_status );
+ });
+ // Make ajax call
+ $.ajax( {
+ type: "POST",
+ url: "${h.url_for( controller='admin_toolshed', action='tool_dependency_status_updates' )}",
+ dataType: "json",
+ data: { ids: ids.join( "," ), status_list: status_list.join( "," ) },
+ success : function ( data ) {
+ $.each( data, function( id, val ) {
+ // Replace HTML
+ var cell1 = $("#ToolDependencyStatus-" + id);
+ cell1.html( val.html_status );
+ dependency_status_list[ id ] = val.status;
+ });
+ tool_dependency_status_updater( dependency_status_list );
+ },
+ error: function() {
+ tool_dependency_status_updater( dependency_status_list );
+ }
+ });
+ };
+ </script>
+</%def>
+
+<%def name="tool_dependency_installation_updater()">
+ <%
+ can_update = False
+ if query.count():
+ # Get the first tool dependency to get to the tool shed repository.
+ tool_dependency = query[0]
+ tool_shed_repository = tool_dependency.tool_shed_repository
+ can_update = tool_shed_repository.tool_dependencies_being_installed or tool_shed_repository.missing_tool_dependencies
+ %>
+ %if can_update:
+ <script type="text/javascript">
+ // Tool dependency installation status updater
+ tool_dependency_status_updater( {${ ",".join( [ '"%s" : "%s"' % ( trans.security.encode_id( td.id ), td.status ) for td in query ] ) }});
+ </script>
+ %endif
+</%def>
+
+<%def name="repository_installation_status_updater()">
+ <script type="text/javascript">
+
+ // Tool shed repository status updater - used to update the installation status on the repository_installation.mako template.
+ // Looks for changes in repository installation status using an async request. Keeps calling itself (via setTimeout) until
+ // repository installation status is neither 'cloning', 'cloned' nor 'installing tool dependencies'.
+ var tool_shed_repository_status_updater = function ( repository_status_list ) {
+ // See if there are any items left to track
+ var empty = true;
+ for ( i in repository_status_list ) {
+ empty = false;
+ break;
+ }
+ if ( ! empty ) {
+ setTimeout( function() { tool_shed_repository_status_updater_callback( repository_status_list ) }, 3000 );
+ }
+ };
+ var tool_shed_repository_status_updater_callback = function ( repository_status_list ) {
+ var ids = []
+ var status_list = []
+ $.each( repository_status_list, function ( id, repository_status ) {
+ ids.push( id );
+ status_list.push( repository_status );
+ });
+ // Make ajax call
+ $.ajax( {
+ type: "POST",
+ url: "${h.url_for( controller='admin_toolshed', action='repository_installation_status_updates' )}",
+ dataType: "json",
+ data: { id: ids[0], status_list: status_list.join( "," ) },
+ success : function ( data ) {
+ $.each( data, function( id, val ) {
+ // Replace HTML
+ var cell1 = $("#RepositoryStatus-" + id);
+ cell1.html( val.html_status );
+ repository_status_list[ id ] = val.status;
+ });
+ tool_shed_repository_status_updater( repository_status_list );
+ },
+ error: function() {
+ tool_shed_repository_status_updater( repository_status_list );
+ }
+ });
+ };
+ </script>
+</%def>
+
+<%def name="repository_installation_updater()">
+ <%
+ can_update = True
+ if tool_shed_repository:
+ can_update = tool_shed_repository.status not in [ trans.model.ToolShedRepository.installation_status.INSTALLED,
+ trans.model.ToolShedRepository.installation_status.ERROR,
+ trans.model.ToolShedRepository.installation_status.UNINSTALLED ]
+ %>
+ %if can_update:
+ <script type="text/javascript">
+ // Tool shed repository installation status updater
+ repository_installation_status_updater( {${ ",".join( [ '"%s" : "%s"' % ( trans.security.encode_id( repository.id ), repository.status ) for repository in query ] ) }});
+ </script>
+ %endif
+</%def>
+
diff -r a0a4f15fe0958c5ff2658c1695c63023b9cf6d39 -r db2bf800496478a5ea041480c3c514c2620e28ae templates/admin/tool_shed_repository/confirm_tool_dependency_install.mako
--- /dev/null
+++ b/templates/admin/tool_shed_repository/confirm_tool_dependency_install.mako
@@ -0,0 +1,86 @@
+<%inherit file="/base.mako"/>
+<%namespace file="/message.mako" import="render_msg" />
+
+<% import os %>
+
+%if message:
+ ${render_msg( message, status )}
+%endif
+
+<div class="warningmessage">
+ <p>
+ The tool dependencies listed below can be automatically installed with the repository. Installing them provides significant
+ benefits and Galaxy includes various features to manage them.
+ </p>
+ <p>
+ Each of these dependencies may require their own build requirements (e.g., CMake, g++, etc). Galaxy will not attempt to install
+ these build requirements, so if any are missing from your environment tool dependency installation may partially fail. The
+ repository and all of it's contents will be installed in any case.
+ </p>
+ <p>
+ If tool dependency installation fails in any way, you can install the missing build requirements and have Galaxy attempt to install
+ the tool dependencies again using the <b>Install tool dependencies</b> pop-up menu option on the <b>Manage repository</b> page.
+ </p>
+</div>
+
+<div class="toolForm">
+ <div class="toolFormBody">
+ <form name="confirm_tool_dependency_install" id="confirm_tool_dependency_install" action="${h.url_for( controller='admin_toolshed', action='install_repository', tool_shed_url=tool_shed_url, repo_info_dict=repo_info_dict, includes_tools=includes_tools )}" method="post" >
+ <div style="clear: both"></div>
+ <div class="form-row">
+ <label>Install tool dependencies?</label>
+ ${install_tool_dependencies_check_box.get_html()}
+ <div class="toolParamHelp" style="clear: both;">
+ Un-check to skip automatic installation of these tool dependencies.
+ </div>
+ </div>
+ <div style="clear: both"></div>
+ <div class="form-row">
+ <table class="grid">
+ <tr><td colspan="4" bgcolor="#D8D8D8"><b>Tool dependencies</b></td></tr>
+ <tr>
+ <th>Name</th>
+ <th>Version</th>
+ <th>Type</th>
+ <th>Install directory</th>
+ </tr>
+ %for repository_name, repo_info_tuple in dict_with_tool_dependencies.items():
+ <%
+ description, repository_clone_url, changeset_revision, ctx_rev, repository_owner, tool_dependencies = repo_info_tuple
+ %>
+ %for dependency_key, requirements_dict in tool_dependencies.items():
+ <%
+ name = requirements_dict[ 'name' ]
+ version = requirements_dict[ 'version' ]
+ type = requirements_dict[ 'type' ]
+ install_dir = os.path.join( trans.app.config.tool_dependency_dir,
+ name,
+ version,
+ repository_owner,
+ repository_name,
+ changeset_revision )
+ readme_text = requirements_dict.get( 'readme', None )
+ %>
+ %if not os.path.exists( install_dir ):
+ <tr>
+ <td>${name}</td>
+ <td>${version}</td>
+ <td>${type}</td>
+ <td>${install_dir}</td>
+ </tr>
+ %if readme_text:
+ <tr><td colspan="4" bgcolor="#FFFFCC">${name} ${version} requirements and installation information</td></tr>
+ <tr><td colspan="4"><pre>${readme_text}</pre></td></tr>
+ %endif
+ %endif
+ %endfor
+ %endfor
+ </table>
+ <div style="clear: both"></div>
+ </div>
+ <div class="form-row">
+ <input type="submit" name="confirm_tool_dependency_install_button" value="Continue"/>
+ </div>
+ </form>
+ </div>
+</div>
diff -r a0a4f15fe0958c5ff2658c1695c63023b9cf6d39 -r db2bf800496478a5ea041480c3c514c2620e28ae templates/admin/tool_shed_repository/deactivate_or_uninstall_repository.mako
--- a/templates/admin/tool_shed_repository/deactivate_or_uninstall_repository.mako
+++ b/templates/admin/tool_shed_repository/deactivate_or_uninstall_repository.mako
@@ -86,7 +86,7 @@
* The repository's installed tool dependencies will be removed from disk.
</div><div class="toolParamHelp" style="clear: both;">
- * Each associated tool dependency record's uninstalled column in the tool_dependency database table will be set to True.
+ * Each associated tool dependency record's status column in the tool_dependency database table will be set to 'Uninstalled'.
</div>
%endif
%if repository.includes_datatypes:
diff -r a0a4f15fe0958c5ff2658c1695c63023b9cf6d39 -r db2bf800496478a5ea041480c3c514c2620e28ae templates/admin/tool_shed_repository/install_missing_tool_dependencies.mako
--- a/templates/admin/tool_shed_repository/install_missing_tool_dependencies.mako
+++ /dev/null
@@ -1,109 +0,0 @@
-<%inherit file="/base.mako"/>
-<%namespace file="/message.mako" import="render_msg" />
-
-<% import os %>
-
-<br/><br/>
-<ul class="manage-table-actions">
- <li><a class="action-button" id="repository-${repository.id}-popup" class="menubutton">Repository Actions</a></li>
- <div popupmenu="repository-${repository.id}-popup">
- <a class="action-button" href="${h.url_for( controller='admin_toolshed', action='browse_repository', id=trans.security.encode_id( repository.id ) )}">Browse repository files</a>
- <a class="action-button" href="${h.url_for( controller='admin_toolshed', action='manage_repository', id=trans.security.encode_id( repository.id ) )}">Manage repository</a>
- <a class="action-button" href="${h.url_for( controller='admin_toolshed', action='check_for_updates', id=trans.security.encode_id( repository.id ) )}">Get updates</a>
- %if repository.includes_tools:
- <a class="action-button" href="${h.url_for( controller='admin_toolshed', action='set_tool_versions', id=trans.security.encode_id( repository.id ) )}">Set tool versions</a>
- %endif
- %if repository.tool_dependencies:
- <a class="action-button" href="${h.url_for( controller='admin_toolshed', action='manage_tool_dependencies', id=trans.security.encode_id( repository.id ) )}">Manage tool dependencies</a>
- %endif
- <a class="action-button" href="${h.url_for( controller='admin_toolshed', action='deactivate_or_uninstall_repository', id=trans.security.encode_id( repository.id ) )}">Deactivate or uninstall repository</a>
- </div>
-</ul>
-
-%if message:
- ${render_msg( message, status )}
-%endif
-
-<div class="warningmessage">
- <p>
- Galaxy will attempt to install the missing tool dependencies listed below. Each of these dependencies may require their own build
- requirements (e.g., CMake, g++, etc). Galaxy will not attempt to install these build requirements, so if any are missing from your
- environment tool dependency installation may partially fail. If this happens, you can install the missing build requirements and
- have Galaxy attempt to install the tool dependencies again.
- </p>
-</div>
-<br/>
-<div class="warningmessage">
- <p>
- Installation may take a while. <b>Always wait until a message is displayed in your browser after clicking the <b>Go</b> button below.</b>
- If you get bored, watching your Galaxy server's paster log will help pass the time.
- </p>
- <p>
- Information about the tool dependency installation process will be saved in various files named with a ".log" extension in the directory:
- ${trans.app.config.tool_dependency_dir}/<i>package name</i>/<i>package version</i>/${repository.owner}/${repository.name}/${repository.changeset_revision}
- </p>
-</div>
-<br/>
-
-<div class="toolForm">
- <div class="toolFormBody">
- <form name="install_missing_tool_dependencies" id="install_missing_tool_dependencies" action="${h.url_for( controller='admin_toolshed', action='install_missing_tool_dependencies', id=trans.security.encode_id( repository.id ), tool_panel_section=tool_panel_section, new_tool_panel_section=new_tool_panel_section, reinstalling=reinstalling )}" method="post" >
- <div style="clear: both"></div>
- <div class="form-row">
- <label>Install missing tool dependencies?</label>
- ${install_tool_dependencies_check_box.get_html()}
- <div class="toolParamHelp" style="clear: both;">
- Un-check to skip installation of these missing tool dependencies.
- </div>
- ## Fake the no_changes_check_box value.
- %if no_changes_checked:
- <input type="hidden" id="no_changes" name="no_changes" value="true" checked="checked"><input type="hidden" name="no_changes" value="true">
- %else:
- <input type="hidden" name="no_changes" value="true">
- %endif
- </div>
- <div style="clear: both"></div>
- <div class="form-row">
- <table class="grid">
- <tr><td colspan="4" bgcolor="#D8D8D8"><b>Missing tool dependencies</b></td></tr>
- <tr>
- <th>Name</th>
- <th>Version</th>
- <th>Type</th>
- <th>Install directory</th>
- </tr>
- %for dependency_key, requirements_dict in tool_dependencies.items():
- <%
- name = requirements_dict[ 'name' ]
- version = requirements_dict[ 'version' ]
- type = requirements_dict[ 'type' ]
- install_dir = os.path.join( trans.app.config.tool_dependency_dir,
- name,
- version,
- repository.owner,
- repository.name,
- repository.changeset_revision )
- readme_text = requirements_dict.get( 'readme', None )
- %>
- %if not os.path.exists( install_dir ):
- <tr>
- <td>${name}</td>
- <td>${version}</td>
- <td>${type}</td>
- <td>${install_dir}</td>
- </tr>
- %if readme_text:
- <tr><td colspan="4" bgcolor="#FFFFCC">${name} ${version} requirements and installation information</td></tr>
- <tr><td colspan="4"><pre>${readme_text}</pre></td></tr>
- %endif
- %endif
- %endfor
- </table>
- <div style="clear: both"></div>
- </div>
- <div class="form-row">
- <input type="submit" name="install_missing_tool_dependencies_button" value="Go"/>
- </div>
- </form>
- </div>
-</div>
diff -r a0a4f15fe0958c5ff2658c1695c63023b9cf6d39 -r db2bf800496478a5ea041480c3c514c2620e28ae templates/admin/tool_shed_repository/install_tool_dependencies.mako
--- a/templates/admin/tool_shed_repository/install_tool_dependencies.mako
+++ b/templates/admin/tool_shed_repository/install_tool_dependencies.mako
@@ -25,16 +25,7 @@
<div class="toolForm"><div class="toolFormBody">
- <form name="install_tool_dependenceies" id="install_tool_dependenceies" action="${h.url_for( controller='admin_toolshed', action='install_repository', tool_shed_url=tool_shed_url, repo_info_dict=repo_info_dict, includes_tools=includes_tools )}" method="post" >
- <div style="clear: both"></div>
- <div class="form-row">
- <label>Install tool dependencies?</label>
- ${install_tool_dependencies_check_box.get_html()}
- <div class="toolParamHelp" style="clear: both;">
- Un-check to skip automatic installation of these tool dependencies.
- </div>
- </div>
- <div style="clear: both"></div>
+ <form name="install_tool_dependenceies" id="install_tool_dependenceies" action="${h.url_for( controller='admin_toolshed', action='install_tool_dependencies' )}" method="post" ><div class="form-row"><table class="grid"><tr><td colspan="4" bgcolor="#D8D8D8"><b>Tool dependencies</b></td></tr>
@@ -44,42 +35,46 @@
<th>Type</th><th>Install directory</th></tr>
- %for repository_name, repo_info_tuple in dict_with_tool_dependencies.items():
+ <% tool_shed_repository = None %>
+ %for tool_dependency in tool_dependencies:
+ <input type="hidden" name="tool_dependency_ids" value="${trans.security.encode_id( tool_dependency.id )}"/><%
- description, repository_clone_url, changeset_revision, ctx_rev, repository_owner, tool_dependencies = repo_info_tuple
+ readme_text = None
+ if tool_shed_repository is None:
+ tool_shed_repository = tool_dependency.tool_shed_repository
+ metadata = tool_shed_repository.metadata
+ tool_dependencies_dict = metadata[ 'tool_dependencies' ]
+ for key, requirements_dict in tool_dependencies_dict.items():
+ key_items = key.split( '/' )
+ key_name = key_items[ 0 ]
+ key_version = key_items[ 1 ]
+ if key_name == tool_dependency.name and key_version == tool_dependency.version:
+ readme_text = requirements_dict.get( 'readme', None )
+ install_dir = os.path.join( trans.app.config.tool_dependency_dir,
+ tool_dependency.name,
+ tool_dependency.version,
+ tool_shed_repository.owner,
+ tool_shed_repository.name,
+ tool_shed_repository.installed_changeset_revision )
%>
- %for dependency_key, requirements_dict in tool_dependencies.items():
- <%
- name = requirements_dict[ 'name' ]
- version = requirements_dict[ 'version' ]
- type = requirements_dict[ 'type' ]
- install_dir = os.path.join( trans.app.config.tool_dependency_dir,
- name,
- version,
- repository_owner,
- repository_name,
- changeset_revision )
- readme_text = requirements_dict.get( 'readme', None )
- %>
- %if not os.path.exists( install_dir ):
- <tr>
- <td>${name}</td>
- <td>${version}</td>
- <td>${type}</td>
- <td>${install_dir}</td>
- </tr>
- %if readme_text:
- <tr><td colspan="4" bgcolor="#FFFFCC">${name} ${version} requirements and installation information</td></tr>
- <tr><td colspan="4"><pre>${readme_text}</pre></td></tr>
- %endif
+ %if not os.path.exists( install_dir ):
+ <tr>
+ <td>${tool_dependency.name}</td>
+ <td>${tool_dependency.version}</td>
+ <td>${tool_dependency.type}</td>
+ <td>${install_dir}</td>
+ </tr>
+ %if readme_text:
+ <tr><td colspan="4" bgcolor="#FFFFCC">${tool_dependency.name} ${tool_dependency.version} requirements and installation information</td></tr>
+ <tr><td colspan="4"><pre>${readme_text}</pre></td></tr>
%endif
- %endfor
+ %endif
%endfor
</table><div style="clear: both"></div></div><div class="form-row">
- <input type="submit" name="install_tool_dependenceies_button" value="Continue"/>
+ <input type="submit" name="install_tool_dependencies_button" value="Install"/></div></form></div>
diff -r a0a4f15fe0958c5ff2658c1695c63023b9cf6d39 -r db2bf800496478a5ea041480c3c514c2620e28ae templates/admin/tool_shed_repository/manage_repository.mako
--- a/templates/admin/tool_shed_repository/manage_repository.mako
+++ b/templates/admin/tool_shed_repository/manage_repository.mako
@@ -12,10 +12,8 @@
<a class="action-button" href="${h.url_for( controller='admin_toolshed', action='set_tool_versions', id=trans.security.encode_id( repository.id ) )}">Set tool versions</a>
%endif
%if repository.tool_dependencies:
- <a class="action-button" href="${h.url_for( controller='admin_toolshed', action='manage_tool_dependencies', id=trans.security.encode_id( repository.id ) )}">Manage tool dependencies</a>
- %endif
- %if repository.missing_tool_dependencies:
- <a class="action-button" href="${h.url_for( controller='admin_toolshed', action='install_missing_tool_dependencies', id=trans.security.encode_id( repository.id ) )}">Install missing tool dependencies</a>
+ <% tool_dependency_ids = [ trans.security.encode_id( td.id ) for td in repository.tool_dependencies ] %>
+ <a class="action-button" href="${h.url_for( controller='admin_toolshed', action='manage_tool_dependencies', tool_dependency_ids=tool_dependency_ids )}">Manage tool dependencies</a>
%endif
<a class="action-button" href="${h.url_for( controller='admin_toolshed', action='deactivate_or_uninstall_repository', id=trans.security.encode_id( repository.id ) )}">Deactivate or uninstall repository</a></div>
@@ -91,21 +89,15 @@
<td><b>version</b></td><td><b>type</b></td></tr>
- %for index, missing_dependency_tup in enumerate( missing_tool_dependencies ):
- <% name, version, type = missing_dependency_tup %>
+ %for tool_dependency in missing_tool_dependencies:
<tr><td>
- <div style="float: left; margin-left: 1px;" class="menubutton split popup" id="missing_dependency-${index}-popup">
- <a class="view-info" href="${h.url_for( controller='admin_toolshed', action='install_tool_dependency', name=name, version=version, type=type, repository_id=trans.security.encode_id( repository.id ) )}">
- ${name}
- </a>
- </div>
- <div popupmenu="missing_dependency-${index}-popup">
- <a class="action-button" href="${h.url_for( controller='admin_toolshed', action='install_tool_dependency', name=name, version=version, type=type, repository_id=trans.security.encode_id( repository.id ) )}">Install this dependency</a>
- </div>
+ <a class="view-info" href="${h.url_for( controller='admin_toolshed', action='manage_tool_dependencies', id=trans.security.encode_id( tool_dependency.id ) )}">
+ ${tool_dependency.name}
+ </a></td>
- <td>${version}</td>
- <td>${type}</td>
+ <td>${tool_dependency.version}</td>
+ <td>${tool_dependency.type}</td></tr>
%endfor
</table>
@@ -131,14 +123,9 @@
%for installed_tool_dependency in installed_tool_dependencies:
<tr><td>
- <div style="float: left; margin-left: 1px;" class="menubutton split popup" id="dependency-${installed_tool_dependency.id}-popup">
- <a class="view-info" href="${h.url_for( controller='admin_toolshed', action='browse_tool_dependency', id=trans.security.encode_id( installed_tool_dependency.id ), repository_id=trans.security.encode_id( repository.id ) )}">
- ${installed_tool_dependency.name}
- </a>
- </div>
- <div popupmenu="dependency-${installed_tool_dependency.id}-popup">
- <a class="action-button" href="${h.url_for( controller='admin_toolshed', action='uninstall_tool_dependency', id=trans.security.encode_id( installed_tool_dependency.id ), repository_id=trans.security.encode_id( repository.id ) )}">Uninstall this dependency</a>
- </div>
+ <a class="view-info" href="${h.url_for( controller='admin_toolshed', action='browse_tool_dependency', id=trans.security.encode_id( installed_tool_dependency.id ), repository_id=trans.security.encode_id( repository.id ) )}">
+ ${installed_tool_dependency.name}
+ </a></td><td>${installed_tool_dependency.version}</td><td>${installed_tool_dependency.type}</td>
diff -r a0a4f15fe0958c5ff2658c1695c63023b9cf6d39 -r db2bf800496478a5ea041480c3c514c2620e28ae templates/admin/tool_shed_repository/manage_tool_dependencies.mako
--- a/templates/admin/tool_shed_repository/manage_tool_dependencies.mako
+++ b/templates/admin/tool_shed_repository/manage_tool_dependencies.mako
@@ -13,9 +13,6 @@
%if repository.includes_tools:
<a class="action-button" href="${h.url_for( controller='admin_toolshed', action='set_tool_versions', id=trans.security.encode_id( repository.id ) )}">Set tool versions</a>
%endif
- %if repository.missing_tool_dependencies:
- <a class="action-button" href="${h.url_for( controller='admin_toolshed', action='install_missing_tool_dependencies', id=trans.security.encode_id( repository.id ) )}">Install missing tool dependencies</a>
- %endif
<a class="action-button" href="${h.url_for( controller='admin_toolshed', action='deactivate_or_uninstall_repository', id=trans.security.encode_id( repository.id ) )}">Deactivate or uninstall repository</a></div></ul>
@@ -34,25 +31,25 @@
name = tool_dependency.name
version = tool_dependency.version
type = tool_dependency.type
- uninstalled = tool_dependency.uninstalled
+ installed = tool_dependency.status == 'trans.model.ToolDependency.installation_status.INSTALLED
install_dir = tool_dependency.installation_directory( trans.app )
%><tr><td bgcolor="#D8D8D8"><div style="float: left; margin-left: 1px;" class="menubutton split popup" id="dependency-${tool_dependency.id}-popup">
- %if uninstalled:
- <a class="view-info" href="${h.url_for( controller='admin_toolshed', action='install_tool_dependency', name=name, version=version, type=type, repository_id=trans.security.encode_id( repository.id ) )}">
+ %if not installed:
+ <a class="view-info" href="${h.url_for( controller='admin_toolshed', action='manage_tool_dependencies', operation='browse', tool_dependency_id=trans.security.encode_id( tool_dependency.id ) )}"><b>Name</b></a><div popupmenu="dependency-${tool_dependency.id}-popup">
- <a class="action-button" href="${h.url_for( controller='admin_toolshed', action='install_tool_dependency', name=name, version=version, type=type, repository_id=trans.security.encode_id( repository.id ) )}">Install this dependency</a>
+ <a class="action-button" href="${h.url_for( controller='admin_toolshed', action='manage_tool_dependencies', operation='install', tool_dependency_id=trans.security.encode_id( tool_dependency.id ) )}">Install this tool dependency</a></div>
%else:
- <a class="view-info" href="${h.url_for( controller='admin_toolshed', action='browse_tool_dependency', id=trans.security.encode_id( tool_dependency.id ), repository_id=trans.security.encode_id( repository.id ) )}">
+ <a class="view-info" href="${h.url_for( controller='admin_toolshed', action='manage_tool_dependencies', operation='browse', tool_dependency_id=trans.security.encode_id( tool_dependency.id ) )}"><b>Name</b></a><div popupmenu="dependency-${tool_dependency.id}-popup">
- <a class="action-button" href="${h.url_for( controller='admin_toolshed', action='uninstall_tool_dependency', id=trans.security.encode_id( tool_dependency.id ), repository_id=trans.security.encode_id( repository.id ) )}">Uninstall this dependency</a>
+ <a class="action-button" href="${h.url_for( controller='admin_toolshed', action='manage_tool_dependencies', operation='uninstall', tool_dependency_id=trans.security.encode_id( tool_dependency.id ) )}">Uninstall this tool dependency</a></div>
%endif
</div>
@@ -64,7 +61,7 @@
<tr><th>Install directory</th><td>
- %if uninstalled:
+ %if not installed:
This dependency is not currently installed
%else:
<a class="view-info" href="${h.url_for( controller='admin_toolshed', action='browse_tool_dependency', id=trans.security.encode_id( tool_dependency.id ), repository_id=trans.security.encode_id( repository.id ) )}">
@@ -73,7 +70,7 @@
%endif
</td></tr>
- <tr><th>Uninstalled</th><td>${uninstalled}</td></tr>
+ <tr><th>Installed</th><td>${not installed}</td></tr>
%endfor
</table><div style="clear: both"></div>
diff -r a0a4f15fe0958c5ff2658c1695c63023b9cf6d39 -r db2bf800496478a5ea041480c3c514c2620e28ae templates/admin/tool_shed_repository/reselect_tool_panel_section.mako
--- a/templates/admin/tool_shed_repository/reselect_tool_panel_section.mako
+++ b/templates/admin/tool_shed_repository/reselect_tool_panel_section.mako
@@ -8,8 +8,9 @@
<div class="toolForm"><div class="toolFormTitle">Choose the tool panel section to contain the installed tools (optional)</div><div class="toolFormBody">
- %if repository.includes_tool_dependencies:
- <form name="reselect_tool_panel_section" id="reselect_tool_panel_section" action="${h.url_for( controller='admin_toolshed', action='install_missing_tool_dependencies', id=trans.security.encode_id( repository.id ), reinstalling=True )}" method="post" >
+ %if repository.tool_dependencies:
+ <% tool_dependency_ids = [ trans.security.encode_id( td.id ) for td in repository.tool_dependencies ] %>
+ <form name="reselect_tool_panel_section" id="reselect_tool_panel_section" action="${h.url_for( controller='admin_toolshed', action='manage_tool_dependencies', operation='install', tool_dependency_ids=tool_dependency_ids )}" method="post" >
%else:
<form name="reselect_tool_panel_section" id="reselect_tool_panel_section" action="${h.url_for( controller='admin_toolshed', action='reinstall_repository', id=trans.security.encode_id( repository.id ) )}" method="post" >
%endif
diff -r a0a4f15fe0958c5ff2658c1695c63023b9cf6d39 -r db2bf800496478a5ea041480c3c514c2620e28ae templates/admin/tool_shed_repository/select_tool_panel_section.mako
--- a/templates/admin/tool_shed_repository/select_tool_panel_section.mako
+++ b/templates/admin/tool_shed_repository/select_tool_panel_section.mako
@@ -78,6 +78,7 @@
<div class="toolForm"><div class="toolFormTitle">Repository README file (may contain important installation or license information)</div><div class="toolFormBody">
+ <input type="hidden" name="readme_text" value="${readme_text}"/><div class="form-row"><pre>${readme_text}</pre></div>
diff -r a0a4f15fe0958c5ff2658c1695c63023b9cf6d39 -r db2bf800496478a5ea041480c3c514c2620e28ae templates/admin/tool_shed_repository/tool_dependencies_grid.mako
--- /dev/null
+++ b/templates/admin/tool_shed_repository/tool_dependencies_grid.mako
@@ -0,0 +1,8 @@
+<%inherit file="/grid_base.mako"/>
+<%namespace file="/admin/tool_shed_repository/common.mako" import="*" />
+
+<%def name="javascripts()">
+ ${parent.javascripts()}
+ ${dependency_status_updater()}
+ ${tool_dependency_installation_updater()}
+</%def>
diff -r a0a4f15fe0958c5ff2658c1695c63023b9cf6d39 -r db2bf800496478a5ea041480c3c514c2620e28ae templates/admin/tool_shed_repository/tool_dependency_installation_status.mako
--- /dev/null
+++ b/templates/admin/tool_shed_repository/tool_dependency_installation_status.mako
@@ -0,0 +1,13 @@
+<%def name="render_tool_dependency_status( tool_dependency )">
+ <%
+ if tool_dependency.status == trans.model.ToolDependency.installation_status.INSTALLING:
+ bgcolor = trans.model.ToolDependency.states.INSTALLING
+ rval = '<div class="count-box state-color-%s" id="ToolDependencyStatus-%s">' % ( bgcolor, trans.security.encode_id( tool_dependency.id ) )
+ rval += '%s</div>' % tool_dependency.status
+ else:
+ rval = tool_dependency.status
+ %>
+ ${rval}
+</%def>
+
+${render_tool_dependency_status( tool_dependency )}
diff -r a0a4f15fe0958c5ff2658c1695c63023b9cf6d39 -r db2bf800496478a5ea041480c3c514c2620e28ae templates/admin/tool_shed_repository/uninstall_tool_dependencies.mako
--- /dev/null
+++ b/templates/admin/tool_shed_repository/uninstall_tool_dependencies.mako
@@ -0,0 +1,52 @@
+<%inherit file="/base.mako"/>
+<%namespace file="/message.mako" import="render_msg" />
+
+<% import os %>
+
+%if message:
+ ${render_msg( message, status )}
+%endif
+
+<div class="toolForm">
+ <div class="toolFormTitle">Uninstall tool dependencies</div>
+ <div class="toolFormBody">
+ <form name="uninstall_tool_dependenceies" id="uninstall_tool_dependenceies" action="${h.url_for( controller='admin_toolshed', action='uninstall_tool_dependencies' )}" method="post" >
+ <div class="form-row">
+ <table class="grid">
+ <tr>
+ <th>Name</th>
+ <th>Version</th>
+ <th>Type</th>
+ <th>Install directory</th>
+ </tr>
+ %for tool_dependency in tool_dependencies:
+ <input type="hidden" name="tool_dependency_ids" value="${trans.security.encode_id( tool_dependency.id )}"/>
+ <%
+ install_dir = os.path.join( trans.app.config.tool_dependency_dir,
+ tool_dependency.name,
+ tool_dependency.version,
+ tool_dependency.tool_shed_repository.owner,
+ tool_dependency.tool_shed_repository.name,
+ tool_dependency.tool_shed_repository.installed_changeset_revision )
+ %>
+ %if os.path.exists( install_dir ):
+ <tr>
+ <td>${tool_dependency.name}</td>
+ <td>${tool_dependency.version}</td>
+ <td>${tool_dependency.type}</td>
+ <td>${install_dir}</td>
+ </tr>
+ %endif
+ %endfor
+ </table>
+ <div style="clear: both"></div>
+ </div>
+ <div class="form-row">
+ <input type="submit" name="uninstall_tool_dependencies_button" value="Uninstall"/>
+ <div class="toolParamHelp" style="clear: both;">
+ Click to uninstall the tool dependencies listed above.
+ </div>
+ </div>
+ </form>
+ </div>
+</div>
diff -r a0a4f15fe0958c5ff2658c1695c63023b9cf6d39 -r db2bf800496478a5ea041480c3c514c2620e28ae templates/tool_form.mako
--- a/templates/tool_form.mako
+++ b/templates/tool_form.mako
@@ -283,7 +283,7 @@
%>
%if tool_id_version_message:
- ${render_msg( tool_id_version_message, 'error' )}
+ ${render_msg( tool_id_version_message, 'warning' )}
%endif
<div class="toolForm" id="${tool.id}">
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: dannon: Remove Availability Zone placement from cloud_launch instance kickoff. This prevents the failure where an AZ is temporarily unavailable by allowing Amazon to auto-place.
by Bitbucket 21 Jun '12
by Bitbucket 21 Jun '12
21 Jun '12
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/a0a4f15fe095/
changeset: a0a4f15fe095
user: dannon
date: 2012-06-21 20:09:17
summary: Remove Availability Zone placement from cloud_launch instance kickoff. This prevents the failure where an AZ is temporarily unavailable by allowing Amazon to auto-place.
affected #: 1 file
diff -r f6a710440c0500fb09e980300419b53b2cda6088 -r a0a4f15fe0958c5ff2658c1695c63023b9cf6d39 lib/galaxy/web/controllers/cloudlaunch.py
--- a/lib/galaxy/web/controllers/cloudlaunch.py
+++ b/lib/galaxy/web/controllers/cloudlaunch.py
@@ -177,7 +177,6 @@
instance_type = user_provided_data['instance_type']
# Remove 'instance_type' key from the dict before creating user data
del user_provided_data['instance_type']
- placement = _find_placement(ec2_conn, instance_type)
ud = "\n".join(['%s: %s' % (key, value) for key, value in user_provided_data.iteritems() if key != 'kp_material'])
try:
rs = ec2_conn.run_instances(image_id=image_id,
@@ -186,8 +185,7 @@
security_groups=security_groups,
user_data=ud,
kernel_id=kernel_id,
- ramdisk_id=ramdisk_id,
- placement=placement)
+ ramdisk_id=ramdisk_id)
except EC2ResponseError, e:
log.error("Problem starting an instance: %s\n%s" % (e, e.body))
if rs:
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
2 new commits in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/53d43a29b41b/
changeset: 53d43a29b41b
user: fbacall
date: 2012-06-19 15:36:44
summary: Included workflow SVG representation in myexperiment export
affected #: 2 files
diff -r 7c495f835a1d436ad33dff6107784f106cc24980 -r 53d43a29b41b4f5327deb42800c0f64d4f129a4a lib/galaxy/web/controllers/workflow.py
--- a/lib/galaxy/web/controllers/workflow.py
+++ b/lib/galaxy/web/controllers/workflow.py
@@ -505,107 +505,8 @@
stored = self.get_stored_workflow( trans, id, check_ownership=True )
session = trans.sa_session
- workflow = stored.latest_workflow
- data = []
-
- canvas = svgfig.canvas(style="stroke:black; fill:none; stroke-width:1px; stroke-linejoin:round; text-anchor:left")
- text = svgfig.SVG("g")
- connectors = svgfig.SVG("g")
- boxes = svgfig.SVG("g")
- svgfig.Text.defaults["font-size"] = "10px"
-
- in_pos = {}
- out_pos = {}
- margin = 5
- line_px = 16 # how much spacing between input/outputs
- widths = {} # store px width for boxes of each step
- max_width, max_x, max_y = 0, 0, 0
-
- for step in workflow.steps:
- # Load from database representation
- module = module_factory.from_workflow_step( trans, step )
-
- # Pack attributes into plain dictionary
- step_dict = {
- 'id': step.order_index,
- 'data_inputs': module.get_data_inputs(),
- 'data_outputs': module.get_data_outputs(),
- 'position': step.position
- }
-
- input_conn_dict = {}
- for conn in step.input_connections:
- input_conn_dict[ conn.input_name ] = \
- dict( id=conn.output_step.order_index, output_name=conn.output_name )
- step_dict['input_connections'] = input_conn_dict
-
- data.append(step_dict)
-
- x, y = step.position['left'], step.position['top']
- count = 0
-
- max_len = len(module.get_name()) * 1.5
- text.append( svgfig.Text(x, y + 20, module.get_name(), **{"font-size": "14px"} ).SVG() )
-
- y += 45
- for di in module.get_data_inputs():
- cur_y = y+count*line_px
- if step.order_index not in in_pos:
- in_pos[step.order_index] = {}
- in_pos[step.order_index][di['name']] = (x, cur_y)
- text.append( svgfig.Text(x, cur_y, di['label']).SVG() )
- count += 1
- max_len = max(max_len, len(di['label']))
-
-
- if len(module.get_data_inputs()) > 0:
- y += 15
-
- for do in module.get_data_outputs():
- cur_y = y+count*line_px
- if step.order_index not in out_pos:
- out_pos[step.order_index] = {}
- out_pos[step.order_index][do['name']] = (x, cur_y)
- text.append( svgfig.Text(x, cur_y, do['name']).SVG() )
- count += 1
- max_len = max(max_len, len(do['name']))
-
- widths[step.order_index] = max_len*5.5
- max_x = max(max_x, step.position['left'])
- max_y = max(max_y, step.position['top'])
- max_width = max(max_width, widths[step.order_index])
-
- for step_dict in data:
- width = widths[step_dict['id']]
- x, y = step_dict['position']['left'], step_dict['position']['top']
- boxes.append( svgfig.Rect(x-margin, y, x+width-margin, y+30, fill="#EBD9B2").SVG() )
- box_height = (len(step_dict['data_inputs']) + len(step_dict['data_outputs'])) * line_px + margin
-
- # Draw separator line
- if len(step_dict['data_inputs']) > 0:
- box_height += 15
- sep_y = y + len(step_dict['data_inputs']) * line_px + 40
- text.append( svgfig.Line(x-margin, sep_y, x+width-margin, sep_y).SVG() ) #
-
- # input/output box
- boxes.append( svgfig.Rect(x-margin, y+30, x+width-margin, y+30+box_height, fill="#ffffff").SVG() )
-
- for conn, output_dict in step_dict['input_connections'].iteritems():
- in_coords = in_pos[step_dict['id']][conn]
- out_conn_pos = out_pos[output_dict['id']][output_dict['output_name']]
- adjusted = (out_conn_pos[0] + widths[output_dict['id']], out_conn_pos[1])
- text.append( svgfig.SVG("circle", cx=out_conn_pos[0]+widths[output_dict['id']]-margin, cy=out_conn_pos[1]-margin, r=5, fill="#ffffff" ) )
- connectors.append( svgfig.Line(adjusted[0], adjusted[1]-margin, in_coords[0]-10, in_coords[1], arrow_end="true" ).SVG() )
-
- canvas.append(connectors)
- canvas.append(boxes)
- canvas.append(text)
- width, height = (max_x + max_width + 50), max_y + 300
- canvas['width'] = "%s px" % width
- canvas['height'] = "%s px" % height
- canvas['viewBox'] = "0 0 %s %s" % (width, height)
trans.response.set_content_type("image/svg+xml")
- return canvas.standalone_xml()
+ return self._workflow_to_svg_canvas( trans, stored ).standalone_xml()
@web.expose
@@ -1056,7 +957,8 @@
request_raw = trans.fill_template( "workflow/myexp_export.mako", \
workflow_name=workflow_dict['name'], \
workflow_description=workflow_dict['annotation'], \
- workflow_content=workflow_content
+ workflow_content=workflow_content, \
+ workflow_svg=self._workflow_to_svg_canvas( trans, stored ).standalone_xml()
)
# strip() b/c myExperiment XML parser doesn't allow white space before XML; utf-8 handles unicode characters.
request = unicode( request_raw.strip(), 'utf-8' )
@@ -1929,6 +1831,110 @@
trans.sa_session.flush()
return stored, missing_tool_tups
+
+ def _workflow_to_svg_canvas( self, trans, stored ):
+
+ workflow = stored.latest_workflow
+ data = []
+
+ canvas = svgfig.canvas(style="stroke:black; fill:none; stroke-width:1px; stroke-linejoin:round; text-anchor:left")
+ text = svgfig.SVG("g")
+ connectors = svgfig.SVG("g")
+ boxes = svgfig.SVG("g")
+ svgfig.Text.defaults["font-size"] = "10px"
+
+ in_pos = {}
+ out_pos = {}
+ margin = 5
+ line_px = 16 # how much spacing between input/outputs
+ widths = {} # store px width for boxes of each step
+ max_width, max_x, max_y = 0, 0, 0
+
+ for step in workflow.steps:
+ # Load from database representation
+ module = module_factory.from_workflow_step( trans, step )
+
+ # Pack attributes into plain dictionary
+ step_dict = {
+ 'id': step.order_index,
+ 'data_inputs': module.get_data_inputs(),
+ 'data_outputs': module.get_data_outputs(),
+ 'position': step.position
+ }
+
+ input_conn_dict = {}
+ for conn in step.input_connections:
+ input_conn_dict[ conn.input_name ] = \
+ dict( id=conn.output_step.order_index, output_name=conn.output_name )
+ step_dict['input_connections'] = input_conn_dict
+
+ data.append(step_dict)
+
+ x, y = step.position['left'], step.position['top']
+ count = 0
+
+ max_len = len(module.get_name()) * 1.5
+ text.append( svgfig.Text(x, y + 20, module.get_name(), **{"font-size": "14px"} ).SVG() )
+
+ y += 45
+ for di in module.get_data_inputs():
+ cur_y = y+count*line_px
+ if step.order_index not in in_pos:
+ in_pos[step.order_index] = {}
+ in_pos[step.order_index][di['name']] = (x, cur_y)
+ text.append( svgfig.Text(x, cur_y, di['label']).SVG() )
+ count += 1
+ max_len = max(max_len, len(di['label']))
+
+
+ if len(module.get_data_inputs()) > 0:
+ y += 15
+
+ for do in module.get_data_outputs():
+ cur_y = y+count*line_px
+ if step.order_index not in out_pos:
+ out_pos[step.order_index] = {}
+ out_pos[step.order_index][do['name']] = (x, cur_y)
+ text.append( svgfig.Text(x, cur_y, do['name']).SVG() )
+ count += 1
+ max_len = max(max_len, len(do['name']))
+
+ widths[step.order_index] = max_len*5.5
+ max_x = max(max_x, step.position['left'])
+ max_y = max(max_y, step.position['top'])
+ max_width = max(max_width, widths[step.order_index])
+
+ for step_dict in data:
+ width = widths[step_dict['id']]
+ x, y = step_dict['position']['left'], step_dict['position']['top']
+ boxes.append( svgfig.Rect(x-margin, y, x+width-margin, y+30, fill="#EBD9B2").SVG() )
+ box_height = (len(step_dict['data_inputs']) + len(step_dict['data_outputs'])) * line_px + margin
+
+ # Draw separator line
+ if len(step_dict['data_inputs']) > 0:
+ box_height += 15
+ sep_y = y + len(step_dict['data_inputs']) * line_px + 40
+ text.append( svgfig.Line(x-margin, sep_y, x+width-margin, sep_y).SVG() ) #
+
+ # input/output box
+ boxes.append( svgfig.Rect(x-margin, y+30, x+width-margin, y+30+box_height, fill="#ffffff").SVG() )
+
+ for conn, output_dict in step_dict['input_connections'].iteritems():
+ in_coords = in_pos[step_dict['id']][conn]
+ out_conn_pos = out_pos[output_dict['id']][output_dict['output_name']]
+ adjusted = (out_conn_pos[0] + widths[output_dict['id']], out_conn_pos[1])
+ text.append( svgfig.SVG("circle", cx=out_conn_pos[0]+widths[output_dict['id']]-margin, cy=out_conn_pos[1]-margin, r=5, fill="#ffffff" ) )
+ connectors.append( svgfig.Line(adjusted[0], adjusted[1]-margin, in_coords[0]-10, in_coords[1], arrow_end="true" ).SVG() )
+
+ canvas.append(connectors)
+ canvas.append(boxes)
+ canvas.append(text)
+ width, height = (max_x + max_width + 50), max_y + 300
+ canvas['width'] = "%s px" % width
+ canvas['height'] = "%s px" % height
+ canvas['viewBox'] = "0 0 %s %s" % (width, height)
+
+ return canvas
## ---- Utility methods -------------------------------------------------------
diff -r 7c495f835a1d436ad33dff6107784f106cc24980 -r 53d43a29b41b4f5327deb42800c0f64d4f129a4a templates/workflow/myexp_export.mako
--- a/templates/workflow/myexp_export.mako
+++ b/templates/workflow/myexp_export.mako
@@ -15,6 +15,7 @@
<content encoding="base64" type="binary">
${textwrap.fill( base64.b64encode( workflow_content ), 64 )}
</content>
- <preview encoding="base64" type="binary">
- </preview>
-</workflow>
\ No newline at end of file
+ <svg encoding="base64">
+ ${textwrap.fill( base64.b64encode( workflow_svg ), 64 )}
+ </svg>
+</workflow>
https://bitbucket.org/galaxy/galaxy-central/changeset/f6a710440c05/
changeset: f6a710440c05
user: jgoecks
date: 2012-06-20 20:59:37
summary: Merged in fbacall/galaxy-central-myexp-integration (pull request #49)
affected #: 2 files
diff -r 6fe91b7bfe6e4bdcf62299ae88a6206fcb50d3f1 -r f6a710440c0500fb09e980300419b53b2cda6088 lib/galaxy/web/controllers/workflow.py
--- a/lib/galaxy/web/controllers/workflow.py
+++ b/lib/galaxy/web/controllers/workflow.py
@@ -505,107 +505,8 @@
stored = self.get_stored_workflow( trans, id, check_ownership=True )
session = trans.sa_session
- workflow = stored.latest_workflow
- data = []
-
- canvas = svgfig.canvas(style="stroke:black; fill:none; stroke-width:1px; stroke-linejoin:round; text-anchor:left")
- text = svgfig.SVG("g")
- connectors = svgfig.SVG("g")
- boxes = svgfig.SVG("g")
- svgfig.Text.defaults["font-size"] = "10px"
-
- in_pos = {}
- out_pos = {}
- margin = 5
- line_px = 16 # how much spacing between input/outputs
- widths = {} # store px width for boxes of each step
- max_width, max_x, max_y = 0, 0, 0
-
- for step in workflow.steps:
- # Load from database representation
- module = module_factory.from_workflow_step( trans, step )
-
- # Pack attributes into plain dictionary
- step_dict = {
- 'id': step.order_index,
- 'data_inputs': module.get_data_inputs(),
- 'data_outputs': module.get_data_outputs(),
- 'position': step.position
- }
-
- input_conn_dict = {}
- for conn in step.input_connections:
- input_conn_dict[ conn.input_name ] = \
- dict( id=conn.output_step.order_index, output_name=conn.output_name )
- step_dict['input_connections'] = input_conn_dict
-
- data.append(step_dict)
-
- x, y = step.position['left'], step.position['top']
- count = 0
-
- max_len = len(module.get_name()) * 1.5
- text.append( svgfig.Text(x, y + 20, module.get_name(), **{"font-size": "14px"} ).SVG() )
-
- y += 45
- for di in module.get_data_inputs():
- cur_y = y+count*line_px
- if step.order_index not in in_pos:
- in_pos[step.order_index] = {}
- in_pos[step.order_index][di['name']] = (x, cur_y)
- text.append( svgfig.Text(x, cur_y, di['label']).SVG() )
- count += 1
- max_len = max(max_len, len(di['label']))
-
-
- if len(module.get_data_inputs()) > 0:
- y += 15
-
- for do in module.get_data_outputs():
- cur_y = y+count*line_px
- if step.order_index not in out_pos:
- out_pos[step.order_index] = {}
- out_pos[step.order_index][do['name']] = (x, cur_y)
- text.append( svgfig.Text(x, cur_y, do['name']).SVG() )
- count += 1
- max_len = max(max_len, len(do['name']))
-
- widths[step.order_index] = max_len*5.5
- max_x = max(max_x, step.position['left'])
- max_y = max(max_y, step.position['top'])
- max_width = max(max_width, widths[step.order_index])
-
- for step_dict in data:
- width = widths[step_dict['id']]
- x, y = step_dict['position']['left'], step_dict['position']['top']
- boxes.append( svgfig.Rect(x-margin, y, x+width-margin, y+30, fill="#EBD9B2").SVG() )
- box_height = (len(step_dict['data_inputs']) + len(step_dict['data_outputs'])) * line_px + margin
-
- # Draw separator line
- if len(step_dict['data_inputs']) > 0:
- box_height += 15
- sep_y = y + len(step_dict['data_inputs']) * line_px + 40
- text.append( svgfig.Line(x-margin, sep_y, x+width-margin, sep_y).SVG() ) #
-
- # input/output box
- boxes.append( svgfig.Rect(x-margin, y+30, x+width-margin, y+30+box_height, fill="#ffffff").SVG() )
-
- for conn, output_dict in step_dict['input_connections'].iteritems():
- in_coords = in_pos[step_dict['id']][conn]
- out_conn_pos = out_pos[output_dict['id']][output_dict['output_name']]
- adjusted = (out_conn_pos[0] + widths[output_dict['id']], out_conn_pos[1])
- text.append( svgfig.SVG("circle", cx=out_conn_pos[0]+widths[output_dict['id']]-margin, cy=out_conn_pos[1]-margin, r=5, fill="#ffffff" ) )
- connectors.append( svgfig.Line(adjusted[0], adjusted[1]-margin, in_coords[0]-10, in_coords[1], arrow_end="true" ).SVG() )
-
- canvas.append(connectors)
- canvas.append(boxes)
- canvas.append(text)
- width, height = (max_x + max_width + 50), max_y + 300
- canvas['width'] = "%s px" % width
- canvas['height'] = "%s px" % height
- canvas['viewBox'] = "0 0 %s %s" % (width, height)
trans.response.set_content_type("image/svg+xml")
- return canvas.standalone_xml()
+ return self._workflow_to_svg_canvas( trans, stored ).standalone_xml()
@web.expose
@@ -1056,7 +957,8 @@
request_raw = trans.fill_template( "workflow/myexp_export.mako", \
workflow_name=workflow_dict['name'], \
workflow_description=workflow_dict['annotation'], \
- workflow_content=workflow_content
+ workflow_content=workflow_content, \
+ workflow_svg=self._workflow_to_svg_canvas( trans, stored ).standalone_xml()
)
# strip() b/c myExperiment XML parser doesn't allow white space before XML; utf-8 handles unicode characters.
request = unicode( request_raw.strip(), 'utf-8' )
@@ -1929,6 +1831,110 @@
trans.sa_session.flush()
return stored, missing_tool_tups
+
+ def _workflow_to_svg_canvas( self, trans, stored ):
+
+ workflow = stored.latest_workflow
+ data = []
+
+ canvas = svgfig.canvas(style="stroke:black; fill:none; stroke-width:1px; stroke-linejoin:round; text-anchor:left")
+ text = svgfig.SVG("g")
+ connectors = svgfig.SVG("g")
+ boxes = svgfig.SVG("g")
+ svgfig.Text.defaults["font-size"] = "10px"
+
+ in_pos = {}
+ out_pos = {}
+ margin = 5
+ line_px = 16 # how much spacing between input/outputs
+ widths = {} # store px width for boxes of each step
+ max_width, max_x, max_y = 0, 0, 0
+
+ for step in workflow.steps:
+ # Load from database representation
+ module = module_factory.from_workflow_step( trans, step )
+
+ # Pack attributes into plain dictionary
+ step_dict = {
+ 'id': step.order_index,
+ 'data_inputs': module.get_data_inputs(),
+ 'data_outputs': module.get_data_outputs(),
+ 'position': step.position
+ }
+
+ input_conn_dict = {}
+ for conn in step.input_connections:
+ input_conn_dict[ conn.input_name ] = \
+ dict( id=conn.output_step.order_index, output_name=conn.output_name )
+ step_dict['input_connections'] = input_conn_dict
+
+ data.append(step_dict)
+
+ x, y = step.position['left'], step.position['top']
+ count = 0
+
+ max_len = len(module.get_name()) * 1.5
+ text.append( svgfig.Text(x, y + 20, module.get_name(), **{"font-size": "14px"} ).SVG() )
+
+ y += 45
+ for di in module.get_data_inputs():
+ cur_y = y+count*line_px
+ if step.order_index not in in_pos:
+ in_pos[step.order_index] = {}
+ in_pos[step.order_index][di['name']] = (x, cur_y)
+ text.append( svgfig.Text(x, cur_y, di['label']).SVG() )
+ count += 1
+ max_len = max(max_len, len(di['label']))
+
+
+ if len(module.get_data_inputs()) > 0:
+ y += 15
+
+ for do in module.get_data_outputs():
+ cur_y = y+count*line_px
+ if step.order_index not in out_pos:
+ out_pos[step.order_index] = {}
+ out_pos[step.order_index][do['name']] = (x, cur_y)
+ text.append( svgfig.Text(x, cur_y, do['name']).SVG() )
+ count += 1
+ max_len = max(max_len, len(do['name']))
+
+ widths[step.order_index] = max_len*5.5
+ max_x = max(max_x, step.position['left'])
+ max_y = max(max_y, step.position['top'])
+ max_width = max(max_width, widths[step.order_index])
+
+ for step_dict in data:
+ width = widths[step_dict['id']]
+ x, y = step_dict['position']['left'], step_dict['position']['top']
+ boxes.append( svgfig.Rect(x-margin, y, x+width-margin, y+30, fill="#EBD9B2").SVG() )
+ box_height = (len(step_dict['data_inputs']) + len(step_dict['data_outputs'])) * line_px + margin
+
+ # Draw separator line
+ if len(step_dict['data_inputs']) > 0:
+ box_height += 15
+ sep_y = y + len(step_dict['data_inputs']) * line_px + 40
+ text.append( svgfig.Line(x-margin, sep_y, x+width-margin, sep_y).SVG() ) #
+
+ # input/output box
+ boxes.append( svgfig.Rect(x-margin, y+30, x+width-margin, y+30+box_height, fill="#ffffff").SVG() )
+
+ for conn, output_dict in step_dict['input_connections'].iteritems():
+ in_coords = in_pos[step_dict['id']][conn]
+ out_conn_pos = out_pos[output_dict['id']][output_dict['output_name']]
+ adjusted = (out_conn_pos[0] + widths[output_dict['id']], out_conn_pos[1])
+ text.append( svgfig.SVG("circle", cx=out_conn_pos[0]+widths[output_dict['id']]-margin, cy=out_conn_pos[1]-margin, r=5, fill="#ffffff" ) )
+ connectors.append( svgfig.Line(adjusted[0], adjusted[1]-margin, in_coords[0]-10, in_coords[1], arrow_end="true" ).SVG() )
+
+ canvas.append(connectors)
+ canvas.append(boxes)
+ canvas.append(text)
+ width, height = (max_x + max_width + 50), max_y + 300
+ canvas['width'] = "%s px" % width
+ canvas['height'] = "%s px" % height
+ canvas['viewBox'] = "0 0 %s %s" % (width, height)
+
+ return canvas
## ---- Utility methods -------------------------------------------------------
diff -r 6fe91b7bfe6e4bdcf62299ae88a6206fcb50d3f1 -r f6a710440c0500fb09e980300419b53b2cda6088 templates/workflow/myexp_export.mako
--- a/templates/workflow/myexp_export.mako
+++ b/templates/workflow/myexp_export.mako
@@ -15,6 +15,7 @@
<content encoding="base64" type="binary">
${textwrap.fill( base64.b64encode( workflow_content ), 64 )}
</content>
- <preview encoding="base64" type="binary">
- </preview>
-</workflow>
\ No newline at end of file
+ <svg encoding="base64">
+ ${textwrap.fill( base64.b64encode( workflow_svg ), 64 )}
+ </svg>
+</workflow>
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0