galaxy-commits
Threads by month
- ----- 2026 -----
- February
- January
- ----- 2025 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2024 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2023 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2022 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2021 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2020 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2019 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2018 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2017 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2016 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2015 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2014 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2013 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2012 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2011 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2010 -----
- December
- November
- October
- September
- August
- July
- June
- May
- 15302 discussions
27 Dec '12
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/7073d786cad0/
changeset: 7073d786cad0
user: greg
date: 2012-12-27 22:00:16
summary: Add the ability to view an SVG image of a workflow contained in a tool shed repository installed into a Galaxy instance. This feature is the same as the feature that has been available in the tool shed for some time.
affected #: 14 files
diff -r 875ac898df00fd919b6b24f58562fadbf03dc5e1 -r 7073d786cad0f0d251d4ea9b2c42171f698cf953 lib/galaxy/util/shed_util_common.py
--- a/lib/galaxy/util/shed_util_common.py
+++ b/lib/galaxy/util/shed_util_common.py
@@ -1,14 +1,14 @@
import os, shutil, tempfile, logging, string, threading, urllib2
from galaxy import util
from galaxy.tools import parameters
-from galaxy.util import inflector
-from galaxy.util import json
+from galaxy.util import inflector, json
from galaxy.web import url_for
from galaxy.web.form_builder import SelectField
from galaxy.webapps.community.util import container_util
from galaxy.datatypes import checkers
from galaxy.model.orm import and_
from galaxy.tools.parameters import dynamic_options
+from galaxy.tool_shed import encoding_util
from galaxy import eggs
import pkg_resources
@@ -176,7 +176,12 @@
containers_dict[ 'valid_tools' ] = valid_tools_root_folder
# Workflows container.
if workflows:
- folder_id, workflows_root_folder = container_util.build_workflows_folder( trans, folder_id, workflows, repository_metadata, label='Workflows' )
+ folder_id, workflows_root_folder = container_util.build_workflows_folder( trans=trans,
+ folder_id=folder_id,
+ workflows=workflows,
+ repository_metadata_id=None,
+ repository_id=repository_id,
+ label='Workflows' )
containers_dict[ 'workflows' ] = workflows_root_folder
except Exception, e:
log.debug( "Exception in build_repository_containers_for_galaxy: %s" % str( e ) )
@@ -252,7 +257,12 @@
# Workflows container.
if metadata and 'workflows' in metadata:
workflows = metadata[ 'workflows' ]
- folder_id, workflows_root_folder = container_util.build_workflows_folder( trans, folder_id, workflows, repository_metadata, label='Workflows' )
+ folder_id, workflows_root_folder = container_util.build_workflows_folder( trans=trans,
+ folder_id=folder_id,
+ workflows=workflows,
+ repository_metadata_id=repository_metadata.id,
+ repository_id=None,
+ label='Workflows' )
containers_dict[ 'workflows' ] = workflows_root_folder
except Exception, e:
log.debug( "Exception in build_repository_containers_for_tool_shed: %s" % str( e ) )
@@ -400,6 +410,25 @@
# tag for any tool in the repository.
break
return can_generate_dependency_metadata
+def can_use_tool_config_disk_file( trans, repository, repo, file_path, changeset_revision ):
+ """
+ Determine if repository's tool config file on disk can be used. This method is restricted to tool config files since, with the
+ exception of tool config files, multiple files with the same name will likely be in various directories in the repository and we're
+ comparing file names only (not relative paths).
+ """
+ if not file_path or not os.path.exists( file_path ):
+ # The file no longer exists on disk, so it must have been deleted at some previous point in the change log.
+ return False
+ if changeset_revision == repository.tip( trans.app ):
+ return True
+ file_name = strip_path( file_path )
+ latest_version_of_file = get_latest_tool_config_revision_from_repository_manifest( repo, file_name, changeset_revision )
+ can_use_disk_file = filecmp.cmp( file_path, latest_version_of_file )
+ try:
+ os.unlink( latest_version_of_file )
+ except:
+ pass
+ return can_use_disk_file
def check_tool_input_params( app, repo_dir, tool_config_name, tool, sample_files ):
"""
Check all of the tool's input parameters, looking for any that are dynamically generated using external data files to make
@@ -1339,6 +1368,16 @@
else:
metadata_dict[ 'workflows' ] = [ ( relative_path, exported_workflow_dict ) ]
return metadata_dict
+def get_absolute_path_to_file_in_repository( repo_files_dir, file_name ):
+ """Return the absolute path to a specified disk file containe in a repository."""
+ stripped_file_name = strip_path( file_name )
+ file_path = None
+ for root, dirs, files in os.walk( repo_files_dir ):
+ if root.find( '.hg' ) < 0:
+ for name in files:
+ if name == stripped_file_name:
+ return os.path.abspath( os.path.join( root, name ) )
+ return file_path
def get_changectx_for_changeset( repo, changeset_revision, **kwd ):
"""Retrieve a specified changectx from a repository"""
for changeset in repo.changelog:
@@ -1487,14 +1526,14 @@
repository_clone_url = os.path.join( tool_shed_url, 'repos', owner, name )
ctx_rev = get_ctx_rev( tool_shed_url, name, owner, installed_changeset_revision )
print "Adding new row (or updating an existing row) for repository '%s' in the tool_shed_repository table." % name
- repository = create_or_update_tool_shed_repository( app=self.app,
+ repository = create_or_update_tool_shed_repository( app=trans.app,
name=name,
description=None,
installed_changeset_revision=changeset_revision,
ctx_rev=ctx_rev,
repository_clone_url=repository_clone_url,
metadata_dict={},
- status=self.app.model.ToolShedRepository.installation_status.NEW,
+ status=trans.model.ToolShedRepository.installation_status.NEW,
current_changeset_revision=None,
owner=sowner,
dist_to_shed=False )
@@ -1712,6 +1751,9 @@
elif all_metadata_records:
return all_metadata_records[ 0 ]
return None
+def get_repository_metadata_by_id( trans, id ):
+ """Get repository metadata from the database"""
+ return trans.sa_session.query( trans.model.RepositoryMetadata ).get( trans.security.decode_id( id ) )
def get_repository_metadata_by_repository_id_changset_revision( trans, id, changeset_revision ):
"""Get a specified metadata record for a specified repository."""
return trans.sa_session.query( trans.model.RepositoryMetadata ) \
@@ -1822,6 +1864,10 @@
tool_path = shed_tool_conf_dict[ 'tool_path' ]
break
return tool_path
+def get_tool_shed_repository_by_id( trans, repository_id ):
+ return trans.sa_session.query( trans.model.ToolShedRepository ) \
+ .filter( trans.model.ToolShedRepository.table.c.id == trans.security.decode_id( repository_id ) ) \
+ .first()
def get_tool_shed_repository_by_shed_name_owner_changeset_revision( app, tool_shed, name, owner, changeset_revision ):
# This method is used only in Galaxy, not the tool shed.
sa_session = app.model.context.current
@@ -2110,6 +2156,46 @@
description = repository_dependencies_dict.get( 'description', None )
all_repository_dependencies[ 'description' ] = description
return all_repository_dependencies
+def load_tool_from_changeset_revision( trans, repository_id, changeset_revision, tool_config_filename ):
+ """
+ Return a loaded tool whose tool config file name (e.g., filtering.xml) is the value of tool_config_filename. The value of changeset_revision
+ is a valid (downloadable) changset revision. The tool config will be located in the repository manifest between the received valid changeset
+ revision and the first changeset revision in the repository, searching backwards.
+ """
+ original_tool_data_path = trans.app.config.tool_data_path
+ repository = get_repository_in_tool_shed( trans, repository_id )
+ repo_files_dir = repository.repo_path( trans.app )
+ repo = hg.repository( get_configured_ui(), repo_files_dir )
+ message = ''
+ tool = None
+ can_use_disk_file = False
+ tool_config_filepath = get_absolute_path_to_file_in_repository( repo_files_dir, tool_config_filename )
+ work_dir = tempfile.mkdtemp()
+ can_use_disk_file = can_use_tool_config_disk_file( trans, repository, repo, tool_config_filepath, changeset_revision )
+ if can_use_disk_file:
+ trans.app.config.tool_data_path = work_dir
+ tool, valid, message, sample_files = handle_sample_files_and_load_tool_from_disk( trans, repo_files_dir, tool_config_filepath, work_dir )
+ if tool is not None:
+ invalid_files_and_errors_tups = check_tool_input_params( trans.app,
+ repo_files_dir,
+ tool_config_filename,
+ tool,
+ sample_files )
+ if invalid_files_and_errors_tups:
+ message2 = generate_message_for_invalid_tools( trans,
+ invalid_files_and_errors_tups,
+ repository,
+ metadata_dict=None,
+ as_html=True,
+ displaying_invalid_tool=True )
+ message = concat_messages( message, message2 )
+ else:
+ tool, message, sample_files = handle_sample_files_and_load_tool_from_tmp_config( trans, repo, changeset_revision, tool_config_filename, work_dir )
+ remove_dir( work_dir )
+ trans.app.config.tool_data_path = original_tool_data_path
+ # Reset the tool_data_tables by loading the empty tool_data_table_conf.xml file.
+ reset_tool_data_tables( trans.app )
+ return repository, tool, message
def load_tool_from_config( app, full_path ):
try:
tool = app.toolbox.load_tool( full_path )
diff -r 875ac898df00fd919b6b24f58562fadbf03dc5e1 -r 7073d786cad0f0d251d4ea9b2c42171f698cf953 lib/galaxy/webapps/community/controllers/admin.py
--- a/lib/galaxy/webapps/community/controllers/admin.py
+++ b/lib/galaxy/webapps/community/controllers/admin.py
@@ -533,7 +533,7 @@
# The received id is a RepositoryMetadata object id, so we need to get the
# associated Repository and redirect to view_or_manage_repository with the
# changeset_revision.
- repository_metadata = common.get_repository_metadata_by_id( trans, kwd[ 'id' ] )
+ repository_metadata = suc.get_repository_metadata_by_id( trans, kwd[ 'id' ] )
repository = repository_metadata.repository
kwd[ 'id' ] = trans.security.encode_id( repository.id )
kwd[ 'changeset_revision' ] = repository_metadata.changeset_revision
@@ -615,7 +615,7 @@
ids = util.listify( id )
count = 0
for repository_metadata_id in ids:
- repository_metadata = common.get_repository_metadata_by_id( trans, repository_metadata_id )
+ repository_metadata = suc.get_repository_metadata_by_id( trans, repository_metadata_id )
trans.sa_session.delete( repository_metadata )
trans.sa_session.flush()
count += 1
diff -r 875ac898df00fd919b6b24f58562fadbf03dc5e1 -r 7073d786cad0f0d251d4ea9b2c42171f698cf953 lib/galaxy/webapps/community/controllers/common.py
--- a/lib/galaxy/webapps/community/controllers/common.py
+++ b/lib/galaxy/webapps/community/controllers/common.py
@@ -113,25 +113,6 @@
repository_metadata.tool_versions = tool_versions_dict
trans.sa_session.add( repository_metadata )
trans.sa_session.flush()
-def can_use_tool_config_disk_file( trans, repository, repo, file_path, changeset_revision ):
- """
- Determine if repository's tool config file on disk can be used. This method is restricted to tool config files since, with the
- exception of tool config files, multiple files with the same name will likely be in various directories in the repository and we're
- comparing file names only (not relative paths).
- """
- if not file_path or not os.path.exists( file_path ):
- # The file no longer exists on disk, so it must have been deleted at some previous point in the change log.
- return False
- if changeset_revision == repository.tip( trans.app ):
- return True
- file_name = suc.strip_path( file_path )
- latest_version_of_file = get_latest_tool_config_revision_from_repository_manifest( repo, file_name, changeset_revision )
- can_use_disk_file = filecmp.cmp( file_path, latest_version_of_file )
- try:
- os.unlink( latest_version_of_file )
- except:
- pass
- return can_use_disk_file
def changeset_is_malicious( trans, id, changeset_revision, **kwd ):
"""Check the malicious flag in repository metadata for a specified change set"""
repository_metadata = suc.get_repository_metadata_by_changeset_revision( trans, id, changeset_revision )
@@ -155,15 +136,6 @@
if user_email in admin_users:
return True
return False
-def get_absolute_path_to_file_in_repository( repo_files_dir, file_name ):
- stripped_file_name = suc.strip_path( file_name )
- file_path = None
- for root, dirs, files in os.walk( repo_files_dir ):
- if root.find( '.hg' ) < 0:
- for name in files:
- if name == stripped_file_name:
- return os.path.abspath( os.path.join( root, name ) )
- return file_path
def get_category( trans, id ):
"""Get a category from the database"""
return trans.sa_session.query( trans.model.Category ).get( trans.security.decode_id( id ) )
@@ -249,9 +221,6 @@
def get_repository_by_name( trans, name ):
"""Get a repository from the database via name"""
return trans.sa_session.query( trans.model.Repository ).filter_by( name=name ).one()
-def get_repository_metadata_by_id( trans, id ):
- """Get repository metadata from the database"""
- return trans.sa_session.query( trans.model.RepositoryMetadata ).get( trans.security.decode_id( id ) )
def get_repository_metadata_revisions_for_review( repository, reviewed=True ):
repository_metadata_revisions = []
metadata_changeset_revision_hashes = []
@@ -425,46 +394,6 @@
if previous_changeset_revision in reviewed_revision_hashes:
return True
return False
-def load_tool_from_changeset_revision( trans, repository_id, changeset_revision, tool_config_filename ):
- """
- Return a loaded tool whose tool config file name (e.g., filtering.xml) is the value of tool_config_filename. The value of changeset_revision
- is a valid (downloadable) changset revision. The tool config will be located in the repository manifest between the received valid changeset
- revision and the first changeset revision in the repository, searching backwards.
- """
- original_tool_data_path = trans.app.config.tool_data_path
- repository = suc.get_repository_in_tool_shed( trans, repository_id )
- repo_files_dir = repository.repo_path( trans.app )
- repo = hg.repository( suc.get_configured_ui(), repo_files_dir )
- message = ''
- tool = None
- can_use_disk_file = False
- tool_config_filepath = get_absolute_path_to_file_in_repository( repo_files_dir, tool_config_filename )
- work_dir = tempfile.mkdtemp()
- can_use_disk_file = can_use_tool_config_disk_file( trans, repository, repo, tool_config_filepath, changeset_revision )
- if can_use_disk_file:
- trans.app.config.tool_data_path = work_dir
- tool, valid, message, sample_files = suc.handle_sample_files_and_load_tool_from_disk( trans, repo_files_dir, tool_config_filepath, work_dir )
- if tool is not None:
- invalid_files_and_errors_tups = suc.check_tool_input_params( trans.app,
- repo_files_dir,
- tool_config_filename,
- tool,
- sample_files )
- if invalid_files_and_errors_tups:
- message2 = suc.generate_message_for_invalid_tools( trans,
- invalid_files_and_errors_tups,
- repository,
- metadata_dict=None,
- as_html=True,
- displaying_invalid_tool=True )
- message = suc.concat_messages( message, message2 )
- else:
- tool, message, sample_files = suc.handle_sample_files_and_load_tool_from_tmp_config( trans, repo, changeset_revision, tool_config_filename, work_dir )
- suc.remove_dir( work_dir )
- trans.app.config.tool_data_path = original_tool_data_path
- # Reset the tool_data_tables by loading the empty tool_data_table_conf.xml file.
- suc.reset_tool_data_tables( trans.app )
- return repository, tool, message
def new_repository_dependency_metadata_required( trans, repository, metadata_dict ):
"""
Compare the last saved metadata for each repository dependency in the repository with the new
diff -r 875ac898df00fd919b6b24f58562fadbf03dc5e1 -r 7073d786cad0f0d251d4ea9b2c42171f698cf953 lib/galaxy/webapps/community/controllers/repository.py
--- a/lib/galaxy/webapps/community/controllers/repository.py
+++ b/lib/galaxy/webapps/community/controllers/repository.py
@@ -1,18 +1,20 @@
-import os, logging, tempfile, shutil, ConfigParser
+import os, logging, re, tempfile, shutil, ConfigParser
from time import gmtime, strftime
from datetime import date, datetime
-from galaxy import util
+from galaxy import util, web
from galaxy.util.odict import odict
-from galaxy.web.base.controller import *
-from galaxy.web.form_builder import CheckboxField
+from galaxy.web.base.controller import BaseUIController
+from galaxy.web.form_builder import CheckboxField, SelectField, build_select_field
from galaxy.webapps.community import model
from galaxy.webapps.community.model import directory_hash_id
-from galaxy.web.framework.helpers import time_ago, iff, grids
-from galaxy.util.json import from_json_string, to_json_string
-from galaxy.model.orm import and_
+from galaxy.web.framework.helpers import grids
+from galaxy.util import json
+from galaxy.model.orm import and_, or_
import galaxy.util.shed_util_common as suc
from galaxy.tool_shed import encoding_util
+from galaxy.webapps.community.util import workflow_util
import common
+import galaxy.tools
from galaxy import eggs
eggs.require('mercurial')
@@ -157,7 +159,7 @@
model.User.table.c.email == column_filter ) )
class EmailAlertsColumn( grids.TextColumn ):
def get_value( self, trans, grid, repository ):
- if trans.user and repository.email_alerts and trans.user.email in from_json_string( repository.email_alerts ):
+ if trans.user and repository.email_alerts and trans.user.email in json.from_json_string( repository.email_alerts ):
return 'yes'
return ''
class DeprecatedColumn( grids.TextColumn ):
@@ -833,7 +835,7 @@
# Start building up the url to redirect back to the calling Galaxy instance.
url = suc.url_join( galaxy_url,
'admin_toolshed/update_to_changeset_revision?tool_shed_url=%s&name=%s&owner=%s&changeset_revision=%s&latest_changeset_revision=' % \
- ( url_for( '/', qualified=True ), repository.name, repository.user.username, changeset_revision ) )
+ ( web.url_for( '/', qualified=True ), repository.name, repository.user.username, changeset_revision ) )
if changeset_revision == repository.tip( trans.app ):
# If changeset_revision is the repository tip, there are no additional updates.
if from_update_manager:
@@ -1020,7 +1022,7 @@
params = util.Params( kwd )
message = util.restore_text( params.get( 'message', '' ) )
status = params.get( 'status', 'done' )
- repository, tool, message = common.load_tool_from_changeset_revision( trans, repository_id, changeset_revision, tool_config )
+ repository, tool, message = suc.load_tool_from_changeset_revision( trans, repository_id, changeset_revision, tool_config )
if message:
status = 'error'
tool_state = self.__new_state( trans )
@@ -1087,7 +1089,7 @@
is_admin = trans.user_is_admin()
if operation == "view_or_manage_repository":
# The received id is a RepositoryMetadata id, so we have to get the repository id.
- repository_metadata = common.get_repository_metadata_by_id( trans, item_id )
+ repository_metadata = suc.get_repository_metadata_by_id( trans, item_id )
repository_id = trans.security.encode_id( repository_metadata.repository.id )
repository = suc.get_repository_in_tool_shed( trans, repository_id )
kwd[ 'id' ] = repository_id
@@ -1104,7 +1106,7 @@
encoded_repository_ids = []
changeset_revisions = []
for repository_metadata_id in util.listify( item_id ):
- repository_metadata = common.get_repository_metadata_by_id( trans, repository_metadata_id )
+ repository_metadata = suc.get_repository_metadata_by_id( trans, repository_metadata_id )
encoded_repository_ids.append( trans.security.encode_id( repository_metadata.repository.id ) )
changeset_revisions.append( repository_metadata.changeset_revision )
new_kwd[ 'repository_ids' ] = encoded_repository_ids
@@ -1172,7 +1174,7 @@
is_admin = trans.user_is_admin()
if operation == "view_or_manage_repository":
# The received id is a RepositoryMetadata id, so we have to get the repository id.
- repository_metadata = common.get_repository_metadata_by_id( trans, item_id )
+ repository_metadata = suc.get_repository_metadata_by_id( trans, item_id )
repository_id = trans.security.encode_id( repository_metadata.repository.id )
repository = suc.get_repository_in_tool_shed( trans, repository_id )
kwd[ 'id' ] = repository_id
@@ -1189,7 +1191,7 @@
encoded_repository_ids = []
changeset_revisions = []
for repository_metadata_id in util.listify( item_id ):
- repository_metadata = common.get_repository_metadata_by_id( trans, item_id )
+ repository_metadata = suc.get_repository_metadata_by_id( trans, item_id )
encoded_repository_ids.append( trans.security.encode_id( repository_metadata.repository.id ) )
changeset_revisions.append( repository_metadata.changeset_revision )
new_kwd = {}
@@ -1245,6 +1247,10 @@
message=message,
status=status )
@web.expose
+ def generate_workflow_image( self, trans, workflow_name, repository_metadata_id=None ):
+ """Return an svg image representation of a workflow dictionary created when the workflow was exported."""
+ return workflow_util.generate_workflow_image( trans, workflow_name, repository_metadata_id=repository_metadata_id, repository_id=None )
+ @web.expose
def get_changeset_revision_and_ctx_rev( self, trans, **kwd ):
"""Handle a request from a local Galaxy instance to retrieve the changeset revision hash to which an installed repository can be updated."""
params = util.Params( kwd )
@@ -1355,7 +1361,7 @@
repository_dependencies = suc.get_repository_dependencies_for_changeset_revision( trans=trans,
repository=repository,
repository_metadata=repository_metadata,
- toolshed_base_url=str( url_for( '/', qualified=True ) ).rstrip( '/' ),
+ toolshed_base_url=str( web.url_for( '/', qualified=True ) ).rstrip( '/' ),
key_rd_dicts_to_be_processed=None,
all_repository_dependencies=None,
handled_key_rd_dicts=None,
@@ -1418,7 +1424,7 @@
encoded_repository_ids.append( trans.security.encode_id( repository.id ) )
changeset_revisions.append( changeset_revision )
if encoded_repository_ids and changeset_revisions:
- repo_info_dict = from_json_string( self.get_repository_information( trans, encoded_repository_ids, changeset_revisions ) )
+ repo_info_dict = json.from_json_string( self.get_repository_information( trans, encoded_repository_ids, changeset_revisions ) )
else:
repo_info_dict = {}
return repo_info_dict
@@ -1465,7 +1471,7 @@
if current_changeset_revision == changeset_revision:
break
if tool_version_dicts:
- return to_json_string( tool_version_dicts )
+ return json.to_json_string( tool_version_dicts )
return ''
def get_versions_of_tool( self, trans, repository, repository_metadata, guid ):
"""Return the tool lineage in descendant order for the received guid contained in the received repsitory_metadata.tool_versions."""
@@ -1595,14 +1601,14 @@
# Redirect back to local Galaxy to perform install.
url = suc.url_join( galaxy_url,
'admin_toolshed/prepare_for_install?tool_shed_url=%s&repository_ids=%s&changeset_revisions=%s' % \
- ( url_for( '/', qualified=True ), ','.join( util.listify( repository_ids ) ), ','.join( util.listify( changeset_revisions ) ) ) )
+ ( web.url_for( '/', qualified=True ), ','.join( util.listify( repository_ids ) ), ','.join( util.listify( changeset_revisions ) ) ) )
return trans.response.send_redirect( url )
@web.expose
def load_invalid_tool( self, trans, repository_id, tool_config, changeset_revision, **kwd ):
params = util.Params( kwd )
message = util.restore_text( params.get( 'message', '' ) )
status = params.get( 'status', 'error' )
- repository, tool, error_message = common.load_tool_from_changeset_revision( trans, repository_id, changeset_revision, tool_config )
+ repository, tool, error_message = suc.load_tool_from_changeset_revision( trans, repository_id, changeset_revision, tool_config )
tool_state = self.__new_state( trans )
is_malicious = common.changeset_is_malicious( trans, repository_id, repository.tip( trans.app ) )
invalid_file_tups = []
@@ -1701,7 +1707,7 @@
alerts_checked = CheckboxField.is_checked( alerts )
category_ids = util.listify( params.get( 'category_id', '' ) )
if repository.email_alerts:
- email_alerts = from_json_string( repository.email_alerts )
+ email_alerts = json.from_json_string( repository.email_alerts )
else:
email_alerts = []
allow_push = params.get( 'allow_push', '' )
@@ -1776,12 +1782,12 @@
if alerts_checked:
if user.email not in email_alerts:
email_alerts.append( user.email )
- repository.email_alerts = to_json_string( email_alerts )
+ repository.email_alerts = json.to_json_string( email_alerts )
flush_needed = True
else:
if user.email in email_alerts:
email_alerts.remove( user.email )
- repository.email_alerts = to_json_string( email_alerts )
+ repository.email_alerts = json.to_json_string( email_alerts )
flush_needed = True
if flush_needed:
trans.sa_session.add( repository )
@@ -1830,7 +1836,7 @@
repository_dependencies = suc.get_repository_dependencies_for_changeset_revision( trans=trans,
repository=repository,
repository_metadata=repository_metadata,
- toolshed_base_url=str( url_for( '/', qualified=True ) ).rstrip( '/' ),
+ toolshed_base_url=str( web.url_for( '/', qualified=True ) ).rstrip( '/' ),
key_rd_dicts_to_be_processed=None,
all_repository_dependencies=None,
handled_key_rd_dicts=None )
@@ -1913,7 +1919,7 @@
Only inputs on the first page will be initialized unless `all_pages` is
True, in which case all inputs regardless of page are initialized.
"""
- state = DefaultToolState()
+ state = galaxy.tools.DefaultToolState()
state.inputs = {}
return state
@web.json
@@ -1939,7 +1945,7 @@
repository_dependencies = suc.get_repository_dependencies_for_changeset_revision( trans=trans,
repository=repository,
repository_metadata=repository_metadata,
- toolshed_base_url=str( url_for( '/', qualified=True ) ).rstrip( '/' ),
+ toolshed_base_url=str( web.url_for( '/', qualified=True ) ).rstrip( '/' ),
key_rd_dicts_to_be_processed=None,
all_repository_dependencies=None,
handled_key_rd_dicts=None )
@@ -2257,18 +2263,18 @@
for repository_id in repository_ids:
repository = suc.get_repository_in_tool_shed( trans, repository_id )
if repository.email_alerts:
- email_alerts = from_json_string( repository.email_alerts )
+ email_alerts = json.from_json_string( repository.email_alerts )
else:
email_alerts = []
if user.email in email_alerts:
email_alerts.remove( user.email )
- repository.email_alerts = to_json_string( email_alerts )
+ repository.email_alerts = json.to_json_string( email_alerts )
trans.sa_session.add( repository )
flush_needed = True
total_alerts_removed += 1
else:
email_alerts.append( user.email )
- repository.email_alerts = to_json_string( email_alerts )
+ repository.email_alerts = json.to_json_string( email_alerts )
trans.sa_session.add( repository )
flush_needed = True
total_alerts_added += 1
@@ -2424,7 +2430,7 @@
alerts = params.get( 'alerts', '' )
alerts_checked = CheckboxField.is_checked( alerts )
if repository.email_alerts:
- email_alerts = from_json_string( repository.email_alerts )
+ email_alerts = json.from_json_string( repository.email_alerts )
else:
email_alerts = []
repository_dependencies = None
@@ -2434,12 +2440,12 @@
if alerts_checked:
if user.email not in email_alerts:
email_alerts.append( user.email )
- repository.email_alerts = to_json_string( email_alerts )
+ repository.email_alerts = json.to_json_string( email_alerts )
flush_needed = True
else:
if user.email in email_alerts:
email_alerts.remove( user.email )
- repository.email_alerts = to_json_string( email_alerts )
+ repository.email_alerts = json.to_json_string( email_alerts )
flush_needed = True
if flush_needed:
trans.sa_session.add( repository )
@@ -2460,7 +2466,7 @@
repository_dependencies = suc.get_repository_dependencies_for_changeset_revision( trans=trans,
repository=repository,
repository_metadata=repository_metadata,
- toolshed_base_url=str( url_for( '/', qualified=True ) ).rstrip( '/' ),
+ toolshed_base_url=str( web.url_for( '/', qualified=True ) ).rstrip( '/' ),
key_rd_dicts_to_be_processed=None,
all_repository_dependencies=None,
handled_key_rd_dicts=None )
@@ -2527,7 +2533,7 @@
guid = tool_metadata_dict[ 'guid' ]
full_path_to_tool_config = os.path.abspath( relative_path_to_tool_config )
full_path_to_dir, tool_config_filename = os.path.split( full_path_to_tool_config )
- can_use_disk_file = common.can_use_tool_config_disk_file( trans, repository, repo, full_path_to_tool_config, changeset_revision )
+ can_use_disk_file = suc.can_use_tool_config_disk_file( trans, repository, repo, full_path_to_tool_config, changeset_revision )
if can_use_disk_file:
trans.app.config.tool_data_path = work_dir
tool, valid, message, sample_files = suc.handle_sample_files_and_load_tool_from_disk( trans,
@@ -2576,7 +2582,26 @@
review_id=review_id,
message=message,
status=status )
-
+ @web.expose
+ def view_workflow( self, trans, workflow_name, repository_metadata_id, **kwd ):
+ """Retrieve necessary information about a workflow from the database so that it can be displayed in an svg image."""
+ params = util.Params( kwd )
+ message = util.restore_text( params.get( 'message', '' ) )
+ status = params.get( 'status', 'done' )
+ if workflow_name:
+ workflow_name = encoding_util.tool_shed_decode( workflow_name )
+ repository_metadata = suc.get_repository_metadata_by_id( trans, repository_metadata_id )
+ repository = suc.get_repository_in_tool_shed( trans, trans.security.encode_id( repository_metadata.repository_id ) )
+ changeset_revision = repository_metadata.changeset_revision
+ metadata = repository_metadata.metadata
+ return trans.fill_template( "/webapps/community/repository/view_workflow.mako",
+ repository=repository,
+ changeset_revision=changeset_revision,
+ repository_metadata_id=repository_metadata_id,
+ workflow_name=workflow_name,
+ metadata=metadata,
+ message=message,
+ status=status )
# ----- Utility methods -----
def build_changeset_revision_select_field( trans, repository, selected_value=None, add_id_to_name=True,
diff -r 875ac898df00fd919b6b24f58562fadbf03dc5e1 -r 7073d786cad0f0d251d4ea9b2c42171f698cf953 lib/galaxy/webapps/community/controllers/workflow.py
--- a/lib/galaxy/webapps/community/controllers/workflow.py
+++ /dev/null
@@ -1,411 +0,0 @@
-import pkg_resources
-pkg_resources.require( "simplejson" )
-pkg_resources.require( "SVGFig" )
-import os, logging, ConfigParser, tempfile, shutil, svgfig
-from galaxy.webapps.community import model
-from galaxy.web.framework.helpers import time_ago, iff, grids
-from galaxy.util.json import from_json_string, to_json_string
-from galaxy.workflow.modules import InputDataModule, ToolModule, WorkflowModuleFactory
-from galaxy.web.base.controller import *
-from galaxy.tools import DefaultToolState
-from galaxy.webapps.galaxy.controllers.workflow import attach_ordered_steps
-import common
-import galaxy.util.shed_util_common as suc
-from galaxy.tool_shed import encoding_util
-
-class RepoInputDataModule( InputDataModule ):
-
- type = "data_input"
- name = "Input dataset"
-
- @classmethod
- def new( Class, trans, tools_metadata=None, tool_id=None ):
- module = Class( trans )
- module.state = dict( name="Input Dataset" )
- return module
- @classmethod
- def from_dict( Class, trans, repository_id, changeset_revision, step_dict, tools_metadata=None, secure=True ):
- module = Class( trans )
- state = from_json_string( step_dict[ "tool_state" ] )
- module.state = dict( name=state.get( "name", "Input Dataset" ) )
- return module
- @classmethod
- def from_workflow_step( Class, trans, repository_id, changeset_revision, tools_metadata, step ):
- module = Class( trans )
- module.state = dict( name="Input Dataset" )
- if step.tool_inputs and "name" in step.tool_inputs:
- module.state[ 'name' ] = step.tool_inputs[ 'name' ]
- return module
-
-class RepoToolModule( ToolModule ):
-
- type = "tool"
-
- def __init__( self, trans, repository_id, changeset_revision, tools_metadata, tool_id ):
- self.trans = trans
- self.tools_metadata = tools_metadata
- self.tool_id = tool_id
- self.tool = None
- self.errors = None
- for tool_dict in tools_metadata:
- if self.tool_id in [ tool_dict[ 'id' ], tool_dict[ 'guid' ] ]:
- repository, self.tool, message = common.load_tool_from_changeset_revision( trans, repository_id, changeset_revision, tool_dict[ 'tool_config' ] )
- if message and self.tool is None:
- self.errors = 'unavailable'
- break
- self.post_job_actions = {}
- self.workflow_outputs = []
- self.state = None
- @classmethod
- def new( Class, trans, repository_id, changeset_revision, tools_metadata, tool_id=None ):
- module = Class( trans, repository_id, changeset_revision, tools_metadata, tool_id )
- module.state = module.tool.new_state( trans, all_pages=True )
- return module
- @classmethod
- def from_dict( Class, trans, repository_id, changeset_revision, step_dict, tools_metadata, secure=True ):
- tool_id = step_dict[ 'tool_id' ]
- module = Class( trans, repository_id, changeset_revision, tools_metadata, tool_id )
- module.state = DefaultToolState()
- if module.tool is not None:
- module.state.decode( step_dict[ "tool_state" ], module.tool, module.trans.app, secure=secure )
- module.errors = step_dict.get( "tool_errors", None )
- return module
- @classmethod
- def from_workflow_step( Class, trans, repository_id, changeset_revision, tools_metadata, step ):
- module = Class( trans, repository_id, changeset_revision, tools_metadata, step.tool_id )
- module.state = DefaultToolState()
- if module.tool:
- module.state.inputs = module.tool.params_from_strings( step.tool_inputs, trans.app, ignore_errors=True )
- else:
- module.state.inputs = {}
- module.errors = step.tool_errors
- return module
- def get_data_inputs( self ):
- data_inputs = []
- def callback( input, value, prefixed_name, prefixed_label ):
- if isinstance( input, DataToolParameter ):
- data_inputs.append( dict( name=prefixed_name,
- label=prefixed_label,
- extensions=input.extensions ) )
- if self.tool:
- visit_input_values( self.tool.inputs, self.state.inputs, callback )
- return data_inputs
- def get_data_outputs( self ):
- data_outputs = []
- if self.tool:
- data_inputs = None
- for name, tool_output in self.tool.outputs.iteritems():
- if tool_output.format_source != None:
- # Default to special name "input" which remove restrictions on connections
- formats = [ 'input' ]
- if data_inputs == None:
- data_inputs = self.get_data_inputs()
- # Find the input parameter referenced by format_source
- for di in data_inputs:
- # Input names come prefixed with conditional and repeat names separated by '|',
- # so remove prefixes when comparing with format_source.
- if di[ 'name' ] != None and di[ 'name' ].split( '|' )[ -1 ] == tool_output.format_source:
- formats = di[ 'extensions' ]
- else:
- formats = [ tool_output.format ]
- for change_elem in tool_output.change_format:
- for when_elem in change_elem.findall( 'when' ):
- format = when_elem.get( 'format', None )
- if format and format not in formats:
- formats.append( format )
- data_outputs.append( dict( name=name, extensions=formats ) )
- return data_outputs
-
-class RepoWorkflowModuleFactory( WorkflowModuleFactory ):
- def __init__( self, module_types ):
- self.module_types = module_types
- def new( self, trans, type, tools_metadata=None, tool_id=None ):
- """Return module for type and (optional) tool_id initialized with new / default state."""
- assert type in self.module_types
- return self.module_types[type].new( trans, tool_id )
- def from_dict( self, trans, repository_id, changeset_revision, step_dict, **kwd ):
- """Return module initialized from the data in dictionary `step_dict`."""
- type = step_dict[ 'type' ]
- assert type in self.module_types
- return self.module_types[ type ].from_dict( trans, repository_id, changeset_revision, step_dict, **kwd )
- def from_workflow_step( self, trans, repository_id, changeset_revision, tools_metadata, step ):
- """Return module initialized from the WorkflowStep object `step`."""
- type = step.type
- return self.module_types[ type ].from_workflow_step( trans, repository_id, changeset_revision, tools_metadata, step )
-
-module_factory = RepoWorkflowModuleFactory( dict( data_input=RepoInputDataModule, tool=RepoToolModule ) )
-
-class WorkflowController( BaseUIController ):
- @web.expose
- def view_workflow( self, trans, **kwd ):
- repository_metadata_id = kwd.get( 'repository_metadata_id', '' )
- workflow_name = kwd.get( 'workflow_name', '' )
- if workflow_name:
- workflow_name = encoding_util.tool_shed_decode( workflow_name )
- message = kwd.get( 'message', '' )
- status = kwd.get( 'status', 'done' )
- repository_metadata = common.get_repository_metadata_by_id( trans, repository_metadata_id )
- repository = suc.get_repository_in_tool_shed( trans, trans.security.encode_id( repository_metadata.repository_id ) )
- return trans.fill_template( "/webapps/community/repository/view_workflow.mako",
- repository=repository,
- changeset_revision=repository_metadata.changeset_revision,
- repository_metadata_id=repository_metadata_id,
- workflow_name=workflow_name,
- metadata=repository_metadata.metadata,
- message=message,
- status=status )
- @web.expose
- def generate_workflow_image( self, trans, repository_metadata_id, workflow_name ):
- repository_metadata = common.get_repository_metadata_by_id( trans, repository_metadata_id )
- repository_id = trans.security.encode_id( repository_metadata.repository_id )
- changeset_revision = repository_metadata.changeset_revision
- metadata = repository_metadata.metadata
- workflow_name = encoding_util.tool_shed_decode( workflow_name )
- # metadata[ 'workflows' ] is a list of tuples where each contained tuple is
- # [ <relative path to the .ga file in the repository>, <exported workflow dict> ]
- for workflow_tup in metadata[ 'workflows' ]:
- workflow_dict = workflow_tup[1]
- if workflow_dict[ 'name' ] == workflow_name:
- break
- if 'tools' in metadata:
- tools_metadata = metadata[ 'tools' ]
- else:
- tools_metadata = []
- workflow, missing_tool_tups = self.__workflow_from_dict( trans, workflow_dict, tools_metadata, repository_id, changeset_revision )
- data = []
- canvas = svgfig.canvas( style="stroke:black; fill:none; stroke-width:1px; stroke-linejoin:round; text-anchor:left" )
- text = svgfig.SVG( "g" )
- connectors = svgfig.SVG( "g" )
- boxes = svgfig.SVG( "g" )
- svgfig.Text.defaults[ "font-size" ] = "10px"
- in_pos = {}
- out_pos = {}
- margin = 5
- # Spacing between input/outputs.
- line_px = 16
- # Store px width for boxes of each step.
- widths = {}
- max_width, max_x, max_y = 0, 0, 0
- for step in workflow.steps:
- step.upgrade_messages = {}
- module = module_factory.from_workflow_step( trans, repository_id, changeset_revision, tools_metadata, step )
- tool_errors = module.type == 'tool' and not module.tool
- module_data_inputs = self.__get_data_inputs( step, module )
- module_data_outputs = self.__get_data_outputs( step, module, workflow.steps )
- step_dict = {
- 'id' : step.order_index,
- 'data_inputs' : module_data_inputs,
- 'data_outputs' : module_data_outputs,
- 'position' : step.position,
- 'tool_errors' : tool_errors
- }
- input_conn_dict = {}
- for conn in step.input_connections:
- input_conn_dict[ conn.input_name ] = dict( id=conn.output_step.order_index, output_name=conn.output_name )
- step_dict[ 'input_connections' ] = input_conn_dict
- data.append( step_dict )
- x, y = step.position[ 'left' ], step.position[ 'top' ]
- count = 0
- module_name = self.__get_name( module, missing_tool_tups )
- max_len = len( module_name ) * 1.5
- text.append( svgfig.Text( x, y + 20, module_name, **{ "font-size": "14px" } ).SVG() )
- y += 45
- for di in module_data_inputs:
- cur_y = y + count * line_px
- if step.order_index not in in_pos:
- in_pos[ step.order_index ] = {}
- in_pos[ step.order_index ][ di[ 'name' ] ] = ( x, cur_y )
- text.append( svgfig.Text( x, cur_y, di[ 'label' ] ).SVG() )
- count += 1
- max_len = max( max_len, len( di[ 'label' ] ) )
- if len( module.get_data_inputs() ) > 0:
- y += 15
- for do in module_data_outputs:
- cur_y = y + count * line_px
- if step.order_index not in out_pos:
- out_pos[ step.order_index ] = {}
- out_pos[ step.order_index ][ do[ 'name' ] ] = ( x, cur_y )
- text.append( svgfig.Text( x, cur_y, do[ 'name' ] ).SVG() )
- count += 1
- max_len = max( max_len, len( do['name' ] ) )
- widths[ step.order_index ] = max_len * 5.5
- max_x = max( max_x, step.position[ 'left' ] )
- max_y = max( max_y, step.position[ 'top' ] )
- max_width = max( max_width, widths[ step.order_index ] )
- for step_dict in data:
- tool_unavailable = step_dict[ 'tool_errors' ]
- width = widths[ step_dict[ 'id' ] ]
- x, y = step_dict[ 'position' ][ 'left' ], step_dict[ 'position' ][ 'top' ]
- if tool_unavailable:
- fill = "#EBBCB2"
- else:
- fill = "#EBD9B2"
- boxes.append( svgfig.Rect( x - margin, y, x + width - margin, y + 30, fill=fill ).SVG() )
- box_height = ( len( step_dict[ 'data_inputs' ] ) + len( step_dict[ 'data_outputs' ] ) ) * line_px + margin
- # Draw separator line.
- if len( step_dict[ 'data_inputs' ] ) > 0:
- box_height += 15
- sep_y = y + len( step_dict[ 'data_inputs' ] ) * line_px + 40
- text.append( svgfig.Line( x - margin, sep_y, x + width - margin, sep_y ).SVG() )
- # Define an input/output box.
- boxes.append( svgfig.Rect( x - margin, y + 30, x + width - margin, y + 30 + box_height, fill="#ffffff" ).SVG() )
- for conn, output_dict in step_dict[ 'input_connections' ].iteritems():
- in_coords = in_pos[ step_dict[ 'id' ] ][ conn ]
- # out_pos_index will be a step number like 1, 2, 3...
- out_pos_index = output_dict[ 'id' ]
- # out_pos_name will be a string like 'o', 'o2', etc.
- out_pos_name = output_dict[ 'output_name' ]
- if out_pos_index in out_pos:
- # out_conn_index_dict will be something like:
- # 7: {'o': (824.5, 618)}
- out_conn_index_dict = out_pos[ out_pos_index ]
- if out_pos_name in out_conn_index_dict:
- out_conn_pos = out_pos[ out_pos_index ][ out_pos_name ]
- else:
- # Take any key / value pair available in out_conn_index_dict.
- # A problem will result if the dictionary is empty.
- if out_conn_index_dict.keys():
- key = out_conn_index_dict.keys()[0]
- out_conn_pos = out_pos[ out_pos_index ][ key ]
- adjusted = ( out_conn_pos[ 0 ] + widths[ output_dict[ 'id' ] ], out_conn_pos[ 1 ] )
- text.append( svgfig.SVG( "circle",
- cx=out_conn_pos[ 0 ] + widths[ output_dict[ 'id' ] ] - margin,
- cy=out_conn_pos[ 1 ] - margin,
- r = 5,
- fill="#ffffff" ) )
- connectors.append( svgfig.Line( adjusted[ 0 ],
- adjusted[ 1 ] - margin,
- in_coords[ 0 ] - 10,
- in_coords[ 1 ],
- arrow_end = "true" ).SVG() )
- canvas.append( connectors )
- canvas.append( boxes )
- canvas.append( text )
- width, height = ( max_x + max_width + 50 ), max_y + 300
- canvas[ 'width' ] = "%s px" % width
- canvas[ 'height' ] = "%s px" % height
- canvas[ 'viewBox' ] = "0 0 %s %s" % ( width, height )
- trans.response.set_content_type( "image/svg+xml" )
- return canvas.standalone_xml()
- def __get_name( self, module, missing_tool_tups ):
- module_name = module.get_name()
- if module.type == 'tool' and module_name == 'unavailable':
- for missing_tool_tup in missing_tool_tups:
- missing_tool_id, missing_tool_name, missing_tool_version = missing_tool_tup
- if missing_tool_id == module.tool_id:
- module_name = '%s' % missing_tool_name
- return module_name
- def __get_data_inputs( self, step, module ):
- if module.type == 'tool':
- if module.tool:
- return module.get_data_inputs()
- else:
- data_inputs = []
- for wfsc in step.input_connections:
- data_inputs_dict = {}
- data_inputs_dict[ 'extensions' ] = [ '' ]
- data_inputs_dict[ 'name' ] = wfsc.input_name
- data_inputs_dict[ 'label' ] = 'Unknown'
- data_inputs.append( data_inputs_dict )
- return data_inputs
- return module.get_data_inputs()
- def __get_data_outputs( self, step, module, steps ):
- if module.type == 'tool':
- if module.tool:
- return module.get_data_outputs()
- else:
- data_outputs = []
- data_outputs_dict = {}
- data_outputs_dict[ 'extensions' ] = [ 'input' ]
- found = False
- for workflow_step in steps:
- for wfsc in workflow_step.input_connections:
- if step.name == wfsc.output_step.name:
- data_outputs_dict[ 'name' ] = wfsc.output_name
- found = True
- break
- if found:
- break
- if not found:
- # We're at the last step of the workflow.
- data_outputs_dict[ 'name' ] = 'output'
- data_outputs.append( data_outputs_dict )
- return data_outputs
- return module.get_data_outputs()
- def __workflow_from_dict( self, trans, workflow_dict, tools_metadata, repository_id, changeset_revision ):
- """Creates and returns workflow object from a dictionary."""
- trans.workflow_building_mode = True
- workflow = model.Workflow()
- workflow.name = workflow_dict[ 'name' ]
- workflow.has_errors = False
- steps = []
- # Keep ids for each step that we need to use to make connections.
- steps_by_external_id = {}
- # Keep track of tools required by the workflow that are not available in
- # the tool shed repository. Each tuple in the list of missing_tool_tups
- # will be ( tool_id, tool_name, tool_version ).
- missing_tool_tups = []
- # First pass to build step objects and populate basic values
- for key, step_dict in workflow_dict[ 'steps' ].iteritems():
- # Create the model class for the step
- step = model.WorkflowStep()
- step.name = step_dict[ 'name' ]
- step.position = step_dict[ 'position' ]
- module = module_factory.from_dict( trans, repository_id, changeset_revision, step_dict, tools_metadata=tools_metadata, secure=False )
- if module.type == 'tool' and module.tool is None:
- # A required tool is not available in the current repository.
- step.tool_errors = 'unavailable'
- missing_tool_tup = ( step_dict[ 'tool_id' ], step_dict[ 'name' ], step_dict[ 'tool_version' ] )
- if missing_tool_tup not in missing_tool_tups:
- missing_tool_tups.append( missing_tool_tup )
- module.save_to_step( step )
- if step.tool_errors:
- workflow.has_errors = True
- # Stick this in the step temporarily.
- step.temp_input_connections = step_dict[ 'input_connections' ]
- steps.append( step )
- steps_by_external_id[ step_dict[ 'id' ] ] = step
- # Second pass to deal with connections between steps.
- for step in steps:
- # Input connections.
- for input_name, conn_dict in step.temp_input_connections.iteritems():
- if conn_dict:
- output_step = steps_by_external_id[ conn_dict[ 'id' ] ]
- conn = model.WorkflowStepConnection()
- conn.input_step = step
- conn.input_name = input_name
- conn.output_step = output_step
- conn.output_name = conn_dict[ 'output_name' ]
- step.input_connections.append( conn )
- del step.temp_input_connections
- # Order the steps if possible.
- attach_ordered_steps( workflow, steps )
- return workflow, missing_tool_tups
- @web.expose
- def import_workflow( self, trans, **kwd ):
- repository_metadata_id = kwd.get( 'repository_metadata_id', '' )
- workflow_name = kwd.get( 'workflow_name', '' )
- if workflow_name:
- workflow_name = encoding_util.tool_shed_decode( workflow_name )
- message = kwd.get( 'message', '' )
- status = kwd.get( 'status', 'done' )
- repository_metadata = get_repository_metadata_by_id( trans, repository_metadata_id )
- workflows = repository_metadata.metadata[ 'workflows' ]
- workflow_data = None
- for workflow_data in workflows:
- if workflow_data[ 'name' ] == workflow_name:
- break
- if workflow_data:
- if kwd.get( 'open_for_url', False ):
- tmp_fd, tmp_fname = tempfile.mkstemp()
- to_file = open( tmp_fname, 'wb' )
- to_file.write( to_json_string( workflow_data ) )
- return open( tmp_fname )
- galaxy_url = trans.get_cookie( name='toolshedgalaxyurl' )
- url = '%sworkflow/import_workflow?tool_shed_url=%s&repository_metadata_id=%s&workflow_name=%s' % \
- ( galaxy_url, url_for( '/', qualified=True ), repository_metadata_id, encoding_util.tool_shed_encode( workflow_name ) )
- return trans.response.send_redirect( url )
- return trans.response.send_redirect( web.url_for( controller='workflow',
- action='view_workflow',
- message=message,
- status=status ) )
diff -r 875ac898df00fd919b6b24f58562fadbf03dc5e1 -r 7073d786cad0f0d251d4ea9b2c42171f698cf953 lib/galaxy/webapps/community/util/container_util.py
--- a/lib/galaxy/webapps/community/util/container_util.py
+++ b/lib/galaxy/webapps/community/util/container_util.py
@@ -56,11 +56,12 @@
class InvalidTool( object ):
"""Invalid tool object"""
- def __init__( self, id=None, tool_config=None, repository_id=None, changeset_revision=None ):
+ def __init__( self, id=None, tool_config=None, repository_id=None, changeset_revision=None, repository_installation_status=None ):
self.id = id
self.tool_config = tool_config
self.repository_id = repository_id
self.changeset_revision = changeset_revision
+ self.repository_installation_status = repository_installation_status
class ReadMe( object ):
"""Readme text object"""
@@ -86,7 +87,7 @@
class Tool( object ):
"""Tool object"""
def __init__( self, id=None, tool_config=None, tool_id=None, name=None, description=None, version=None, requirements=None,
- repository_id=None, changeset_revision=None ):
+ repository_id=None, changeset_revision=None, repository_installation_status=None ):
self.id = id
self.tool_config = tool_config
self.tool_id = tool_id
@@ -96,6 +97,7 @@
self.requirements = requirements
self.repository_id = repository_id
self.changeset_revision = changeset_revision
+ self.repository_installation_status = repository_installation_status
class ToolDependency( object ):
"""Tool dependency object"""
@@ -112,14 +114,17 @@
self.tool_dependency_id = tool_dependency_id
class Workflow( object ):
- """Workflow object"""
- def __init__( self, id=None, repository_metadata_id=None, workflow_name=None, steps=None, format_version=None, annotation=None ):
+ """Workflow object."""
+ def __init__( self, id=None, workflow_name=None, steps=None, format_version=None, annotation=None, repository_metadata_id=None, repository_id=None ):
+ # When rendered in the tool shed, repository_metadata_id will have a value and repository_id will be None. When rendered in Galaxy, repository_id
+ # will have a value and repository_metadata_id will be None.
self.id = id
- self.repository_metadata_id = repository_metadata_id
self.workflow_name = workflow_name
self.steps = steps
self.format_version = format_version
self.annotation = annotation
+ self.repository_metadata_id = repository_metadata_id
+ self.repository_id = repository_id
def build_datatypes_folder( trans, folder_id, datatypes, label='Datatypes' ):
"""Return a folder hierarchy containing datatypes."""
@@ -163,12 +168,18 @@
invalid_tool_id += 1
if repository:
repository_id = repository.id
+ if trans.webapp.name == 'galaxy':
+ repository_installation_status = repository.status
+ else:
+ repository_installation_status = None
else:
repository_id = None
+ repository_installation_status = None
invalid_tool = InvalidTool( id=invalid_tool_id,
tool_config=invalid_tool_config,
repository_id=repository_id,
- changeset_revision=changeset_revision )
+ changeset_revision=changeset_revision,
+ repository_installation_status=repository_installation_status )
folder.invalid_tools.append( invalid_tool )
else:
invalid_tools_root_folder = None
@@ -249,8 +260,13 @@
folder.valid_tools.append( tool )
if repository:
repository_id = repository.id
+ if trans.webapp.name == 'galaxy':
+ repository_installation_status = repository.status
+ else:
+ repository_installation_status = None
else:
- repository_id = ''
+ repository_id = None
+ repository_installation_status = None
for tool_dict in tool_dicts:
tool_id += 1
if 'requirements' in tool_dict:
@@ -269,7 +285,8 @@
version=tool_dict[ 'version' ],
requirements=requirements_str,
repository_id=repository_id,
- changeset_revision=changeset_revision )
+ changeset_revision=changeset_revision,
+ repository_installation_status=repository_installation_status )
folder.valid_tools.append( tool )
else:
tools_root_folder = None
@@ -351,8 +368,13 @@
else:
tool_dependencies_root_folder = None
return folder_id, tool_dependencies_root_folder
-def build_workflows_folder( trans, folder_id, workflows, repository_metadata, label='Workflows' ):
- """Return a folder hierarchy containing invalid tools."""
+def build_workflows_folder( trans, folder_id, workflows, repository_metadata_id=None, repository_id=None, label='Workflows' ):
+ """
+ Return a folder hierarchy containing workflow objects for each workflow dictionary in the received workflows list. When
+ this method is called from the tool shed, repository_metadata_id will have a value and repository_id will be None. When
+ this method is called from Galaxy, repository_id will have a value only if the repository is not currenlty being installed
+ and repository_metadata_id will be None.
+ """
if workflows:
workflow_id = 0
folder_id += 1
@@ -363,11 +385,12 @@
# Insert a header row.
workflow_id += 1
workflow = Workflow( id=workflow_id,
- repository_metadata_id=None,
workflow_name='Name',
steps='steps',
format_version='format-version',
- annotation='annotation' )
+ annotation='annotation',
+ repository_metadata_id=repository_metadata_id,
+ repository_id=repository_id )
folder.workflows.append( workflow )
for workflow_tup in workflows:
workflow_dict=workflow_tup[ 1 ]
@@ -378,11 +401,12 @@
steps = 'unknown'
workflow_id += 1
workflow = Workflow( id=workflow_id,
- repository_metadata_id=repository_metadata.id,
workflow_name=workflow_dict[ 'name' ],
steps=steps,
format_version=workflow_dict[ 'format-version' ],
- annotation=workflow_dict[ 'annotation' ] )
+ annotation=workflow_dict[ 'annotation' ],
+ repository_metadata_id=repository_metadata_id,
+ repository_id=repository_id )
folder.workflows.append( workflow )
else:
workflows_root_folder = None
diff -r 875ac898df00fd919b6b24f58562fadbf03dc5e1 -r 7073d786cad0f0d251d4ea9b2c42171f698cf953 lib/galaxy/webapps/community/util/workflow_util.py
--- /dev/null
+++ b/lib/galaxy/webapps/community/util/workflow_util.py
@@ -0,0 +1,384 @@
+from galaxy import eggs
+import pkg_resources
+
+pkg_resources.require( "SVGFig" )
+
+import logging, svgfig
+from galaxy.util import json
+import galaxy.util.shed_util_common as suc
+from galaxy.tool_shed import encoding_util
+from galaxy.workflow.modules import InputDataModule, ToolModule, WorkflowModuleFactory
+import galaxy.webapps.galaxy.controllers.workflow
+import galaxy.tools
+import galaxy.tools.parameters
+
+log = logging.getLogger( __name__ )
+
+class RepoInputDataModule( InputDataModule ):
+
+ type = "data_input"
+ name = "Input dataset"
+
+ @classmethod
+ def new( Class, trans, tools_metadata=None, tool_id=None ):
+ module = Class( trans )
+ module.state = dict( name="Input Dataset" )
+ return module
+ @classmethod
+ def from_dict( Class, trans, repository_id, changeset_revision, step_dict, tools_metadata=None, secure=True ):
+ module = Class( trans )
+ state = json.from_json_string( step_dict[ "tool_state" ] )
+ module.state = dict( name=state.get( "name", "Input Dataset" ) )
+ return module
+ @classmethod
+ def from_workflow_step( Class, trans, repository_id, changeset_revision, tools_metadata, step ):
+ module = Class( trans )
+ module.state = dict( name="Input Dataset" )
+ if step.tool_inputs and "name" in step.tool_inputs:
+ module.state[ 'name' ] = step.tool_inputs[ 'name' ]
+ return module
+
+class RepoToolModule( ToolModule ):
+
+ type = "tool"
+
+ def __init__( self, trans, repository_id, changeset_revision, tools_metadata, tool_id ):
+ self.trans = trans
+ self.tools_metadata = tools_metadata
+ self.tool_id = tool_id
+ self.tool = None
+ self.errors = None
+ for tool_dict in tools_metadata:
+ if self.tool_id in [ tool_dict[ 'id' ], tool_dict[ 'guid' ] ]:
+ if trans.webapp.name == 'community':
+ # We're in the tool shed.
+ repository, self.tool, message = suc.load_tool_from_changeset_revision( trans, repository_id, changeset_revision, tool_dict[ 'tool_config' ] )
+ if message and self.tool is None:
+ self.errors = 'unavailable'
+ break
+ else:
+ # We're in Galaxy.
+ self.tool = trans.app.toolbox.tools_by_id.get( self.tool_id, None )
+ if self.tool is None:
+ self.errors = 'unavailable'
+ self.post_job_actions = {}
+ self.workflow_outputs = []
+ self.state = None
+ @classmethod
+ def new( Class, trans, repository_id, changeset_revision, tools_metadata, tool_id=None ):
+ module = Class( trans, repository_id, changeset_revision, tools_metadata, tool_id )
+ module.state = module.tool.new_state( trans, all_pages=True )
+ return module
+ @classmethod
+ def from_dict( Class, trans, repository_id, changeset_revision, step_dict, tools_metadata, secure=True ):
+ tool_id = step_dict[ 'tool_id' ]
+ module = Class( trans, repository_id, changeset_revision, tools_metadata, tool_id )
+ module.state = galaxy.tools.DefaultToolState()
+ if module.tool is not None:
+ module.state.decode( step_dict[ "tool_state" ], module.tool, module.trans.app, secure=secure )
+ module.errors = step_dict.get( "tool_errors", None )
+ return module
+ @classmethod
+ def from_workflow_step( Class, trans, repository_id, changeset_revision, tools_metadata, step ):
+ module = Class( trans, repository_id, changeset_revision, tools_metadata, step.tool_id )
+ module.state = galaxy.tools.DefaultToolState()
+ if module.tool:
+ module.state.inputs = module.tool.params_from_strings( step.tool_inputs, trans.app, ignore_errors=True )
+ else:
+ module.state.inputs = {}
+ module.errors = step.tool_errors
+ return module
+ def get_data_inputs( self ):
+ data_inputs = []
+ def callback( input, value, prefixed_name, prefixed_label ):
+ if isinstance( input, galaxy.tools.parameters.DataToolParameter ):
+ data_inputs.append( dict( name=prefixed_name,
+ label=prefixed_label,
+ extensions=input.extensions ) )
+ if self.tool:
+ galaxy.tools.parameters.visit_input_values( self.tool.inputs, self.state.inputs, callback )
+ return data_inputs
+ def get_data_outputs( self ):
+ data_outputs = []
+ if self.tool:
+ data_inputs = None
+ for name, tool_output in self.tool.outputs.iteritems():
+ if tool_output.format_source != None:
+ # Default to special name "input" which remove restrictions on connections
+ formats = [ 'input' ]
+ if data_inputs == None:
+ data_inputs = self.get_data_inputs()
+ # Find the input parameter referenced by format_source
+ for di in data_inputs:
+ # Input names come prefixed with conditional and repeat names separated by '|',
+ # so remove prefixes when comparing with format_source.
+ if di[ 'name' ] != None and di[ 'name' ].split( '|' )[ -1 ] == tool_output.format_source:
+ formats = di[ 'extensions' ]
+ else:
+ formats = [ tool_output.format ]
+ for change_elem in tool_output.change_format:
+ for when_elem in change_elem.findall( 'when' ):
+ format = when_elem.get( 'format', None )
+ if format and format not in formats:
+ formats.append( format )
+ data_outputs.append( dict( name=name, extensions=formats ) )
+ return data_outputs
+
+class RepoWorkflowModuleFactory( WorkflowModuleFactory ):
+ def __init__( self, module_types ):
+ self.module_types = module_types
+ def new( self, trans, type, tools_metadata=None, tool_id=None ):
+ """Return module for type and (optional) tool_id initialized with new / default state."""
+ assert type in self.module_types
+ return self.module_types[type].new( trans, tool_id )
+ def from_dict( self, trans, repository_id, changeset_revision, step_dict, **kwd ):
+ """Return module initialized from the data in dictionary `step_dict`."""
+ type = step_dict[ 'type' ]
+ assert type in self.module_types
+ return self.module_types[ type ].from_dict( trans, repository_id, changeset_revision, step_dict, **kwd )
+ def from_workflow_step( self, trans, repository_id, changeset_revision, tools_metadata, step ):
+ """Return module initialized from the WorkflowStep object `step`."""
+ type = step.type
+ return self.module_types[ type ].from_workflow_step( trans, repository_id, changeset_revision, tools_metadata, step )
+
+module_factory = RepoWorkflowModuleFactory( dict( data_input=RepoInputDataModule, tool=RepoToolModule ) )
+
+def generate_workflow_image( trans, workflow_name, repository_metadata_id=None, repository_id=None ):
+ """
+ Return an svg image representation of a workflow dictionary created when the workflow was exported. This method is called
+ from both Galaxy and the tool shed. When called from the tool shed, repository_metadata_id will have a value and repository_id
+ will be None. When called from Galaxy, repository_metadata_id will be None and repository_id will have a value.
+ """
+ workflow_name = encoding_util.tool_shed_decode( workflow_name )
+ if trans.webapp.name == 'community':
+ # We're in the tool shed.
+ repository_metadata = suc.get_repository_metadata_by_id( trans, repository_metadata_id )
+ repository_id = trans.security.encode_id( repository_metadata.repository_id )
+ changeset_revision = repository_metadata.changeset_revision
+ metadata = repository_metadata.metadata
+ else:
+ # We're in Galaxy.
+ repository = suc.get_tool_shed_repository_by_id( trans, repository_id )
+ changeset_revision = repository.changeset_revision
+ metadata = repository.metadata
+ # metadata[ 'workflows' ] is a list of tuples where each contained tuple is
+ # [ <relative path to the .ga file in the repository>, <exported workflow dict> ]
+ for workflow_tup in metadata[ 'workflows' ]:
+ workflow_dict = workflow_tup[1]
+ if workflow_dict[ 'name' ] == workflow_name:
+ break
+ if 'tools' in metadata:
+ tools_metadata = metadata[ 'tools' ]
+ else:
+ tools_metadata = []
+ workflow, missing_tool_tups = get_workflow_from_dict( trans, workflow_dict, tools_metadata, repository_id, changeset_revision )
+ data = []
+ canvas = svgfig.canvas( style="stroke:black; fill:none; stroke-width:1px; stroke-linejoin:round; text-anchor:left" )
+ text = svgfig.SVG( "g" )
+ connectors = svgfig.SVG( "g" )
+ boxes = svgfig.SVG( "g" )
+ svgfig.Text.defaults[ "font-size" ] = "10px"
+ in_pos = {}
+ out_pos = {}
+ margin = 5
+ # Spacing between input/outputs.
+ line_px = 16
+ # Store px width for boxes of each step.
+ widths = {}
+ max_width, max_x, max_y = 0, 0, 0
+ for step in workflow.steps:
+ step.upgrade_messages = {}
+ module = module_factory.from_workflow_step( trans, repository_id, changeset_revision, tools_metadata, step )
+ tool_errors = module.type == 'tool' and not module.tool
+ module_data_inputs = get_workflow_data_inputs( step, module )
+ module_data_outputs = get_workflow_data_outputs( step, module, workflow.steps )
+ step_dict = {
+ 'id' : step.order_index,
+ 'data_inputs' : module_data_inputs,
+ 'data_outputs' : module_data_outputs,
+ 'position' : step.position,
+ 'tool_errors' : tool_errors
+ }
+ input_conn_dict = {}
+ for conn in step.input_connections:
+ input_conn_dict[ conn.input_name ] = dict( id=conn.output_step.order_index, output_name=conn.output_name )
+ step_dict[ 'input_connections' ] = input_conn_dict
+ data.append( step_dict )
+ x, y = step.position[ 'left' ], step.position[ 'top' ]
+ count = 0
+ module_name = get_workflow_module_name( module, missing_tool_tups )
+ max_len = len( module_name ) * 1.5
+ text.append( svgfig.Text( x, y + 20, module_name, **{ "font-size": "14px" } ).SVG() )
+ y += 45
+ for di in module_data_inputs:
+ cur_y = y + count * line_px
+ if step.order_index not in in_pos:
+ in_pos[ step.order_index ] = {}
+ in_pos[ step.order_index ][ di[ 'name' ] ] = ( x, cur_y )
+ text.append( svgfig.Text( x, cur_y, di[ 'label' ] ).SVG() )
+ count += 1
+ max_len = max( max_len, len( di[ 'label' ] ) )
+ if len( module.get_data_inputs() ) > 0:
+ y += 15
+ for do in module_data_outputs:
+ cur_y = y + count * line_px
+ if step.order_index not in out_pos:
+ out_pos[ step.order_index ] = {}
+ out_pos[ step.order_index ][ do[ 'name' ] ] = ( x, cur_y )
+ text.append( svgfig.Text( x, cur_y, do[ 'name' ] ).SVG() )
+ count += 1
+ max_len = max( max_len, len( do['name' ] ) )
+ widths[ step.order_index ] = max_len * 5.5
+ max_x = max( max_x, step.position[ 'left' ] )
+ max_y = max( max_y, step.position[ 'top' ] )
+ max_width = max( max_width, widths[ step.order_index ] )
+ for step_dict in data:
+ tool_unavailable = step_dict[ 'tool_errors' ]
+ width = widths[ step_dict[ 'id' ] ]
+ x, y = step_dict[ 'position' ][ 'left' ], step_dict[ 'position' ][ 'top' ]
+ if tool_unavailable:
+ fill = "#EBBCB2"
+ else:
+ fill = "#EBD9B2"
+ boxes.append( svgfig.Rect( x - margin, y, x + width - margin, y + 30, fill=fill ).SVG() )
+ box_height = ( len( step_dict[ 'data_inputs' ] ) + len( step_dict[ 'data_outputs' ] ) ) * line_px + margin
+ # Draw separator line.
+ if len( step_dict[ 'data_inputs' ] ) > 0:
+ box_height += 15
+ sep_y = y + len( step_dict[ 'data_inputs' ] ) * line_px + 40
+ text.append( svgfig.Line( x - margin, sep_y, x + width - margin, sep_y ).SVG() )
+ # Define an input/output box.
+ boxes.append( svgfig.Rect( x - margin, y + 30, x + width - margin, y + 30 + box_height, fill="#ffffff" ).SVG() )
+ for conn, output_dict in step_dict[ 'input_connections' ].iteritems():
+ in_coords = in_pos[ step_dict[ 'id' ] ][ conn ]
+ # out_pos_index will be a step number like 1, 2, 3...
+ out_pos_index = output_dict[ 'id' ]
+ # out_pos_name will be a string like 'o', 'o2', etc.
+ out_pos_name = output_dict[ 'output_name' ]
+ if out_pos_index in out_pos:
+ # out_conn_index_dict will be something like:
+ # 7: {'o': (824.5, 618)}
+ out_conn_index_dict = out_pos[ out_pos_index ]
+ if out_pos_name in out_conn_index_dict:
+ out_conn_pos = out_pos[ out_pos_index ][ out_pos_name ]
+ else:
+ # Take any key / value pair available in out_conn_index_dict.
+ # A problem will result if the dictionary is empty.
+ if out_conn_index_dict.keys():
+ key = out_conn_index_dict.keys()[0]
+ out_conn_pos = out_pos[ out_pos_index ][ key ]
+ adjusted = ( out_conn_pos[ 0 ] + widths[ output_dict[ 'id' ] ], out_conn_pos[ 1 ] )
+ text.append( svgfig.SVG( "circle",
+ cx=out_conn_pos[ 0 ] + widths[ output_dict[ 'id' ] ] - margin,
+ cy=out_conn_pos[ 1 ] - margin,
+ r = 5,
+ fill="#ffffff" ) )
+ connectors.append( svgfig.Line( adjusted[ 0 ],
+ adjusted[ 1 ] - margin,
+ in_coords[ 0 ] - 10,
+ in_coords[ 1 ],
+ arrow_end = "true" ).SVG() )
+ canvas.append( connectors )
+ canvas.append( boxes )
+ canvas.append( text )
+ width, height = ( max_x + max_width + 50 ), max_y + 300
+ canvas[ 'width' ] = "%s px" % width
+ canvas[ 'height' ] = "%s px" % height
+ canvas[ 'viewBox' ] = "0 0 %s %s" % ( width, height )
+ trans.response.set_content_type( "image/svg+xml" )
+ return canvas.standalone_xml()
+def get_workflow_data_inputs( step, module ):
+ if module.type == 'tool':
+ if module.tool:
+ return module.get_data_inputs()
+ else:
+ data_inputs = []
+ for wfsc in step.input_connections:
+ data_inputs_dict = {}
+ data_inputs_dict[ 'extensions' ] = [ '' ]
+ data_inputs_dict[ 'name' ] = wfsc.input_name
+ data_inputs_dict[ 'label' ] = 'Unknown'
+ data_inputs.append( data_inputs_dict )
+ return data_inputs
+ return module.get_data_inputs()
+def get_workflow_data_outputs( step, module, steps ):
+ if module.type == 'tool':
+ if module.tool:
+ return module.get_data_outputs()
+ else:
+ data_outputs = []
+ data_outputs_dict = {}
+ data_outputs_dict[ 'extensions' ] = [ 'input' ]
+ found = False
+ for workflow_step in steps:
+ for wfsc in workflow_step.input_connections:
+ if step.name == wfsc.output_step.name:
+ data_outputs_dict[ 'name' ] = wfsc.output_name
+ found = True
+ break
+ if found:
+ break
+ if not found:
+ # We're at the last step of the workflow.
+ data_outputs_dict[ 'name' ] = 'output'
+ data_outputs.append( data_outputs_dict )
+ return data_outputs
+ return module.get_data_outputs()
+def get_workflow_from_dict( trans, workflow_dict, tools_metadata, repository_id, changeset_revision ):
+ """Return a workflow object from the dictionary object created when it was exported."""
+ trans.workflow_building_mode = True
+ workflow = trans.model.Workflow()
+ workflow.name = workflow_dict[ 'name' ]
+ workflow.has_errors = False
+ steps = []
+ # Keep ids for each step that we need to use to make connections.
+ steps_by_external_id = {}
+ # Keep track of tools required by the workflow that are not available in
+ # the tool shed repository. Each tuple in the list of missing_tool_tups
+ # will be ( tool_id, tool_name, tool_version ).
+ missing_tool_tups = []
+ # First pass to build step objects and populate basic values
+ for key, step_dict in workflow_dict[ 'steps' ].iteritems():
+ # Create the model class for the step
+ step = trans.model.WorkflowStep()
+ step.name = step_dict[ 'name' ]
+ step.position = step_dict[ 'position' ]
+ module = module_factory.from_dict( trans, repository_id, changeset_revision, step_dict, tools_metadata=tools_metadata, secure=False )
+ if module.type == 'tool' and module.tool is None:
+ # A required tool is not available in the current repository.
+ step.tool_errors = 'unavailable'
+ missing_tool_tup = ( step_dict[ 'tool_id' ], step_dict[ 'name' ], step_dict[ 'tool_version' ] )
+ if missing_tool_tup not in missing_tool_tups:
+ missing_tool_tups.append( missing_tool_tup )
+ module.save_to_step( step )
+ if step.tool_errors:
+ workflow.has_errors = True
+ # Stick this in the step temporarily.
+ step.temp_input_connections = step_dict[ 'input_connections' ]
+ steps.append( step )
+ steps_by_external_id[ step_dict[ 'id' ] ] = step
+ # Second pass to deal with connections between steps.
+ for step in steps:
+ # Input connections.
+ for input_name, conn_dict in step.temp_input_connections.iteritems():
+ if conn_dict:
+ output_step = steps_by_external_id[ conn_dict[ 'id' ] ]
+ conn = trans.model.WorkflowStepConnection()
+ conn.input_step = step
+ conn.input_name = input_name
+ conn.output_step = output_step
+ conn.output_name = conn_dict[ 'output_name' ]
+ step.input_connections.append( conn )
+ del step.temp_input_connections
+ # Order the steps if possible.
+ galaxy.webapps.galaxy.controllers.workflow.attach_ordered_steps( workflow, steps )
+ return workflow, missing_tool_tups
+def get_workflow_module_name( module, missing_tool_tups ):
+ module_name = module.get_name()
+ if module.type == 'tool' and module_name == 'unavailable':
+ for missing_tool_tup in missing_tool_tups:
+ missing_tool_id, missing_tool_name, missing_tool_version = missing_tool_tup
+ if missing_tool_id == module.tool_id:
+ module_name = '%s' % missing_tool_name
+ break
+ return module_name
\ No newline at end of file
diff -r 875ac898df00fd919b6b24f58562fadbf03dc5e1 -r 7073d786cad0f0d251d4ea9b2c42171f698cf953 lib/galaxy/webapps/galaxy/controllers/admin_toolshed.py
--- a/lib/galaxy/webapps/galaxy/controllers/admin_toolshed.py
+++ b/lib/galaxy/webapps/galaxy/controllers/admin_toolshed.py
@@ -1,10 +1,10 @@
import urllib2, tempfile
from admin import *
-from galaxy.util.json import from_json_string, to_json_string
+from galaxy.util import json
import galaxy.util.shed_util as shed_util
import galaxy.util.shed_util_common as suc
from galaxy.tool_shed import encoding_util
-from galaxy.webapps.community.util import container_util
+from galaxy.webapps.community.util import container_util, workflow_util
from galaxy import eggs, tools
eggs.require( 'mercurial' )
@@ -552,6 +552,11 @@
galaxy_url = url_for( '/', qualified=True )
url = suc.url_join( tool_shed_url, 'repository/find_workflows?galaxy_url=%s' % galaxy_url )
return trans.response.send_redirect( url )
+ @web.expose
+ @web.require_admin
+ def generate_workflow_image( self, trans, workflow_name, repository_id=None ):
+ """Return an svg image representation of a workflow dictionary created when the workflow was exported."""
+ return workflow_util.generate_workflow_image( trans, workflow_name, repository_metadata_id=None, repository_id=repository_id )
@web.json
@web.require_admin
def get_file_contents( self, trans, file_path ):
@@ -576,7 +581,7 @@
raw_text = response.read()
response.close()
if len( raw_text ) > 2:
- encoded_text = from_json_string( raw_text )
+ encoded_text = json.from_json_string( raw_text )
text = encoding_util.tool_shed_decode( encoded_text )
else:
text = ''
@@ -586,6 +591,71 @@
return tool_version.get_version_ids( app, reverse=True )
@web.expose
@web.require_admin
+ def import_workflow( self, trans, workflow_name, repository_id, **kwd ):
+ # FIXME: importing doesn't yet work...
+ params = util.Params( kwd )
+ message = util.restore_text( params.get( 'message', '' ) )
+ status = params.get( 'status', 'done' )
+ if workflow_name:
+ workflow_name = encoding_util.tool_shed_decode( workflow_name )
+ repository = suc.get_tool_shed_repository_by_id( trans, repository_id )
+ changeset_revision = repository.changeset_revision
+ metadata = repository.metadata
+ workflows = metadata[ 'workflows' ]
+ tools_metadata = metadata[ 'tools' ]
+ workflow_dict = None
+ for workflow_data_tuple in workflows:
+ # The value of workflow_data_tuple is ( relative_path_to_workflow_file, exported_workflow_dict ).
+ relative_path_to_workflow_file, exported_workflow_dict = workflow_data_tuple
+ if exported_workflow_dict[ 'name' ] == workflow_name:
+ # If the exported workflow is available on disk, import it.
+ if os.path.exists( relative_path_to_workflow_file ):
+ workflow_file = open( relative_path_to_workflow_file, 'rb' )
+ workflow_data = workflow_file.read()
+ workflow_file.close()
+ workflow_dict = json.from_json_string( workflow_data )
+ else:
+ # Use the current exported_workflow_dict.
+ workflow_dict = exported_workflow_dict
+ break
+ if workflow_dict:
+ # Create workflow if possible. If a required tool is not available in the local
+ # Galaxy instance, the tool information will be available in the step_dict.
+ src = None
+ workflow, missing_tool_tups = workflow_util.get_workflow_from_dict( trans,
+ workflow_dict,
+ tools_metadata,
+ repository_id,
+ changeset_revision )
+ if workflow_name:
+ workflow.name = workflow_name
+ # Provide user feedback and show workflow list.
+ if workflow.has_errors:
+ message += "Imported, but some steps in this workflow have validation errors. "
+ status = "error"
+ if workflow.has_cycles:
+ message += "Imported, but this workflow contains cycles. "
+ status = "error"
+ else:
+ message += "Workflow <b>%s</b> imported successfully. " % workflow.name
+ if missing_tool_tups:
+ # TODO: rework this since it is used in the tool shed, but shoudn't be used in Galaxy.
+ name_and_id_str = ''
+ for missing_tool_tup in missing_tool_tups:
+ tool_id, tool_name, other = missing_tool_tup
+ name_and_id_str += 'name: %s, id: %s' % ( str( tool_id ), str( tool_name ) )
+ log.debug( "The following tools required by this workflow are missing from this Galaxy instance: %s" % name_and_id_str )
+ else:
+ message += 'The workflow named %s is not included in the metadata for revision %s of repository %s' % \
+ ( str( workflow_name ), str( changeset_revision ), str( repository.name ) )
+ status = 'error'
+ return trans.response.send_redirect( web.url_for( controller='admin_toolshed',
+ action='browse_repository',
+ id=repository_id,
+ message=message,
+ status=status ) )
+ @web.expose
+ @web.require_admin
def initiate_repository_installation( self, trans, shed_repository_ids, encoded_kwd, reinstalling=False ):
tsr_ids = util.listify( shed_repository_ids )
tool_shed_repositories = []
@@ -728,7 +798,7 @@
text = response.read()
response.close()
if text:
- tool_version_dicts = from_json_string( text )
+ tool_version_dicts = json.from_json_string( text )
shed_util.handle_tool_versions( trans.app, tool_version_dicts, tool_shed_repository )
else:
message += "Version information for the tools included in the <b>%s</b> repository is missing. " % name
@@ -1084,7 +1154,7 @@
response = urllib2.urlopen( url )
raw_text = response.read()
response.close()
- repo_information_dict = from_json_string( raw_text )
+ repo_information_dict = json.from_json_string( raw_text )
includes_tools = util.string_as_bool( repo_information_dict.get( 'includes_tools', False ) )
includes_repository_dependencies = util.string_as_bool( repo_information_dict.get( 'includes_repository_dependencies', False ) )
includes_tool_dependencies = util.string_as_bool( repo_information_dict.get( 'includes_tool_dependencies', False ) )
@@ -1191,7 +1261,7 @@
response = urllib2.urlopen( url )
raw_text = response.read()
response.close()
- readme_files_dict = from_json_string( raw_text )
+ readme_files_dict = json.from_json_string( raw_text )
# Since we are installing a new repository, no tool dependencies will be considered "missing". Most of the repository contents
# are set to None since we don't yet know what they are.
containers_dict = suc.build_repository_containers_for_galaxy( trans=trans,
@@ -1593,7 +1663,7 @@
text = response.read()
response.close()
if text:
- tool_version_dicts = from_json_string( text )
+ tool_version_dicts = json.from_json_string( text )
shed_util.handle_tool_versions( trans.app, tool_version_dicts, repository )
message = "Tool versions have been set for all included tools."
status = 'done'
@@ -1785,7 +1855,26 @@
tool_lineage=tool_lineage,
message=message,
status=status )
-
+ @web.expose
+ @web.require_admin
+ def view_workflow( self, trans, workflow_name=None, repository_id=None, **kwd ):
+ """Retrieve necessary information about a workflow from the database so that it can be displayed in an svg image."""
+ params = util.Params( kwd )
+ message = util.restore_text( params.get( 'message', '' ) )
+ status = params.get( 'status', 'done' )
+ if workflow_name:
+ workflow_name = encoding_util.tool_shed_decode( workflow_name )
+ repository = suc.get_tool_shed_repository_by_id( trans, repository_id )
+ changeset_revision = repository.changeset_revision
+ metadata = repository.metadata
+ return trans.fill_template( "/admin/tool_shed_repository/view_workflow.mako",
+ repository=repository,
+ changeset_revision=changeset_revision,
+ repository_id=repository_id,
+ workflow_name=workflow_name,
+ metadata=metadata,
+ message=message,
+ status=status )
## ---- Utility methods -------------------------------------------------------
def build_shed_tool_conf_select_field( trans ):
diff -r 875ac898df00fd919b6b24f58562fadbf03dc5e1 -r 7073d786cad0f0d251d4ea9b2c42171f698cf953 lib/galaxy/workflow/modules.py
--- a/lib/galaxy/workflow/modules.py
+++ b/lib/galaxy/workflow/modules.py
@@ -2,7 +2,7 @@
from elementtree.ElementTree import Element
from galaxy import web
from galaxy.tools.parameters import DataToolParameter, DummyDataset, RuntimeValue, check_param, visit_input_values
-from galaxy.tools import DefaultToolState
+import galaxy.tools
from galaxy.util.bunch import Bunch
from galaxy.util.json import from_json_string, to_json_string
from galaxy.jobs.actions.post import ActionBox
@@ -139,7 +139,7 @@
return dict( input=DataToolParameter( None, Element( "param", name="input", label=label, multiple=True, type="data", format=', '.join(filter_set) ), self.trans ) )
def get_runtime_state( self ):
- state = DefaultToolState()
+ state = galaxy.tools.DefaultToolState()
state.inputs = dict( input=None )
return state
@@ -149,7 +149,7 @@
def decode_runtime_state( self, trans, string ):
fake_tool = Bunch( inputs = self.get_runtime_inputs() )
- state = DefaultToolState()
+ state = galaxy.tools.DefaultToolState()
state.decode( string, fake_tool, trans.app )
return state
@@ -192,7 +192,7 @@
def from_dict( Class, trans, d, secure=True ):
tool_id = d[ 'tool_id' ]
module = Class( trans, tool_id )
- module.state = DefaultToolState()
+ module.state = galaxy.tools.DefaultToolState()
if module.tool is not None:
module.state.decode( d[ "tool_state" ], module.tool, module.trans.app, secure=secure )
module.errors = d.get( "tool_errors", None )
@@ -213,7 +213,7 @@
tool_id = tool.id
if ( trans.app.toolbox and tool_id in trans.app.toolbox.tools_by_id ):
module = Class( trans, tool_id )
- module.state = DefaultToolState()
+ module.state = galaxy.tools.DefaultToolState()
module.state.inputs = module.tool.params_from_strings( step.tool_inputs, trans.app, ignore_errors=True )
module.errors = step.tool_errors
# module.post_job_actions = step.post_job_actions
diff -r 875ac898df00fd919b6b24f58562fadbf03dc5e1 -r 7073d786cad0f0d251d4ea9b2c42171f698cf953 templates/admin/tool_shed_repository/view_workflow.mako
--- /dev/null
+++ b/templates/admin/tool_shed_repository/view_workflow.mako
@@ -0,0 +1,45 @@
+<%inherit file="/base.mako"/>
+<%namespace file="/message.mako" import="render_msg" />
+<%namespace file="/webapps/community/common/common.mako" import="*" />
+<%namespace file="/webapps/community/repository/common.mako" import="*" />
+
+<%
+ from galaxy.web.framework.helpers import time_ago
+ from galaxy.tool_shed.encoding_util import tool_shed_encode
+%>
+
+<%!
+ def inherit(context):
+ if context.get('use_panels'):
+ return '/webapps/community/base_panels.mako'
+ else:
+ return '/base.mako'
+%>
+<%inherit file="${inherit(context)}"/>
+
+<%def name="render_workflow( workflow_name, repository_id )">
+ <% center_url = h.url_for( controller='admin_toolshed', action='generate_workflow_image', workflow_name=tool_shed_encode( workflow_name ), repository_id=repository_id ) %>
+ <iframe name="galaxy_main" id="galaxy_main" frameborder="0" style="position: absolute; width: 100%; height: 100%;" src="${center_url}"></iframe>
+</%def>
+
+<br/><br/>
+<ul class="manage-table-actions">
+ <li><a class="action-button" id="repository-${repository.id}-popup" class="menubutton">Repository Actions</a></li>
+ <div popupmenu="repository-${repository.id}-popup">
+ <li><a class="action-button" href="${h.url_for( controller='admin_toolshed', action='import_workflow', workflow_name=tool_shed_encode( workflow_name ), repository_id=repository_id )}">Import workflow to Galaxy</a></li>
+ </div>
+</ul>
+
+%if message:
+ ${render_msg( message, status )}
+%endif
+
+<div class="toolFormTitle">${workflow_name | h}</div>
+<div class="form-row">
+ <div class="toolParamHelp" style="clear: both;">
+ (this page displays SVG graphics)
+ </div>
+</div>
+<br clear="left"/>
+
+${render_workflow( workflow_name, repository_id )}
diff -r 875ac898df00fd919b6b24f58562fadbf03dc5e1 -r 7073d786cad0f0d251d4ea9b2c42171f698cf953 templates/webapps/community/repository/common.mako
--- a/templates/webapps/community/repository/common.mako
+++ b/templates/webapps/community/repository/common.mako
@@ -231,6 +231,10 @@
folder_label = "%s<i> - this repository's tools require handling of these dependencies</i>" % folder_label
col_span_str = 'colspan="4"'
elif folder.workflows:
+ if folder.description:
+ folder_label = "%s<i> - %s</i>" % ( folder_label, folder.description )
+ else:
+ folder_label = "%s<i> - click the name to view an SVG image of the workflow</i>" % folder_label
col_span_str = 'colspan="4"'
%><td ${col_span_str} style="padding-left: ${folder_pad}px;">
@@ -439,7 +443,11 @@
<a class="action-button" href="${h.url_for( controller='repository', action='view_tool_metadata', repository_id=trans.security.encode_id( tool.repository_id ), changeset_revision=tool.changeset_revision, tool_id=tool.tool_id )}">View tool metadata</a></div>
%else:
- <a class="action-button" href="${h.url_for( controller='admin_toolshed', action='view_tool_metadata', repository_id=trans.security.encode_id( tool.repository_id ), changeset_revision=tool.changeset_revision, tool_id=tool.tool_id )}">${tool.name | h}</a>
+ %if tool.repository_installation_status == trans.model.ToolShedRepository.installation_status.INSTALLED:
+ <a class="action-button" href="${h.url_for( controller='admin_toolshed', action='view_tool_metadata', repository_id=trans.security.encode_id( tool.repository_id ), changeset_revision=tool.changeset_revision, tool_id=tool.tool_id )}">${tool.name | h}</a>
+ %else:
+ ${tool.name | h}
+ %endif
%endif
%else:
${tool.name | h}
@@ -515,6 +523,13 @@
<%
from galaxy.tool_shed.encoding_util import tool_shed_encode
encoded_id = trans.security.encode_id( workflow.id )
+ encoded_workflow_name = tool_shed_encode( workflow.workflow_name )
+ if trans.webapp.name == 'community':
+ encoded_repository_metadata_id = trans.security.encode_id( workflow.repository_metadata_id )
+ encoded_repository_id = None
+ else:
+ encoded_repository_metadata_id = None
+ encoded_repository_id = trans.security.encode_id( workflow.repository_id )
if row_is_header:
cell_type = 'th'
else:
@@ -528,8 +543,12 @@
<${cell_type} style="padding-left: ${pad+20}px;">
%if row_is_header:
${workflow.workflow_name | h}
+ %elif trans.webapp.name == 'community' and encoded_repository_metadata_id:
+ <a href="${h.url_for( controller='repository', action='view_workflow', workflow_name=encoded_workflow_name, repository_metadata_id=encoded_repository_metadata_id )}">${workflow.workflow_name | h}</a>
+ %elif trans.webapp.name == 'galaxy' and encoded_repository_id:
+ <a href="${h.url_for( controller='admin_toolshed', action='view_workflow', workflow_name=encoded_workflow_name, repository_id=encoded_repository_id )}">${workflow.workflow_name | h}</a>
%else:
- <a href="${h.url_for( controller='workflow', action='view_workflow', repository_metadata_id=trans.security.encode_id( workflow.repository_metadata_id ), workflow_name=tool_shed_encode( workflow.workflow_name ) )}">${workflow.workflow_name | h}</a>
+ ${workflow.workflow_name | h}
%endif
</${cell_type}><${cell_type}>${workflow.steps | h}</${cell_type}>
@@ -557,7 +576,7 @@
missing_repository_dependencies_root_folder = containers_dict.get( 'missing_repository_dependencies', None )
tool_dependencies_root_folder = containers_dict.get( 'tool_dependencies', None )
missing_tool_dependencies_root_folder = containers_dict.get( 'missing_tool_dependencies', None )
- valid_tools_root_folder = containers_dict.get( 'valid_tools', none )
+ valid_tools_root_folder = containers_dict.get( 'valid_tools', None )
workflows_root_folder = containers_dict.get( 'workflows', None )
has_contents = datatypes_root_folder or invalid_tools_root_folder or valid_tools_root_folder or workflows_root_folder
diff -r 875ac898df00fd919b6b24f58562fadbf03dc5e1 -r 7073d786cad0f0d251d4ea9b2c42171f698cf953 templates/webapps/community/repository/view_workflow.mako
--- a/templates/webapps/community/repository/view_workflow.mako
+++ b/templates/webapps/community/repository/view_workflow.mako
@@ -7,22 +7,23 @@
from galaxy.web.framework.helpers import time_ago
from galaxy.tool_shed.encoding_util import tool_shed_encode
- in_tool_shed = trans.webapp.name == 'community'
is_admin = trans.user_is_admin()
is_new = repository.is_new( trans.app )
can_manage = is_admin or trans.user == repository.user
- can_contact_owner = in_tool_shed and trans.user and trans.user != repository.user
- can_push = in_tool_shed and trans.app.security_agent.can_push( trans.app, trans.user, repository )
+ can_contact_owner = trans.user and trans.user != repository.user
+ can_push = trans.app.security_agent.can_push( trans.app, trans.user, repository )
can_upload = can_push
- can_download = in_tool_shed and not is_new and ( not is_malicious or can_push )
- can_browse_contents = in_tool_shed and not is_new
- can_rate = in_tool_shed and not is_new and trans.user and repository.user != trans.user
- can_view_change_log = in_tool_shed and not is_new
+ can_download = not is_new and ( not is_malicious or can_push )
+ can_browse_contents = not is_new
+ can_rate = not is_new and trans.user and repository.user != trans.user
+ can_view_change_log = not is_new
if can_push:
browse_label = 'Browse or delete repository tip files'
else:
browse_label = 'Browse repository tip files'
- has_readme = metadata and 'readme' in metadata
+ has_readme = metadata and 'readme_files' in metadata
+
+ # <li><a class="action-button" href="${h.url_for( controller='repository', action='install_repositories_by_revision', repository_ids=trans.security.encode_id( repository.id ), changeset_revisions=changeset_revision )}">Install repository to Galaxy</a></li>
%><%!
@@ -34,57 +35,43 @@
%><%inherit file="${inherit(context)}"/>
-<%def name="render_workflow( repository_metadata_id, workflow_name )">
- <% center_url = h.url_for( controller='workflow', action='generate_workflow_image', repository_metadata_id=repository_metadata_id, workflow_name=tool_shed_encode( workflow_name ) ) %>
+<%def name="render_workflow( workflow_name, repository_metadata_id )">
+ <% center_url = h.url_for( controller='repository', action='generate_workflow_image', workflow_name=tool_shed_encode( workflow_name ), repository_metadata_id=repository_metadata_id, repository_id=None ) %><iframe name="galaxy_main" id="galaxy_main" frameborder="0" style="position: absolute; width: 100%; height: 100%;" src="${center_url}"></iframe></%def><br/><br/><ul class="manage-table-actions">
- %if in_tool_shed:
- %if is_new and can_upload:
- <a class="action-button" href="${h.url_for( controller='upload', action='upload', repository_id=trans.security.encode_id( repository.id ) )}">Upload files to repository</a>
- %else:
- <li><a class="action-button" id="repository-${repository.id}-popup" class="menubutton">Repository Actions</a></li>
- <div popupmenu="repository-${repository.id}-popup">
- %if can_upload:
- <a class="action-button" href="${h.url_for( controller='upload', action='upload', repository_id=trans.security.encode_id( repository.id ) )}">Upload files to repository</a>
- %endif
- %if can_manage:
- <a class="action-button" href="${h.url_for( controller='repository', action='manage_repository', id=trans.app.security.encode_id( repository.id ), changeset_revision=repository.tip( trans.app ) )}">Manage repository</a>
- %else:
- <a class="action-button" href="${h.url_for( controller='repository', action='view_repository', id=trans.app.security.encode_id( repository.id ), changeset_revision=repository.tip( trans.app ) )}">View repository</a>
- %endif
- %if can_view_change_log:
- <a class="action-button" href="${h.url_for( controller='repository', action='view_changelog', id=trans.app.security.encode_id( repository.id ) )}">View change log</a>
- %endif
- %if can_rate:
- <a class="action-button" href="${h.url_for( controller='repository', action='rate_repository', id=trans.app.security.encode_id( repository.id ) )}">Rate repository</a>
- %endif
- %if can_browse_contents:
- <a class="action-button" href="${h.url_for( controller='repository', action='browse_repository', id=trans.app.security.encode_id( repository.id ) )}">${browse_label}</a>
- %endif
- %if can_contact_owner:
- <a class="action-button" href="${h.url_for( controller='repository', action='contact_owner', id=trans.security.encode_id( repository.id ) )}">Contact repository owner</a>
- %endif
- %if can_download:
- <a class="action-button" href="${h.url_for( controller='repository', action='download', repository_id=trans.app.security.encode_id( repository.id ), changeset_revision=changeset_revision, file_type='gz' )}">Download as a .tar.gz file</a>
- <a class="action-button" href="${h.url_for( controller='repository', action='download', repository_id=trans.app.security.encode_id( repository.id ), changeset_revision=changeset_revision, file_type='bz2' )}">Download as a .tar.bz2 file</a>
- <a class="action-button" href="${h.url_for( controller='repository', action='download', repository_id=trans.app.security.encode_id( repository.id ), changeset_revision=changeset_revision, file_type='zip' )}">Download as a zip file</a>
- %endif
- </div>
- %endif
+ %if is_new and can_upload:
+ <a class="action-button" href="${h.url_for( controller='upload', action='upload', repository_id=trans.security.encode_id( repository.id ) )}">Upload files to repository</a>
%else:
<li><a class="action-button" id="repository-${repository.id}-popup" class="menubutton">Repository Actions</a></li><div popupmenu="repository-${repository.id}-popup">
- <li><a class="action-button" href="${h.url_for( controller='workflow', action='import_workflow', repository_metadata_id=repository_metadata_id, workflow_name=tool_shed_encode( workflow_name ) )}">Import workflow to local Galaxy</a></li>
- <li><a class="action-button" href="${h.url_for( controller='repository', action='install_repositories_by_revision', repository_ids=trans.security.encode_id( repository.id ), changeset_revisions=changeset_revision )}">Install repository to local Galaxy</a></li>
- </div>
- <li><a class="action-button" id="toolshed-${repository.id}-popup" class="menubutton">Tool Shed Actions</a></li>
- <div popupmenu="toolshed-${repository.id}-popup">
- <a class="action-button" href="${h.url_for( controller='repository', action='browse_valid_categories' )}">Browse valid repositories</a>
- <a class="action-button" href="${h.url_for( controller='repository', action='find_tools' )}">Search for valid tools</a>
- <a class="action-button" href="${h.url_for( controller='repository', action='find_workflows' )}">Search for workflows</a>
+ %if can_upload:
+ <a class="action-button" href="${h.url_for( controller='upload', action='upload', repository_id=trans.security.encode_id( repository.id ) )}">Upload files to repository</a>
+ %endif
+ %if can_manage:
+ <a class="action-button" href="${h.url_for( controller='repository', action='manage_repository', id=trans.app.security.encode_id( repository.id ), changeset_revision=repository.tip( trans.app ) )}">Manage repository</a>
+ %else:
+ <a class="action-button" href="${h.url_for( controller='repository', action='view_repository', id=trans.app.security.encode_id( repository.id ), changeset_revision=repository.tip( trans.app ) )}">View repository</a>
+ %endif
+ %if can_view_change_log:
+ <a class="action-button" href="${h.url_for( controller='repository', action='view_changelog', id=trans.app.security.encode_id( repository.id ) )}">View change log</a>
+ %endif
+ %if can_rate:
+ <a class="action-button" href="${h.url_for( controller='repository', action='rate_repository', id=trans.app.security.encode_id( repository.id ) )}">Rate repository</a>
+ %endif
+ %if can_browse_contents:
+ <a class="action-button" href="${h.url_for( controller='repository', action='browse_repository', id=trans.app.security.encode_id( repository.id ) )}">${browse_label}</a>
+ %endif
+ %if can_contact_owner:
+ <a class="action-button" href="${h.url_for( controller='repository', action='contact_owner', id=trans.security.encode_id( repository.id ) )}">Contact repository owner</a>
+ %endif
+ %if can_download:
+ <a class="action-button" href="${h.url_for( controller='repository', action='download', repository_id=trans.app.security.encode_id( repository.id ), changeset_revision=changeset_revision, file_type='gz' )}">Download as a .tar.gz file</a>
+ <a class="action-button" href="${h.url_for( controller='repository', action='download', repository_id=trans.app.security.encode_id( repository.id ), changeset_revision=changeset_revision, file_type='bz2' )}">Download as a .tar.bz2 file</a>
+ <a class="action-button" href="${h.url_for( controller='repository', action='download', repository_id=trans.app.security.encode_id( repository.id ), changeset_revision=changeset_revision, file_type='zip' )}">Download as a zip file</a>
+ %endif
</div>
%endif
</ul>
@@ -102,4 +89,4 @@
</div><br clear="left"/>
-${render_workflow( repository_metadata_id, workflow_name )}
+${render_workflow( workflow_name, repository_metadata_id )}
diff -r 875ac898df00fd919b6b24f58562fadbf03dc5e1 -r 7073d786cad0f0d251d4ea9b2c42171f698cf953 test/tool_shed/base/twilltestcase.py
--- a/test/tool_shed/base/twilltestcase.py
+++ b/test/tool_shed/base/twilltestcase.py
@@ -454,9 +454,11 @@
( self.security.encode_id( repository.id ), tool_xml_path, changeset_revision )
self.visit_url( url )
self.check_for_strings( strings_displayed, strings_not_displayed )
- def load_workflow_image( self, repository, workflow_name, strings_displayed=[], strings_not_displayed=[] ):
+ def load_workflow_image_in_tool_shed( self, repository, workflow_name, strings_displayed=[], strings_not_displayed=[] ):
+ # FIXME: Can not always assume the first repository_metadata record is the correct one.
+ # TODO: Add a method for displaying a workflow image in Galaxy.
metadata = self.get_repository_metadata( repository )
- url = '/workflow/generate_workflow_image?repository_metadata_id=%s&workflow_name=%s' % \
+ url = '/repository/generate_workflow_image?repository_metadata_id=%s&workflow_name=%s' % \
( self.security.encode_id( metadata[0].id ), tool_shed_encode( workflow_name ) )
self.visit_url( url )
self.check_for_strings( strings_displayed, strings_not_displayed )
diff -r 875ac898df00fd919b6b24f58562fadbf03dc5e1 -r 7073d786cad0f0d251d4ea9b2c42171f698cf953 test/tool_shed/functional/test_0060_workflows.py
--- a/test/tool_shed/functional/test_0060_workflows.py
+++ b/test/tool_shed/functional/test_0060_workflows.py
@@ -46,7 +46,7 @@
workflow_filename,
filepath=workflow_filepath,
commit_message='Uploaded filtering workflow.' )
- self.load_workflow_image( repository, workflow_name, strings_displayed=[ '#EBBCB2' ] )
+ self.load_workflow_image_in_tool_shed( repository, workflow_name, strings_displayed=[ '#EBBCB2' ] )
def test_0020_upload_tool( self ):
'''Upload the missing tool for the workflow in the previous step, and verify that the error is no longer present.'''
repository = test_db_util.get_repository_by_name_and_owner( repository_name, common.test_user_1_name )
@@ -54,7 +54,7 @@
'filtering/filtering_2.2.0.tar',
commit_message="Uploaded filtering 2.2.0",
remove_repo_files_not_in_tar='No' )
- self.load_workflow_image( repository, workflow_name, strings_not_displayed=[ '#EBBCB2' ] )
+ self.load_workflow_image_in_tool_shed( repository, workflow_name, strings_not_displayed=[ '#EBBCB2' ] )
def test_0025_verify_repository_metadata( self ):
'''Verify that resetting the metadata does not change it.'''
repository = test_db_util.get_repository_by_name_and_owner( repository_name, common.test_user_1_name )
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
9 new commits in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/ab827b2c7907/
changeset: ab827b2c7907
user: jmchilton
date: 2012-12-26 19:42:31
summary: Implement new job runner super class ClusterJobRunner intended to reduce amount of duplicated code between drmaa, pbs, and lwr job runners (also I guess cli and condor classes seem like they could benefit from this as well). This super class will manage the monitor and worker threads and queues.
I am submitting only changes to the LWR that use this class, but I would encourage the Galaxy team to refactor the drmaa and pbs runners to use this class as well (or I would be happy to make these changes if given access or a promise the changes will be accepted quickly).
A variant of the drmaa runner that has been refactored to use this class can be found here: https://bitbucket.org/jmchilton/galaxy-central-lwr-enhancement-1/src/tip/li… from the now defunct pull request 80.
---
lib/galaxy/jobs/runners/__init__.py | 160 +++++++++++++++++++++++++++++++++++
1 file changed, 160 insertions(+)
affected #: 1 file
diff -r 4f4875265599424fed16f35dd82eb785167f6c25 -r ab827b2c790750c5fd9b9d815d54aeba8100008a lib/galaxy/jobs/runners/__init__.py
--- a/lib/galaxy/jobs/runners/__init__.py
+++ b/lib/galaxy/jobs/runners/__init__.py
@@ -1,5 +1,10 @@
import os, logging, os.path
+from galaxy import model
+from Queue import Queue, Empty
+import time
+import threading
+
log = logging.getLogger( __name__ )
class BaseJobRunner( object ):
@@ -90,3 +95,158 @@
set_extension = False,
kwds = { 'overwrite' : False } )
return commands
+
+class ClusterJobState( object ):
+ """
+ Encapsulate the state of a cluster job, this should be subclassed as
+ needed for various job runners to capture additional information needed
+ to communicate with cluster job manager.
+ """
+
+ def __init__( self ):
+ self.job_wrapper = None
+ self.job_id = None
+ self.old_state = None
+ self.running = False
+ self.runner_url = None
+
+STOP_SIGNAL = object()
+
+JOB_STATUS_QUEUED = 'queue'
+JOB_STATUS_FAILED = 'fail'
+JOB_STATUS_FINISHED = 'finish'
+
+class ClusterJobRunner( BaseJobRunner ):
+ """
+ Not sure this is the best name for this class, but there is common code
+ shared between sge, pbs, drmaa, etc...
+ """
+
+ def __init__( self, app ):
+ self.app = app
+ self.sa_session = app.model.context
+ # 'watched' and 'queue' are both used to keep track of jobs to watch.
+ # 'queue' is used to add new watched jobs, and can be called from
+ # any thread (usually by the 'queue_job' method). 'watched' must only
+ # be modified by the monitor thread, which will move items from 'queue'
+ # to 'watched' and then manage the watched jobs.
+ self.watched = []
+ self.monitor_queue = Queue()
+
+ def _init_monitor_thread(self):
+ self.monitor_thread = threading.Thread( name="%s.monitor_thread" % self.runner_name, target=self.monitor )
+ self.monitor_thread.setDaemon( True )
+ self.monitor_thread.start()
+
+ def _init_worker_threads(self):
+ self.work_queue = Queue()
+ self.work_threads = []
+ nworkers = self.app.config.cluster_job_queue_workers
+ for i in range( nworkers ):
+ worker = threading.Thread( name="%s.work_thread-%d" % (self.runner_name, i), target=self.run_next )
+ worker.start()
+ self.work_threads.append( worker )
+
+ def handle_stop(self):
+ # DRMAA and SGE runners should override this and disconnect.
+ pass
+
+ def monitor( self ):
+ """
+ Watches jobs currently in the cluster queue and deals with state changes
+ (queued to running) and job completion
+ """
+ while 1:
+ # Take any new watched jobs and put them on the monitor list
+ try:
+ while 1:
+ cluster_job_state = self.monitor_queue.get_nowait()
+ if cluster_job_state is STOP_SIGNAL:
+ # TODO: This is where any cleanup would occur
+ self.handle_stop()
+ return
+ self.watched.append( cluster_job_state )
+ except Empty:
+ pass
+ # Iterate over the list of watched jobs and check state
+ self.check_watched_items()
+ # Sleep a bit before the next state check
+ time.sleep( 1 )
+
+ def run_next( self ):
+ """
+ Run the next item in the queue (a job waiting to run or finish )
+ """
+ while 1:
+ ( op, obj ) = self.work_queue.get()
+ if op is STOP_SIGNAL:
+ return
+ try:
+ if op == JOB_STATUS_QUEUED:
+ # If the next item is to be run, then only run it if the
+ # job state is "queued". Otherwise the next item was either
+ # cancelled or one of its siblings encountered an error.
+ job_state = obj.get_state()
+ if model.Job.states.QUEUED == job_state:
+ self.queue_job( obj )
+ else:
+ log.debug( "Not executing job %d in state %s" % ( obj.get_id_tag(), job_state ) )
+ elif op == JOB_STATUS_FINISHED:
+ self.finish_job( obj )
+ elif op == JOB_STATUS_FAILED:
+ self.fail_job( obj )
+ except:
+ log.exception( "Uncaught exception %sing job" % op )
+
+ def monitor_job(self, job_state):
+ self.monitor_queue.put( job_state )
+
+ def put( self, job_wrapper ):
+ """Add a job to the queue (by job identifier)"""
+ # Change to queued state before handing to worker thread so the runner won't pick it up again
+ job_wrapper.change_state( model.Job.states.QUEUED )
+ self.mark_as_queued(job_wrapper)
+
+ def shutdown( self ):
+ """Attempts to gracefully shut down the monitor thread"""
+ log.info( "sending stop signal to worker threads" )
+ self.monitor_queue.put( STOP_SIGNAL )
+ for i in range( len( self.work_threads ) ):
+ self.work_queue.put( ( STOP_SIGNAL, None ) )
+
+ def check_watched_items(self):
+ """
+ This method is responsible for iterating over self.watched and handling
+ state changes and updating self.watched with a new list of watched job
+ states. Subclasses can opt to override this directly (as older job runners will
+ initially) or just override check_watched_item and allow the list processing to
+ reuse the logic here.
+ """
+ new_watched = []
+ for cluster_job_state in self.watched:
+ new_cluster_job_state = self.check_watched_item(cluster_job_state)
+ if new_cluster_job_state:
+ new_watched.append(new_cluster_job_state)
+ self.watched = new_watched
+
+ # Subclasses should implement this unless they override check_watched_items all together.
+ def check_watched_item(self):
+ raise NotImplementedError()
+
+ def queue_job(self, job_wrapper):
+ raise NotImplementedError()
+
+ def finish_job(self, job_state):
+ raise NotImplementedError()
+
+ def fail_job(self, job_state):
+ raise NotImplementedError()
+
+ def mark_as_finished(self, job_state):
+ self.work_queue.put( ( JOB_STATUS_FINISHED, job_state ) )
+
+ def mark_as_failed(self, job_state):
+ self.work_queue.put( ( JOB_STATUS_FAILED, job_state ) )
+
+ def mark_as_queued(self, job_wrapper):
+ self.work_queue.put( ( JOB_STATUS_QUEUED, job_wrapper ) )
https://bitbucket.org/galaxy/galaxy-central/changeset/8c6cf637058a/
changeset: 8c6cf637058a
user: jmchilton
date: 2012-12-26 19:42:31
summary: Refactor the LWRJobRunner to be a ClusterJobRunner and implement a recover method for this runner, the upshot of this is that LWR jobs can now survive Galaxy restarts. Downside is that jobs are no longer queued on Galaxy server, so LWR server should be updated (to changeset 5213f6d or newer) to queue jobs on the remote server. This is not manidatory however, this will still work it is just more jobs may run simultaneously than is desired.
---
lib/galaxy/jobs/runners/lwr.py | 211 ++++++++++++++++++++++++----------------
1 file changed, 128 insertions(+), 83 deletions(-)
affected #: 1 file
diff -r ab827b2c790750c5fd9b9d815d54aeba8100008a -r 8c6cf637058a1f14137b5f9caa9af29e077b29ce lib/galaxy/jobs/runners/lwr.py
--- a/lib/galaxy/jobs/runners/lwr.py
+++ b/lib/galaxy/jobs/runners/lwr.py
@@ -7,7 +7,7 @@
from galaxy import model
from galaxy.datatypes.data import nice_size
-from galaxy.jobs.runners import BaseJobRunner
+from galaxy.jobs.runners import ClusterJobState, ClusterJobRunner
import os, errno
from time import sleep
@@ -199,12 +199,18 @@
def wait(self):
""" """
while True:
- check_complete_response = self.__raw_execute_and_parse("check_complete", {"job_id" : self.job_id })
- complete = check_complete_response["complete"] == "true"
+ complete = self.check_complete()
if complete:
return check_complete_response
time.sleep(1)
+ def raw_check_complete(self):
+ check_complete_response = self.__raw_execute_and_parse("check_complete", {"job_id" : self.job_id })
+ return check_complete_response
+
+ def check_complete(self):
+ return self.raw_check_complete()["complete"] == "true"
+
def clean(self):
self.__raw_execute("clean", { "job_id" : self.job_id })
@@ -213,51 +219,34 @@
-class LwrJobRunner( BaseJobRunner ):
+class LwrJobRunner( ClusterJobRunner ):
"""
- Lwr Job Runner
+ LWR Job Runner
"""
- STOP_SIGNAL = object()
+ runner_name = "LWRRunner"
+
def __init__( self, app ):
- """Start the job runner with 'nworkers' worker threads"""
- self.app = app
- self.sa_session = app.model.context
+ """Start the job runner """
+ super( LwrJobRunner, self ).__init__( app )
+ self._init_monitor_thread()
+ log.info( "starting LWR workers" )
+ self._init_worker_threads()
- # start workers
- self.queue = Queue()
- self.threads = []
- nworkers = app.config.local_job_queue_workers
- log.info( "starting workers" )
- for i in range( nworkers ):
- worker = threading.Thread( ( name="LwrJobRunner.thread-%d" % i ), target=self.run_next )
- worker.setDaemon( True )
- worker.start()
- self.threads.append( worker )
- log.debug( "%d workers ready", nworkers )
+ def check_watched_item(self, job_state):
+ try:
+ client = self.get_client_from_state(job_state)
+ complete = client.check_complete()
+ except Exception:
+ # An orphaned job was put into the queue at app startup, so remote server went down
+ # either way we are done I guess.
+ self.mark_as_finished(job_state)
+ return None
+ if complete:
+ self.mark_as_finished(job_state)
+ return None
+ return job_state
- def run_next( self ):
- """Run the next job, waiting until one is available if neccesary"""
- while 1:
- job_wrapper = self.queue.get()
- if job_wrapper is self.STOP_SIGNAL:
- return
- try:
- self.run_job( job_wrapper )
- except:
- log.exception( "Uncaught exception running job" )
-
- def determine_lwr_url(self, url):
- lwr_url = url[ len( 'lwr://' ) : ]
- return lwr_url
-
- def get_client_from_wrapper(self, job_wrapper):
- return self.get_client( job_wrapper.get_job_runner_url(), job_wrapper.job_id )
-
- def get_client(self, job_runner, job_id):
- lwr_url = self.determine_lwr_url( job_runner )
- return Client(lwr_url, job_id)
-
- def run_job( self, job_wrapper ):
+ def queue_job(self, job_wrapper):
stderr = stdout = command_line = ''
runner_url = job_wrapper.get_job_runner_url()
@@ -277,35 +266,76 @@
return
# If we were able to get a command line, run the job
- if command_line:
- try:
- #log.debug( 'executing: %s' % command_line )
- client = self.get_client_from_wrapper(job_wrapper)
- output_fnames = job_wrapper.get_output_fnames()
- output_files = [ str( o ) for o in output_fnames ]
- input_files = job_wrapper.get_input_fnames()
- file_stager = FileStager(client, command_line, job_wrapper.extra_filenames, input_files, output_files, job_wrapper.tool.tool_dir)
- rebuilt_command_line = file_stager.get_rewritten_command_line()
- client.launch( rebuilt_command_line )
+ if not command_line:
+ job_wrapper.finish( '', '' )
+ return
- job_wrapper.set_runner( runner_url, job_wrapper.job_id )
- job_wrapper.change_state( model.Job.states.RUNNING )
+ try:
+ #log.debug( 'executing: %s' % command_line )
+ client = self.get_client_from_wrapper(job_wrapper)
+ output_files = self.get_output_files(job_wrapper)
+ input_files = job_wrapper.get_input_fnames()
+ file_stager = FileStager(client, command_line, job_wrapper.extra_filenames, input_files, output_files, job_wrapper.tool.tool_dir)
+ rebuilt_command_line = file_stager.get_rewritten_command_line()
+ client.launch( rebuilt_command_line )
+ job_wrapper.set_runner( runner_url, job_wrapper.job_id )
+ job_wrapper.change_state( model.Job.states.RUNNING )
- run_results = client.wait()
- log.debug('run_results %s' % run_results )
- stdout = run_results['stdout']
- stderr = run_results['stderr']
+ except Exception, exc:
+ job_wrapper.fail( "failure running job", exception=True )
+ log.exception("failure running job %d" % job_wrapper.job_id)
+ return
-
- if job_wrapper.get_state() not in [ model.Job.states.ERROR, model.Job.states.DELETED ]:
- for output_file in output_files:
- client.download_output(output_file)
- client.clean()
- log.debug('execution finished: %s' % command_line)
- except Exception, exc:
- job_wrapper.fail( "failure running job", exception=True )
- log.exception("failure running job %d" % job_wrapper.job_id)
- return
+ lwr_job_state = ClusterJobState()
+ lwr_job_state.job_wrapper = job_wrapper
+ lwr_job_state.job_id = job_wrapper.job_id
+ lwr_job_state.old_state = True
+ lwr_job_state.running = True
+ lwr_job_state.runner_url = runner_url
+ self.monitor_job(lwr_job_state)
+
+ def get_output_files(self, job_wrapper):
+ output_fnames = job_wrapper.get_output_fnames()
+ return [ str( o ) for o in output_fnames ]
+
+
+ def determine_lwr_url(self, url):
+ lwr_url = url[ len( 'lwr://' ) : ]
+ return lwr_url
+
+ def get_client_from_wrapper(self, job_wrapper):
+ return self.get_client( job_wrapper.get_job_runner_url(), job_wrapper.job_id )
+
+ def get_client_from_state(self, job_state):
+ job_runner = job_state.runner_url
+ job_id = job_state.job_id
+ return self.get_client(job_runner, job_id)
+
+ def get_client(self, job_runner, job_id):
+ lwr_url = self.determine_lwr_url( job_runner )
+ return Client(lwr_url, job_id)
+
+ def finish_job( self, job_state ):
+ stderr = stdout = command_line = ''
+ job_wrapper = job_state.job_wrapper
+ try:
+ client = self.get_client_from_state(job_state)
+
+ run_results = client.raw_check_complete()
+ log.debug('run_results %s' % run_results )
+ stdout = run_results['stdout']
+ stderr = run_results['stderr']
+
+ if job_wrapper.get_state() not in [ model.Job.states.ERROR, model.Job.states.DELETED ]:
+ output_files = self.get_output_files(job_wrapper)
+ for output_file in output_files:
+ client.download_output(output_file)
+ client.clean()
+ log.debug('execution finished: %s' % command_line)
+ except Exception, exc:
+ job_wrapper.fail( "failure running job", exception=True )
+ log.exception("failure running job %d" % job_wrapper.job_id)
+ return
#run the metadata setting script here
#this is terminate-able when output dataset/job is deleted
#so that long running set_meta()s can be canceled without having to reboot the server
@@ -321,7 +351,7 @@
job_wrapper.external_output_metadata.set_job_runner_external_pid( external_metadata_proc.pid, self.sa_session )
external_metadata_proc.wait()
log.debug( 'execution of external set_meta finished for job %d' % job_wrapper.job_id )
-
+
# Finish the job
try:
job_wrapper.finish( stdout, stderr )
@@ -329,12 +359,13 @@
log.exception("Job wrapper finish method failed")
job_wrapper.fail("Unable to finish job", exception=True)
- def put( self, job_wrapper ):
- """Add a job to the queue (by job identifier)"""
- # Change to queued state before handing to worker thread so the runner won't pick it up again
- job_wrapper.change_state( model.Job.states.QUEUED )
- self.queue.put( job_wrapper )
-
+ def fail_job( self, job_state ):
+ """
+ Seperated out so we can use the worker threads for it.
+ """
+ self.stop_job( self.sa_session.query( self.app.model.Job ).get( job_state.job_wrapper.job_id ) )
+ job_state.job_wrapper.fail( job_state.fail_message )
+
def shutdown( self ):
"""Attempts to gracefully shut down the worker threads"""
log.info( "sending stop signal to worker threads" )
@@ -383,7 +414,21 @@
log.debug("Attempt remote lwr kill of job with url %s and id %s" % (lwr_url, job_id))
client = self.get_client(lwr_url, job_id)
client.kill()
+
+
def recover( self, job, job_wrapper ):
- # local jobs can't be recovered
- job_wrapper.change_state( model.Job.states.ERROR, info = "This job was killed when Galaxy was restarted. Please retry the job." )
-
+ """Recovers jobs stuck in the queued/running state when Galaxy started"""
+ job_state = ClusterJobState()
+ job_state.job_id = str( job.get_job_runner_external_id() )
+ job_state.runner_url = job_wrapper.get_job_runner_url()
+ job_wrapper.command_line = job.get_command_line()
+ job_state.job_wrapper = job_wrapper
+ if job.get_state() == model.Job.states.RUNNING:
+ log.debug( "(LWR/%s) is still in running state, adding to the LWR queue" % ( job.get_id()) )
+ job_state.old_state = True
+ job_state.running = True
+ self.monitor_queue.put( job_state )
+ elif job.get_state() == model.Job.states.QUEUED:
+ # LWR doesn't queue currently, so this indicates galaxy was shutoff while
+ # job was being staged. Not sure how to recover from that.
+ job_state.job_wrapper.fail( "This job was killed when Galaxy was restarted. Please retry the job." )
https://bitbucket.org/galaxy/galaxy-central/changeset/e6676636cfe7/
changeset: e6676636cfe7
user: jmchilton
date: 2012-12-26 19:42:31
summary: Allow execution of jobs created by task splitting via the LWR job runner.
---
lib/galaxy/jobs/runners/lwr.py | 62 +++++++++++++++++++++++++++++++---------
1 file changed, 49 insertions(+), 13 deletions(-)
affected #: 1 file
diff -r 8c6cf637058a1f14137b5f9caa9af29e077b29ce -r e6676636cfe79de273602f7c68b2174f74a0d2d5 lib/galaxy/jobs/runners/lwr.py
--- a/lib/galaxy/jobs/runners/lwr.py
+++ b/lib/galaxy/jobs/runners/lwr.py
@@ -27,13 +27,14 @@
class FileStager(object):
- def __init__(self, client, command_line, config_files, input_files, output_files, tool_dir):
+ def __init__(self, client, command_line, config_files, input_files, output_files, tool_dir, working_directory):
self.client = client
self.command_line = command_line
self.config_files = config_files
self.input_files = input_files
self.output_files = output_files
self.tool_dir = os.path.abspath(tool_dir)
+ self.working_directory = working_directory
self.file_renames = {}
@@ -46,7 +47,9 @@
self.__initialize_referenced_tool_files()
self.__upload_tool_files()
self.__upload_input_files()
+ self.__upload_working_directory_files()
self.__initialize_output_file_renames()
+ self.__initialize_task_output_file_renames()
self.__initialize_config_file_renames()
self.__rewrite_and_upload_config_files()
self.__rewrite_command_line()
@@ -69,13 +72,27 @@
for input_file in self.input_files:
input_upload_response = self.client.upload_input(input_file)
self.file_renames[input_file] = input_upload_response['path']
-
+
+ def __upload_working_directory_files(self):
+ # Task manager stages files into working directory, these need to be uploaded
+ for working_directory_file in os.listdir(self.working_directory):
+ path = os.path.join(self.working_directory, working_directory_file)
+ working_file_response = self.client.upload_working_directory_file(path)
+ self.file_renames[path] = working_file_response['path']
+
def __initialize_output_file_renames(self):
for output_file in self.output_files:
self.file_renames[output_file] = r'%s%s%s' % (self.new_outputs_directory,
self.remote_path_separator,
os.path.basename(output_file))
+ def __initialize_task_output_file_renames(self):
+ for output_file in self.output_files:
+ name = os.path.basename(output_file)
+ self.file_renames[os.path.join(self.working_directory, name)] = r'%s%s%s' % (self.new_working_directory,
+ self.remote_path_separator,
+ name)
+
def __initialize_config_file_renames(self):
for config_file in self.config_files:
self.file_renames[config_file] = r'%s%s%s' % (self.new_working_directory,
@@ -172,13 +189,27 @@
def upload_config_file(self, path, contents):
return self.__upload_contents("upload_config_file", path, contents)
-
- def download_output(self, path):
+
+ def upload_working_directory_file(self, path):
+ return self.__upload_file("upload_working_directory_file", path)
+
+ def _get_output_type(self, name):
+ return self.__raw_execute_and_parse('get_output_type', {'name': name,
+ 'job_id': self.job_id})
+
+ def download_output(self, path, working_directory):
""" """
name = os.path.basename(path)
- response = self.__raw_execute('download_output', {'name' : name,
- "job_id" : self.job_id})
- output = open(path, 'wb')
+ output_type = self._get_output_type(name)
+ response = self.__raw_execute('download_output', {'name' : name,
+ "job_id" : self.job_id,
+ 'output_type': output_type})
+ if output_type == 'direct':
+ output = open(path, 'wb')
+ elif output_type == 'task':
+ output = open(os.path.join(working_directory, name), 'wb')
+ else:
+ raise Exception("No remote output found for dataset with path %s" % path)
try:
while True:
buffer = response.read(1024)
@@ -254,7 +285,7 @@
try:
job_wrapper.prepare()
if hasattr(job_wrapper, 'prepare_input_files_cmds') and job_wrapper.prepare_input_files_cmds is not None:
- for cmd in job_wrapper.prepare_input_file_cmds: # run the commands to stage the input files
+ for cmd in job_wrapper.prepare_input_files_cmds: # run the commands to stage the input files
#log.debug( 'executing: %s' % cmd )
if 0 != os.system(cmd):
raise Exception('Error running file staging command: %s' % cmd)
@@ -275,7 +306,8 @@
client = self.get_client_from_wrapper(job_wrapper)
output_files = self.get_output_files(job_wrapper)
input_files = job_wrapper.get_input_fnames()
- file_stager = FileStager(client, command_line, job_wrapper.extra_filenames, input_files, output_files, job_wrapper.tool.tool_dir)
+ working_directory = job_wrapper.working_directory
+ file_stager = FileStager(client, command_line, job_wrapper.extra_filenames, input_files, output_files, job_wrapper.tool.tool_dir, working_directory)
rebuilt_command_line = file_stager.get_rewritten_command_line()
client.launch( rebuilt_command_line )
job_wrapper.set_runner( runner_url, job_wrapper.job_id )
@@ -304,7 +336,10 @@
return lwr_url
def get_client_from_wrapper(self, job_wrapper):
- return self.get_client( job_wrapper.get_job_runner_url(), job_wrapper.job_id )
+ job_id = job_wrapper.job_id
+ if hasattr(job_wrapper, 'task_id'):
+ job_id = "%s_%s" % (job_id, job_wrapper.task_id)
+ return self.get_client( job_wrapper.get_job_runner_url(), job_id )
def get_client_from_state(self, job_state):
job_runner = job_state.runner_url
@@ -329,7 +364,7 @@
if job_wrapper.get_state() not in [ model.Job.states.ERROR, model.Job.states.DELETED ]:
output_files = self.get_output_files(job_wrapper)
for output_file in output_files:
- client.download_output(output_file)
+ client.download_output(output_file, working_directory=job_wrapper.working_directory)
client.clean()
log.debug('execution finished: %s' % command_line)
except Exception, exc:
@@ -386,8 +421,9 @@
def stop_job( self, job ):
#if our local job has JobExternalOutputMetadata associated, then our primary job has to have already finished
- if job.external_output_metadata:
- pid = job.external_output_metadata[0].job_runner_external_pid #every JobExternalOutputMetadata has a pid set, we just need to take from one of them
+ job_ext_output_metadata = job.get_external_output_metadata()
+ if job_ext_output_metadata:
+ pid = job_ext_output_metadata[0].job_runner_external_pid #every JobExternalOutputMetadata has a pid set, we just need to take from one of them
if pid in [ None, '' ]:
log.warning( "stop_job(): %s: no PID in database for job, unable to stop" % job.id )
return
https://bitbucket.org/galaxy/galaxy-central/changeset/e4adc9ad0bb7/
changeset: e4adc9ad0bb7
user: jmchilton
date: 2012-12-26 19:42:31
summary: Extend LWR job runner to stage an input's extra_files_path (if present).
---
lib/galaxy/jobs/runners/lwr.py | 22 ++++++++++++++++++----
1 file changed, 18 insertions(+), 4 deletions(-)
affected #: 1 file
diff -r e6676636cfe79de273602f7c68b2174f74a0d2d5 -r e4adc9ad0bb72111e502c6a998b697aa43ccbebd lib/galaxy/jobs/runners/lwr.py
--- a/lib/galaxy/jobs/runners/lwr.py
+++ b/lib/galaxy/jobs/runners/lwr.py
@@ -72,6 +72,16 @@
for input_file in self.input_files:
input_upload_response = self.client.upload_input(input_file)
self.file_renames[input_file] = input_upload_response['path']
+ # TODO: Determine if this is object store safe and what needs to be
+ # done if it is not.
+ files_path = "%s_files" % input_file[0:-len(".dat")]
+ if os.path.exists(files_path):
+ for extra_file in os.listdir(files_path):
+ extra_file_path = os.path.join(files_path, extra_file)
+ relative_path = os.path.basename(files_path)
+ extra_file_relative_path = os.path.join(relative_path, extra_file)
+ response = self.client.upload_extra_input(extra_file_path, extra_file_relative_path)
+ self.file_renames[extra_file_path] = response['path']
def __upload_working_directory_files(self):
# Task manager stages files into working directory, these need to be uploaded
@@ -167,17 +177,18 @@
response = self.__raw_execute(command, args, data)
return simplejson.loads(response.read())
- def __upload_file(self, action, path, contents = None):
+ def __upload_file(self, action, path, name=None, contents = None):
""" """
input = open(path, 'rb')
try:
mmapped_input = mmap.mmap(input.fileno(), 0, access = mmap.ACCESS_READ)
- return self.__upload_contents(action, path, mmapped_input)
+ return self.__upload_contents(action, path, mmapped_input, name)
finally:
input.close()
- def __upload_contents(self, action, path, contents):
- name = os.path.basename(path)
+ def __upload_contents(self, action, path, contents, name=None):
+ if not name:
+ name = os.path.basename(path)
args = {"job_id" : self.job_id, "name" : name}
return self.__raw_execute_and_parse(action, args, contents)
@@ -187,6 +198,9 @@
def upload_input(self, path):
return self.__upload_file("upload_input", path)
+ def upload_extra_input(self, path, relative_name):
+ return self.__upload_file("upload_extra_input", path, name=relative_name)
+
def upload_config_file(self, path, contents):
return self.__upload_contents("upload_config_file", path, contents)
https://bitbucket.org/galaxy/galaxy-central/changeset/93dd6202175a/
changeset: 93dd6202175a
user: jmchilton
date: 2012-12-26 19:42:31
summary: Refactor much of the lwr client code out into its own module. This will make it easier to keep content insync with client code from lwr source.
---
lib/galaxy/jobs/runners/lwr.py | 256 +-----------------------
lib/galaxy/jobs/runners/lwr_client/__init__.py | 246 +++++++++++++++++++++++
2 files changed, 249 insertions(+), 253 deletions(-)
create mode 100644 lib/galaxy/jobs/runners/lwr_client/__init__.py
affected #: 2 files
diff -r e4adc9ad0bb72111e502c6a998b697aa43ccbebd -r 93dd6202175a12a592a38a2d6d6669baa1fbd5fd lib/galaxy/jobs/runners/lwr.py
--- a/lib/galaxy/jobs/runners/lwr.py
+++ b/lib/galaxy/jobs/runners/lwr.py
@@ -1,268 +1,18 @@
import logging
import subprocess
-from Queue import Queue
-import threading
-
-import re
from galaxy import model
-from galaxy.datatypes.data import nice_size
from galaxy.jobs.runners import ClusterJobState, ClusterJobRunner
-import os, errno
+import errno
from time import sleep
+from lwr_client import FileStager, Client
+
log = logging.getLogger( __name__ )
__all__ = [ 'LwrJobRunner' ]
-import urllib
-import urllib2
-import httplib
-import mmap
-import tempfile
-import time
-
-import simplejson
-
-class FileStager(object):
-
- def __init__(self, client, command_line, config_files, input_files, output_files, tool_dir, working_directory):
- self.client = client
- self.command_line = command_line
- self.config_files = config_files
- self.input_files = input_files
- self.output_files = output_files
- self.tool_dir = os.path.abspath(tool_dir)
- self.working_directory = working_directory
-
- self.file_renames = {}
-
- job_config = client.setup()
-
- self.new_working_directory = job_config['working_directory']
- self.new_outputs_directory = job_config['outputs_directory']
- self.remote_path_separator = job_config['path_separator']
-
- self.__initialize_referenced_tool_files()
- self.__upload_tool_files()
- self.__upload_input_files()
- self.__upload_working_directory_files()
- self.__initialize_output_file_renames()
- self.__initialize_task_output_file_renames()
- self.__initialize_config_file_renames()
- self.__rewrite_and_upload_config_files()
- self.__rewrite_command_line()
-
- def __initialize_referenced_tool_files(self):
- pattern = r"(%s%s\S+)" % (self.tool_dir, os.sep)
- referenced_tool_files = []
- referenced_tool_files += re.findall(pattern, self.command_line)
- if self.config_files != None:
- for config_file in self.config_files:
- referenced_tool_files += re.findall(pattern, self.__read(config_file))
- self.referenced_tool_files = referenced_tool_files
-
- def __upload_tool_files(self):
- for referenced_tool_file in self.referenced_tool_files:
- tool_upload_response = self.client.upload_tool_file(referenced_tool_file)
- self.file_renames[referenced_tool_file] = tool_upload_response['path']
-
- def __upload_input_files(self):
- for input_file in self.input_files:
- input_upload_response = self.client.upload_input(input_file)
- self.file_renames[input_file] = input_upload_response['path']
- # TODO: Determine if this is object store safe and what needs to be
- # done if it is not.
- files_path = "%s_files" % input_file[0:-len(".dat")]
- if os.path.exists(files_path):
- for extra_file in os.listdir(files_path):
- extra_file_path = os.path.join(files_path, extra_file)
- relative_path = os.path.basename(files_path)
- extra_file_relative_path = os.path.join(relative_path, extra_file)
- response = self.client.upload_extra_input(extra_file_path, extra_file_relative_path)
- self.file_renames[extra_file_path] = response['path']
-
- def __upload_working_directory_files(self):
- # Task manager stages files into working directory, these need to be uploaded
- for working_directory_file in os.listdir(self.working_directory):
- path = os.path.join(self.working_directory, working_directory_file)
- working_file_response = self.client.upload_working_directory_file(path)
- self.file_renames[path] = working_file_response['path']
-
- def __initialize_output_file_renames(self):
- for output_file in self.output_files:
- self.file_renames[output_file] = r'%s%s%s' % (self.new_outputs_directory,
- self.remote_path_separator,
- os.path.basename(output_file))
-
- def __initialize_task_output_file_renames(self):
- for output_file in self.output_files:
- name = os.path.basename(output_file)
- self.file_renames[os.path.join(self.working_directory, name)] = r'%s%s%s' % (self.new_working_directory,
- self.remote_path_separator,
- name)
-
- def __initialize_config_file_renames(self):
- for config_file in self.config_files:
- self.file_renames[config_file] = r'%s%s%s' % (self.new_working_directory,
- self.remote_path_separator,
- os.path.basename(config_file))
-
- def __rewrite_paths(self, contents):
- new_contents = contents
- for local_path, remote_path in self.file_renames.iteritems():
- new_contents = new_contents.replace(local_path, remote_path)
- return new_contents
-
- def __rewrite_and_upload_config_files(self):
- for config_file in self.config_files:
- config_contents = self.__read(config_file)
- new_config_contents = self.__rewrite_paths(config_contents)
- self.client.upload_config_file(config_file, new_config_contents)
-
- def __rewrite_command_line(self):
- self.rewritten_command_line = self.__rewrite_paths(self.command_line)
-
- def get_rewritten_command_line(self):
- return self.rewritten_command_line
-
- def __read(self, path):
- input = open(path, "r")
- try:
- return input.read()
- finally:
- input.close()
-
-
-
-class Client(object):
- """
- """
- """
- """
- def __init__(self, remote_host, job_id, private_key=None):
- if not remote_host.endswith("/"):
- remote_host = remote_host + "/"
- ## If we don't have an explicit private_key defined, check for
- ## one embedded in the URL. A URL of the form
- ## https://moo@cow:8913 will try to contact https://cow:8913
- ## with a private key of moo
- private_key_format = "https?://(.*)@.*/?"
- private_key_match= re.match(private_key_format, remote_host)
- if not private_key and private_key_match:
- private_key = private_key_match.group(1)
- remote_host = remote_host.replace("%s@" % private_key, '', 1)
- self.remote_host = remote_host
- self.job_id = job_id
- self.private_key = private_key
-
- def url_open(self, request, data):
- return urllib2.urlopen(request, data)
-
- def __build_url(self, command, args):
- if self.private_key:
- args["private_key"] = self.private_key
- data = urllib.urlencode(args)
- url = self.remote_host + command + "?" + data
- return url
-
- def __raw_execute(self, command, args = {}, data = None):
- url = self.__build_url(command, args)
- request = urllib2.Request(url=url, data=data)
- response = self.url_open(request, data)
- return response
-
- def __raw_execute_and_parse(self, command, args = {}, data = None):
- response = self.__raw_execute(command, args, data)
- return simplejson.loads(response.read())
-
- def __upload_file(self, action, path, name=None, contents = None):
- """ """
- input = open(path, 'rb')
- try:
- mmapped_input = mmap.mmap(input.fileno(), 0, access = mmap.ACCESS_READ)
- return self.__upload_contents(action, path, mmapped_input, name)
- finally:
- input.close()
-
- def __upload_contents(self, action, path, contents, name=None):
- if not name:
- name = os.path.basename(path)
- args = {"job_id" : self.job_id, "name" : name}
- return self.__raw_execute_and_parse(action, args, contents)
-
- def upload_tool_file(self, path):
- return self.__upload_file("upload_tool_file", path)
-
- def upload_input(self, path):
- return self.__upload_file("upload_input", path)
-
- def upload_extra_input(self, path, relative_name):
- return self.__upload_file("upload_extra_input", path, name=relative_name)
-
- def upload_config_file(self, path, contents):
- return self.__upload_contents("upload_config_file", path, contents)
-
- def upload_working_directory_file(self, path):
- return self.__upload_file("upload_working_directory_file", path)
-
- def _get_output_type(self, name):
- return self.__raw_execute_and_parse('get_output_type', {'name': name,
- 'job_id': self.job_id})
-
- def download_output(self, path, working_directory):
- """ """
- name = os.path.basename(path)
- output_type = self._get_output_type(name)
- response = self.__raw_execute('download_output', {'name' : name,
- "job_id" : self.job_id,
- 'output_type': output_type})
- if output_type == 'direct':
- output = open(path, 'wb')
- elif output_type == 'task':
- output = open(os.path.join(working_directory, name), 'wb')
- else:
- raise Exception("No remote output found for dataset with path %s" % path)
- try:
- while True:
- buffer = response.read(1024)
- if buffer == "":
- break
- output.write(buffer)
- finally:
- output.close()
-
- def launch(self, command_line):
- """ """
- return self.__raw_execute("launch", {"command_line" : command_line,
- "job_id" : self.job_id})
-
- def kill(self):
- return self.__raw_execute("kill", {"job_id" : self.job_id})
-
- def wait(self):
- """ """
- while True:
- complete = self.check_complete()
- if complete:
- return check_complete_response
- time.sleep(1)
-
- def raw_check_complete(self):
- check_complete_response = self.__raw_execute_and_parse("check_complete", {"job_id" : self.job_id })
- return check_complete_response
-
- def check_complete(self):
- return self.raw_check_complete()["complete"] == "true"
-
- def clean(self):
- self.__raw_execute("clean", { "job_id" : self.job_id })
-
- def setup(self):
- return self.__raw_execute_and_parse("setup", { "job_id" : self.job_id })
-
-
class LwrJobRunner( ClusterJobRunner ):
"""
diff -r e4adc9ad0bb72111e502c6a998b697aa43ccbebd -r 93dd6202175a12a592a38a2d6d6669baa1fbd5fd lib/galaxy/jobs/runners/lwr_client/__init__.py
--- /dev/null
+++ b/lib/galaxy/jobs/runners/lwr_client/__init__.py
@@ -0,0 +1,246 @@
+import mmap
+import os
+import re
+import time
+import urllib
+import urllib2
+
+import simplejson
+
+
+class FileStager(object):
+
+ def __init__(self, client, command_line, config_files, input_files, output_files, tool_dir, working_directory):
+ self.client = client
+ self.command_line = command_line
+ self.config_files = config_files
+ self.input_files = input_files
+ self.output_files = output_files
+ self.tool_dir = os.path.abspath(tool_dir)
+ self.working_directory = working_directory
+
+ self.file_renames = {}
+
+ job_config = client.setup()
+
+ self.new_working_directory = job_config['working_directory']
+ self.new_outputs_directory = job_config['outputs_directory']
+ self.remote_path_separator = job_config['path_separator']
+
+ self.__initialize_referenced_tool_files()
+ self.__upload_tool_files()
+ self.__upload_input_files()
+ self.__upload_working_directory_files()
+ self.__initialize_output_file_renames()
+ self.__initialize_task_output_file_renames()
+ self.__initialize_config_file_renames()
+ self.__rewrite_and_upload_config_files()
+ self.__rewrite_command_line()
+
+ def __initialize_referenced_tool_files(self):
+ pattern = r"(%s%s\S+)" % (self.tool_dir, os.sep)
+ referenced_tool_files = []
+ referenced_tool_files += re.findall(pattern, self.command_line)
+ if self.config_files != None:
+ for config_file in self.config_files:
+ referenced_tool_files += re.findall(pattern, self.__read(config_file))
+ self.referenced_tool_files = referenced_tool_files
+
+ def __upload_tool_files(self):
+ for referenced_tool_file in self.referenced_tool_files:
+ tool_upload_response = self.client.upload_tool_file(referenced_tool_file)
+ self.file_renames[referenced_tool_file] = tool_upload_response['path']
+
+ def __upload_input_files(self):
+ for input_file in self.input_files:
+ input_upload_response = self.client.upload_input(input_file)
+ self.file_renames[input_file] = input_upload_response['path']
+ # TODO: Determine if this is object store safe and what needs to be
+ # done if it is not.
+ files_path = "%s_files" % input_file[0:-len(".dat")]
+ if os.path.exists(files_path):
+ for extra_file in os.listdir(files_path):
+ extra_file_path = os.path.join(files_path, extra_file)
+ relative_path = os.path.basename(files_path)
+ extra_file_relative_path = os.path.join(relative_path, extra_file)
+ response = self.client.upload_extra_input(extra_file_path, extra_file_relative_path)
+ self.file_renames[extra_file_path] = response['path']
+
+ def __upload_working_directory_files(self):
+ # Task manager stages files into working directory, these need to be uploaded
+ for working_directory_file in os.listdir(self.working_directory):
+ path = os.path.join(self.working_directory, working_directory_file)
+ working_file_response = self.client.upload_working_directory_file(path)
+ self.file_renames[path] = working_file_response['path']
+
+ def __initialize_output_file_renames(self):
+ for output_file in self.output_files:
+ self.file_renames[output_file] = r'%s%s%s' % (self.new_outputs_directory,
+ self.remote_path_separator,
+ os.path.basename(output_file))
+
+ def __initialize_task_output_file_renames(self):
+ for output_file in self.output_files:
+ name = os.path.basename(output_file)
+ self.file_renames[os.path.join(self.working_directory, name)] = r'%s%s%s' % (self.new_working_directory,
+ self.remote_path_separator,
+ name)
+
+ def __initialize_config_file_renames(self):
+ for config_file in self.config_files:
+ self.file_renames[config_file] = r'%s%s%s' % (self.new_working_directory,
+ self.remote_path_separator,
+ os.path.basename(config_file))
+
+ def __rewrite_paths(self, contents):
+ new_contents = contents
+ for local_path, remote_path in self.file_renames.iteritems():
+ new_contents = new_contents.replace(local_path, remote_path)
+ return new_contents
+
+ def __rewrite_and_upload_config_files(self):
+ for config_file in self.config_files:
+ config_contents = self.__read(config_file)
+ new_config_contents = self.__rewrite_paths(config_contents)
+ self.client.upload_config_file(config_file, new_config_contents)
+
+ def __rewrite_command_line(self):
+ self.rewritten_command_line = self.__rewrite_paths(self.command_line)
+
+ def get_rewritten_command_line(self):
+ return self.rewritten_command_line
+
+ def __read(self, path):
+ input = open(path, "r")
+ try:
+ return input.read()
+ finally:
+ input.close()
+
+
+
+class Client(object):
+ """
+ """
+ """
+ """
+ def __init__(self, remote_host, job_id, private_key=None):
+ if not remote_host.endswith("/"):
+ remote_host = remote_host + "/"
+ ## If we don't have an explicit private_key defined, check for
+ ## one embedded in the URL. A URL of the form
+ ## https://moo@cow:8913 will try to contact https://cow:8913
+ ## with a private key of moo
+ private_key_format = "https?://(.*)@.*/?"
+ private_key_match= re.match(private_key_format, remote_host)
+ if not private_key and private_key_match:
+ private_key = private_key_match.group(1)
+ remote_host = remote_host.replace("%s@" % private_key, '', 1)
+ self.remote_host = remote_host
+ self.job_id = job_id
+ self.private_key = private_key
+
+ def url_open(self, request, data):
+ return urllib2.urlopen(request, data)
+
+ def __build_url(self, command, args):
+ if self.private_key:
+ args["private_key"] = self.private_key
+ data = urllib.urlencode(args)
+ url = self.remote_host + command + "?" + data
+ return url
+
+ def __raw_execute(self, command, args = {}, data = None):
+ url = self.__build_url(command, args)
+ request = urllib2.Request(url=url, data=data)
+ response = self.url_open(request, data)
+ return response
+
+ def __raw_execute_and_parse(self, command, args = {}, data = None):
+ response = self.__raw_execute(command, args, data)
+ return simplejson.loads(response.read())
+
+ def __upload_file(self, action, path, name=None, contents = None):
+ """ """
+ input = open(path, 'rb')
+ try:
+ mmapped_input = mmap.mmap(input.fileno(), 0, access = mmap.ACCESS_READ)
+ return self.__upload_contents(action, path, mmapped_input, name)
+ finally:
+ input.close()
+
+ def __upload_contents(self, action, path, contents, name=None):
+ if not name:
+ name = os.path.basename(path)
+ args = {"job_id" : self.job_id, "name" : name}
+ return self.__raw_execute_and_parse(action, args, contents)
+
+ def upload_tool_file(self, path):
+ return self.__upload_file("upload_tool_file", path)
+
+ def upload_input(self, path):
+ return self.__upload_file("upload_input", path)
+
+ def upload_extra_input(self, path, relative_name):
+ return self.__upload_file("upload_extra_input", path, name=relative_name)
+
+ def upload_config_file(self, path, contents):
+ return self.__upload_contents("upload_config_file", path, contents)
+
+ def upload_working_directory_file(self, path):
+ return self.__upload_file("upload_working_directory_file", path)
+
+ def _get_output_type(self, name):
+ return self.__raw_execute_and_parse('get_output_type', {'name': name,
+ 'job_id': self.job_id})
+
+ def download_output(self, path, working_directory):
+ """ """
+ name = os.path.basename(path)
+ output_type = self._get_output_type(name)
+ response = self.__raw_execute('download_output', {'name' : name,
+ "job_id" : self.job_id,
+ 'output_type': output_type})
+ if output_type == 'direct':
+ output = open(path, 'wb')
+ elif output_type == 'task':
+ output = open(os.path.join(working_directory, name), 'wb')
+ else:
+ raise Exception("No remote output found for dataset with path %s" % path)
+ try:
+ while True:
+ buffer = response.read(1024)
+ if buffer == "":
+ break
+ output.write(buffer)
+ finally:
+ output.close()
+
+ def launch(self, command_line):
+ """ """
+ return self.__raw_execute("launch", {"command_line" : command_line,
+ "job_id" : self.job_id})
+
+ def kill(self):
+ return self.__raw_execute("kill", {"job_id" : self.job_id})
+
+ def wait(self):
+ """ """
+ while True:
+ complete = self.check_complete()
+ if complete:
+ return check_complete_response
+ time.sleep(1)
+
+ def raw_check_complete(self):
+ check_complete_response = self.__raw_execute_and_parse("check_complete", {"job_id" : self.job_id })
+ return check_complete_response
+
+ def check_complete(self):
+ return self.raw_check_complete()["complete"] == "true"
+
+ def clean(self):
+ self.__raw_execute("clean", { "job_id" : self.job_id })
+
+ def setup(self):
+ return self.__raw_execute_and_parse("setup", { "job_id" : self.job_id })
https://bitbucket.org/galaxy/galaxy-central/changeset/50c1edba7fe0/
changeset: 50c1edba7fe0
user: jmchilton
date: 2012-12-26 19:42:31
summary: Documentation and PEP8 fixes for lwr client code.
---
lib/galaxy/jobs/runners/lwr_client/__init__.py | 206 +++++++++++++++++++-----
1 file changed, 164 insertions(+), 42 deletions(-)
affected #: 1 file
diff -r 93dd6202175a12a592a38a2d6d6669baa1fbd5fd -r 50c1edba7fe058e475c1da4aebf9caf85be435e5 lib/galaxy/jobs/runners/lwr_client/__init__.py
--- a/lib/galaxy/jobs/runners/lwr_client/__init__.py
+++ b/lib/galaxy/jobs/runners/lwr_client/__init__.py
@@ -1,3 +1,10 @@
+"""
+lwr_client
+==========
+
+This module contains logic for interfacing with an external LWR server.
+
+"""
import mmap
import os
import re
@@ -9,8 +16,32 @@
class FileStager(object):
-
+ """
+ Objects of the FileStager class interact with an LWR client object to
+ stage the files required to run jobs on a remote LWR server.
+
+ **Parameters**
+
+ client : Client
+ LWR client object.
+ command_line : str
+ The local command line to execute, this will be rewritten for the remote server.
+ config_files : list
+ List of Galaxy 'configfile's produced for this job. These will be rewritten and sent to remote server.
+ input_files : list
+ List of input files used by job. These will be transferred and references rewritten.
+ output_files : list
+ List of output_files produced by job.
+ tool_dir : str
+ Directory containing tool to execute (if a wrapper is used, it will be transferred to remote server).
+ working_directory : str
+ Local path created by Galaxy for running this job.
+
+ """
+
def __init__(self, client, command_line, config_files, input_files, output_files, tool_dir, working_directory):
+ """
+ """
self.client = client
self.command_line = command_line
self.config_files = config_files
@@ -67,7 +98,8 @@
self.file_renames[extra_file_path] = response['path']
def __upload_working_directory_files(self):
- # Task manager stages files into working directory, these need to be uploaded
+ # Task manager stages files into working directory, these need to be
+ # uploaded if present.
for working_directory_file in os.listdir(self.working_directory):
path = os.path.join(self.working_directory, working_directory_file)
working_file_response = self.client.upload_working_directory_file(path)
@@ -75,8 +107,8 @@
def __initialize_output_file_renames(self):
for output_file in self.output_files:
- self.file_renames[output_file] = r'%s%s%s' % (self.new_outputs_directory,
- self.remote_path_separator,
+ self.file_renames[output_file] = r'%s%s%s' % (self.new_outputs_directory,
+ self.remote_path_separator,
os.path.basename(output_file))
def __initialize_task_output_file_renames(self):
@@ -108,6 +140,10 @@
self.rewritten_command_line = self.__rewrite_paths(self.command_line)
def get_rewritten_command_line(self):
+ """
+ Returns the rewritten version of the command line to execute suitable
+ for remote host.
+ """
return self.rewritten_command_line
def __read(self, path):
@@ -117,13 +153,21 @@
finally:
input.close()
-
-
+
class Client(object):
- """
"""
- """
+ Objects of this client class perform low-level communication with a remote LWR server.
+
+ **Parameters**
+
+ remote_host : str
+ Remote URL of the LWR server.
+ job_id : str
+ Galaxy job/task id.
+ private_key : str (optional)
+ Secret key the remote LWR server is configured with.
"""
+
def __init__(self, remote_host, job_id, private_key=None):
if not remote_host.endswith("/"):
remote_host = remote_host + "/"
@@ -132,7 +176,7 @@
## https://moo@cow:8913 will try to contact https://cow:8913
## with a private key of moo
private_key_format = "https?://(.*)@.*/?"
- private_key_match= re.match(private_key_format, remote_host)
+ private_key_match = re.match(private_key_format, remote_host)
if not private_key and private_key_match:
private_key = private_key_match.group(1)
remote_host = remote_host.replace("%s@" % private_key, '', 1)
@@ -140,9 +184,9 @@
self.job_id = job_id
self.private_key = private_key
- def url_open(self, request, data):
+ def _url_open(self, request, data):
return urllib2.urlopen(request, data)
-
+
def __build_url(self, command, args):
if self.private_key:
args["private_key"] = self.private_key
@@ -150,21 +194,20 @@
url = self.remote_host + command + "?" + data
return url
- def __raw_execute(self, command, args = {}, data = None):
+ def __raw_execute(self, command, args={}, data=None):
url = self.__build_url(command, args)
request = urllib2.Request(url=url, data=data)
- response = self.url_open(request, data)
+ response = self._url_open(request, data)
return response
- def __raw_execute_and_parse(self, command, args = {}, data = None):
+ def __raw_execute_and_parse(self, command, args={}, data=None):
response = self.__raw_execute(command, args, data)
return simplejson.loads(response.read())
- def __upload_file(self, action, path, name=None, contents = None):
- """ """
+ def __upload_file(self, action, path, name=None, contents=None):
input = open(path, 'rb')
try:
- mmapped_input = mmap.mmap(input.fileno(), 0, access = mmap.ACCESS_READ)
+ mmapped_input = mmap.mmap(input.fileno(), 0, access=mmap.ACCESS_READ)
return self.__upload_contents(action, path, mmapped_input, name)
finally:
input.close()
@@ -172,39 +215,93 @@
def __upload_contents(self, action, path, contents, name=None):
if not name:
name = os.path.basename(path)
- args = {"job_id" : self.job_id, "name" : name}
+ args = {"job_id": self.job_id, "name": name}
return self.__raw_execute_and_parse(action, args, contents)
-
+
def upload_tool_file(self, path):
+ """
+ Upload a tool related file (e.g. wrapper) required to run job.
+
+ **Parameters**
+
+ path : str
+ Local path tool.
+ """
return self.__upload_file("upload_tool_file", path)
def upload_input(self, path):
+ """
+ Upload input dataset to remote server.
+
+ **Parameters**
+
+ path : str
+ Local path of input dataset.
+ """
return self.__upload_file("upload_input", path)
def upload_extra_input(self, path, relative_name):
+ """
+ Upload extra input file to remote server.
+
+ **Parameters**
+
+ path : str
+ Extra files path of input dataset corresponding to this input.
+ relative_name : str
+ Relative path of extra file to upload relative to inputs extra files path.
+ """
return self.__upload_file("upload_extra_input", path, name=relative_name)
def upload_config_file(self, path, contents):
+ """
+ Upload a job's config file to the remote server.
+
+ **Parameters**
+
+ path : str
+ Local path to the original config file.
+ contents : str
+ Rewritten contents of the config file to upload.
+ """
return self.__upload_contents("upload_config_file", path, contents)
def upload_working_directory_file(self, path):
+ """
+ Upload the supplied file (path) from a job's working directory
+ to remote server.
+
+ **Parameters**
+
+ path : str
+ Path to file to upload.
+ """
return self.__upload_file("upload_working_directory_file", path)
def _get_output_type(self, name):
- return self.__raw_execute_and_parse('get_output_type', {'name': name,
- 'job_id': self.job_id})
+ return self.__raw_execute_and_parse("get_output_type", {"name": name,
+ "job_id": self.job_id})
def download_output(self, path, working_directory):
- """ """
+ """
+ Download an output dataset from the remote server.
+
+ **Parameters**
+
+ path : str
+ Local path of the dataset.
+ working_directory : str
+ Local working_directory for the job.
+ """
name = os.path.basename(path)
output_type = self._get_output_type(name)
- response = self.__raw_execute('download_output', {'name' : name,
- "job_id" : self.job_id,
- 'output_type': output_type})
- if output_type == 'direct':
- output = open(path, 'wb')
- elif output_type == 'task':
- output = open(os.path.join(working_directory, name), 'wb')
+ response = self.__raw_execute("download_output", {"name": name,
+ "job_id": self.job_id,
+ "output_type": output_type})
+ if output_type == "direct":
+ output = open(path, "wb")
+ elif output_type == "task":
+ output = open(os.path.join(working_directory, name), "wb")
else:
raise Exception("No remote output found for dataset with path %s" % path)
try:
@@ -215,32 +312,57 @@
output.write(buffer)
finally:
output.close()
-
+
def launch(self, command_line):
- """ """
- return self.__raw_execute("launch", {"command_line" : command_line,
- "job_id" : self.job_id})
+ """
+ Run or queue up the execution of the supplied
+ `command_line` on the remote server.
+
+ **Parameters**
+
+ command_line : str
+ Command to execute.
+ """
+ return self.__raw_execute("launch", {"command_line": command_line,
+ "job_id": self.job_id})
def kill(self):
- return self.__raw_execute("kill", {"job_id" : self.job_id})
-
+ """
+ Cancel remote job, either removing from the queue or killing it.
+ """
+ return self.__raw_execute("kill", {"job_id": self.job_id})
+
def wait(self):
- """ """
+ """
+ Wait for job to finish.
+ """
while True:
- complete = self.check_complete()
- if complete:
- return check_complete_response
+ complete_response = self.raw_check_complete()
+ if complete_response["complete"] == "true":
+ return complete_response
time.sleep(1)
def raw_check_complete(self):
- check_complete_response = self.__raw_execute_and_parse("check_complete", {"job_id" : self.job_id })
+ """
+ Get check_complete response from the remote server.
+ """
+ check_complete_response = self.__raw_execute_and_parse("check_complete", {"job_id": self.job_id})
return check_complete_response
def check_complete(self):
+ """
+ Return boolean indicating whether the job is complete.
+ """
return self.raw_check_complete()["complete"] == "true"
def clean(self):
- self.__raw_execute("clean", { "job_id" : self.job_id })
+ """
+ Cleanup the remote job.
+ """
+ self.__raw_execute("clean", {"job_id": self.job_id})
def setup(self):
- return self.__raw_execute_and_parse("setup", { "job_id" : self.job_id })
+ """
+ Setup remote LWR server to run this job.
+ """
+ return self.__raw_execute_and_parse("setup", {"job_id": self.job_id})
https://bitbucket.org/galaxy/galaxy-central/changeset/5822038f8c96/
changeset: 5822038f8c96
user: jmchilton
date: 2012-12-26 19:42:31
summary: Extend lwr to allow execution of jobs with outputs specified using 'from_work_dir'.
---
lib/galaxy/jobs/runners/__init__.py | 72 +++++++++++++-----------
lib/galaxy/jobs/runners/lwr.py | 7 ++-
lib/galaxy/jobs/runners/lwr_client/__init__.py | 31 ++++++++--
3 files changed, 72 insertions(+), 38 deletions(-)
affected #: 3 files
diff -r 50c1edba7fe058e475c1da4aebf9caf85be435e5 -r 5822038f8c9677cbcaea60597a8d988cbd65b174 lib/galaxy/jobs/runners/__init__.py
--- a/lib/galaxy/jobs/runners/__init__.py
+++ b/lib/galaxy/jobs/runners/__init__.py
@@ -8,7 +8,7 @@
log = logging.getLogger( __name__ )
class BaseJobRunner( object ):
- def build_command_line( self, job_wrapper, include_metadata=False ):
+ def build_command_line( self, job_wrapper, include_metadata=False, include_work_dir_outputs=True ):
"""
Compose the sequence of commands necessary to execute a job. This will
currently include:
@@ -19,18 +19,6 @@
- commands to set metadata (if include_metadata is True)
"""
- def in_directory( file, directory ):
- """
- Return true, if the common prefix of both is equal to directory
- e.g. /a/b/c/d.rst and directory is /a/b, the common prefix is /a/b
- """
-
- # Make both absolute.
- directory = os.path.abspath( directory )
- file = os.path.abspath( file )
-
- return os.path.commonprefix( [ file, directory ] ) == directory
-
commands = job_wrapper.get_command_line()
# All job runners currently handle this case which should never
# occur
@@ -47,6 +35,41 @@
commands = "; ".join( job_wrapper.dependency_shell_commands + [ commands ] )
# -- Append commands to copy job outputs based on from_work_dir attribute. --
+ if include_work_dir_outputs:
+ work_dir_outputs = self.get_work_dir_outputs( job_wrapper )
+ if work_dir_outputs:
+ commands += "; " + "; ".join( [ "cp %s %s" % ( source_file, destination ) for ( source_file, destination ) in work_dir_outputs ] )
+
+ # Append metadata setting commands, we don't want to overwrite metadata
+ # that was copied over in init_meta(), as per established behavior
+ if include_metadata and self.app.config.set_metadata_externally:
+ commands += "; cd %s; " % os.path.abspath( os.getcwd() )
+ commands += job_wrapper.setup_external_metadata(
+ exec_dir = os.path.abspath( os.getcwd() ),
+ tmp_dir = job_wrapper.working_directory,
+ dataset_files_path = self.app.model.Dataset.file_path,
+ output_fnames = job_wrapper.get_output_fnames(),
+ set_extension = False,
+ kwds = { 'overwrite' : False } )
+ return commands
+
+ def get_work_dir_outputs( self, job_wrapper ):
+ """
+ Returns list of pairs (source_file, destination) describing path
+ to work_dir output file and ultimate destination.
+ """
+
+ def in_directory( file, directory ):
+ """
+ Return true, if the common prefix of both is equal to directory
+ e.g. /a/b/c/d.rst and directory is /a/b, the common prefix is /a/b
+ """
+
+ # Make both absolute.
+ directory = os.path.abspath( directory )
+ file = os.path.abspath( file )
+
+ return os.path.commonprefix( [ file, directory ] ) == directory
# Set up dict of dataset id --> output path; output path can be real or
# false depending on outputs_to_working_directory
@@ -57,6 +80,7 @@
path = dataset_path.false_path
output_paths[ dataset_path.dataset_id ] = path
+ output_pairs = []
# Walk job's output associations to find and use from_work_dir attributes.
job = job_wrapper.get_job()
job_tool = self.app.toolbox.tools_by_id.get( job.tool_id, None )
@@ -72,30 +96,14 @@
source_file = os.path.join( os.path.abspath( job_wrapper.working_directory ), hda_tool_output.from_work_dir )
destination = output_paths[ dataset.dataset_id ]
if in_directory( source_file, job_wrapper.working_directory ):
- try:
- commands += "; cp %s %s" % ( source_file, destination )
- log.debug( "Copying %s to %s as directed by from_work_dir" % ( source_file, destination ) )
- except ( IOError, OSError ):
- log.debug( "Could not copy %s to %s as directed by from_work_dir" % ( source_file, destination ) )
+ output_pairs.append( ( source_file, destination ) )
+ log.debug( "Copying %s to %s as directed by from_work_dir" % ( source_file, destination ) )
else:
# Security violation.
log.exception( "from_work_dir specified a location not in the working directory: %s, %s" % ( source_file, job_wrapper.working_directory ) )
+ return output_pairs
-
- # Append metadata setting commands, we don't want to overwrite metadata
- # that was copied over in init_meta(), as per established behavior
- if include_metadata and self.app.config.set_metadata_externally:
- commands += "; cd %s; " % os.path.abspath( os.getcwd() )
- commands += job_wrapper.setup_external_metadata(
- exec_dir = os.path.abspath( os.getcwd() ),
- tmp_dir = job_wrapper.working_directory,
- dataset_files_path = self.app.model.Dataset.file_path,
- output_fnames = job_wrapper.get_output_fnames(),
- set_extension = False,
- kwds = { 'overwrite' : False } )
- return commands
-
class ClusterJobState( object ):
"""
Encapsulate the state of a cluster job, this should be subclassed as
diff -r 50c1edba7fe058e475c1da4aebf9caf85be435e5 -r 5822038f8c9677cbcaea60597a8d988cbd65b174 lib/galaxy/jobs/runners/lwr.py
--- a/lib/galaxy/jobs/runners/lwr.py
+++ b/lib/galaxy/jobs/runners/lwr.py
@@ -54,7 +54,7 @@
if 0 != os.system(cmd):
raise Exception('Error running file staging command: %s' % cmd)
job_wrapper.prepare_input_files_cmds = None # prevent them from being used in-line
- command_line = self.build_command_line( job_wrapper, include_metadata=False )
+ command_line = self.build_command_line( job_wrapper, include_metadata=False, include_work_dir_outputs=False )
except:
job_wrapper.fail( "failure preparing job", exception=True )
log.exception("failure running job %d" % job_wrapper.job_id)
@@ -126,7 +126,12 @@
stderr = run_results['stderr']
if job_wrapper.get_state() not in [ model.Job.states.ERROR, model.Job.states.DELETED ]:
+ work_dir_outputs = self.get_work_dir_outputs(job_wrapper)
output_files = self.get_output_files(job_wrapper)
+ for source_file, output_file in work_dir_outputs:
+ client.download_work_dir_output(source_file, job_wrapper.working_directory, output_file)
+ # Remove from full output_files list so don't try to download directly.
+ output_files.remove(output_file)
for output_file in output_files:
client.download_output(output_file, working_directory=job_wrapper.working_directory)
client.clean()
diff -r 50c1edba7fe058e475c1da4aebf9caf85be435e5 -r 5822038f8c9677cbcaea60597a8d988cbd65b174 lib/galaxy/jobs/runners/lwr_client/__init__.py
--- a/lib/galaxy/jobs/runners/lwr_client/__init__.py
+++ b/lib/galaxy/jobs/runners/lwr_client/__init__.py
@@ -282,6 +282,24 @@
return self.__raw_execute_and_parse("get_output_type", {"name": name,
"job_id": self.job_id})
+ def download_work_dir_output(self, source, working_directory, output_path):
+ """
+ Download an output dataset specified with from_work_dir from the
+ remote server.
+
+ **Parameters**
+
+ source : str
+ Path in job's working_directory to find output in.
+ working_directory : str
+ Local working_directory for the job.
+ output_path : str
+ Full path to output dataset.
+ """
+ output = open(output_path, "wb")
+ name = os.path.basename(source)
+ self.__raw_download_output(name, self.job_id, "work_dir", output)
+
def download_output(self, path, working_directory):
"""
Download an output dataset from the remote server.
@@ -295,23 +313,26 @@
"""
name = os.path.basename(path)
output_type = self._get_output_type(name)
- response = self.__raw_execute("download_output", {"name": name,
- "job_id": self.job_id,
- "output_type": output_type})
if output_type == "direct":
output = open(path, "wb")
elif output_type == "task":
output = open(os.path.join(working_directory, name), "wb")
else:
raise Exception("No remote output found for dataset with path %s" % path)
+ self.__raw_download_output(name, self.job_id, output_type, output)
+
+ def __raw_download_output(self, name, job_id, output_type, output_file):
+ response = self.__raw_execute("download_output", {"name": name,
+ "job_id": self.job_id,
+ "output_type": output_type})
try:
while True:
buffer = response.read(1024)
if buffer == "":
break
- output.write(buffer)
+ output_file.write(buffer)
finally:
- output.close()
+ output_file.close()
def launch(self, command_line):
"""
https://bitbucket.org/galaxy/galaxy-central/changeset/6265bf3f27ad/
changeset: 6265bf3f27ad
user: jmchilton
date: 2012-12-26 19:42:31
summary: Implement optimization attempting to not transfer unneeded inputs to remote LWR server. More general refactoring and testing of lwr client code.
---
lib/galaxy/jobs/runners/lwr_client/__init__.py | 170 +++++++++++++++++++-----
1 file changed, 136 insertions(+), 34 deletions(-)
affected #: 1 file
diff -r 5822038f8c9677cbcaea60597a8d988cbd65b174 -r 6265bf3f27ad611db6c676e94166c25500c13432 lib/galaxy/jobs/runners/lwr_client/__init__.py
--- a/lib/galaxy/jobs/runners/lwr_client/__init__.py
+++ b/lib/galaxy/jobs/runners/lwr_client/__init__.py
@@ -15,6 +15,99 @@
import simplejson
+class JobInputs(object):
+ """
+ Abstractions over dynamic inputs created for a given job (namely the command to
+ execute and created configfiles).
+
+ **Parameters**
+
+ command_line : str
+ Local command to execute for this job. (To be rewritten.)
+ config_files : str
+ Config files created for this job. (To be rewritten.)
+
+
+ >>> import tempfile
+ >>> tf = tempfile.NamedTemporaryFile()
+ >>> def setup_inputs(tf):
+ ... open(tf.name, "w").write("world /path/to/input the rest")
+ ... inputs = JobInputs("hello /path/to/input", [tf.name])
+ ... return inputs
+ >>> inputs = setup_inputs(tf)
+ >>> inputs.rewrite_paths("/path/to/input", 'C:\\input')
+ >>> inputs.rewritten_command_line
+ 'hello C:\\\\input'
+ >>> inputs.rewritten_config_files[tf.name]
+ 'world C:\\\\input the rest'
+ >>> tf.close()
+ >>> tf = tempfile.NamedTemporaryFile()
+ >>> inputs = setup_inputs(tf)
+ >>> inputs.find_referenced_subfiles('/path/to')
+ ['/path/to/input']
+ >>> inputs.path_referenced('/path/to')
+ True
+ >>> inputs.path_referenced('/path/to/input')
+ True
+ >>> inputs.path_referenced('/path/to/notinput')
+ False
+ >>> tf.close()
+ """
+
+ def __init__(self, command_line, config_files):
+ self.rewritten_command_line = command_line
+ self.rewritten_config_files = {}
+ for config_file in config_files or []:
+ config_contents = _read(config_file)
+ self.rewritten_config_files[config_file] = config_contents
+
+ def find_referenced_subfiles(self, directory):
+ """
+ Return list of files below specified `directory` in job inputs. Could
+ use more sophisticated logic (match quotes to handle spaces, handle
+ subdirectories, etc...).
+
+ **Parameters**
+
+ directory : str
+ Full path to directory to search.
+
+ """
+ pattern = r"(%s%s\S+)" % (directory, os.sep)
+ referenced_files = set()
+ for input_contents in self.__items():
+ referenced_files.update(re.findall(pattern, input_contents))
+ return list(referenced_files)
+
+ def path_referenced(self, path):
+ pattern = r"%s" % path
+ found = False
+ for input_contents in self.__items():
+ if re.findall(pattern, input_contents):
+ found = True
+ break
+ return found
+
+ def rewrite_paths(self, local_path, remote_path):
+ """
+ Rewrite references to `local_path` with `remote_path` in job inputs.
+ """
+ self.__rewrite_command_line(local_path, remote_path)
+ self.__rewrite_config_files(local_path, remote_path)
+
+ def __rewrite_command_line(self, local_path, remote_path):
+ self.rewritten_command_line = self.rewritten_command_line.replace(local_path, remote_path)
+
+ def __rewrite_config_files(self, local_path, remote_path):
+ for config_file, rewritten_contents in self.rewritten_config_files.iteritems():
+ self.rewritten_config_files[config_file] = rewritten_contents.replace(local_path, remote_path)
+
+ def __items(self):
+ items = [self.rewritten_command_line]
+ items.extend(self.rewritten_config_files.values())
+ return items
+
+
class FileStager(object):
"""
Objects of the FileStager class interact with an LWR client object to
@@ -50,6 +143,10 @@
self.tool_dir = os.path.abspath(tool_dir)
self.working_directory = working_directory
+ # Setup job inputs, these will need to be rewritten before
+ # shipping off to remote LWR server.
+ self.job_inputs = JobInputs(self.command_line, self.config_files)
+
self.file_renames = {}
job_config = client.setup()
@@ -65,17 +162,11 @@
self.__initialize_output_file_renames()
self.__initialize_task_output_file_renames()
self.__initialize_config_file_renames()
- self.__rewrite_and_upload_config_files()
- self.__rewrite_command_line()
+ self.__handle_rewrites()
+ self.__upload_rewritten_config_files()
def __initialize_referenced_tool_files(self):
- pattern = r"(%s%s\S+)" % (self.tool_dir, os.sep)
- referenced_tool_files = []
- referenced_tool_files += re.findall(pattern, self.command_line)
- if self.config_files != None:
- for config_file in self.config_files:
- referenced_tool_files += re.findall(pattern, self.__read(config_file))
- self.referenced_tool_files = referenced_tool_files
+ self.referenced_tool_files = self.job_inputs.find_referenced_subfiles(self.tool_dir)
def __upload_tool_files(self):
for referenced_tool_file in self.referenced_tool_files:
@@ -84,18 +175,25 @@
def __upload_input_files(self):
for input_file in self.input_files:
+ self.__upload_input_file(input_file)
+ self.__upload_input_extra_files(input_file)
+
+ def __upload_input_file(self, input_file):
+ if self.job_inputs.path_referenced(input_file):
input_upload_response = self.client.upload_input(input_file)
self.file_renames[input_file] = input_upload_response['path']
- # TODO: Determine if this is object store safe and what needs to be
- # done if it is not.
- files_path = "%s_files" % input_file[0:-len(".dat")]
- if os.path.exists(files_path):
- for extra_file in os.listdir(files_path):
- extra_file_path = os.path.join(files_path, extra_file)
- relative_path = os.path.basename(files_path)
- extra_file_relative_path = os.path.join(relative_path, extra_file)
- response = self.client.upload_extra_input(extra_file_path, extra_file_relative_path)
- self.file_renames[extra_file_path] = response['path']
+
+ def __upload_input_extra_files(self, input_file):
+ # TODO: Determine if this is object store safe and what needs to be
+ # done if it is not.
+ files_path = "%s_files" % input_file[0:-len(".dat")]
+ if os.path.exists(files_path) and self.job_inputs.path_referenced(files_path):
+ for extra_file in os.listdir(files_path):
+ extra_file_path = os.path.join(files_path, extra_file)
+ relative_path = os.path.basename(files_path)
+ extra_file_relative_path = os.path.join(relative_path, extra_file)
+ response = self.client.upload_extra_input(extra_file_path, extra_file_relative_path)
+ self.file_renames[extra_file_path] = response['path']
def __upload_working_directory_files(self):
# Task manager stages files into working directory, these need to be
@@ -130,28 +228,20 @@
new_contents = new_contents.replace(local_path, remote_path)
return new_contents
- def __rewrite_and_upload_config_files(self):
- for config_file in self.config_files:
- config_contents = self.__read(config_file)
- new_config_contents = self.__rewrite_paths(config_contents)
+ def __handle_rewrites(self):
+ for local_path, remote_path in self.file_renames.iteritems():
+ self.job_inputs.rewrite_paths(local_path, remote_path)
+
+ def __upload_rewritten_config_files(self):
+ for config_file, new_config_contents in self.job_inputs.rewritten_config_files.iteritems():
self.client.upload_config_file(config_file, new_config_contents)
- def __rewrite_command_line(self):
- self.rewritten_command_line = self.__rewrite_paths(self.command_line)
-
def get_rewritten_command_line(self):
"""
Returns the rewritten version of the command line to execute suitable
for remote host.
"""
- return self.rewritten_command_line
-
- def __read(self, path):
- input = open(path, "r")
- try:
- return input.read()
- finally:
- input.close()
+ return self.job_inputs.rewritten_command_line
class Client(object):
@@ -387,3 +477,15 @@
Setup remote LWR server to run this job.
"""
return self.__raw_execute_and_parse("setup", {"job_id": self.job_id})
+
+
+def _read(path):
+ """
+ Utility method to quickly read small files (config files and tool
+ wrappers) into memory as strings.
+ """
+ input = open(path, "r")
+ try:
+ return input.read()
+ finally:
+ input.close()
https://bitbucket.org/galaxy/galaxy-central/changeset/875ac898df00/
changeset: 875ac898df00
user: jmchilton
date: 2012-12-26 19:42:31
summary: Rework job_id handling in LWR runner allowing remote LWR server to assign a job_id during setup, save this will serve as the job's external id. This change allows multiple Galaxy instances to submit jobs to the same LWR backend server and will prove useful when implementing additional backends (pbs/drmaa/etc...) for the LWR server.
---
lib/galaxy/jobs/runners/lwr.py | 5 +++--
lib/galaxy/jobs/runners/lwr_client/__init__.py | 22 ++++++++++++++++------
2 files changed, 19 insertions(+), 8 deletions(-)
affected #: 2 files
diff -r 6265bf3f27ad611db6c676e94166c25500c13432 -r 875ac898df00fd919b6b24f58562fadbf03dc5e1 lib/galaxy/jobs/runners/lwr.py
--- a/lib/galaxy/jobs/runners/lwr.py
+++ b/lib/galaxy/jobs/runners/lwr.py
@@ -73,8 +73,9 @@
working_directory = job_wrapper.working_directory
file_stager = FileStager(client, command_line, job_wrapper.extra_filenames, input_files, output_files, job_wrapper.tool.tool_dir, working_directory)
rebuilt_command_line = file_stager.get_rewritten_command_line()
+ job_id = file_stager.job_id
client.launch( rebuilt_command_line )
- job_wrapper.set_runner( runner_url, job_wrapper.job_id )
+ job_wrapper.set_runner( runner_url, job_id )
job_wrapper.change_state( model.Job.states.RUNNING )
except Exception, exc:
@@ -84,7 +85,7 @@
lwr_job_state = ClusterJobState()
lwr_job_state.job_wrapper = job_wrapper
- lwr_job_state.job_id = job_wrapper.job_id
+ lwr_job_state.job_id = job_id
lwr_job_state.old_state = True
lwr_job_state.running = True
lwr_job_state.runner_url = runner_url
diff -r 6265bf3f27ad611db6c676e94166c25500c13432 -r 875ac898df00fd919b6b24f58562fadbf03dc5e1 lib/galaxy/jobs/runners/lwr_client/__init__.py
--- a/lib/galaxy/jobs/runners/lwr_client/__init__.py
+++ b/lib/galaxy/jobs/runners/lwr_client/__init__.py
@@ -149,12 +149,7 @@
self.file_renames = {}
- job_config = client.setup()
-
- self.new_working_directory = job_config['working_directory']
- self.new_outputs_directory = job_config['outputs_directory']
- self.remote_path_separator = job_config['path_separator']
-
+ self.__handle_setup()
self.__initialize_referenced_tool_files()
self.__upload_tool_files()
self.__upload_input_files()
@@ -165,6 +160,21 @@
self.__handle_rewrites()
self.__upload_rewritten_config_files()
+ def __handle_setup(self):
+ job_config = self.client.setup()
+
+ self.new_working_directory = job_config['working_directory']
+ self.new_outputs_directory = job_config['outputs_directory']
+ self.remote_path_separator = job_config['path_separator']
+ # If remote LWR server assigned job id, use that otherwise
+ # just use local job_id assigned.
+ galaxy_job_id = self.client.job_id
+ self.job_id = job_config.get('job_id', galaxy_job_id)
+ if self.job_id != galaxy_job_id:
+ # Remote LWR server assigned an id different than the
+ # Galaxy job id, update client to reflect this.
+ self.client.job_id = self.job_id
+
def __initialize_referenced_tool_files(self):
self.referenced_tool_files = self.job_inputs.find_referenced_subfiles(self.tool_dir)
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: greg: Display installed repository dependencies in a separate container from missing repository dependencies.
by Bitbucket 25 Dec '12
by Bitbucket 25 Dec '12
25 Dec '12
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/4f4875265599/
changeset: 4f4875265599
user: greg
date: 2012-12-25 17:49:39
summary: Display installed repository dependencies in a separate container from missing repository dependencies.
affected #: 7 files
diff -r a3915a264f6c3ae7cec12778b3c3e7794ca5a8e8 -r 4f4875265599424fed16f35dd82eb785167f6c25 lib/galaxy/model/__init__.py
--- a/lib/galaxy/model/__init__.py
+++ b/lib/galaxy/model/__init__.py
@@ -3167,40 +3167,40 @@
def has_readme_files( self ):
return self.metadata and 'readme_files' in self.metadata
@property
- def required_repositories( self ):
+ def repository_dependencies( self ):
required_repositories = []
- for rrda in self.repository_dependencies:
+ for rrda in self.required_repositories:
repository_dependency = rrda.repository_dependency
required_repository = repository_dependency.repository
required_repositories.append( required_repository )
return required_repositories
@property
- def installed_required_repositories( self ):
+ def installed_repository_dependencies( self ):
"""Return the repository's repository dependencies that are currently installed."""
installed_required_repositories = []
- for required_repository in self.required_repositories:
+ for required_repository in self.repository_dependencies:
if required_repository.status == self.installation_status.INSTALLED:
installed_required_repositories.append( required_repository )
return installed_required_repositories
@property
- def missing_required_repositories( self ):
+ def missing_repository_dependencies( self ):
"""Return the repository's repository dependencies that are not currently installed, and may not ever have been installed."""
missing_required_repositories = []
- for required_repository in self.required_repositories:
+ for required_repository in self.repository_dependencies:
if required_repository.status not in [ self.installation_status.INSTALLED ]:
missing_required_repositories.append( required_repository )
return missing_required_repositories
@property
- def required_repositories_being_installed( self ):
+ def repository_dependencies_being_installed( self ):
required_repositories_being_installed = []
- for required_repository in self.required_repositories:
+ for required_repository in self.repository_dependencies:
if tool_dependency.status == ToolDependency.installation_status.INSTALLING:
required_repositories_being_installed.append( required_repository )
return required_repositories_being_installed
@property
- def required_repositories_missing_or_being_installed( self ):
+ def repository_dependencies_missing_or_being_installed( self ):
required_repositories_missing_or_being_installed = []
- for required_repository in self.required_repositories:
+ for required_repository in self.repository_dependencies:
if required_repository.status in [ self.installation_status.ERROR,
self.installation_status.INSTALLING,
self.installation_status.NEVER_INSTALLED,
@@ -3208,17 +3208,17 @@
required_repositories_missing_or_being_installed.append( required_repository )
return required_repositories_missing_or_being_installed
@property
- def required_repositories_with_installation_errors( self ):
+ def repository_dependencies_with_installation_errors( self ):
required_repositories_with_installation_errors = []
- for required_repository in self.required_repositories:
+ for required_repository in self.repository_dependencies:
if required_repository.status == self.installation_status.ERROR:
required_repositories_with_installation_errors.append( required_repository )
return required_repositories_with_installation_errors
@property
- def uninstalled_required_repositories( self ):
+ def uninstalled_repository_dependencies( self ):
"""Return the repository's repository dependencies that have been uninstalled."""
uninstalled_required_repositories = []
- for required_repository in self.required_repositories:
+ for required_repository in self.repository_dependencies:
if required_repository.status == self.installation_status.UNINSTALLED:
uninstalled_required_repositories.append( required_repository )
return uninstalled_required_repositories
diff -r a3915a264f6c3ae7cec12778b3c3e7794ca5a8e8 -r 4f4875265599424fed16f35dd82eb785167f6c25 lib/galaxy/model/mapping.py
--- a/lib/galaxy/model/mapping.py
+++ b/lib/galaxy/model/mapping.py
@@ -1758,8 +1758,8 @@
primaryjoin=( ToolShedRepository.table.c.id == ToolDependency.table.c.tool_shed_repository_id ),
order_by=ToolDependency.table.c.name,
backref='tool_shed_repository' ),
- repository_dependencies=relation( RepositoryRepositoryDependencyAssociation,
- primaryjoin=( ToolShedRepository.table.c.id == RepositoryRepositoryDependencyAssociation.table.c.tool_shed_repository_id ) ) ) )
+ required_repositories=relation( RepositoryRepositoryDependencyAssociation,
+ primaryjoin=( ToolShedRepository.table.c.id == RepositoryRepositoryDependencyAssociation.table.c.tool_shed_repository_id ) ) ) )
assign_mapper( context, RepositoryRepositoryDependencyAssociation, RepositoryRepositoryDependencyAssociation.table,
properties=dict( repository=relation( ToolShedRepository,
diff -r a3915a264f6c3ae7cec12778b3c3e7794ca5a8e8 -r 4f4875265599424fed16f35dd82eb785167f6c25 lib/galaxy/util/shed_util.py
--- a/lib/galaxy/util/shed_util.py
+++ b/lib/galaxy/util/shed_util.py
@@ -488,6 +488,40 @@
if idx == count:
break
return headers
+def get_installed_and_missing_repository_dependencies( trans, repository ):
+ missing_repository_dependencies = {}
+ installed_repository_dependencies = {}
+ if repository.has_repository_dependencies:
+ metadata = repository.metadata
+ installed_rd_tups = []
+ missing_rd_tups = []
+ # The repository dependencies container will include only the immediate repository dependencies of this repository, so
+ # the container will be only a single level in depth.
+ for rd in repository.repository_dependencies:
+ rd_tup = [ rd.tool_shed, rd.name, rd.owner, rd.changeset_revision, rd.id, rd.status ]
+ if rd.status == trans.model.ToolShedRepository.installation_status.INSTALLED:
+ installed_rd_tups.append( rd_tup )
+ else:
+ missing_rd_tups.append( rd_tup )
+ if installed_rd_tups or missing_rd_tups:
+ # Get the description from the metadata in case it has a value.
+ repository_dependencies = metadata.get( 'repository_dependencies', {} )
+ description = repository_dependencies.get( 'description', None )
+ # We need to add a root_key entry to one or both of installed_repository_dependencies dictionary and the
+ # missing_repository_dependencies dictionary for proper display parsing.
+ root_key = container_util.generate_repository_dependencies_key_for_repository( repository.tool_shed,
+ repository.name,
+ repository.owner,
+ repository.installed_changeset_revision )
+ if installed_rd_tups:
+ installed_repository_dependencies[ 'root_key' ] = root_key
+ installed_repository_dependencies[ root_key ] = installed_rd_tups
+ installed_repository_dependencies[ 'description' ] = description
+ if missing_rd_tups:
+ missing_repository_dependencies[ 'root_key' ] = root_key
+ missing_repository_dependencies[ root_key ] = missing_rd_tups
+ missing_repository_dependencies[ 'description' ] = description
+ return installed_repository_dependencies, missing_repository_dependencies
def get_installed_and_missing_tool_dependencies( trans, repository, all_tool_dependencies ):
if all_tool_dependencies:
tool_dependencies = {}
@@ -861,8 +895,11 @@
"""
metadata = repository.metadata
if metadata:
+ # Handle proprietary datatypes.
datatypes = metadata.get( 'datatypes', None )
+ # Handle invalid tools.
invalid_tools = metadata.get( 'invalid_tools', None )
+ # Handle README files.
if repository.has_readme_files:
if reinstalling:
# Since we're reinstalling, we need to sned a request to the tool shed to get the README files.
@@ -877,33 +914,20 @@
readme_files_dict = suc.build_readme_files_dict( repository.metadata, tool_path )
else:
readme_files_dict = None
- repository_dependencies_dict_for_display = {}
- if repository.has_repository_dependencies:
- rd_tups = []
- # We need to add a root_key entry to the repository_dependencies dictionary for proper display parsing.
- root_key = container_util.generate_repository_dependencies_key_for_repository( repository.tool_shed,
- repository.name,
- repository.owner,
- repository.installed_changeset_revision )
- # The repository dependencies container will include only the immediate repository dependencies of this repository, so
- # the container will be only a single level in depth.
- for rr in repository.required_repositories:
- rd_tup = [ rr.tool_shed, rr.name, rr.owner, rr.changeset_revision, rr.id, rr.status ]
- rd_tups.append( rd_tup )
- repository_dependencies_dict_for_display[ 'root_key' ] = root_key
- repository_dependencies_dict_for_display[ root_key ] = rd_tups
- # Get the description from the metadata in case it has a value.
- repository_dependencies = metadata.get( 'repository_dependencies', {} )
- repository_dependencies_dict_for_display[ 'description' ] = repository_dependencies.get( 'description', None )
+ # Handle repository dependencies.
+ installed_repository_dependencies, missing_repository_dependencies = get_installed_and_missing_repository_dependencies( trans, repository )
+ # Handle tool dependencies.
all_tool_dependencies = metadata.get( 'tool_dependencies', None )
- tool_dependencies, missing_tool_dependencies = get_installed_and_missing_tool_dependencies( trans, repository, all_tool_dependencies )
+ installed_tool_dependencies, missing_tool_dependencies = get_installed_and_missing_tool_dependencies( trans, repository, all_tool_dependencies )
if reinstalling:
# All tool dependencies will be considered missing since we are reinstalling the repository.
- if tool_dependencies:
- for td in tool_dependencies:
+ if installed_tool_dependencies:
+ for td in installed_tool_dependencies:
missing_tool_dependencies.append( td )
- tool_dependencies = None
+ installed_tool_dependencies = None
+ # Handle valid tools.
valid_tools = metadata.get( 'tools', None )
+ # Handle workflows.
workflows = metadata.get( 'workflows', None )
containers_dict = suc.build_repository_containers_for_galaxy( trans=trans,
toolshed_base_url=tool_shed_url,
@@ -913,10 +937,11 @@
repository=repository,
datatypes=datatypes,
invalid_tools=invalid_tools,
+ missing_repository_dependencies=missing_repository_dependencies,
missing_tool_dependencies=missing_tool_dependencies,
readme_files_dict=readme_files_dict,
- repository_dependencies=repository_dependencies_dict_for_display,
- tool_dependencies=tool_dependencies,
+ repository_dependencies=installed_repository_dependencies,
+ tool_dependencies=installed_tool_dependencies,
valid_tools=valid_tools,
workflows=workflows )
else:
diff -r a3915a264f6c3ae7cec12778b3c3e7794ca5a8e8 -r 4f4875265599424fed16f35dd82eb785167f6c25 lib/galaxy/util/shed_util_common.py
--- a/lib/galaxy/util/shed_util_common.py
+++ b/lib/galaxy/util/shed_util_common.py
@@ -76,14 +76,15 @@
log.debug( "Error reading README file '%s' defined in metadata: %s" % ( str( relative_path_to_readme_file ), str( e ) ) )
return readme_files_dict
def build_repository_containers_for_galaxy( trans, toolshed_base_url, repository_name, repository_owner, changeset_revision, repository, datatypes,
- invalid_tools, missing_tool_dependencies, readme_files_dict, repository_dependencies, tool_dependencies,
- valid_tools, workflows ):
+ invalid_tools, missing_repository_dependencies, missing_tool_dependencies, readme_files_dict,
+ repository_dependencies, tool_dependencies, valid_tools, workflows ):
"""Return a dictionary of containers for the received repository's dependencies and readme files for display during installation to Galaxy."""
containers_dict = dict( datatypes=None,
invalid_tools=None,
missing_tool_dependencies=None,
readme_files=None,
repository_dependencies=None,
+ missing_repository_dependencies=None,
tool_dependencies=None,
valid_tools=None,
workflows=None )
@@ -121,7 +122,7 @@
if readme_files_dict:
folder_id, readme_files_root_folder = container_util.build_readme_files_folder( trans, folder_id, readme_files_dict )
containers_dict[ 'readme_files' ] = readme_files_root_folder
- # Repository dependencies container.
+ # Installed repository dependencies container.
if repository_dependencies:
folder_id, repository_dependencies_root_folder = container_util.build_repository_dependencies_folder( trans=trans,
toolshed_base_url=toolshed_base_url,
@@ -129,16 +130,31 @@
repository_owner=repository_owner,
changeset_revision=changeset_revision,
folder_id=folder_id,
- repository_dependencies=repository_dependencies )
+ repository_dependencies=repository_dependencies,
+ label='Installed repository dependencies',
+ installed=True )
containers_dict[ 'repository_dependencies' ] = repository_dependencies_root_folder
- # Tool dependencies container.
+ # Missing repository dependencies container.
+ if missing_repository_dependencies:
+ folder_id, missing_repository_dependencies_root_folder = \
+ container_util.build_repository_dependencies_folder( trans=trans,
+ toolshed_base_url=toolshed_base_url,
+ repository_name=repository_name,
+ repository_owner=repository_owner,
+ changeset_revision=changeset_revision,
+ folder_id=folder_id,
+ repository_dependencies=missing_repository_dependencies,
+ label='Missing repository dependencies',
+ installed=False )
+ containers_dict[ 'missing_repository_dependencies' ] = missing_repository_dependencies_root_folder
+ # Installed tool dependencies container.
if tool_dependencies:
# We only want to display the Status column if the tool_dependency is missing.
folder_id, tool_dependencies_root_folder = container_util.build_tool_dependencies_folder( trans,
folder_id,
tool_dependencies,
label='Installed tool dependencies',
- display_status=False )
+ installed=True )
containers_dict[ 'tool_dependencies' ] = tool_dependencies_root_folder
# Missing tool dependencies container.
if missing_tool_dependencies:
@@ -147,7 +163,7 @@
folder_id,
missing_tool_dependencies,
label='Missing tool dependencies',
- display_status=True )
+ installed=False )
containers_dict[ 'missing_tool_dependencies' ] = missing_tool_dependencies_root_folder
# Valid tools container.
if valid_tools:
@@ -210,7 +226,9 @@
repository_owner=repository.user.username,
changeset_revision=changeset_revision,
folder_id=folder_id,
- repository_dependencies=repository_dependencies )
+ repository_dependencies=repository_dependencies,
+ label='Repository dependencies',
+ installed=False )
if repository_dependencies_root_folder:
containers_dict[ 'repository_dependencies' ] = repository_dependencies_root_folder
# Tool dependencies container.
@@ -219,7 +237,7 @@
folder_id, tool_dependencies_root_folder = container_util.build_tool_dependencies_folder( trans,
folder_id,
tool_dependencies,
- display_status=False )
+ installed=False )
containers_dict[ 'tool_dependencies' ] = tool_dependencies_root_folder
# Valid tools container.
if metadata and 'tools' in metadata:
diff -r a3915a264f6c3ae7cec12778b3c3e7794ca5a8e8 -r 4f4875265599424fed16f35dd82eb785167f6c25 lib/galaxy/webapps/community/util/container_util.py
--- a/lib/galaxy/webapps/community/util/container_util.py
+++ b/lib/galaxy/webapps/community/util/container_util.py
@@ -201,7 +201,7 @@
readme_files_root_folder = None
return folder_id, readme_files_root_folder
def build_repository_dependencies_folder( trans, toolshed_base_url, repository_name, repository_owner, changeset_revision, folder_id, repository_dependencies,
- label='Repository dependencies' ):
+ label='Repository dependencies', installed=False ):
"""Return a folder hierarchy containing repository dependencies."""
if repository_dependencies:
repository_dependency_id = 0
@@ -274,8 +274,11 @@
else:
tools_root_folder = None
return folder_id, tools_root_folder
-def build_tool_dependencies_folder( trans, folder_id, tool_dependencies, label='Tool dependencies', display_status=False ):
+def build_tool_dependencies_folder( trans, folder_id, tool_dependencies, label='Tool dependencies', installed=False ):
"""Return a folder hierarchy containing tool dependencies."""
+ # The status will be displayed only if the values of the received installed is False. When this is the case, we're in Galaxy
+ # (not the tool shed), and the tool dependencies are not installed or are in an error state, so they are considered missing.
+ # The tool dependency status will be displayed only if the tool dependency is not installed.
if tool_dependencies:
tool_dependency_id = 0
folder_id += 1
@@ -283,11 +286,10 @@
folder_id += 1
folder = Folder( id=folder_id, key='tool_dependencies', label=label, parent=tool_dependencies_root_folder )
if trans.webapp.name == 'galaxy':
- if display_status:
- # The status will be displayed only if the tool dependency status is not 'Installed'.
+ if installed:
+ folder.description = 'click the name to browse the dependency installation directory'
+ else:
folder.description = 'click the name to install the missing dependency'
- else:
- folder.description = 'click the name to browse the dependency installation directory'
tool_dependencies_root_folder.folders.append( folder )
# Insert a header row.
tool_dependency_id += 1
@@ -297,17 +299,15 @@
name='Name',
version='Version',
type='Type' )
- if display_status:
+ if installed:
+ tool_dependency.install_dir = 'Install directory'
+ else:
tool_dependency.installation_status = 'Status'
- else:
- tool_dependency.install_dir = 'Install directory'
else:
tool_dependency = ToolDependency( id=tool_dependency_id,
name='Name',
version='Version',
type='Type' )
- if display_status:
- tool_dependency.installation_status = 'Status'
folder.tool_dependencies.append( tool_dependency )
for dependency_key, requirements_dict in tool_dependencies.items():
tool_dependency_id += 1
@@ -317,7 +317,7 @@
type = set_environment_dict[ 'type' ]
repository_id = set_environment_dict.get( 'repository_id', None )
td_id = set_environment_dict.get( 'tool_dependency_id', None )
- if display_status:
+ if trans.webapp.name == 'galaxy':
installation_status = set_environment_dict.get( 'status', None )
else:
installation_status = None
@@ -335,7 +335,7 @@
install_dir = requirements_dict.get( 'install_dir', None )
repository_id = requirements_dict.get( 'repository_id', None )
td_id = requirements_dict.get( 'tool_dependency_id', None )
- if display_status:
+ if trans.webapp.name == 'galaxy':
installation_status = requirements_dict.get( 'status', None )
else:
installation_status = None
diff -r a3915a264f6c3ae7cec12778b3c3e7794ca5a8e8 -r 4f4875265599424fed16f35dd82eb785167f6c25 lib/galaxy/webapps/galaxy/controllers/admin_toolshed.py
--- a/lib/galaxy/webapps/galaxy/controllers/admin_toolshed.py
+++ b/lib/galaxy/webapps/galaxy/controllers/admin_toolshed.py
@@ -44,7 +44,7 @@
elif tool_shed_repository.status in [ trans.model.ToolShedRepository.installation_status.DEACTIVATED ]:
bgcolor = trans.model.ToolShedRepository.states.WARNING
elif tool_shed_repository.status in [ trans.model.ToolShedRepository.installation_status.INSTALLED ]:
- if tool_shed_repository.missing_required_repositories:
+ if tool_shed_repository.missing_repository_dependencies:
bgcolor = trans.model.ToolShedRepository.states.WARNING
status_label = '%s, missing repository dependencies' % status_label
elif tool_shed_repository.missing_tool_dependencies:
@@ -1202,6 +1202,7 @@
repository=None,
datatypes=None,
invalid_tools=None,
+ missing_repository_dependencies=None,
missing_tool_dependencies=None,
readme_files_dict=readme_files_dict,
repository_dependencies=repository_dependencies,
diff -r a3915a264f6c3ae7cec12778b3c3e7794ca5a8e8 -r 4f4875265599424fed16f35dd82eb785167f6c25 templates/webapps/community/repository/common.mako
--- a/templates/webapps/community/repository/common.mako
+++ b/templates/webapps/community/repository/common.mako
@@ -472,15 +472,18 @@
<${cell_type} style="padding-left: ${pad+20}px;">
%if row_is_header:
${tool_dependency.name | h}
- %elif trans.webapp.name == 'galaxy' and tool_dependency.tool_dependency_id and tool_dependency.repository_id and not tool_dependency.installation_status:
- ## tool_dependency.installation_status will be None if the status value in the database is 'Installed'.
- <a class="action-button" href="${h.url_for( controller='admin_toolshed', action='browse_tool_dependency', id=trans.security.encode_id( tool_dependency.tool_dependency_id ), repository_id=trans.security.encode_id( tool_dependency.repository_id ) )}">
+ %elif trans.webapp.name == 'galaxy' and tool_dependency.tool_dependency_id:
+ %if tool_dependency.repository_id and tool_dependency.installation_status == 'Installed':
+ <a class="action-button" href="${h.url_for( controller='admin_toolshed', action='browse_tool_dependency', id=trans.security.encode_id( tool_dependency.tool_dependency_id ), repository_id=trans.security.encode_id( tool_dependency.repository_id ) )}">
+ ${tool_dependency.name | h}
+ </a>
+ %elif tool_dependency.installation_status != 'Installed':
+ <a class="action-button" href="${h.url_for( controller='admin_toolshed', action='manage_tool_dependencies', id=trans.security.encode_id( tool_dependency.tool_dependency_id ) )}">
+ ${tool_dependency.name}
+ </a>
+ %else:
${tool_dependency.name | h}
- </a>
- %elif trans.webapp.name == 'galaxy' and tool_dependency.tool_dependency_id and tool_dependency.installation_status:
- <a class="action-button" href="${h.url_for( controller='admin_toolshed', action='manage_tool_dependencies', id=trans.security.encode_id( tool_dependency.tool_dependency_id ) )}">
- ${tool_dependency.name}
- </a>
+ %endif
%else:
${tool_dependency.name | h}
%endif
@@ -551,6 +554,7 @@
invalid_tools_root_folder = containers_dict.get( 'invalid_tools', None )
readme_files_root_folder = containers_dict.get( 'readme_files', None )
repository_dependencies_root_folder = containers_dict.get( 'repository_dependencies', None )
+ missing_repository_dependencies_root_folder = containers_dict.get( 'missing_repository_dependencies', None )
tool_dependencies_root_folder = containers_dict.get( 'tool_dependencies', None )
missing_tool_dependencies_root_folder = containers_dict.get( 'missing_tool_dependencies', None )
valid_tools_root_folder = containers_dict.get( 'valid_tools', none )
@@ -578,10 +582,17 @@
</div></div>
%endif
- %if repository_dependencies_root_folder or tool_dependencies_root_folder or missing_tool_dependencies_root_folder:
+ %if missing_repository_dependencies_root_folder or repository_dependencies_root_folder or tool_dependencies_root_folder or missing_tool_dependencies_root_folder:
<div class="toolForm"><div class="toolFormTitle">Dependencies of this repository</div><div class="toolFormBody">
+ %if missing_repository_dependencies_root_folder:
+ <p/>
+ <% row_counter = RowCounter() %>
+ <table cellspacing="2" cellpadding="2" border="0" width="100%" class="tables container-table" id="missing_repository_dependencies">
+ ${render_folder( missing_repository_dependencies_root_folder, 0, parent=None, row_counter=row_counter, is_root_folder=True )}
+ </table>
+ %endif
%if repository_dependencies_root_folder:
<p/><% row_counter = RowCounter() %>
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: greg: Slight enhancements for managing simple repository dependencies.
by Bitbucket 22 Dec '12
by Bitbucket 22 Dec '12
22 Dec '12
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/a3915a264f6c/
changeset: a3915a264f6c
user: greg
date: 2012-12-22 21:12:41
summary: Slight enhancements for managing simple repository dependencies.
affected #: 3 files
diff -r 32ea53484cec638021a38c0c252d5dc1d4bf5da4 -r a3915a264f6c3ae7cec12778b3c3e7794ca5a8e8 lib/galaxy/model/__init__.py
--- a/lib/galaxy/model/__init__.py
+++ b/lib/galaxy/model/__init__.py
@@ -3014,6 +3014,7 @@
installation_status = Bunch( NEW='New',
CLONING='Cloning',
SETTING_TOOL_VERSIONS='Setting tool versions',
+ INSTALLING_REPOSITORY_DEPENDENCIES='Installing repository dependencies',
INSTALLING_TOOL_DEPENDENCIES='Installing tool dependencies',
LOADING_PROPRIETARY_DATATYPES='Loading proprietary datatypes',
INSTALLED='Installed',
@@ -3166,6 +3167,62 @@
def has_readme_files( self ):
return self.metadata and 'readme_files' in self.metadata
@property
+ def required_repositories( self ):
+ required_repositories = []
+ for rrda in self.repository_dependencies:
+ repository_dependency = rrda.repository_dependency
+ required_repository = repository_dependency.repository
+ required_repositories.append( required_repository )
+ return required_repositories
+ @property
+ def installed_required_repositories( self ):
+ """Return the repository's repository dependencies that are currently installed."""
+ installed_required_repositories = []
+ for required_repository in self.required_repositories:
+ if required_repository.status == self.installation_status.INSTALLED:
+ installed_required_repositories.append( required_repository )
+ return installed_required_repositories
+ @property
+ def missing_required_repositories( self ):
+ """Return the repository's repository dependencies that are not currently installed, and may not ever have been installed."""
+ missing_required_repositories = []
+ for required_repository in self.required_repositories:
+ if required_repository.status not in [ self.installation_status.INSTALLED ]:
+ missing_required_repositories.append( required_repository )
+ return missing_required_repositories
+ @property
+ def required_repositories_being_installed( self ):
+ required_repositories_being_installed = []
+ for required_repository in self.required_repositories:
+ if tool_dependency.status == ToolDependency.installation_status.INSTALLING:
+ required_repositories_being_installed.append( required_repository )
+ return required_repositories_being_installed
+ @property
+ def required_repositories_missing_or_being_installed( self ):
+ required_repositories_missing_or_being_installed = []
+ for required_repository in self.required_repositories:
+ if required_repository.status in [ self.installation_status.ERROR,
+ self.installation_status.INSTALLING,
+ self.installation_status.NEVER_INSTALLED,
+ self.installation_status.UNINSTALLED ]:
+ required_repositories_missing_or_being_installed.append( required_repository )
+ return required_repositories_missing_or_being_installed
+ @property
+ def required_repositories_with_installation_errors( self ):
+ required_repositories_with_installation_errors = []
+ for required_repository in self.required_repositories:
+ if required_repository.status == self.installation_status.ERROR:
+ required_repositories_with_installation_errors.append( required_repository )
+ return required_repositories_with_installation_errors
+ @property
+ def uninstalled_required_repositories( self ):
+ """Return the repository's repository dependencies that have been uninstalled."""
+ uninstalled_required_repositories = []
+ for required_repository in self.required_repositories:
+ if required_repository.status == self.installation_status.UNINSTALLED:
+ uninstalled_required_repositories.append( required_repository )
+ return uninstalled_required_repositories
+ @property
def installed_tool_dependencies( self ):
"""Return the repository's tool dependencies that are currently installed."""
installed_dependencies = []
diff -r 32ea53484cec638021a38c0c252d5dc1d4bf5da4 -r a3915a264f6c3ae7cec12778b3c3e7794ca5a8e8 lib/galaxy/util/shed_util.py
--- a/lib/galaxy/util/shed_util.py
+++ b/lib/galaxy/util/shed_util.py
@@ -856,7 +856,8 @@
def populate_containers_dict_from_repository_metadata( trans, tool_shed_url, tool_path, repository, reinstalling=False ):
"""
Retrieve necessary information from the received repository's metadata to populate the containers_dict for display. This methos is called only
- from Galaxy and not the tool shed.
+ from Galaxy (not the tool shed) when displaying repository dependencies for installed repositories and when displaying them for uninstalled
+ repositories that are being reinstalled.
"""
metadata = repository.metadata
if metadata:
@@ -876,43 +877,24 @@
readme_files_dict = suc.build_readme_files_dict( repository.metadata, tool_path )
else:
readme_files_dict = None
- repository_dependencies = metadata.get( 'repository_dependencies', None )
repository_dependencies_dict_for_display = {}
- if repository_dependencies:
- # We need to add a root_key entry to the repository_dependencies dictionary since it will not be included in the installed tool shed repository metadata.
+ if repository.has_repository_dependencies:
+ rd_tups = []
+ # We need to add a root_key entry to the repository_dependencies dictionary for proper display parsing.
root_key = container_util.generate_repository_dependencies_key_for_repository( repository.tool_shed,
repository.name,
repository.owner,
- repository.installed_changeset_revision )
- rd_tups_for_display = []
- rd_tups = repository_dependencies[ 'repository_dependencies' ]
- for index, rd_tup in enumerate( rd_tups ):
- # Get the id and the installation status of the required repository.
- tool_shed, name, owner, changeset_revision = rd_tup
- required_repository = suc.get_repository_for_dependency_relationship( trans.app, tool_shed, name, owner, changeset_revision )
- # TODO: Since the changeset revision defined in the tool shed repository's repository_dependencies.xml file may have a changeset_revision
- # value that is outdated, we ened to make a call to the tool shed get the update dchangeset revision if repository is still None here.
- if required_repository:
- rd_tup.append( required_repository.id )
- rd_tup.append( str( required_repository.status ) )
- else:
- # See above TODO. For now, we'll take a short cut and attempt to find the repository by name and owner only. This will not work long
- # term because multiple revisions of a reposiory with the same name and owner could be installed into a Galaxy instance. The long term
- # fix is to call get_update_to_changeset_revision_and_ctx_rev( trans, repository ) for each required repository.
- required_repository = trans.sa_session.query( trans.model.ToolShedRepository ) \
- .filter( and_( trans.model.ToolShedRepository.table.c.name == name,
- trans.model.ToolShedRepository.table.c.owner == owner ) ) \
- .first()
- if required_repository:
- rd_tup.append( required_repository.id )
- rd_tup.append( str( required_repository.status ) )
- else:
- rd_tup.append( None )
- rd_tup.append( None )
- rd_tups[ index ] = rd_tup
+ repository.installed_changeset_revision )
+ # The repository dependencies container will include only the immediate repository dependencies of this repository, so
+ # the container will be only a single level in depth.
+ for rr in repository.required_repositories:
+ rd_tup = [ rr.tool_shed, rr.name, rr.owner, rr.changeset_revision, rr.id, rr.status ]
+ rd_tups.append( rd_tup )
repository_dependencies_dict_for_display[ 'root_key' ] = root_key
repository_dependencies_dict_for_display[ root_key ] = rd_tups
- repository_dependencies_dict_for_display[ 'description' ] = repository_dependencies[ 'description' ]
+ # Get the description from the metadata in case it has a value.
+ repository_dependencies = metadata.get( 'repository_dependencies', {} )
+ repository_dependencies_dict_for_display[ 'description' ] = repository_dependencies.get( 'description', None )
all_tool_dependencies = metadata.get( 'tool_dependencies', None )
tool_dependencies, missing_tool_dependencies = get_installed_and_missing_tool_dependencies( trans, repository, all_tool_dependencies )
if reinstalling:
diff -r 32ea53484cec638021a38c0c252d5dc1d4bf5da4 -r a3915a264f6c3ae7cec12778b3c3e7794ca5a8e8 lib/galaxy/webapps/galaxy/controllers/admin_toolshed.py
--- a/lib/galaxy/webapps/galaxy/controllers/admin_toolshed.py
+++ b/lib/galaxy/webapps/galaxy/controllers/admin_toolshed.py
@@ -32,6 +32,7 @@
status_label = tool_shed_repository.status
if tool_shed_repository.status in [ trans.model.ToolShedRepository.installation_status.CLONING,
trans.model.ToolShedRepository.installation_status.SETTING_TOOL_VERSIONS,
+ trans.model.ToolShedRepository.installation_status.INSTALLING_REPOSITORY_DEPENDENCIES,
trans.model.ToolShedRepository.installation_status.INSTALLING_TOOL_DEPENDENCIES,
trans.model.ToolShedRepository.installation_status.LOADING_PROPRIETARY_DATATYPES ]:
bgcolor = trans.model.ToolShedRepository.states.INSTALLING
@@ -43,9 +44,12 @@
elif tool_shed_repository.status in [ trans.model.ToolShedRepository.installation_status.DEACTIVATED ]:
bgcolor = trans.model.ToolShedRepository.states.WARNING
elif tool_shed_repository.status in [ trans.model.ToolShedRepository.installation_status.INSTALLED ]:
- if tool_shed_repository.missing_tool_dependencies:
+ if tool_shed_repository.missing_required_repositories:
bgcolor = trans.model.ToolShedRepository.states.WARNING
- status_label = '%s, missing dependencies' % status_label
+ status_label = '%s, missing repository dependencies' % status_label
+ elif tool_shed_repository.missing_tool_dependencies:
+ bgcolor = trans.model.ToolShedRepository.states.WARNING
+ status_label = '%s, missing tool dependencies' % status_label
else:
bgcolor = trans.model.ToolShedRepository.states.OK
else:
@@ -182,6 +186,7 @@
[ model.ToolShedRepository.installation_status.NEW,
model.ToolShedRepository.installation_status.CLONING,
model.ToolShedRepository.installation_status.SETTING_TOOL_VERSIONS,
+ model.ToolShedRepository.installation_status.INSTALLING_REPOSITORY_DEPENDENCIES,
model.ToolShedRepository.installation_status.INSTALLING_TOOL_DEPENDENCIES,
model.ToolShedRepository.installation_status.LOADING_PROPRIETARY_DATATYPES,
model.ToolShedRepository.installation_status.UNINSTALLED ], \
@@ -500,7 +505,7 @@
removed = False
if removed:
tool_shed_repository.uninstalled = True
- # Remove all installed tool dependencies.
+ # Remove all installed tool dependencies, but don't touch any repository dependencies..
for tool_dependency in tool_shed_repository.installed_tool_dependencies:
uninstalled, error_message = shed_util.remove_tool_dependency( trans, tool_dependency )
if error_message:
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: james_taylor: Fix shutdown on python >= 2.6.2 by calling setDaemon when creating threads (these are still cleanly shutdown by atexit). Also add descriptive names to most job worker threads
by Bitbucket 21 Dec '12
by Bitbucket 21 Dec '12
21 Dec '12
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/32ea53484cec/
changeset: 32ea53484cec
user: james_taylor
date: 2012-12-21 22:43:30
summary: Fix shutdown on python >= 2.6.2 by calling setDaemon when creating threads (these are still cleanly shutdown by atexit). Also add descriptive names to most job worker threads
affected #: 7 files
diff -r c54ebfe2c0086b6e690e98a958e2d41ecf722bec -r 32ea53484cec638021a38c0c252d5dc1d4bf5da4 lib/galaxy/jobs/handler.py
--- a/lib/galaxy/jobs/handler.py
+++ b/lib/galaxy/jobs/handler.py
@@ -61,7 +61,8 @@
# Helper for interruptable sleep
self.sleeper = Sleeper()
self.running = True
- self.monitor_thread = threading.Thread( target=self.__monitor )
+ self.monitor_thread = threading.Thread( name="JobHandlerQueue.monitor_thread", target=self.__monitor )
+ self.monitor_thread.setDaemon( True )
def start( self ):
"""
@@ -353,7 +354,8 @@
# Helper for interruptable sleep
self.sleeper = Sleeper()
self.running = True
- self.monitor_thread = threading.Thread( target=self.monitor )
+ self.monitor_thread = threading.Thread( name="JobHandlerStopQueue.monitor_thread", target=self.monitor )
+ self.monitor_thread.setDaemon( True )
self.monitor_thread.start()
log.info( "job handler stop queue started" )
diff -r c54ebfe2c0086b6e690e98a958e2d41ecf722bec -r 32ea53484cec638021a38c0c252d5dc1d4bf5da4 lib/galaxy/jobs/manager.py
--- a/lib/galaxy/jobs/manager.py
+++ b/lib/galaxy/jobs/manager.py
@@ -68,7 +68,8 @@
# Helper for interruptable sleep
self.sleeper = Sleeper()
self.running = True
- self.monitor_thread = threading.Thread( target=self.__monitor )
+ self.monitor_thread = threading.Thread( name="JobManagerQueue.monitor_thread", target=self.__monitor )
+ self.monitor_thread.setDaemon( True )
# Recover jobs at startup
self.__check_jobs_at_startup()
# Start the queue
@@ -219,7 +220,8 @@
# Helper for interruptable sleep
self.sleeper = Sleeper()
self.running = True
- self.monitor_thread = threading.Thread( target=self.monitor )
+ self.monitor_thread = threading.Thread( name="JobManagerStopQueue.monitor_thread", target=self.monitor )
+ self.monitor_thread.setDaemon( True )
self.monitor_thread.start()
log.info( "job manager stop queue started" )
diff -r c54ebfe2c0086b6e690e98a958e2d41ecf722bec -r 32ea53484cec638021a38c0c252d5dc1d4bf5da4 lib/galaxy/jobs/runners/drmaa.py
--- a/lib/galaxy/jobs/runners/drmaa.py
+++ b/lib/galaxy/jobs/runners/drmaa.py
@@ -105,13 +105,14 @@
self.monitor_queue = Queue()
self.ds = drmaa.Session()
self.ds.initialize()
- self.monitor_thread = threading.Thread( target=self.monitor )
+ self.monitor_thread = threading.Thread( name="DRMAAJobRunner.monitor_thread", target=self.monitor )
+ self.monitor_thread.setDaemon( True )
self.monitor_thread.start()
self.work_queue = Queue()
self.work_threads = []
nworkers = app.config.cluster_job_queue_workers
for i in range( nworkers ):
- worker = threading.Thread( target=self.run_next )
+ worker = threading.Thread( name=( "DRMAAJobRunner.work_threads-%d" % i ), target=self.run_next )
worker.start()
self.work_threads.append( worker )
log.debug( "%d workers ready" % nworkers )
diff -r c54ebfe2c0086b6e690e98a958e2d41ecf722bec -r 32ea53484cec638021a38c0c252d5dc1d4bf5da4 lib/galaxy/jobs/runners/local.py
--- a/lib/galaxy/jobs/runners/local.py
+++ b/lib/galaxy/jobs/runners/local.py
@@ -37,7 +37,8 @@
nworkers = app.config.local_job_queue_workers
log.info( "starting workers" )
for i in range( nworkers ):
- worker = threading.Thread( target=self.run_next )
+ worker = threading.Thread( name=( "LocalJobRunner.threads-%d" % i ), target=self.run_next )
+ worker.setDaemon( True )
worker.start()
self.threads.append( worker )
log.debug( "%d workers ready", nworkers )
diff -r c54ebfe2c0086b6e690e98a958e2d41ecf722bec -r 32ea53484cec638021a38c0c252d5dc1d4bf5da4 lib/galaxy/jobs/runners/lwr.py
--- a/lib/galaxy/jobs/runners/lwr.py
+++ b/lib/galaxy/jobs/runners/lwr.py
@@ -229,7 +229,8 @@
nworkers = app.config.local_job_queue_workers
log.info( "starting workers" )
for i in range( nworkers ):
- worker = threading.Thread( target=self.run_next )
+ worker = threading.Thread( ( name="LwrJobRunner.thread-%d" % i ), target=self.run_next )
+ worker.setDaemon( True )
worker.start()
self.threads.append( worker )
log.debug( "%d workers ready", nworkers )
diff -r c54ebfe2c0086b6e690e98a958e2d41ecf722bec -r 32ea53484cec638021a38c0c252d5dc1d4bf5da4 lib/galaxy/jobs/runners/tasks.py
--- a/lib/galaxy/jobs/runners/tasks.py
+++ b/lib/galaxy/jobs/runners/tasks.py
@@ -29,7 +29,8 @@
nworkers = app.config.local_task_queue_workers
log.info( "Starting tasked-job runners" )
for i in range( nworkers ):
- worker = threading.Thread( target=self.run_next )
+ worker = threading.Thread( name=( "TaskedJobRunner-%d" % i ), target=self.run_next )
+ worker.setDaemon( True )
worker.start()
self.threads.append( worker )
log.debug( "%d workers ready", nworkers )
diff -r c54ebfe2c0086b6e690e98a958e2d41ecf722bec -r 32ea53484cec638021a38c0c252d5dc1d4bf5da4 lib/galaxy/webapps/galaxy/buildapp.py
--- a/lib/galaxy/webapps/galaxy/buildapp.py
+++ b/lib/galaxy/webapps/galaxy/buildapp.py
@@ -38,6 +38,8 @@
import traceback, sys
traceback.print_exc()
sys.exit( 1 )
+ # Call app's shutdown method when the interpeter exits, this cleanly stops
+ # the various Galaxy application daemon threads
atexit.register( app.shutdown )
# Create the universe WSGI application
webapp = GalaxyWebApplication( app, session_cookie='galaxysession', name='galaxy' )
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: jgoecks: Fixes for sorting and merging genomic regions during visual analysis.
by Bitbucket 21 Dec '12
by Bitbucket 21 Dec '12
21 Dec '12
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/c54ebfe2c008/
changeset: c54ebfe2c008
user: jgoecks
date: 2012-12-21 22:08:16
summary: Fixes for sorting and merging genomic regions during visual analysis.
affected #: 1 file
diff -r 692a1e8b69998d99465838cb5c8b1d56f3a37d2d -r c54ebfe2c0086b6e690e98a958e2d41ecf722bec lib/galaxy/webapps/galaxy/api/tools.py
--- a/lib/galaxy/webapps/galaxy/api/tools.py
+++ b/lib/galaxy/webapps/galaxy/api/tools.py
@@ -158,17 +158,16 @@
if len( regions ) > 1:
# Sort by chrom name, start so that data is not fetched out of order.
- regions.sort( key=lambda r: r.chrom )
- regions.sort( key=lambda r: r.start )
-
+ regions = sorted(regions, key=lambda r: (r.chrom.lower(), r.start))
+
# Merge overlapping regions so that regions do not overlap
# and hence data is not included multiple times.
prev = regions[0]
cur = regions[1]
index = 1
while True:
- if cur.start <= prev.end:
- # Found overlapping regions, so join them.
+ if cur.chrom == prev.chrom and cur.start <= prev.end:
+ # Found overlapping regions, so join them into prev.
prev.end = cur.end
del regions[ index ]
else:
@@ -182,7 +181,7 @@
break
else:
cur = regions[ index ]
-
+
run_on_regions = True
# Dataset check.
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: inithello: Tool shed functional test enhancements. Functional tests for uninstalling, deactivating, reinstalling, and reactivating installed repositories.
by Bitbucket 21 Dec '12
by Bitbucket 21 Dec '12
21 Dec '12
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/692a1e8b6999/
changeset: 692a1e8b6999
user: inithello
date: 2012-12-21 18:47:30
summary: Tool shed functional test enhancements. Functional tests for uninstalling, deactivating, reinstalling, and reactivating installed repositories.
affected #: 9 files
diff -r b268bc0bbc63f525a981547d06c20911f553f1b2 -r 692a1e8b69998d99465838cb5c8b1d56f3a37d2d test/tool_shed/base/twilltestcase.py
--- a/test/tool_shed/base/twilltestcase.py
+++ b/test/tool_shed/base/twilltestcase.py
@@ -68,13 +68,10 @@
for dependency in installed_repository.metadata[ 'tool_dependencies' ]:
tool_dependency = installed_repository.metadata[ 'tool_dependencies' ][ dependency ]
strings_displayed.extend( [ tool_dependency[ 'name' ], tool_dependency[ 'version' ], tool_dependency[ 'type' ] ] )
- """
- TODO: Uncomment these when Greg enhances the tool dependencies and missing tool dependencies containers to display the status.
if dependencies_installed:
strings_displayed.append( 'Installed' )
else:
strings_displayed.append( 'Never installed' )
- """
url = '/admin_toolshed/manage_repository?id=%s' % self.security.encode_id( installed_repository.id )
self.visit_galaxy_url( url )
self.check_for_strings( strings_displayed, strings_not_displayed )
@@ -322,6 +319,14 @@
file( xml_filename, 'w' ).write( repository_dependency_xml )
def generate_temp_path( self, test_script_path, additional_paths=[] ):
return os.path.join( self.tool_shed_test_tmp_dir, test_script_path, os.sep.join( additional_paths ) )
+ def get_datatypes_count( self ):
+ url = '/admin/view_datatypes_registry'
+ self.visit_galaxy_url( url )
+ html = self.last_page()
+ datatypes_count = re.search( 'registry contains (\d+) data types', html )
+ if datatypes_count:
+ return datatypes_count.group( 1 )
+ return None
def get_filename( self, filename, filepath=None ):
if filepath is not None:
return os.path.abspath( os.path.join( filepath, filename ) )
@@ -404,15 +409,16 @@
# group(2), and the reinstalling flag in group(3) and pass them to the manage_repositories method in the Galaxy
# admin_toolshed controller.
install_parameters = re.search( 'initiate_repository_installation\( "([^"]+)", "([^"]+)", "([^"]+)" \);', html )
- iri_ids = install_parameters.group(1)
- encoded_kwd = install_parameters.group(2)
- reinstalling = install_parameters.group(3)
- url = '/admin_toolshed/manage_repositories?operation=install&tool_shed_repository_ids=%s&encoded_kwd=%s&reinstalling=%s' % \
- ( iri_ids, encoded_kwd, reinstalling )
- self.visit_galaxy_url( url )
- def install_repository( self, name, owner, category_name, install_tool_dependencies=False, changeset_revision=None, strings_displayed=[], strings_not_displayed=[], preview_strings_displayed=[], **kwd ):
- if test_db_util.get_installed_repository_by_name_owner( name, owner ) is not None:
- return
+ if install_parameters:
+ iri_ids = install_parameters.group(1)
+ encoded_kwd = install_parameters.group(2)
+ reinstalling = install_parameters.group(3)
+ url = '/admin_toolshed/manage_repositories?operation=install&tool_shed_repository_ids=%s&encoded_kwd=%s&reinstalling=%s' % \
+ ( iri_ids, encoded_kwd, reinstalling )
+ self.visit_galaxy_url( url )
+ def install_repository( self, name, owner, category_name, install_tool_dependencies=False,
+ changeset_revision=None, strings_displayed=[], strings_not_displayed=[],
+ preview_strings_displayed=[], post_submit_strings_displayed=[], **kwd ):
self.browse_tool_shed( url=self.url )
self.browse_category( test_db_util.get_category_by_name( category_name ) )
self.preview_repository_in_tool_shed( name, common.test_user_1_name, strings_displayed=preview_strings_displayed )
@@ -435,6 +441,7 @@
if 'shed_tool_conf' not in kwd:
kwd[ 'shed_tool_conf' ] = self.shed_tool_conf
self.submit_form( 1, 'select_tool_panel_section_button', **kwd )
+ self.check_for_strings( post_submit_strings_displayed, strings_not_displayed )
self.initiate_installation_process()
self.wait_for_repository_installation( repository, changeset_revision )
def load_invalid_tool_page( self, repository, tool_xml, changeset_revision, strings_displayed=[], strings_not_displayed=[] ):
@@ -460,6 +467,11 @@
self.visit_url( '/repository/preview_tools_in_changeset?repository_id=%s&changeset_revision=%s' % \
( self.security.encode_id( repository.id ), changeset_revision ) )
self.check_for_strings( strings_displayed, strings_not_displayed )
+ def reactivate_repository( self, installed_repository ):
+ url = '/admin_toolshed/browse_repositories?operation=activate+or+reinstall&id=%s' % self.security.encode_id( installed_repository.id )
+ self.visit_galaxy_url( url )
+ strings_displayed = [ installed_repository.name, 'repository has been activated' ]
+ self.check_for_strings( strings_displayed, [] )
def reinstall_repository( self, installed_repository ):
url = '/admin_toolshed/reinstall_repository?id=%s' % self.security.encode_id( installed_repository.id )
self.visit_galaxy_url( url )
@@ -513,13 +525,16 @@
def uninstall_repository( self, installed_repository, remove_from_disk=True ):
url = '/admin_toolshed/deactivate_or_uninstall_repository?id=%s' % self.security.encode_id( installed_repository.id )
self.visit_galaxy_url( url )
-# form = tc.browser.get_form( 'deactivate_or_uninstall_repository' )
- tc.fv ( 1, "remove_from_disk", '1' )
-# checkbox.readonly = False
-# if remove_from_disk:
-# checkbox.selected = True
+ if remove_from_disk:
+ tc.fv ( 1, "remove_from_disk", 'true' )
+ else:
+ tc.fv ( 1, "remove_from_disk", 'false' )
tc.submit( 'deactivate_or_uninstall_repository_button' )
- strings_displayed = [ 'has been uninstalled', 'The repository named' ]
+ strings_displayed = [ 'The repository named' ]
+ if remove_from_disk:
+ strings_displayed.append( 'has been uninstalled' )
+ else:
+ strings_displayed.append( 'has been deactivated' )
self.check_for_strings( strings_displayed, strings_not_displayed=[] )
def update_installed_repository( self, installed_repository, strings_displayed=[], strings_not_displayed=[] ):
url = '/admin_toolshed/check_for_updates?id=%s' % self.security.encode_id( installed_repository.id )
diff -r b268bc0bbc63f525a981547d06c20911f553f1b2 -r 692a1e8b69998d99465838cb5c8b1d56f3a37d2d test/tool_shed/functional/test_0040_repository_circular_dependencies.py
--- a/test/tool_shed/functional/test_0040_repository_circular_dependencies.py
+++ b/test/tool_shed/functional/test_0040_repository_circular_dependencies.py
@@ -26,7 +26,7 @@
def test_0005_create_category( self ):
"""Create a category for this test suite"""
self.create_category( name='test_0040_repository_circular_dependencies', description='Testing handling of circular repository dependencies.' )
- def test_0010_create_freebayes_repository_name( self ):
+ def test_0010_create_freebayes_repository( self ):
'''Create and populate freebayes_0040.'''
self.logout()
self.login( email=common.test_user_1_email, username=common.test_user_1_name )
diff -r b268bc0bbc63f525a981547d06c20911f553f1b2 -r 692a1e8b69998d99465838cb5c8b1d56f3a37d2d test/tool_shed/functional/test_1020_install_repository_with_repository_dependencies.py
--- a/test/tool_shed/functional/test_1020_install_repository_with_repository_dependencies.py
+++ b/test/tool_shed/functional/test_1020_install_repository_with_repository_dependencies.py
@@ -65,6 +65,7 @@
self.preview_repository_in_tool_shed( 'emboss_0020', common.test_user_1_name, strings_displayed=[ 'emboss_0020', 'Valid tools' ] )
def test_0015_install_emboss_repository( self ):
'''Install the emboss repository without installing tool dependencies.'''
+ old_datatypes = self.get_datatypes_count()
self.install_repository( 'emboss_0020',
common.test_user_1_name,
'Test 0020 Basic Repository Dependencies',
@@ -81,6 +82,8 @@
strings_displayed=[ 'Installed tool shed repository', 'Valid tools', 'antigenic' ] )
self.check_installed_repository_tool_dependencies( installed_repository, dependencies_installed=False )
self.verify_tool_metadata_for_installed_repository( installed_repository )
+ new_datatypes = self.get_datatypes_count()
+ assert new_datatypes > old_datatypes, 'Installing emboss did not add datatypes to the registry'
def test_0020_verify_installed_repository_metadata( self ):
'''Verify that resetting the metadata on an installed repository does not change the metadata.'''
self.verify_installed_repository_metadata_unchanged( 'emboss_0020', common.test_user_1_name )
diff -r b268bc0bbc63f525a981547d06c20911f553f1b2 -r 692a1e8b69998d99465838cb5c8b1d56f3a37d2d test/tool_shed/functional/test_1030_install_repository_with_dependency_revisions.py
--- a/test/tool_shed/functional/test_1030_install_repository_with_dependency_revisions.py
+++ b/test/tool_shed/functional/test_1030_install_repository_with_dependency_revisions.py
@@ -102,11 +102,10 @@
def test_0015_install_emboss_repository( self ):
'''Install the emboss repository without installing tool dependencies.'''
repository = test_db_util.get_repository_by_name_and_owner( 'emboss_0030', common.test_user_1_name )
- revisions = self.get_repository_metadata_revisions( repository )
+ old_datatypes = self.get_datatypes_count()
self.install_repository( 'emboss_0030',
common.test_user_1_name,
'Test 0030 Repository Dependency Revisions',
- changeset_revision=revisions[1],
install_tool_dependencies=False,
new_tool_panel_section='test_1030' )
installed_repository = test_db_util.get_installed_repository_by_name_owner( 'emboss_0030', common.test_user_1_name )
@@ -121,6 +120,8 @@
self.check_installed_repository_tool_dependencies( installed_repository, dependencies_installed=False )
self.verify_tool_metadata_for_installed_repository( installed_repository )
self.update_installed_repository( installed_repository, strings_displayed=[ "there are no updates available" ] )
+ new_datatypes = self.get_datatypes_count()
+ assert new_datatypes > old_datatypes, 'Installing emboss did not add datatypes to the registry.'
def test_0025_verify_installed_repository_metadata( self ):
'''Verify that resetting the metadata on an installed repository does not change the metadata.'''
self.verify_installed_repository_metadata_unchanged( 'emboss_0030', common.test_user_1_name )
diff -r b268bc0bbc63f525a981547d06c20911f553f1b2 -r 692a1e8b69998d99465838cb5c8b1d56f3a37d2d test/tool_shed/functional/test_1200_uninstall_and_reinstall_basic_repository.py
--- a/test/tool_shed/functional/test_1200_uninstall_and_reinstall_basic_repository.py
+++ b/test/tool_shed/functional/test_1200_uninstall_and_reinstall_basic_repository.py
@@ -1,8 +1,8 @@
from tool_shed.base.twilltestcase import ShedTwillTestCase, common, os
import tool_shed.base.test_db_util as test_db_util
-class BasicToolShedFeatures( ShedTwillTestCase ):
- '''Test installing a basic repository.'''
+class UninstallingAndReinstallingRepositories( ShedTwillTestCase ):
+ '''Test uninstalling and reinstalling a basic repository.'''
def test_0000_initiate_users( self ):
"""Create necessary user accounts."""
self.logout()
@@ -54,7 +54,6 @@
def test_0015_uninstall_filtering_repository( self ):
'''Uninstall the filtering repository.'''
installed_repository = test_db_util.get_installed_repository_by_name_owner( 'filtering_0000', common.test_user_1_name )
- old_metadata = installed_repository.metadata
self.uninstall_repository( installed_repository, remove_from_disk=True )
strings_not_displayed = [ installed_repository.name,
installed_repository.description,
@@ -84,7 +83,7 @@
def test_0030_reactivate_filtering_repository( self ):
'''Reactivate the filtering repository and verify that it now shows up in the list of installed repositories.'''
installed_repository = test_db_util.get_installed_repository_by_name_owner( 'filtering_0000', common.test_user_1_name )
- self.reinstall_repository( installed_repository )
+ self.reactivate_repository( installed_repository )
strings_displayed = [ installed_repository.name,
installed_repository.description,
installed_repository.owner,
diff -r b268bc0bbc63f525a981547d06c20911f553f1b2 -r 692a1e8b69998d99465838cb5c8b1d56f3a37d2d test/tool_shed/functional/test_1210_uninstall_reinstall_repository_with_tool_dependencies.py
--- /dev/null
+++ b/test/tool_shed/functional/test_1210_uninstall_reinstall_repository_with_tool_dependencies.py
@@ -0,0 +1,110 @@
+from tool_shed.base.twilltestcase import ShedTwillTestCase, common, os
+import tool_shed.base.test_db_util as test_db_util
+
+class UninstallingAndReinstallingRepositories( ShedTwillTestCase ):
+ '''Test uninstalling and reinstalling a repository with tool dependencies.'''
+ def test_0000_initiate_users( self ):
+ """Create necessary user accounts."""
+ self.galaxy_logout()
+ self.galaxy_login( email=common.admin_email, username=common.admin_username )
+ admin_user = test_db_util.get_galaxy_user( common.admin_email )
+ assert admin_user is not None, 'Problem retrieving user with email %s from the database' % admin_email
+ admin_user_private_role = test_db_util.get_galaxy_private_role( admin_user )
+ self.logout()
+ self.login( email=common.test_user_1_email, username=common.test_user_1_name )
+ test_user_1 = test_db_util.get_user( common.test_user_1_email )
+ assert test_user_1 is not None, 'Problem retrieving user with email %s from the database' % test_user_1_email
+ test_user_1_private_role = test_db_util.get_private_role( test_user_1 )
+ self.logout()
+ self.login( email=common.admin_email, username=common.admin_username )
+ admin_user = test_db_util.get_user( common.admin_email )
+ assert admin_user is not None, 'Problem retrieving user with email %s from the database' % common.admin_email
+ admin_user_private_role = test_db_util.get_private_role( admin_user )
+ def test_0005_ensure_repositories_and_categories_exist( self ):
+ '''Create the 0010 category and upload the freebayes repository to the tool shed, if necessary.'''
+ category = self.create_category( name='Test 0010 Repository With Tool Dependencies', description='Tests for a repository with tool dependencies.' )
+ self.logout()
+ self.login( email=common.test_user_1_email, username=common.test_user_1_name )
+ repository = self.get_or_create_repository( name='freebayes_0010',
+ description="Galaxy's freebayes tool",
+ long_description="Long description of Galaxy's freebayes tool",
+ owner=common.test_user_1_name,
+ category_id=self.security.encode_id( category.id ) )
+ if self.repository_is_new( repository ):
+ self.upload_file( repository,
+ 'freebayes/freebayes.xml',
+ valid_tools_only=False,
+ commit_message="Uploaded freebayes.xml." )
+ self.upload_file( repository,
+ 'freebayes/tool_data_table_conf.xml.sample',
+ valid_tools_only=False,
+ commit_message="Uploaded tool_data_table_conf.xml.",
+ remove_repo_files_not_in_tar='No' )
+ self.upload_file( repository,
+ 'freebayes/sam_fa_indices.loc.sample',
+ commit_message="Uploaded sam_fa_indices.loc.sample.",
+ valid_tools_only=False,
+ remove_repo_files_not_in_tar='No' )
+ self.upload_file( repository,
+ 'freebayes/invalid_tool_dependencies/tool_dependencies.xml',
+ valid_tools_only=False,
+ commit_message="Uploaded invalid_tool_dependencies/tool_dependencies.xml.",
+ remove_repo_files_not_in_tar='No' )
+ self.upload_file( repository,
+ 'freebayes/tool_dependencies.xml',
+ valid_tools_only=False,
+ commit_message="Uploaded tool_dependencies.xml",
+ remove_repo_files_not_in_tar='No' )
+ def test_0010_install_freebayes_repository( self ):
+ '''Install the freebayes repository into the Galaxy instance.'''
+ self.galaxy_logout()
+ self.galaxy_login( email=common.admin_email, username=common.admin_username )
+ self.install_repository( 'freebayes_0010',
+ common.test_user_1_name,
+ 'Test 0010 Repository With Tool Dependencies',
+ new_tool_panel_section='test_1210' )
+ installed_repository = test_db_util.get_installed_repository_by_name_owner( 'freebayes_0010', common.test_user_1_name )
+ strings_displayed = [ installed_repository.name,
+ installed_repository.description,
+ installed_repository.owner,
+ installed_repository.tool_shed,
+ installed_repository.installed_changeset_revision ]
+ self.display_galaxy_browse_repositories_page( strings_displayed=strings_displayed )
+ def test_0015_uninstall_freebayes_repository( self ):
+ '''Uninstall the freebayes repository.'''
+ installed_repository = test_db_util.get_installed_repository_by_name_owner( 'freebayes_0010', common.test_user_1_name )
+ self.uninstall_repository( installed_repository, remove_from_disk=True )
+ strings_not_displayed = [ installed_repository.name, installed_repository.installed_changeset_revision ]
+ self.display_galaxy_browse_repositories_page( strings_not_displayed=strings_not_displayed )
+ def test_0020_reinstall_freebayes_repository( self ):
+ '''Reinstall the freebayes repository.'''
+ installed_repository = test_db_util.get_installed_repository_by_name_owner( 'freebayes_0010', common.test_user_1_name )
+ self.reinstall_repository( installed_repository )
+ strings_displayed = [ installed_repository.name,
+ installed_repository.description,
+ installed_repository.owner,
+ installed_repository.tool_shed,
+ installed_repository.installed_changeset_revision ]
+ self.display_galaxy_browse_repositories_page( strings_displayed=strings_displayed )
+ self.display_installed_repository_manage_page( installed_repository,
+ strings_displayed=[ 'Installed tool shed repository', 'Valid tools', 'FreeBayes' ] )
+ self.verify_tool_metadata_for_installed_repository( installed_repository )
+ def test_0025_deactivate_freebayes_repository( self ):
+ '''Deactivate the freebayes repository without removing it from disk.'''
+ installed_repository = test_db_util.get_installed_repository_by_name_owner( 'freebayes_0010', common.test_user_1_name )
+ self.uninstall_repository( installed_repository, remove_from_disk=False )
+ strings_not_displayed = [ installed_repository.name, installed_repository.installed_changeset_revision ]
+ self.display_galaxy_browse_repositories_page( strings_not_displayed=strings_not_displayed )
+ def test_0030_reactivate_freebayes_repository( self ):
+ '''Reactivate the freebayes repository and verify that it now shows up in the list of installed repositories.'''
+ installed_repository = test_db_util.get_installed_repository_by_name_owner( 'freebayes_0010', common.test_user_1_name )
+ self.reactivate_repository( installed_repository )
+ strings_displayed = [ installed_repository.name,
+ installed_repository.description,
+ installed_repository.owner,
+ installed_repository.tool_shed,
+ installed_repository.installed_changeset_revision ]
+ self.display_galaxy_browse_repositories_page( strings_displayed=strings_displayed )
+ self.display_installed_repository_manage_page( installed_repository,
+ strings_displayed=[ 'Installed tool shed repository', 'Valid tools', 'FreeBayes' ] )
+ self.verify_tool_metadata_for_installed_repository( installed_repository )
diff -r b268bc0bbc63f525a981547d06c20911f553f1b2 -r 692a1e8b69998d99465838cb5c8b1d56f3a37d2d test/tool_shed/functional/test_1220_uninstall_reinstall_repository_with_repository_dependencies.py
--- /dev/null
+++ b/test/tool_shed/functional/test_1220_uninstall_reinstall_repository_with_repository_dependencies.py
@@ -0,0 +1,123 @@
+from tool_shed.base.twilltestcase import ShedTwillTestCase, common, os
+import tool_shed.base.test_db_util as test_db_util
+
+datatypes_repository_name = 'emboss_datatypes_0020'
+datatypes_repository_description = "Galaxy applicable data formats used by Emboss tools."
+datatypes_repository_long_description = "Galaxy applicable data formats used by Emboss tools. This repository contains no tools."
+
+emboss_repository_name = 'emboss_0020'
+emboss_repository_description = 'Galaxy wrappers for Emboss version 5.0.0 tools'
+emboss_repository_long_description = 'Galaxy wrappers for Emboss version 5.0.0 tools'
+
+class UninstallingAndReinstallingRepositories( ShedTwillTestCase ):
+ '''Test uninstalling and reinstalling a repository with tool dependencies.'''
+ def test_0000_initiate_users( self ):
+ """Create necessary user accounts."""
+ self.galaxy_logout()
+ self.galaxy_login( email=common.admin_email, username=common.admin_username )
+ admin_user = test_db_util.get_galaxy_user( common.admin_email )
+ assert admin_user is not None, 'Problem retrieving user with email %s from the database' % admin_email
+ admin_user_private_role = test_db_util.get_galaxy_private_role( admin_user )
+ self.logout()
+ self.login( email=common.test_user_1_email, username=common.test_user_1_name )
+ test_user_1 = test_db_util.get_user( common.test_user_1_email )
+ assert test_user_1 is not None, 'Problem retrieving user with email %s from the database' % test_user_1_email
+ test_user_1_private_role = test_db_util.get_private_role( test_user_1 )
+ self.logout()
+ self.login( email=common.admin_email, username=common.admin_username )
+ admin_user = test_db_util.get_user( common.admin_email )
+ assert admin_user is not None, 'Problem retrieving user with email %s from the database' % common.admin_email
+ admin_user_private_role = test_db_util.get_private_role( admin_user )
+ def test_0005_ensure_repositories_and_categories_exist( self ):
+ '''Create the 0020 category and upload the emboss repository to the tool shed, if necessary.'''
+ category = self.create_category( name='Test 0020 Basic Repository Dependencies', description='Tests for a repository with tool dependencies.' )
+ self.logout()
+ self.login( email=common.test_user_1_email, username=common.test_user_1_name )
+ datatypes_repository = self.get_or_create_repository( name=datatypes_repository_name,
+ description=datatypes_repository_description,
+ long_description=datatypes_repository_long_description,
+ owner=common.test_user_1_name,
+ category_id=self.security.encode_id( category.id ),
+ strings_displayed=[] )
+ if self.repository_is_new( datatypes_repository ):
+ self.upload_file( datatypes_repository, 'emboss/datatypes/datatypes_conf.xml', commit_message='Uploaded datatypes_conf.xml.' )
+ emboss_repository = self.get_or_create_repository( name=emboss_repository_name,
+ description=emboss_repository_description,
+ long_description=emboss_repository_long_description,
+ owner=common.test_user_1_name,
+ category_id=self.security.encode_id( category.id ),
+ strings_displayed=[] )
+ self.upload_file( emboss_repository, 'emboss/emboss.tar', commit_message='Uploaded emboss_5.tar' )
+ repository_dependencies_path = self.generate_temp_path( 'test_1020', additional_paths=[ 'emboss', '5' ] )
+ self.generate_repository_dependency_xml( [ datatypes_repository ],
+ self.get_filename( 'repository_dependencies.xml', filepath=repository_dependencies_path ) )
+ self.upload_file( emboss_repository,
+ 'repository_dependencies.xml',
+ filepath=repository_dependencies_path,
+ commit_message='Uploaded repository_dependencies.xml' )
+ def test_0010_install_emboss_repository( self ):
+ '''Install the emboss repository into the Galaxy instance.'''
+ self.galaxy_logout()
+ self.galaxy_login( email=common.admin_email, username=common.admin_username )
+ self.install_repository( emboss_repository_name,
+ common.test_user_1_name,
+ 'Test 0020 Basic Repository Dependencies',
+ new_tool_panel_section='test_1210' )
+ installed_repository = test_db_util.get_installed_repository_by_name_owner( emboss_repository_name, common.test_user_1_name )
+ strings_displayed = [ installed_repository.name,
+ installed_repository.description,
+ installed_repository.owner,
+ installed_repository.tool_shed,
+ installed_repository.installed_changeset_revision ]
+ self.display_galaxy_browse_repositories_page( strings_displayed=strings_displayed )
+ def test_0015_uninstall_emboss_repository( self ):
+ '''Uninstall the emboss repository.'''
+# old_datatypes = self.get_datatypes_count()
+ installed_repository = test_db_util.get_installed_repository_by_name_owner( emboss_repository_name, common.test_user_1_name )
+ self.uninstall_repository( installed_repository, remove_from_disk=True )
+ strings_not_displayed = [ installed_repository.name, installed_repository.installed_changeset_revision ]
+ self.display_galaxy_browse_repositories_page( strings_not_displayed=strings_not_displayed )
+# new_datatypes = self.get_datatypes_count()
+# assert new_datatypes < old_datatypes, 'Uninstalling emboss did not remove datatypes from the registry.'
+ def test_0020_reinstall_emboss_repository( self ):
+ '''Reinstall the emboss repository.'''
+# old_datatypes = self.get_datatypes_count()
+ installed_repository = test_db_util.get_installed_repository_by_name_owner( emboss_repository_name, common.test_user_1_name )
+ self.reinstall_repository( installed_repository )
+ strings_displayed = [ installed_repository.name,
+ installed_repository.description,
+ installed_repository.owner,
+ installed_repository.tool_shed,
+ installed_repository.installed_changeset_revision ]
+ self.display_galaxy_browse_repositories_page( strings_displayed=strings_displayed )
+ self.display_installed_repository_manage_page( installed_repository,
+ strings_displayed=[ 'Installed tool shed repository', 'Valid tools', 'emboss' ] )
+ self.verify_tool_metadata_for_installed_repository( installed_repository )
+# new_datatypes = self.get_datatypes_count()
+# assert new_datatypes > old_datatypes, 'Reinstalling emboss did not add datatypes to the registry.'
+ def test_0025_deactivate_emboss_repository( self ):
+ '''Deactivate the emboss repository without removing it from disk.'''
+# old_datatypes = self.get_datatypes_count()
+ installed_repository = test_db_util.get_installed_repository_by_name_owner( emboss_repository_name, common.test_user_1_name )
+ self.uninstall_repository( installed_repository, remove_from_disk=False )
+ strings_not_displayed = [ installed_repository.name,
+ installed_repository.installed_changeset_revision ]
+ self.display_galaxy_browse_repositories_page( strings_not_displayed=strings_not_displayed )
+# new_datatypes = self.get_datatypes_count()
+# assert new_datatypes < old_datatypes, 'Deactivating emboss did not remove datatypes from the registry.'
+ def test_0030_reactivate_emboss_repository( self ):
+ '''Reactivate the emboss repository and verify that it now shows up in the list of installed repositories.'''
+# old_datatypes = self.get_datatypes_count()
+ installed_repository = test_db_util.get_installed_repository_by_name_owner( emboss_repository_name, common.test_user_1_name )
+ self.reactivate_repository( installed_repository )
+ strings_displayed = [ installed_repository.name,
+ installed_repository.description,
+ installed_repository.owner,
+ installed_repository.tool_shed,
+ installed_repository.installed_changeset_revision ]
+ self.display_galaxy_browse_repositories_page( strings_displayed=strings_displayed )
+ self.display_installed_repository_manage_page( installed_repository,
+ strings_displayed=[ 'Installed tool shed repository', 'Valid tools', 'emboss' ] )
+ self.verify_tool_metadata_for_installed_repository( installed_repository )
+# new_datatypes = self.get_datatypes_count()
+# assert new_datatypes > old_datatypes, 'Reactivating emboss did not add datatypes to the registry.'
diff -r b268bc0bbc63f525a981547d06c20911f553f1b2 -r 692a1e8b69998d99465838cb5c8b1d56f3a37d2d test/tool_shed/functional/test_1230_uninstall_reinstall_repository_with_dependency_revisions.py
--- /dev/null
+++ b/test/tool_shed/functional/test_1230_uninstall_reinstall_repository_with_dependency_revisions.py
@@ -0,0 +1,146 @@
+from tool_shed.base.twilltestcase import ShedTwillTestCase, common, os
+import tool_shed.base.test_db_util as test_db_util
+
+datatypes_repository_name = 'emboss_datatypes_0030'
+datatypes_repository_description = "Galaxy applicable data formats used by Emboss tools."
+datatypes_repository_long_description = "Galaxy applicable data formats used by Emboss tools. This repository contains no tools."
+
+emboss_repository_name = 'emboss_0030'
+emboss_5_repository_name = 'emboss_5_0030'
+emboss_6_repository_name = 'emboss_6_0030'
+emboss_repository_description = 'Galaxy wrappers for Emboss version 5.0.0 tools'
+emboss_repository_long_description = 'Galaxy wrappers for Emboss version 5.0.0 tools'
+
+class UninstallingAndReinstallingRepositories( ShedTwillTestCase ):
+ '''Test uninstalling and reinstalling a repository with repository dependency revisions.'''
+ def test_0000_initiate_users( self ):
+ """Create necessary user accounts."""
+ self.galaxy_logout()
+ self.galaxy_login( email=common.admin_email, username=common.admin_username )
+ admin_user = test_db_util.get_galaxy_user( common.admin_email )
+ assert admin_user is not None, 'Problem retrieving user with email %s from the database' % admin_email
+ admin_user_private_role = test_db_util.get_galaxy_private_role( admin_user )
+ self.logout()
+ self.login( email=common.test_user_1_email, username=common.test_user_1_name )
+ test_user_1 = test_db_util.get_user( common.test_user_1_email )
+ assert test_user_1 is not None, 'Problem retrieving user with email %s from the database' % test_user_1_email
+ test_user_1_private_role = test_db_util.get_private_role( test_user_1 )
+ self.logout()
+ self.login( email=common.admin_email, username=common.admin_username )
+ admin_user = test_db_util.get_user( common.admin_email )
+ assert admin_user is not None, 'Problem retrieving user with email %s from the database' % common.admin_email
+ admin_user_private_role = test_db_util.get_private_role( admin_user )
+ def test_0005_ensure_repositories_and_categories_exist( self ):
+ '''Create the 0030 category and upload the emboss repository to the tool shed, if necessary.'''
+ category = self.create_category( name='Test 0030 Repository Dependency Revisions', description='Tests for a repository with tool dependencies.' )
+ self.logout()
+ self.login( email=common.test_user_1_email, username=common.test_user_1_name )
+ datatypes_repository = self.get_or_create_repository( name=datatypes_repository_name,
+ description=datatypes_repository_description,
+ long_description=datatypes_repository_long_description,
+ owner=common.test_user_1_name,
+ category_id=self.security.encode_id( category.id ),
+ strings_displayed=[] )
+ if self.repository_is_new( datatypes_repository ):
+ self.upload_file( datatypes_repository, 'emboss/datatypes/datatypes_conf.xml', commit_message='Uploaded datatypes_conf.xml.' )
+ emboss_5_repository = self.get_or_create_repository( name=emboss_5_repository_name,
+ description=emboss_repository_description,
+ long_description=emboss_repository_long_description,
+ owner=common.test_user_1_name,
+ category_id=self.security.encode_id( category.id ),
+ strings_displayed=[] )
+ self.upload_file( emboss_5_repository, 'emboss/emboss.tar', commit_message='Uploaded emboss.tar' )
+ repository_dependencies_path = self.generate_temp_path( 'test_1030', additional_paths=[ 'emboss', '5' ] )
+ self.generate_repository_dependency_xml( [ datatypes_repository ],
+ self.get_filename( 'repository_dependencies.xml', filepath=repository_dependencies_path ) )
+ self.upload_file( emboss_5_repository,
+ 'repository_dependencies.xml',
+ filepath=repository_dependencies_path,
+ commit_message='Uploaded repository_dependencies.xml' )
+ emboss_6_repository = self.get_or_create_repository( name=emboss_6_repository_name,
+ description=emboss_repository_description,
+ long_description=emboss_repository_long_description,
+ owner=common.test_user_1_name,
+ category_id=self.security.encode_id( category.id ),
+ strings_displayed=[] )
+ self.upload_file( emboss_6_repository, 'emboss/emboss.tar', commit_message='Uploaded emboss.tar' )
+ repository_dependencies_path = self.generate_temp_path( 'test_1030', additional_paths=[ 'emboss', '6' ] )
+ self.generate_repository_dependency_xml( [ datatypes_repository ],
+ self.get_filename( 'repository_dependencies.xml', filepath=repository_dependencies_path ) )
+ self.upload_file( emboss_6_repository,
+ 'repository_dependencies.xml',
+ filepath=repository_dependencies_path,
+ commit_message='Uploaded repository_dependencies.xml' )
+ emboss_repository = self.get_or_create_repository( name=emboss_repository_name,
+ description=emboss_repository_description,
+ long_description=emboss_repository_long_description,
+ owner=common.test_user_1_name,
+ category_id=self.security.encode_id( category.id ),
+ strings_displayed=[] )
+ self.upload_file( emboss_repository, 'emboss/emboss.tar', commit_message='Uploaded emboss.tar' )
+ repository_dependencies_path = self.generate_temp_path( 'test_1030', additional_paths=[ 'emboss', '5' ] )
+ self.generate_repository_dependency_xml( [ emboss_5_repository ],
+ self.get_filename( 'repository_dependencies.xml', filepath=repository_dependencies_path ) )
+ self.upload_file( emboss_repository,
+ 'repository_dependencies.xml',
+ filepath=repository_dependencies_path,
+ commit_message='Uploaded repository_dependencies.xml' )
+ self.generate_repository_dependency_xml( [ emboss_6_repository ],
+ self.get_filename( 'repository_dependencies.xml', filepath=repository_dependencies_path ) )
+ self.upload_file( emboss_repository,
+ 'repository_dependencies.xml',
+ filepath=repository_dependencies_path,
+ commit_message='Uploaded repository_dependencies.xml' )
+ def test_0010_install_emboss_repository( self ):
+ '''Install the emboss repository into the Galaxy instance.'''
+ self.galaxy_logout()
+ self.galaxy_login( email=common.admin_email, username=common.admin_username )
+ self.install_repository( emboss_repository_name,
+ common.test_user_1_name,
+ 'Test 0030 Repository Dependency Revisions',
+ new_tool_panel_section='test_1210' )
+ installed_repository = test_db_util.get_installed_repository_by_name_owner( emboss_repository_name, common.test_user_1_name )
+ strings_displayed = [ installed_repository.name,
+ installed_repository.description,
+ installed_repository.owner,
+ installed_repository.tool_shed,
+ installed_repository.installed_changeset_revision ]
+ self.display_galaxy_browse_repositories_page( strings_displayed=strings_displayed )
+ def test_0015_uninstall_emboss_repository( self ):
+ '''Uninstall the emboss repository.'''
+ installed_repository = test_db_util.get_installed_repository_by_name_owner( emboss_repository_name, common.test_user_1_name )
+ self.uninstall_repository( installed_repository, remove_from_disk=True )
+ strings_not_displayed = [ installed_repository.installed_changeset_revision ]
+ self.display_galaxy_browse_repositories_page( strings_not_displayed=strings_not_displayed )
+ def test_0020_reinstall_emboss_repository( self ):
+ '''Reinstall the emboss repository.'''
+ installed_repository = test_db_util.get_installed_repository_by_name_owner( emboss_repository_name, common.test_user_1_name )
+ self.reinstall_repository( installed_repository )
+ strings_displayed = [ installed_repository.name,
+ installed_repository.description,
+ installed_repository.owner,
+ installed_repository.tool_shed,
+ installed_repository.installed_changeset_revision ]
+ self.display_galaxy_browse_repositories_page( strings_displayed=strings_displayed )
+ self.display_installed_repository_manage_page( installed_repository,
+ strings_displayed=[ 'Installed tool shed repository', 'Valid tools', 'emboss' ] )
+ self.verify_tool_metadata_for_installed_repository( installed_repository )
+ def test_0025_deactivate_emboss_repository( self ):
+ '''Deactivate the emboss repository without removing it from disk.'''
+ installed_repository = test_db_util.get_installed_repository_by_name_owner( emboss_repository_name, common.test_user_1_name )
+ self.uninstall_repository( installed_repository, remove_from_disk=False )
+ strings_not_displayed = [ installed_repository.installed_changeset_revision ]
+ self.display_galaxy_browse_repositories_page( strings_not_displayed=strings_not_displayed )
+ def test_0030_reactivate_emboss_repository( self ):
+ '''Reactivate the emboss repository and verify that it now shows up in the list of installed repositories.'''
+ installed_repository = test_db_util.get_installed_repository_by_name_owner( emboss_repository_name, common.test_user_1_name )
+ self.reactivate_repository( installed_repository )
+ strings_displayed = [ installed_repository.name,
+ installed_repository.description,
+ installed_repository.owner,
+ installed_repository.tool_shed,
+ installed_repository.installed_changeset_revision ]
+ self.display_galaxy_browse_repositories_page( strings_displayed=strings_displayed )
+ self.display_installed_repository_manage_page( installed_repository,
+ strings_displayed=[ 'Installed tool shed repository', 'Valid tools', 'emboss' ] )
+ self.verify_tool_metadata_for_installed_repository( installed_repository )
diff -r b268bc0bbc63f525a981547d06c20911f553f1b2 -r 692a1e8b69998d99465838cb5c8b1d56f3a37d2d test/tool_shed/functional_tests.py
--- a/test/tool_shed/functional_tests.py
+++ b/test/tool_shed/functional_tests.py
@@ -270,7 +270,7 @@
file_path = galaxy_file_path,
tool_path = tool_path,
tool_data_path = tool_data_path,
- shed_tool_path=galaxy_shed_tool_path,
+ shed_tool_path = galaxy_shed_tool_path,
update_integrated_tool_panel = False,
tool_config_file = [ galaxy_tool_conf_file, galaxy_shed_tool_conf_file ],
tool_sheds_config_file = galaxy_tool_sheds_conf_file,
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: greg: Fixes and enhancemensts for rendering simple repository dependencies for tools hed repository installed into a Galaxy instance.
by Bitbucket 21 Dec '12
by Bitbucket 21 Dec '12
21 Dec '12
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/b268bc0bbc63/
changeset: b268bc0bbc63
user: greg
date: 2012-12-21 18:35:28
summary: Fixes and enhancemensts for rendering simple repository dependencies for tools hed repository installed into a Galaxy instance.
affected #: 6 files
diff -r 5e60f799a8687d92fb41dd8a764d434623837772 -r b268bc0bbc63f525a981547d06c20911f553f1b2 lib/galaxy/util/shed_util.py
--- a/lib/galaxy/util/shed_util.py
+++ b/lib/galaxy/util/shed_util.py
@@ -853,7 +853,7 @@
if k not in [ 'id', 'version', 'name' ]:
return True
return False
-def populate_containers_dict_from_repository_metadata( trans, tool_shed_url, tool_path, repository ):
+def populate_containers_dict_from_repository_metadata( trans, tool_shed_url, tool_path, repository, reinstalling=False ):
"""
Retrieve necessary information from the received repository's metadata to populate the containers_dict for display. This methos is called only
from Galaxy and not the tool shed.
@@ -863,25 +863,64 @@
datatypes = metadata.get( 'datatypes', None )
invalid_tools = metadata.get( 'invalid_tools', None )
if repository.has_readme_files:
- readme_files_dict = suc.build_readme_files_dict( repository.metadata, tool_path )
+ if reinstalling:
+ # Since we're reinstalling, we need to sned a request to the tool shed to get the README files.
+ url = suc.url_join( tool_shed_url,
+ 'repository/get_readme_files?name=%s&owner=%s&changeset_revision=%s' % \
+ ( repository.name, repository.owner, repository.installed_changeset_revision ) )
+ response = urllib2.urlopen( url )
+ raw_text = response.read()
+ response.close()
+ readme_files_dict = from_json_string( raw_text )
+ else:
+ readme_files_dict = suc.build_readme_files_dict( repository.metadata, tool_path )
else:
readme_files_dict = None
repository_dependencies = metadata.get( 'repository_dependencies', None )
repository_dependencies_dict_for_display = {}
if repository_dependencies:
- # We need to add a root_key entry to the repository_dependencies dictionary since it will not be included in the installed tool
- # shed repository metadata.
+ # We need to add a root_key entry to the repository_dependencies dictionary since it will not be included in the installed tool shed repository metadata.
root_key = container_util.generate_repository_dependencies_key_for_repository( repository.tool_shed,
repository.name,
repository.owner,
repository.installed_changeset_revision )
rd_tups_for_display = []
rd_tups = repository_dependencies[ 'repository_dependencies' ]
+ for index, rd_tup in enumerate( rd_tups ):
+ # Get the id and the installation status of the required repository.
+ tool_shed, name, owner, changeset_revision = rd_tup
+ required_repository = suc.get_repository_for_dependency_relationship( trans.app, tool_shed, name, owner, changeset_revision )
+ # TODO: Since the changeset revision defined in the tool shed repository's repository_dependencies.xml file may have a changeset_revision
+ # value that is outdated, we ened to make a call to the tool shed get the update dchangeset revision if repository is still None here.
+ if required_repository:
+ rd_tup.append( required_repository.id )
+ rd_tup.append( str( required_repository.status ) )
+ else:
+ # See above TODO. For now, we'll take a short cut and attempt to find the repository by name and owner only. This will not work long
+ # term because multiple revisions of a reposiory with the same name and owner could be installed into a Galaxy instance. The long term
+ # fix is to call get_update_to_changeset_revision_and_ctx_rev( trans, repository ) for each required repository.
+ required_repository = trans.sa_session.query( trans.model.ToolShedRepository ) \
+ .filter( and_( trans.model.ToolShedRepository.table.c.name == name,
+ trans.model.ToolShedRepository.table.c.owner == owner ) ) \
+ .first()
+ if required_repository:
+ rd_tup.append( required_repository.id )
+ rd_tup.append( str( required_repository.status ) )
+ else:
+ rd_tup.append( None )
+ rd_tup.append( None )
+ rd_tups[ index ] = rd_tup
repository_dependencies_dict_for_display[ 'root_key' ] = root_key
repository_dependencies_dict_for_display[ root_key ] = rd_tups
repository_dependencies_dict_for_display[ 'description' ] = repository_dependencies[ 'description' ]
all_tool_dependencies = metadata.get( 'tool_dependencies', None )
tool_dependencies, missing_tool_dependencies = get_installed_and_missing_tool_dependencies( trans, repository, all_tool_dependencies )
+ if reinstalling:
+ # All tool dependencies will be considered missing since we are reinstalling the repository.
+ if tool_dependencies:
+ for td in tool_dependencies:
+ missing_tool_dependencies.append( td )
+ tool_dependencies = None
valid_tools = metadata.get( 'tools', None )
workflows = metadata.get( 'workflows', None )
containers_dict = suc.build_repository_containers_for_galaxy( trans=trans,
diff -r 5e60f799a8687d92fb41dd8a764d434623837772 -r b268bc0bbc63f525a981547d06c20911f553f1b2 lib/galaxy/util/shed_util_common.py
--- a/lib/galaxy/util/shed_util_common.py
+++ b/lib/galaxy/util/shed_util_common.py
@@ -106,11 +106,12 @@
folder_id = 0
# Datatypes container.
if datatypes:
- folder_id, datatypes_root_folder = container_util.build_datatypes_folder( folder_id, datatypes )
+ folder_id, datatypes_root_folder = container_util.build_datatypes_folder( trans, folder_id, datatypes )
containers_dict[ 'datatypes' ] = datatypes_root_folder
# Invalid tools container.
if invalid_tools:
- folder_id, invalid_tools_root_folder = container_util.build_invalid_tools_folder( folder_id,
+ folder_id, invalid_tools_root_folder = container_util.build_invalid_tools_folder( trans,
+ folder_id,
invalid_tools,
changeset_revision,
repository=repository,
@@ -118,11 +119,12 @@
containers_dict[ 'invalid_tools' ] = invalid_tools_root_folder
# Readme files container.
if readme_files_dict:
- folder_id, readme_files_root_folder = container_util.build_readme_files_folder( folder_id, readme_files_dict )
+ folder_id, readme_files_root_folder = container_util.build_readme_files_folder( trans, folder_id, readme_files_dict )
containers_dict[ 'readme_files' ] = readme_files_root_folder
# Repository dependencies container.
if repository_dependencies:
- folder_id, repository_dependencies_root_folder = container_util.build_repository_dependencies_folder( toolshed_base_url=toolshed_base_url,
+ folder_id, repository_dependencies_root_folder = container_util.build_repository_dependencies_folder( trans=trans,
+ toolshed_base_url=toolshed_base_url,
repository_name=repository_name,
repository_owner=repository_owner,
changeset_revision=changeset_revision,
@@ -132,44 +134,40 @@
# Tool dependencies container.
if tool_dependencies:
# We only want to display the Status column if the tool_dependency is missing.
- description = 'click the name to browse the dependency installation directory'
- folder_id, tool_dependencies_root_folder = container_util.build_tool_dependencies_folder( folder_id,
+ folder_id, tool_dependencies_root_folder = container_util.build_tool_dependencies_folder( trans,
+ folder_id,
tool_dependencies,
label='Installed tool dependencies',
- for_galaxy=True,
- description=description,
display_status=False )
containers_dict[ 'tool_dependencies' ] = tool_dependencies_root_folder
# Missing tool dependencies container.
if missing_tool_dependencies:
- description = 'click the name to install the missing dependency'
# We only want to display the Status column if the tool_dependency is missing.
- folder_id, missing_tool_dependencies_root_folder = container_util.build_tool_dependencies_folder( folder_id,
+ folder_id, missing_tool_dependencies_root_folder = container_util.build_tool_dependencies_folder( trans,
+ folder_id,
missing_tool_dependencies,
label='Missing tool dependencies',
- for_galaxy=True,
- description=description,
display_status=True )
containers_dict[ 'missing_tool_dependencies' ] = missing_tool_dependencies_root_folder
# Valid tools container.
if valid_tools:
- folder_id, valid_tools_root_folder = container_util.build_tools_folder( folder_id,
+ folder_id, valid_tools_root_folder = container_util.build_tools_folder( trans,
+ folder_id,
valid_tools,
repository,
changeset_revision,
- label='Valid tools',
- description='click the name to inspect the tool metadata' )
+ label='Valid tools' )
containers_dict[ 'valid_tools' ] = valid_tools_root_folder
# Workflows container.
if workflows:
- folder_id, workflows_root_folder = container_util.build_workflows_folder( folder_id, workflows, repository_metadata, label='Workflows' )
+ folder_id, workflows_root_folder = container_util.build_workflows_folder( trans, folder_id, workflows, repository_metadata, label='Workflows' )
containers_dict[ 'workflows' ] = workflows_root_folder
except Exception, e:
log.debug( "Exception in build_repository_containers_for_galaxy: %s" % str( e ) )
finally:
lock.release()
return containers_dict
-def build_repository_containers_for_tool_shed( repository, changeset_revision, repository_dependencies, repository_metadata ):
+def build_repository_containers_for_tool_shed( trans, repository, changeset_revision, repository_dependencies, repository_metadata ):
"""Return a dictionary of containers for the received repository's dependencies and contents for display in the tool shed."""
containers_dict = dict( datatypes=None,
invalid_tools=None,
@@ -187,12 +185,13 @@
# Datatypes container.
if metadata and 'datatypes' in metadata:
datatypes = metadata[ 'datatypes' ]
- folder_id, datatypes_root_folder = container_util.build_datatypes_folder( folder_id, datatypes )
+ folder_id, datatypes_root_folder = container_util.build_datatypes_folder( trans, folder_id, datatypes )
containers_dict[ 'datatypes' ] = datatypes_root_folder
# Invalid tools container.
if metadata and 'invalid_tools' in metadata:
invalid_tool_configs = metadata[ 'invalid_tools' ]
- folder_id, invalid_tools_root_folder = container_util.build_invalid_tools_folder( folder_id,
+ folder_id, invalid_tools_root_folder = container_util.build_invalid_tools_folder( trans,
+ folder_id,
invalid_tool_configs,
changeset_revision,
repository=repository,
@@ -201,11 +200,12 @@
# Readme files container.
if metadata and 'readme_files' in metadata:
readme_files_dict = build_readme_files_dict( metadata )
- folder_id, readme_files_root_folder = container_util.build_readme_files_folder( folder_id, readme_files_dict )
+ folder_id, readme_files_root_folder = container_util.build_readme_files_folder( trans, folder_id, readme_files_dict )
containers_dict[ 'readme_files' ] = readme_files_root_folder
# Repository dependencies container.
toolshed_base_url = str( url_for( '/', qualified=True ) ).rstrip( '/' )
- folder_id, repository_dependencies_root_folder = container_util.build_repository_dependencies_folder( toolshed_base_url=toolshed_base_url,
+ folder_id, repository_dependencies_root_folder = container_util.build_repository_dependencies_folder( trans=trans,
+ toolshed_base_url=toolshed_base_url,
repository_name=repository.name,
repository_owner=repository.user.username,
changeset_revision=changeset_revision,
@@ -216,16 +216,16 @@
# Tool dependencies container.
if metadata and 'tool_dependencies' in metadata:
tool_dependencies = metadata[ 'tool_dependencies' ]
- folder_id, tool_dependencies_root_folder = container_util.build_tool_dependencies_folder( folder_id,
+ folder_id, tool_dependencies_root_folder = container_util.build_tool_dependencies_folder( trans,
+ folder_id,
tool_dependencies,
- for_galaxy=False,
- description=None,
display_status=False )
containers_dict[ 'tool_dependencies' ] = tool_dependencies_root_folder
# Valid tools container.
if metadata and 'tools' in metadata:
valid_tools = metadata[ 'tools' ]
- folder_id, valid_tools_root_folder = container_util.build_tools_folder( folder_id,
+ folder_id, valid_tools_root_folder = container_util.build_tools_folder( trans,
+ folder_id,
valid_tools,
repository,
changeset_revision,
@@ -234,7 +234,7 @@
# Workflows container.
if metadata and 'workflows' in metadata:
workflows = metadata[ 'workflows' ]
- folder_id, workflows_root_folder = container_util.build_workflows_folder( folder_id, workflows, repository_metadata, label='Workflows' )
+ folder_id, workflows_root_folder = container_util.build_workflows_folder( trans, folder_id, workflows, repository_metadata, label='Workflows' )
containers_dict[ 'workflows' ] = workflows_root_folder
except Exception, e:
log.debug( "Exception in build_repository_containers_for_tool_shed: %s" % str( e ) )
diff -r 5e60f799a8687d92fb41dd8a764d434623837772 -r b268bc0bbc63f525a981547d06c20911f553f1b2 lib/galaxy/webapps/community/controllers/repository.py
--- a/lib/galaxy/webapps/community/controllers/repository.py
+++ b/lib/galaxy/webapps/community/controllers/repository.py
@@ -1850,7 +1850,7 @@
review_id = trans.security.encode_id( review.id )
else:
review_id = None
- containers_dict = suc.build_repository_containers_for_tool_shed( repository, changeset_revision, repository_dependencies, repository_metadata )
+ containers_dict = suc.build_repository_containers_for_tool_shed( trans, repository, changeset_revision, repository_dependencies, repository_metadata )
return trans.fill_template( '/webapps/community/repository/manage_repository.mako',
cntrller=cntrller,
repo_name=repo_name,
@@ -1953,7 +1953,7 @@
selected_value=changeset_revision,
add_id_to_name=False,
downloadable=False )
- containers_dict = suc.build_repository_containers_for_tool_shed( repository, changeset_revision, repository_dependencies, repository_metadata )
+ containers_dict = suc.build_repository_containers_for_tool_shed( trans, repository, changeset_revision, repository_dependencies, repository_metadata )
return trans.fill_template( '/webapps/community/repository/preview_tools_in_changeset.mako',
repository=repository,
containers_dict=containers_dict,
@@ -2481,7 +2481,7 @@
review_id = trans.security.encode_id( review.id )
else:
review_id = None
- containers_dict = suc.build_repository_containers_for_tool_shed( repository, changeset_revision, repository_dependencies, repository_metadata )
+ containers_dict = suc.build_repository_containers_for_tool_shed( trans, repository, changeset_revision, repository_dependencies, repository_metadata )
return trans.fill_template( '/webapps/community/repository/view_repository.mako',
cntrller=cntrller,
repo=repo,
diff -r 5e60f799a8687d92fb41dd8a764d434623837772 -r b268bc0bbc63f525a981547d06c20911f553f1b2 lib/galaxy/webapps/community/util/container_util.py
--- a/lib/galaxy/webapps/community/util/container_util.py
+++ b/lib/galaxy/webapps/community/util/container_util.py
@@ -71,13 +71,14 @@
class RepositoryDependency( object ):
"""Repository dependency object"""
- def __init__( self, id=None, toolshed=None, repository_name=None, repository_owner=None, changeset_revision=None, installation_status=None ):
+ def __init__( self, id=None, toolshed=None, repository_name=None, repository_owner=None, changeset_revision=None, installation_status=None, tool_shed_repository_id=None ):
self.id = id
self.toolshed = toolshed
self.repository_name = repository_name
self.repository_owner = repository_owner
self.changeset_revision = changeset_revision
self.installation_status = installation_status
+ self.tool_shed_repository_id = tool_shed_repository_id
@property
def listify( self ):
return [ self.toolshed, self.repository_name, self.repository_owner, self.changeset_revision ]
@@ -120,7 +121,7 @@
self.format_version = format_version
self.annotation = annotation
-def build_datatypes_folder( folder_id, datatypes, label='Datatypes', description=None ):
+def build_datatypes_folder( trans, folder_id, datatypes, label='Datatypes' ):
"""Return a folder hierarchy containing datatypes."""
if datatypes:
datatype_id = 0
@@ -128,8 +129,6 @@
datatypes_root_folder = Folder( id=folder_id, key='root', label='root', parent=None )
folder_id += 1
folder = Folder( id=folder_id, key='datatypes', label=label, parent=datatypes_root_folder )
- if description:
- folder.description = description
datatypes_root_folder.folders.append( folder )
# Insert a header row.
datatype_id += 1
@@ -150,7 +149,7 @@
else:
datatypes_root_folder = None
return folder_id, datatypes_root_folder
-def build_invalid_tools_folder( folder_id, invalid_tool_configs, changeset_revision, repository=None, label='Invalid tools', description=None ):
+def build_invalid_tools_folder( trans, folder_id, invalid_tool_configs, changeset_revision, repository=None, label='Invalid tools' ):
"""Return a folder hierarchy containing invalid tools."""
# TODO: Should we display invalid tools on the tool panel selection page when installing the repository into Galaxy?
if invalid_tool_configs:
@@ -159,8 +158,6 @@
invalid_tools_root_folder = Folder( id=folder_id, key='root', label='root', parent=None )
folder_id += 1
folder = Folder( id=folder_id, key='invalid_tools', label=label, parent=invalid_tools_root_folder )
- if description:
- folder.description = description
invalid_tools_root_folder.folders.append( folder )
for invalid_tool_config in invalid_tool_configs:
invalid_tool_id += 1
@@ -176,7 +173,7 @@
else:
invalid_tools_root_folder = None
return folder_id, invalid_tools_root_folder
-def build_readme_files_folder( folder_id, readme_files_dict, label='Readme files', description=None ):
+def build_readme_files_folder( trans, folder_id, readme_files_dict, label='Readme files' ):
"""Return a folder hierarchy containing readme text files."""
if readme_files_dict:
multiple_readme_files = len( readme_files_dict ) > 1
@@ -186,8 +183,6 @@
if multiple_readme_files:
folder_id += 1
readme_files_folder = Folder( id=folder_id, key='readme_files', label=label, parent=readme_files_root_folder )
- if description:
- readme_files_folder.description = description
readme_files_root_folder.folders.append( readme_files_folder )
for readme_file_name, readme_file_text in readme_files_dict.items():
readme_id += 1
@@ -205,8 +200,8 @@
else:
readme_files_root_folder = None
return folder_id, readme_files_root_folder
-def build_repository_dependencies_folder( toolshed_base_url, repository_name, repository_owner, changeset_revision, folder_id, repository_dependencies,
- label='Repository dependencies', description=None ):
+def build_repository_dependencies_folder( trans, toolshed_base_url, repository_name, repository_owner, changeset_revision, folder_id, repository_dependencies,
+ label='Repository dependencies' ):
"""Return a folder hierarchy containing repository dependencies."""
if repository_dependencies:
repository_dependency_id = 0
@@ -217,8 +212,6 @@
# Create the Repository dependencies folder and add it to the root folder.
repository_dependencies_folder_key = repository_dependencies[ 'root_key' ]
repository_dependencies_folder = Folder( id=folder_id, key=repository_dependencies_folder_key, label=label, parent=repository_dependencies_root_folder )
- if description:
- repository_dependencies_folder.description = description
del repository_dependencies[ 'root_key' ]
# The received repository_dependencies is a dictionary with keys: 'root_key', 'description', and one or more repository_dependency keys.
# We want the description value associated with the repository_dependencies_folder.
@@ -226,12 +219,12 @@
repository_dependencies_root_folder.folders.append( repository_dependencies_folder )
del repository_dependencies[ 'description' ]
repository_dependencies_folder, folder_id, repository_dependency_id = \
- populate_repository_dependencies_container( repository_dependencies_folder, repository_dependencies, folder_id, repository_dependency_id )
+ populate_repository_dependencies_container( trans, repository_dependencies_folder, repository_dependencies, folder_id, repository_dependency_id )
repository_dependencies_folder = prune_repository_dependencies( repository_dependencies_folder )
else:
repository_dependencies_root_folder = None
return folder_id, repository_dependencies_root_folder
-def build_tools_folder( folder_id, tool_dicts, repository, changeset_revision, valid=True, label='Valid tools', description=None ):
+def build_tools_folder( trans, folder_id, tool_dicts, repository, changeset_revision, valid=True, label='Valid tools' ):
"""Return a folder hierarchy containing valid tools."""
if tool_dicts:
tool_id = 0
@@ -239,8 +232,8 @@
tools_root_folder = Folder( id=folder_id, key='root', label='root', parent=None )
folder_id += 1
folder = Folder( id=folder_id, key='tools', label=label, parent=tools_root_folder )
- if description:
- folder.description = description
+ if trans.webapp.name == 'galaxy':
+ folder.description = 'click the name to inspect the tool metadata'
tools_root_folder.folders.append( folder )
# Insert a header row.
tool_id += 1
@@ -281,7 +274,7 @@
else:
tools_root_folder = None
return folder_id, tools_root_folder
-def build_tool_dependencies_folder( folder_id, tool_dependencies, label='Tool dependencies', for_galaxy=False, description=None, display_status=False ):
+def build_tool_dependencies_folder( trans, folder_id, tool_dependencies, label='Tool dependencies', display_status=False ):
"""Return a folder hierarchy containing tool dependencies."""
if tool_dependencies:
tool_dependency_id = 0
@@ -289,12 +282,16 @@
tool_dependencies_root_folder = Folder( id=folder_id, key='root', label='root', parent=None )
folder_id += 1
folder = Folder( id=folder_id, key='tool_dependencies', label=label, parent=tool_dependencies_root_folder )
- if description:
- folder.description = description
+ if trans.webapp.name == 'galaxy':
+ if display_status:
+ # The status will be displayed only if the tool dependency status is not 'Installed'.
+ folder.description = 'click the name to install the missing dependency'
+ else:
+ folder.description = 'click the name to browse the dependency installation directory'
tool_dependencies_root_folder.folders.append( folder )
# Insert a header row.
tool_dependency_id += 1
- if for_galaxy:
+ if trans.webapp.name == 'galaxy':
# Include the installation directory.
tool_dependency = ToolDependency( id=tool_dependency_id,
name='Name',
@@ -354,7 +351,7 @@
else:
tool_dependencies_root_folder = None
return folder_id, tool_dependencies_root_folder
-def build_workflows_folder( folder_id, workflows, repository_metadata, label='Workflows', description=None ):
+def build_workflows_folder( trans, folder_id, workflows, repository_metadata, label='Workflows' ):
"""Return a folder hierarchy containing invalid tools."""
if workflows:
workflow_id = 0
@@ -362,8 +359,6 @@
workflows_root_folder = Folder( id=folder_id, key='root', label='root', parent=None )
folder_id += 1
folder = Folder( id=folder_id, key='workflows', label=label, parent=workflows_root_folder )
- if description:
- folder.description = description
workflows_root_folder.folders.append( folder )
# Insert a header row.
workflow_id += 1
@@ -436,7 +431,7 @@
repository_owner = items[ 2 ]
changeset_revision = items[ 3 ]
return toolshed_base_url, repository_name, repository_owner, changeset_revision
-def handle_repository_dependencies_container_entry( repository_dependencies_folder, rd_key, rd_value, folder_id, repository_dependency_id, folder_keys ):
+def handle_repository_dependencies_container_entry( trans, repository_dependencies_folder, rd_key, rd_value, folder_id, repository_dependency_id, folder_keys ):
toolshed, repository_name, repository_owner, changeset_revision = get_components_from_key( rd_key )
folder = get_folder( repository_dependencies_folder, rd_key )
label = generate_repository_dependencies_folder_label_from_key( repository_name, repository_owner, changeset_revision, repository_dependencies_folder.key )
@@ -451,7 +446,25 @@
folder_id += 1
sub_folder = Folder( id=folder_id, key=rd_key, label=label, parent=repository_dependencies_folder )
repository_dependencies_folder.folders.append( sub_folder )
+ if trans.webapp.name == 'galaxy':
+ # Insert a header row.
+ repository_dependency_id += 1
+ repository_dependency = RepositoryDependency( id=repository_dependency_id,
+ repository_name='Name',
+ changeset_revision='Revision',
+ repository_owner='Owner',
+ installation_status='Installation status' )
+ # Insert the header row into the folder.
+ sub_folder.repository_dependencies.append( repository_dependency )
for repository_dependency in rd_value:
+ if trans.webapp.name == 'galaxy':
+ # We have two extra items in the tuple, repository.id and repository.status.
+ tool_shed_repository_id = repository_dependency[ 4 ]
+ installation_status = repository_dependency[ 5 ]
+ repository_dependency = repository_dependency[ 0:4 ]
+ else:
+ tool_shed_repository_id = None
+ installation_status = None
can_create_dependency = not is_subfolder_of( sub_folder, repository_dependency )
if can_create_dependency:
toolshed, repository_name, repository_owner, changeset_revision = repository_dependency
@@ -460,7 +473,9 @@
toolshed=toolshed,
repository_name=repository_name,
repository_owner=repository_owner,
- changeset_revision=changeset_revision )
+ changeset_revision=changeset_revision,
+ installation_status=installation_status,
+ tool_shed_repository_id=tool_shed_repository_id )
# Insert the repository_dependency into the folder.
sub_folder.repository_dependencies.append( repository_dependency )
return repository_dependencies_folder, folder_id, repository_dependency_id
@@ -474,11 +489,11 @@
def key_is_current_repositorys_key( repository_name, repository_owner, changeset_revision, key ):
toolshed_base_url, key_name, key_owner, key_changeset_revision = get_components_from_key( key )
return repository_name == key_name and repository_owner == key_owner and changeset_revision == key_changeset_revision
-def populate_repository_dependencies_container( repository_dependencies_folder, repository_dependencies, folder_id, repository_dependency_id ):
+def populate_repository_dependencies_container( trans, repository_dependencies_folder, repository_dependencies, folder_id, repository_dependency_id ):
folder_keys = repository_dependencies.keys()
for key, value in repository_dependencies.items():
repository_dependencies_folder, folder_id, repository_dependency_id = \
- handle_repository_dependencies_container_entry( repository_dependencies_folder, key, value, folder_id, repository_dependency_id, folder_keys )
+ handle_repository_dependencies_container_entry( trans, repository_dependencies_folder, key, value, folder_id, repository_dependency_id, folder_keys )
return repository_dependencies_folder, folder_id, repository_dependency_id
def print_folders( pad, folder ):
# For debugging...
diff -r 5e60f799a8687d92fb41dd8a764d434623837772 -r b268bc0bbc63f525a981547d06c20911f553f1b2 lib/galaxy/webapps/galaxy/controllers/admin_toolshed.py
--- a/lib/galaxy/webapps/galaxy/controllers/admin_toolshed.py
+++ b/lib/galaxy/webapps/galaxy/controllers/admin_toolshed.py
@@ -877,7 +877,7 @@
trans.sa_session.add( repository )
trans.sa_session.flush()
message = "The repository information has been updated."
- containers_dict = shed_util.populate_containers_dict_from_repository_metadata( trans, tool_shed_url, tool_path, repository )
+ containers_dict = shed_util.populate_containers_dict_from_repository_metadata( trans, tool_shed_url, tool_path, repository, reinstalling=False )
return trans.fill_template( '/admin/tool_shed_repository/manage_repository.mako',
repository=repository,
description=description,
@@ -1409,6 +1409,7 @@
tool_shed_url = suc.get_url_from_repository_tool_shed( trans.app, tool_shed_repository )
ctx_rev = suc.get_ctx_rev( tool_shed_url, tool_shed_repository.name, tool_shed_repository.owner, tool_shed_repository.installed_changeset_revision )
repository_clone_url = suc.generate_clone_url_for_installed_repository( trans.app, tool_shed_repository )
+ tool_path, relative_install_dir = tool_shed_repository.get_tool_relative_path( trans.app )
repository_dependencies = self.get_repository_dependencies( trans=trans,
repository_id=repository_id,
repository_name=tool_shed_repository.name,
@@ -1457,66 +1458,7 @@
message += "The tools contained in your <b>%s</b> repository were last loaded into the tool panel outside of any sections. " % tool_shed_repository.name
message += "Uncheck the <b>No changes</b> check box and select a tool panel section to load the tools into that section. "
status = 'warning'
- if metadata:
- datatypes = metadata.get( 'datatypes', None )
- invalid_tools = metadata.get( 'invalid_tools', None )
- if tool_shed_repository.has_readme_files:
- url = suc.url_join( tool_shed_url,
- 'repository/get_readme_files?name=%s&owner=%s&changeset_revision=%s' % \
- ( tool_shed_repository.name, tool_shed_repository.owner, tool_shed_repository.installed_changeset_revision ) )
- response = urllib2.urlopen( url )
- raw_text = response.read()
- response.close()
- readme_files_dict = from_json_string( raw_text )
- else:
- readme_files_dict = None
- repository_dependencies = metadata.get( 'repository_dependencies', None )
- repository_dependencies_dict_for_display = {}
- if repository_dependencies:
- # We need to add a root_key entry to the repository_dependencies dictionary since it will not be included in the installed tool
- # shed repository metadata.
- root_key = container_util.generate_repository_dependencies_key_for_repository( tool_shed_repository.tool_shed,
- tool_shed_repository.name,
- tool_shed_repository.owner,
- tool_shed_repository.installed_changeset_revision )
- rd_tups_for_display = []
- rd_tups = repository_dependencies[ 'repository_dependencies' ]
- repository_dependencies_dict_for_display[ 'root_key' ] = root_key
- repository_dependencies_dict_for_display[ root_key ] = rd_tups
- repository_dependencies_dict_for_display[ 'description' ] = repository_dependencies[ 'description' ]
- all_tool_dependencies = metadata.get( 'tool_dependencies', None )
- tool_dependencies, missing_tool_dependencies = shed_util.get_installed_and_missing_tool_dependencies( trans,
- tool_shed_repository,
- all_tool_dependencies )
- valid_tools = metadata.get( 'tools', None )
- workflows = metadata.get( 'workflows', None )
- # All tool dependencies will be considered missing since we are reinstalling the repository.
- if tool_dependencies:
- for td in tool_dependencies:
- missing_tool_dependencies.append( td )
- tool_dependencies = None
- containers_dict = suc.build_repository_containers_for_galaxy( trans=trans,
- toolshed_base_url=tool_shed_url,
- repository_name=tool_shed_repository.name,
- repository_owner=tool_shed_repository.owner,
- changeset_revision=tool_shed_repository.installed_changeset_revision,
- repository=tool_shed_repository,
- datatypes=datatypes,
- invalid_tools=invalid_tools,
- missing_tool_dependencies=missing_tool_dependencies,
- readme_files_dict=readme_files_dict,
- repository_dependencies=repository_dependencies,
- tool_dependencies=missing_tool_dependencies,
- valid_tools=valid_tools,
- workflows=workflows )
- else:
- containers_dict = dict( datatypes=None,
- invalid_tools=None,
- readme_files_dict=None,
- repository_dependencies=None,
- tool_dependencies=None,
- valid_tools=None,
- workflows=None )
+ containers_dict = shed_util.populate_containers_dict_from_repository_metadata( trans, tool_shed_url, tool_path, tool_shed_repository, reinstalling=True )
# Handle repository dependencies check box.
install_repository_dependencies_check_box = CheckboxField( 'install_repository_dependencies', checked=True )
# Handle tool dependencies check box.
@@ -1656,7 +1598,7 @@
status = 'error'
shed_tool_conf, tool_path, relative_install_dir = suc.get_tool_panel_config_tool_path_install_dir( trans.app, repository )
repo_files_dir = os.path.abspath( os.path.join( relative_install_dir, repository.name ) )
- containers_dict = shed_util.populate_containers_dict_from_repository_metadata( trans, tool_shed_url, tool_path, repository )
+ containers_dict = shed_util.populate_containers_dict_from_repository_metadata( trans, tool_shed_url, tool_path, repository, reinstalling=False )
return trans.fill_template( '/admin/tool_shed_repository/manage_repository.mako',
repository=repository,
description=repository.description,
diff -r 5e60f799a8687d92fb41dd8a764d434623837772 -r b268bc0bbc63f525a981547d06c20911f553f1b2 templates/webapps/community/repository/common.mako
--- a/templates/webapps/community/repository/common.mako
+++ b/templates/webapps/community/repository/common.mako
@@ -214,6 +214,8 @@
folder_label = "%s<i> - %s</i>" % ( folder_label, folder.description )
else:
folder_label = "%s<i> - this repository requires installation of these additional repositories</i>" % folder_label
+ if trans.webapp.name == 'galaxy':
+ col_span_str = 'colspan="4"'
elif folder.label == 'Valid tools':
col_span_str = 'colspan="3"'
if folder.description:
@@ -252,8 +254,9 @@
%for readme in folder.readme_files:
${render_readme( readme, pad, my_row, row_counter )}
%endfor
- %for repository_dependency in folder.repository_dependencies:
- ${render_repository_dependency( repository_dependency, pad, my_row, row_counter )}
+ %for index, repository_dependency in enumerate( folder.repository_dependencies ):
+ <% row_is_header = index == 0 %>
+ ${render_repository_dependency( repository_dependency, pad, my_row, row_counter, row_is_header )}
%endfor
%for index, tool_dependency in enumerate( folder.tool_dependencies ):
<% row_is_header = index == 0 %>
@@ -349,21 +352,60 @@
%></%def>
-<%def name="render_repository_dependency( repository_dependency, pad, parent, row_counter )">
+<%def name="render_repository_dependency( repository_dependency, pad, parent, row_counter, row_is_header=False )"><%
encoded_id = trans.security.encode_id( repository_dependency.id )
+ if trans.webapp.name == 'galaxy':
+ if repository_dependency.tool_shed_repository_id:
+ encoded_required_repository_id = trans.security.encode_id( repository_dependency.tool_shed_repository_id )
+ else:
+ encoded_required_repository_id = None
+ if repository_dependency.installation_status:
+ installation_status = str( repository_dependency.installation_status )
+ else:
+ installation_status = None
repository_name = str( repository_dependency.repository_name )
changeset_revision = str( repository_dependency.changeset_revision )
repository_owner = str( repository_dependency.repository_owner )
+
+ if trans.webapp.name == 'galaxy':
+ if row_is_header:
+ cell_type = 'th'
+ else:
+ cell_type = 'td'
+ else:
+ cell_type = 'td'
%><tr class="datasetRow"
%if parent is not None:
parent="${parent}"
%endif
id="libraryItem-${encoded_id}">
- ##<td style="padding-left: ${pad+20}px;">${repository_dependency.toolshed | h}</td>
- <td style="padding-left: ${pad+20}px;">Repository <b>${repository_name | h}</b> revision <b>${changeset_revision | h}</b> owned by <b>${repository_owner | h}</b></td>
+ %if trans.webapp.name == 'galaxy':
+ <${cell_type} style="padding-left: ${pad+20}px;">
+ %if row_is_header:
+ ${repository_name | h}
+ %elif encoded_required_repository_id:
+ <a class="action-button" href="${h.url_for( controller='admin_toolshed', action='manage_repository', id=encoded_required_repository_id )}">${repository_name | h}</a>
+ %else:
+ ${repository_name | h}
+ %endif
+ </${cell_type}>
+ <${cell_type}>
+ ${changeset_revision | h}
+ </${cell_type}>
+ <${cell_type}>
+ ${repository_owner | h}
+ </${cell_type}>
+ <${cell_type}>
+ ${installation_status}
+ </${cell_type}>
+ %else:
+ <td style="padding-left: ${pad+20}px;">
+ Repository <b>${repository_name | h}</b> revision <b>${changeset_revision | h}</b> owned by <b>${repository_owner | h}</b>
+ </td>
+ %endif
</tr><%
my_row = row_counter.count
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
2 new commits in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/8990b28bb0e7/
changeset: 8990b28bb0e7
user: inithello
date: 2012-12-21 16:32:45
summary: Fix handling of repository tools and tool dependencies. Fix issue with functional tests altering the integrated tool panel.
affected #: 2 files
diff -r 5c9971aba1b4f4062e27dcbdd9262e3e81c88ddf -r 8990b28bb0e77dbf06a83e47e0f76045c28968f3 lib/galaxy/util/shed_util.py
--- a/lib/galaxy/util/shed_util.py
+++ b/lib/galaxy/util/shed_util.py
@@ -75,8 +75,9 @@
# Replace the old list of in-memory config_elems with the new list for this shed_tool_conf_dict.
shed_tool_conf_dict[ 'config_elems' ] = config_elems
app.toolbox.shed_tool_confs[ index ] = shed_tool_conf_dict
- # Write the current in-memory version of the integrated_tool_panel.xml file to disk.
- app.toolbox.write_integrated_tool_panel_config_file()
+ if app.config.update_integrated_tool_panel:
+ # Write the current in-memory version of the integrated_tool_panel.xml file to disk.
+ app.toolbox.write_integrated_tool_panel_config_file()
app.toolbox_search = ToolBoxSearch( app.toolbox )
def alter_config_and_load_prorietary_datatypes( app, datatypes_config, relative_install_dir, deactivate=False, override=True ):
"""
@@ -1055,7 +1056,7 @@
shed_tool_conf_dict[ 'config_elems' ] = config_elems
trans.app.toolbox.shed_tool_confs[ index ] = shed_tool_conf_dict
trans.app.toolbox_search = ToolBoxSearch( trans.app.toolbox )
- if uninstall:
+ if uninstall and trans.app.config.update_integrated_tool_panel:
# Write the current in-memory version of the integrated_tool_panel.xml file to disk.
trans.app.toolbox.write_integrated_tool_panel_config_file()
def remove_tool_dependency( trans, tool_dependency ):
diff -r 5c9971aba1b4f4062e27dcbdd9262e3e81c88ddf -r 8990b28bb0e77dbf06a83e47e0f76045c28968f3 lib/galaxy/webapps/galaxy/controllers/admin_toolshed.py
--- a/lib/galaxy/webapps/galaxy/controllers/admin_toolshed.py
+++ b/lib/galaxy/webapps/galaxy/controllers/admin_toolshed.py
@@ -1088,14 +1088,16 @@
if ( not includes_tools and not includes_repository_dependencies ) or \
( ( includes_tools or includes_repository_dependencies ) and kwd.get( 'select_tool_panel_section_button', False ) ):
install_repository_dependencies = CheckboxField.is_checked( install_repository_dependencies )
- if includes_tool_dependencies:
- install_tool_dependencies = CheckboxField.is_checked( install_tool_dependencies )
+ if includes_tools:
shed_tool_conf = kwd[ 'shed_tool_conf' ]
else:
- install_tool_dependencies = False
# If installing a repository that includes no tools, get the relative tool_path from the file to which the migrated_tools_config
# setting points.
shed_tool_conf = trans.app.config.migrated_tools_config
+ if includes_tool_dependencies:
+ install_tool_dependencies = CheckboxField.is_checked( install_tool_dependencies )
+ else:
+ install_tool_dependencies = False
tool_path = suc.get_tool_path_by_shed_tool_conf_filename( trans, shed_tool_conf )
created_or_updated_tool_shed_repositories, repo_info_dicts, filtered_repo_info_dicts, message = \
shed_util.create_repository_dependency_objects( trans, tool_path, tool_shed_url, repo_info_dicts, reinstalling=False )
https://bitbucket.org/galaxy/galaxy-central/changeset/5e60f799a868/
changeset: 5e60f799a868
user: inithello
date: 2012-12-21 16:33:18
summary: Make functional tests explicitly specify which shed tool config to use.
affected #: 2 files
diff -r 8990b28bb0e77dbf06a83e47e0f76045c28968f3 -r 5e60f799a8687d92fb41dd8a764d434623837772 test/tool_shed/base/twilltestcase.py
--- a/test/tool_shed/base/twilltestcase.py
+++ b/test/tool_shed/base/twilltestcase.py
@@ -28,6 +28,7 @@
self.file_dir = os.environ.get( 'TOOL_SHED_TEST_FILE_DIR', None )
self.tool_shed_test_file = None
self.tool_data_path = os.environ.get( 'GALAXY_TEST_TOOL_DATA_PATH' )
+ self.shed_tool_conf = os.environ.get( 'GALAXY_TEST_SHED_TOOL_CONF' )
# TODO: Figure out a way to alter these attributes during tests.
self.galaxy_tool_dependency_dir = None # os.environ.get( 'GALAXY_TEST_TOOL_DEPENDENCY_DIR' )
self.shed_tools_dict = {}
@@ -431,6 +432,8 @@
checkbox.selected = True
else:
checkbox.selected = False
+ if 'shed_tool_conf' not in kwd:
+ kwd[ 'shed_tool_conf' ] = self.shed_tool_conf
self.submit_form( 1, 'select_tool_panel_section_button', **kwd )
self.initiate_installation_process()
self.wait_for_repository_installation( repository, changeset_revision )
diff -r 8990b28bb0e77dbf06a83e47e0f76045c28968f3 -r 5e60f799a8687d92fb41dd8a764d434623837772 test/tool_shed/functional_tests.py
--- a/test/tool_shed/functional_tests.py
+++ b/test/tool_shed/functional_tests.py
@@ -256,6 +256,7 @@
shed_tool_conf_template_parser = string.Template( shed_tool_conf_xml_template )
shed_tool_conf_xml = shed_tool_conf_template_parser.safe_substitute( shed_tool_path=galaxy_shed_tool_path )
file( galaxy_shed_tool_conf_file, 'w' ).write( shed_tool_conf_xml )
+ os.environ[ 'GALAXY_TEST_SHED_TOOL_CONF' ] = galaxy_shed_tool_conf_file
# ---- Build Galaxy Application --------------------------------------------------
galaxy_global_conf = { '__file__' : 'universe_wsgi.ini.sample' }
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: greg: Fix for setting tool versions for tools contained in tool shed repositories installed into a Galaxy instance.
by Bitbucket 21 Dec '12
by Bitbucket 21 Dec '12
21 Dec '12
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/5c9971aba1b4/
changeset: 5c9971aba1b4
user: greg
date: 2012-12-21 15:00:34
summary: Fix for setting tool versions for tools contained in tool shed repositories installed into a Galaxy instance.
affected #: 2 files
diff -r 0a3e4bc2b5a510820b8aba73b78433db810a3481 -r 5c9971aba1b4f4062e27dcbdd9262e3e81c88ddf lib/galaxy/util/shed_util.py
--- a/lib/galaxy/util/shed_util.py
+++ b/lib/galaxy/util/shed_util.py
@@ -852,6 +852,60 @@
if k not in [ 'id', 'version', 'name' ]:
return True
return False
+def populate_containers_dict_from_repository_metadata( trans, tool_shed_url, tool_path, repository ):
+ """
+ Retrieve necessary information from the received repository's metadata to populate the containers_dict for display. This methos is called only
+ from Galaxy and not the tool shed.
+ """
+ metadata = repository.metadata
+ if metadata:
+ datatypes = metadata.get( 'datatypes', None )
+ invalid_tools = metadata.get( 'invalid_tools', None )
+ if repository.has_readme_files:
+ readme_files_dict = suc.build_readme_files_dict( repository.metadata, tool_path )
+ else:
+ readme_files_dict = None
+ repository_dependencies = metadata.get( 'repository_dependencies', None )
+ repository_dependencies_dict_for_display = {}
+ if repository_dependencies:
+ # We need to add a root_key entry to the repository_dependencies dictionary since it will not be included in the installed tool
+ # shed repository metadata.
+ root_key = container_util.generate_repository_dependencies_key_for_repository( repository.tool_shed,
+ repository.name,
+ repository.owner,
+ repository.installed_changeset_revision )
+ rd_tups_for_display = []
+ rd_tups = repository_dependencies[ 'repository_dependencies' ]
+ repository_dependencies_dict_for_display[ 'root_key' ] = root_key
+ repository_dependencies_dict_for_display[ root_key ] = rd_tups
+ repository_dependencies_dict_for_display[ 'description' ] = repository_dependencies[ 'description' ]
+ all_tool_dependencies = metadata.get( 'tool_dependencies', None )
+ tool_dependencies, missing_tool_dependencies = get_installed_and_missing_tool_dependencies( trans, repository, all_tool_dependencies )
+ valid_tools = metadata.get( 'tools', None )
+ workflows = metadata.get( 'workflows', None )
+ containers_dict = suc.build_repository_containers_for_galaxy( trans=trans,
+ toolshed_base_url=tool_shed_url,
+ repository_name=repository.name,
+ repository_owner=repository.owner,
+ changeset_revision=repository.installed_changeset_revision,
+ repository=repository,
+ datatypes=datatypes,
+ invalid_tools=invalid_tools,
+ missing_tool_dependencies=missing_tool_dependencies,
+ readme_files_dict=readme_files_dict,
+ repository_dependencies=repository_dependencies_dict_for_display,
+ tool_dependencies=tool_dependencies,
+ valid_tools=valid_tools,
+ workflows=workflows )
+ else:
+ containers_dict = dict( datatypes=None,
+ invalid_tools=None,
+ readme_files_dict=None,
+ repository_dependencies=None,
+ tool_dependencies=None,
+ valid_tools=None,
+ workflows=None )
+ return containers_dict
def pull_repository( repo, repository_clone_url, ctx_rev ):
"""Pull changes from a remote repository to a local one."""
commands.pull( suc.get_configured_ui(), repo, source=repository_clone_url, rev=[ ctx_rev ] )
diff -r 0a3e4bc2b5a510820b8aba73b78433db810a3481 -r 5c9971aba1b4f4062e27dcbdd9262e3e81c88ddf lib/galaxy/webapps/galaxy/controllers/admin_toolshed.py
--- a/lib/galaxy/webapps/galaxy/controllers/admin_toolshed.py
+++ b/lib/galaxy/webapps/galaxy/controllers/admin_toolshed.py
@@ -877,45 +877,7 @@
trans.sa_session.add( repository )
trans.sa_session.flush()
message = "The repository information has been updated."
- metadata = repository.metadata
- datatypes = metadata.get( 'datatypes', None )
- invalid_tools = metadata.get( 'invalid_tools', None )
- if repository.has_readme_files:
- readme_files_dict = suc.build_readme_files_dict( repository.metadata, tool_path )
- else:
- readme_files_dict = None
- repository_dependencies = metadata.get( 'repository_dependencies', None )
- repository_dependencies_dict_for_display = {}
- if repository_dependencies:
- # We need to add a root_key entry to the repository_dependencies dictionary since it will not be included in the installed tool
- # shed repository metadata.
- root_key = container_util.generate_repository_dependencies_key_for_repository( repository.tool_shed,
- repository.name,
- repository.owner,
- repository.installed_changeset_revision )
- rd_tups_for_display = []
- rd_tups = repository_dependencies[ 'repository_dependencies' ]
- repository_dependencies_dict_for_display[ 'root_key' ] = root_key
- repository_dependencies_dict_for_display[ root_key ] = rd_tups
- repository_dependencies_dict_for_display[ 'description' ] = repository_dependencies[ 'description' ]
- all_tool_dependencies = metadata.get( 'tool_dependencies', None )
- tool_dependencies, missing_tool_dependencies = shed_util.get_installed_and_missing_tool_dependencies( trans, repository, all_tool_dependencies )
- valid_tools = metadata.get( 'tools', None )
- workflows = metadata.get( 'workflows', None )
- containers_dict = suc.build_repository_containers_for_galaxy( trans=trans,
- toolshed_base_url=tool_shed_url,
- repository_name=repository.name,
- repository_owner=repository.owner,
- changeset_revision=repository.installed_changeset_revision,
- repository=repository,
- datatypes=datatypes,
- invalid_tools=invalid_tools,
- missing_tool_dependencies=missing_tool_dependencies,
- readme_files_dict=readme_files_dict,
- repository_dependencies=repository_dependencies_dict_for_display,
- tool_dependencies=tool_dependencies,
- valid_tools=valid_tools,
- workflows=workflows )
+ containers_dict = shed_util.populate_containers_dict_from_repository_metadata( trans, tool_shed_url, tool_path, repository )
return trans.fill_template( '/admin/tool_shed_repository/manage_repository.mako',
repository=repository,
description=description,
@@ -1668,7 +1630,10 @@
@web.expose
@web.require_admin
def set_tool_versions( self, trans, **kwd ):
- # Get the tool_versions from the tool shed for each tool in the installed change set.
+ """
+ Get the tool_versions from the tool shed for each tool in the installed revision of a selected tool shed repository and update the
+ metadata for the repository's revision in the Galaxy database.
+ """
repository = suc.get_installed_tool_shed_repository( trans, kwd[ 'id' ] )
tool_shed_url = suc.get_url_from_repository_tool_shed( trans.app, repository )
url = suc.url_join( tool_shed_url,
@@ -1689,10 +1654,12 @@
status = 'error'
shed_tool_conf, tool_path, relative_install_dir = suc.get_tool_panel_config_tool_path_install_dir( trans.app, repository )
repo_files_dir = os.path.abspath( os.path.join( relative_install_dir, repository.name ) )
+ containers_dict = shed_util.populate_containers_dict_from_repository_metadata( trans, tool_shed_url, tool_path, repository )
return trans.fill_template( '/admin/tool_shed_repository/manage_repository.mako',
repository=repository,
description=repository.description,
repo_files_dir=repo_files_dir,
+ containers_dict=containers_dict,
message=message,
status=status )
@web.json
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0