galaxy-commits
Threads by month
- ----- 2024 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2023 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2022 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2021 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2020 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2019 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2018 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2017 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2016 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2015 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2014 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2013 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2012 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2011 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2010 -----
- December
- November
- October
- September
- August
- July
- June
- May
March 2013
- 1 participants
- 183 discussions
2 new commits in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/8070d6f638fe/
changeset: 8070d6f638fe
user: jgoecks
date: 2013-03-17 15:58:43
summary: Import and documentation fixes for web helpers.
affected #: 1 file
diff -r 1aa6224b32c2e1c05eafbf1f507d2f7a754ded89 -r 8070d6f638fea37aff3ac3e081b7bd9fba0402b7 lib/galaxy/web/framework/helpers/__init__.py
--- a/lib/galaxy/web/framework/helpers/__init__.py
+++ b/lib/galaxy/web/framework/helpers/__init__.py
@@ -1,7 +1,7 @@
import pkg_resources
pkg_resources.require( "WebHelpers" )
-from webhelpers import *
+from webhelpers import date, stylesheet_link_tag, javascript_include_tag, url_for
from galaxy.util.json import to_json_string
from galaxy.util import hash_util
@@ -12,10 +12,13 @@
server_starttime = int(time.time())
-# If the date is more than one week ago, then display the actual date instead of in words
def time_ago( x ):
+ """
+ Convert a datetime to a string.
+ """
delta = timedelta(weeks=1)
+ # If the date is more than one week ago, then display the actual date instead of in words
if (datetime.utcnow() - x) > delta: # Greater than a week difference
return x.strftime("%b %d, %Y")
else:
@@ -27,8 +30,10 @@
else:
return c
-# Smart string truncation
def truncate(content, length=100, suffix='...'):
+ """
+ Smart string truncation
+ """
if len(content) <= length:
return content
else:
https://bitbucket.org/galaxy/galaxy-central/commits/92275794f6ef/
changeset: 92275794f6ef
user: jgoecks
date: 2013-03-17 16:27:36
summary: Add method to encode ids in an dictionary and use to encode dataset id in tabular display.
affected #: 2 files
diff -r 8070d6f638fea37aff3ac3e081b7bd9fba0402b7 -r 92275794f6efe2f2e17c2d3785061e88f2f23f84 lib/galaxy/web/security/__init__.py
--- a/lib/galaxy/web/security/__init__.py
+++ b/lib/galaxy/web/security/__init__.py
@@ -30,10 +30,13 @@
random_pool.stir()
return str( number.getRandomNumber( nbits, random_pool.get_bytes ) )
+
class SecurityHelper( object ):
+
def __init__( self, **config ):
self.id_secret = config['id_secret']
self.id_cipher = Blowfish.new( self.id_secret )
+
def encode_id( self, obj_id ):
# Convert to string
s = str( obj_id )
@@ -41,17 +44,32 @@
s = ( "!" * ( 8 - len(s) % 8 ) ) + s
# Encrypt
return self.id_cipher.encrypt( s ).encode( 'hex' )
+
+ def encode_dict_ids( self, a_dict ):
+ """
+ Encode all ids in dictionary. Ids are identified by (a) an 'id' key or
+ (b) a key that ends with '_id'
+ """
+ for key, val in a_dict.items():
+ if key == 'id' or key.endswith('_id'):
+ a_dict[ key ] = self.encode_id( val )
+
+ return a_dict
+
def decode_id( self, obj_id ):
return int( self.id_cipher.decrypt( obj_id.decode( 'hex' ) ).lstrip( "!" ) )
+
def encode_guid( self, session_key ):
# Session keys are strings
# Pad to a multiple of 8 with leading "!"
s = ( "!" * ( 8 - len( session_key ) % 8 ) ) + session_key
# Encrypt
return self.id_cipher.encrypt( s ).encode( 'hex' )
+
def decode_guid( self, session_key ):
# Session keys are strings
return self.id_cipher.decrypt( session_key.decode( 'hex' ) ).lstrip( "!" )
+
def get_new_guid( self ):
# Generate a unique, high entropy 128 bit random number
return get_random_bytes( 16 )
diff -r 8070d6f638fea37aff3ac3e081b7bd9fba0402b7 -r 92275794f6efe2f2e17c2d3785061e88f2f23f84 templates/webapps/galaxy/dataset/tabular_chunked.mako
--- a/templates/webapps/galaxy/dataset/tabular_chunked.mako
+++ b/templates/webapps/galaxy/dataset/tabular_chunked.mako
@@ -17,8 +17,7 @@
require(['mvc/data'], function(data) {
data.createTabularDatasetChunkedView(
- // Dataset config. TODO: encode id.
- _.extend( ${h.to_json_string( dataset.get_api_value() )},
+ _.extend( ${h.to_json_string( trans.security.encode_dict_ids( dataset.get_api_value() ) )},
{
chunk_url: "${h.url_for( controller='/dataset', action='display',
dataset_id=trans.security.encode_id( dataset.id ))}",
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: greg: Import fix for admin grids in the tool shed.
by commits-noreply@bitbucket.org 16 Mar '13
by commits-noreply@bitbucket.org 16 Mar '13
16 Mar '13
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/1aa6224b32c2/
changeset: 1aa6224b32c2
user: greg
date: 2013-03-16 20:18:16
summary: Import fix for admin grids in the tool shed.
affected #: 1 file
diff -r 4c7d89fe3fc9e9e4e8cd69aa0ff1504b842d668a -r 1aa6224b32c2e1c05eafbf1f507d2f7a754ded89 lib/tool_shed/grids/admin_grids.py
--- a/lib/tool_shed/grids/admin_grids.py
+++ b/lib/tool_shed/grids/admin_grids.py
@@ -5,6 +5,12 @@
import tool_shed.util.shed_util_common as suc
from tool_shed.grids.repository_grids import RepositoryGrid, CategoryGrid
+from galaxy import eggs
+import pkg_resources
+
+pkg_resources.require( 'mercurial' )
+from mercurial import hg, ui, commands
+
log = logging.getLogger( __name__ )
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: greg: Fix for viewing tools in a repository.
by commits-noreply@bitbucket.org 16 Mar '13
by commits-noreply@bitbucket.org 16 Mar '13
16 Mar '13
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/4c7d89fe3fc9/
changeset: 4c7d89fe3fc9
user: greg
date: 2013-03-16 20:08:14
summary: Fix for viewing tools in a repository.
affected #: 1 file
diff -r a5c906f0e8a03b9ec884b224393c5cb767dcf37d -r 4c7d89fe3fc9e9e4e8cd69aa0ff1504b842d668a lib/tool_shed/util/tool_util.py
--- a/lib/tool_shed/util/tool_util.py
+++ b/lib/tool_shed/util/tool_util.py
@@ -535,7 +535,7 @@
def handle_sample_files_and_load_tool_from_tmp_config( trans, repo, changeset_revision, tool_config_filename, work_dir ):
tool = None
message = ''
- ctx = get_changectx_for_changeset( repo, changeset_revision )
+ ctx = suc.get_changectx_for_changeset( repo, changeset_revision )
# We're not currently doing anything with the returned list of deleted_sample_files here. It is intended to help handle sample files that are in
# the manifest, but have been deleted from disk.
sample_files, deleted_sample_files = get_list_of_copied_sample_files( repo, ctx, dir=work_dir )
@@ -548,7 +548,7 @@
error, message = handle_sample_tool_data_table_conf_file( trans.app, tool_data_table_config )
if error:
log.debug( message )
- manifest_ctx, ctx_file = get_ctx_file_path_from_manifest( tool_config_filename, repo, changeset_revision )
+ manifest_ctx, ctx_file = suc.get_ctx_file_path_from_manifest( tool_config_filename, repo, changeset_revision )
if manifest_ctx and ctx_file:
tool, message2 = load_tool_from_tmp_config( trans, repo, manifest_ctx, ctx_file, work_dir )
message = concat_messages( message, message2 )
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: greg: Relocate the tool shed's workflow utility component.
by commits-noreply@bitbucket.org 16 Mar '13
by commits-noreply@bitbucket.org 16 Mar '13
16 Mar '13
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/a5c906f0e8a0/
changeset: a5c906f0e8a0
user: greg
date: 2013-03-16 17:43:20
summary: Relocate the tool shed's workflow utility component.
affected #: 4 files
diff -r 3f2e8f5f0d7de7131aa878e867c12c352f1f79b5 -r a5c906f0e8a03b9ec884b224393c5cb767dcf37d lib/galaxy/webapps/galaxy/controllers/admin_toolshed.py
--- a/lib/galaxy/webapps/galaxy/controllers/admin_toolshed.py
+++ b/lib/galaxy/webapps/galaxy/controllers/admin_toolshed.py
@@ -7,9 +7,8 @@
from galaxy.model.orm import or_
import tool_shed.util.shed_util_common as suc
from tool_shed.util import common_install_util, data_manager_util, datatype_util, encoding_util, metadata_util
-from tool_shed.util import readme_util, repository_dependency_util, tool_dependency_util, tool_util
+from tool_shed.util import readme_util, repository_dependency_util, tool_dependency_util, tool_util, workflow_util
from tool_shed.galaxy_install import repository_util
-from galaxy.webapps.tool_shed.util import workflow_util
import tool_shed.galaxy_install.grids.admin_toolshed_grids as admin_toolshed_grids
import pkg_resources
diff -r 3f2e8f5f0d7de7131aa878e867c12c352f1f79b5 -r a5c906f0e8a03b9ec884b224393c5cb767dcf37d lib/galaxy/webapps/tool_shed/controllers/repository.py
--- a/lib/galaxy/webapps/tool_shed/controllers/repository.py
+++ b/lib/galaxy/webapps/tool_shed/controllers/repository.py
@@ -11,9 +11,9 @@
from galaxy.util import json
from galaxy.model.orm import and_, or_
import tool_shed.util.shed_util_common as suc
-from tool_shed.util import encoding_util, metadata_util, readme_util, repository_dependency_util, review_util, tool_dependency_util, tool_util
+from tool_shed.util import encoding_util, metadata_util, readme_util, repository_dependency_util, review_util, tool_dependency_util, tool_util, workflow_util
from tool_shed.galaxy_install import repository_util
-from galaxy.webapps.tool_shed.util import common_util, container_util, workflow_util
+from galaxy.webapps.tool_shed.util import common_util, container_util
import galaxy.tools
import tool_shed.grids.repository_grids as repository_grids
import tool_shed.grids.util as grids_util
diff -r 3f2e8f5f0d7de7131aa878e867c12c352f1f79b5 -r a5c906f0e8a03b9ec884b224393c5cb767dcf37d lib/galaxy/webapps/tool_shed/util/workflow_util.py
--- a/lib/galaxy/webapps/tool_shed/util/workflow_util.py
+++ /dev/null
@@ -1,411 +0,0 @@
-from galaxy import eggs
-import pkg_resources
-
-pkg_resources.require( "SVGFig" )
-
-import logging, svgfig
-from galaxy.util import json
-import tool_shed.util.shed_util_common as suc
-from tool_shed.util import encoding_util, metadata_util, tool_util
-from galaxy.workflow.modules import InputDataModule, ToolModule, WorkflowModuleFactory
-import galaxy.webapps.galaxy.controllers.workflow
-import galaxy.tools
-import galaxy.tools.parameters
-
-log = logging.getLogger( __name__ )
-
-class RepoInputDataModule( InputDataModule ):
-
- type = "data_input"
- name = "Input dataset"
-
- @classmethod
- def new( Class, trans, tools_metadata=None, tool_id=None ):
- module = Class( trans )
- module.state = dict( name="Input Dataset" )
- return module
- @classmethod
- def from_dict( Class, trans, repository_id, changeset_revision, step_dict, tools_metadata=None, secure=True ):
- module = Class( trans )
- state = json.from_json_string( step_dict[ "tool_state" ] )
- module.state = dict( name=state.get( "name", "Input Dataset" ) )
- return module
- @classmethod
- def from_workflow_step( Class, trans, repository_id, changeset_revision, tools_metadata, step ):
- module = Class( trans )
- module.state = dict( name="Input Dataset" )
- if step.tool_inputs and "name" in step.tool_inputs:
- module.state[ 'name' ] = step.tool_inputs[ 'name' ]
- return module
-
-class RepoToolModule( ToolModule ):
-
- type = "tool"
-
- def __init__( self, trans, repository_id, changeset_revision, tools_metadata, tool_id ):
- self.trans = trans
- self.tools_metadata = tools_metadata
- self.tool_id = tool_id
- self.tool = None
- self.errors = None
- for tool_dict in tools_metadata:
- if self.tool_id in [ tool_dict[ 'id' ], tool_dict[ 'guid' ] ]:
- if trans.webapp.name == 'tool_shed':
- # We're in the tool shed.
- repository, self.tool, message = tool_util.load_tool_from_changeset_revision( trans, repository_id, changeset_revision, tool_dict[ 'tool_config' ] )
- if message and self.tool is None:
- self.errors = 'unavailable'
- break
- else:
- # We're in Galaxy.
- self.tool = trans.app.toolbox.tools_by_id.get( self.tool_id, None )
- if self.tool is None:
- self.errors = 'unavailable'
- self.post_job_actions = {}
- self.workflow_outputs = []
- self.state = None
- @classmethod
- def new( Class, trans, repository_id, changeset_revision, tools_metadata, tool_id=None ):
- module = Class( trans, repository_id, changeset_revision, tools_metadata, tool_id )
- module.state = module.tool.new_state( trans, all_pages=True )
- return module
- @classmethod
- def from_dict( Class, trans, repository_id, changeset_revision, step_dict, tools_metadata, secure=True ):
- tool_id = step_dict[ 'tool_id' ]
- module = Class( trans, repository_id, changeset_revision, tools_metadata, tool_id )
- module.state = galaxy.tools.DefaultToolState()
- if module.tool is not None:
- module.state.decode( step_dict[ "tool_state" ], module.tool, module.trans.app, secure=secure )
- module.errors = step_dict.get( "tool_errors", None )
- return module
- @classmethod
- def from_workflow_step( Class, trans, repository_id, changeset_revision, tools_metadata, step ):
- module = Class( trans, repository_id, changeset_revision, tools_metadata, step.tool_id )
- module.state = galaxy.tools.DefaultToolState()
- if module.tool:
- module.state.inputs = module.tool.params_from_strings( step.tool_inputs, trans.app, ignore_errors=True )
- else:
- module.state.inputs = {}
- module.errors = step.tool_errors
- return module
- def get_data_inputs( self ):
- data_inputs = []
- def callback( input, value, prefixed_name, prefixed_label ):
- if isinstance( input, galaxy.tools.parameters.DataToolParameter ):
- data_inputs.append( dict( name=prefixed_name,
- label=prefixed_label,
- extensions=input.extensions ) )
- if self.tool:
- galaxy.tools.parameters.visit_input_values( self.tool.inputs, self.state.inputs, callback )
- return data_inputs
- def get_data_outputs( self ):
- data_outputs = []
- if self.tool:
- data_inputs = None
- for name, tool_output in self.tool.outputs.iteritems():
- if tool_output.format_source != None:
- # Default to special name "input" which remove restrictions on connections
- formats = [ 'input' ]
- if data_inputs == None:
- data_inputs = self.get_data_inputs()
- # Find the input parameter referenced by format_source
- for di in data_inputs:
- # Input names come prefixed with conditional and repeat names separated by '|',
- # so remove prefixes when comparing with format_source.
- if di[ 'name' ] != None and di[ 'name' ].split( '|' )[ -1 ] == tool_output.format_source:
- formats = di[ 'extensions' ]
- else:
- formats = [ tool_output.format ]
- for change_elem in tool_output.change_format:
- for when_elem in change_elem.findall( 'when' ):
- format = when_elem.get( 'format', None )
- if format and format not in formats:
- formats.append( format )
- data_outputs.append( dict( name=name, extensions=formats ) )
- return data_outputs
-
-class RepoWorkflowModuleFactory( WorkflowModuleFactory ):
- def __init__( self, module_types ):
- self.module_types = module_types
- def new( self, trans, type, tools_metadata=None, tool_id=None ):
- """Return module for type and (optional) tool_id initialized with new / default state."""
- assert type in self.module_types
- return self.module_types[type].new( trans, tool_id )
- def from_dict( self, trans, repository_id, changeset_revision, step_dict, **kwd ):
- """Return module initialized from the data in dictionary `step_dict`."""
- type = step_dict[ 'type' ]
- assert type in self.module_types
- return self.module_types[ type ].from_dict( trans, repository_id, changeset_revision, step_dict, **kwd )
- def from_workflow_step( self, trans, repository_id, changeset_revision, tools_metadata, step ):
- """Return module initialized from the WorkflowStep object `step`."""
- type = step.type
- return self.module_types[ type ].from_workflow_step( trans, repository_id, changeset_revision, tools_metadata, step )
-
-module_factory = RepoWorkflowModuleFactory( dict( data_input=RepoInputDataModule, tool=RepoToolModule ) )
-
-def generate_workflow_image( trans, workflow_name, repository_metadata_id=None, repository_id=None ):
- """
- Return an svg image representation of a workflow dictionary created when the workflow was exported. This method is called
- from both Galaxy and the tool shed. When called from the tool shed, repository_metadata_id will have a value and repository_id
- will be None. When called from Galaxy, repository_metadata_id will be None and repository_id will have a value.
- """
- workflow_name = encoding_util.tool_shed_decode( workflow_name )
- if trans.webapp.name == 'tool_shed':
- # We're in the tool shed.
- repository_metadata = metadata_util.get_repository_metadata_by_id( trans, repository_metadata_id )
- repository_id = trans.security.encode_id( repository_metadata.repository_id )
- changeset_revision = repository_metadata.changeset_revision
- metadata = repository_metadata.metadata
- else:
- # We're in Galaxy.
- repository = suc.get_tool_shed_repository_by_id( trans, repository_id )
- changeset_revision = repository.changeset_revision
- metadata = repository.metadata
- # metadata[ 'workflows' ] is a list of tuples where each contained tuple is
- # [ <relative path to the .ga file in the repository>, <exported workflow dict> ]
- for workflow_tup in metadata[ 'workflows' ]:
- workflow_dict = workflow_tup[1]
- if workflow_dict[ 'name' ] == workflow_name:
- break
- if 'tools' in metadata:
- tools_metadata = metadata[ 'tools' ]
- else:
- tools_metadata = []
- workflow, missing_tool_tups = get_workflow_from_dict( trans=trans,
- workflow_dict=workflow_dict,
- tools_metadata=tools_metadata,
- repository_id=repository_id,
- changeset_revision=changeset_revision )
- data = []
- canvas = svgfig.canvas( style="stroke:black; fill:none; stroke-width:1px; stroke-linejoin:round; text-anchor:left" )
- text = svgfig.SVG( "g" )
- connectors = svgfig.SVG( "g" )
- boxes = svgfig.SVG( "g" )
- svgfig.Text.defaults[ "font-size" ] = "10px"
- in_pos = {}
- out_pos = {}
- margin = 5
- # Spacing between input/outputs.
- line_px = 16
- # Store px width for boxes of each step.
- widths = {}
- max_width, max_x, max_y = 0, 0, 0
- for step in workflow.steps:
- step.upgrade_messages = {}
- module = module_factory.from_workflow_step( trans, repository_id, changeset_revision, tools_metadata, step )
- tool_errors = module.type == 'tool' and not module.tool
- module_data_inputs = get_workflow_data_inputs( step, module )
- module_data_outputs = get_workflow_data_outputs( step, module, workflow.steps )
- step_dict = {
- 'id' : step.order_index,
- 'data_inputs' : module_data_inputs,
- 'data_outputs' : module_data_outputs,
- 'position' : step.position,
- 'tool_errors' : tool_errors
- }
- input_conn_dict = {}
- for conn in step.input_connections:
- input_conn_dict[ conn.input_name ] = dict( id=conn.output_step.order_index, output_name=conn.output_name )
- step_dict[ 'input_connections' ] = input_conn_dict
- data.append( step_dict )
- x, y = step.position[ 'left' ], step.position[ 'top' ]
- count = 0
- module_name = get_workflow_module_name( module, missing_tool_tups )
- max_len = len( module_name ) * 1.5
- text.append( svgfig.Text( x, y + 20, module_name, **{ "font-size": "14px" } ).SVG() )
- y += 45
- for di in module_data_inputs:
- cur_y = y + count * line_px
- if step.order_index not in in_pos:
- in_pos[ step.order_index ] = {}
- in_pos[ step.order_index ][ di[ 'name' ] ] = ( x, cur_y )
- text.append( svgfig.Text( x, cur_y, di[ 'label' ] ).SVG() )
- count += 1
- max_len = max( max_len, len( di[ 'label' ] ) )
- if len( module.get_data_inputs() ) > 0:
- y += 15
- for do in module_data_outputs:
- cur_y = y + count * line_px
- if step.order_index not in out_pos:
- out_pos[ step.order_index ] = {}
- out_pos[ step.order_index ][ do[ 'name' ] ] = ( x, cur_y )
- text.append( svgfig.Text( x, cur_y, do[ 'name' ] ).SVG() )
- count += 1
- max_len = max( max_len, len( do['name' ] ) )
- widths[ step.order_index ] = max_len * 5.5
- max_x = max( max_x, step.position[ 'left' ] )
- max_y = max( max_y, step.position[ 'top' ] )
- max_width = max( max_width, widths[ step.order_index ] )
- for step_dict in data:
- tool_unavailable = step_dict[ 'tool_errors' ]
- width = widths[ step_dict[ 'id' ] ]
- x, y = step_dict[ 'position' ][ 'left' ], step_dict[ 'position' ][ 'top' ]
- # Only highlight missing tools if displaying in the tool shed.
- if trans.webapp.name == 'tool_shed' and tool_unavailable:
- fill = "#EBBCB2"
- else:
- fill = "#EBD9B2"
- boxes.append( svgfig.Rect( x - margin, y, x + width - margin, y + 30, fill=fill ).SVG() )
- box_height = ( len( step_dict[ 'data_inputs' ] ) + len( step_dict[ 'data_outputs' ] ) ) * line_px + margin
- # Draw separator line.
- if len( step_dict[ 'data_inputs' ] ) > 0:
- box_height += 15
- sep_y = y + len( step_dict[ 'data_inputs' ] ) * line_px + 40
- text.append( svgfig.Line( x - margin, sep_y, x + width - margin, sep_y ).SVG() )
- # Define an input/output box.
- boxes.append( svgfig.Rect( x - margin, y + 30, x + width - margin, y + 30 + box_height, fill="#ffffff" ).SVG() )
- for conn, output_dict in step_dict[ 'input_connections' ].iteritems():
- in_coords = in_pos[ step_dict[ 'id' ] ][ conn ]
- # out_pos_index will be a step number like 1, 2, 3...
- out_pos_index = output_dict[ 'id' ]
- # out_pos_name will be a string like 'o', 'o2', etc.
- out_pos_name = output_dict[ 'output_name' ]
- if out_pos_index in out_pos:
- # out_conn_index_dict will be something like:
- # 7: {'o': (824.5, 618)}
- out_conn_index_dict = out_pos[ out_pos_index ]
- if out_pos_name in out_conn_index_dict:
- out_conn_pos = out_pos[ out_pos_index ][ out_pos_name ]
- else:
- # Take any key / value pair available in out_conn_index_dict.
- # A problem will result if the dictionary is empty.
- if out_conn_index_dict.keys():
- key = out_conn_index_dict.keys()[0]
- out_conn_pos = out_pos[ out_pos_index ][ key ]
- adjusted = ( out_conn_pos[ 0 ] + widths[ output_dict[ 'id' ] ], out_conn_pos[ 1 ] )
- text.append( svgfig.SVG( "circle",
- cx=out_conn_pos[ 0 ] + widths[ output_dict[ 'id' ] ] - margin,
- cy=out_conn_pos[ 1 ] - margin,
- r = 5,
- fill="#ffffff" ) )
- connectors.append( svgfig.Line( adjusted[ 0 ],
- adjusted[ 1 ] - margin,
- in_coords[ 0 ] - 10,
- in_coords[ 1 ],
- arrow_end = "true" ).SVG() )
- canvas.append( connectors )
- canvas.append( boxes )
- canvas.append( text )
- width, height = ( max_x + max_width + 50 ), max_y + 300
- canvas[ 'width' ] = "%s px" % width
- canvas[ 'height' ] = "%s px" % height
- canvas[ 'viewBox' ] = "0 0 %s %s" % ( width, height )
- trans.response.set_content_type( "image/svg+xml" )
- return canvas.standalone_xml()
-def get_workflow_data_inputs( step, module ):
- if module.type == 'tool':
- if module.tool:
- return module.get_data_inputs()
- else:
- data_inputs = []
- for wfsc in step.input_connections:
- data_inputs_dict = {}
- data_inputs_dict[ 'extensions' ] = [ '' ]
- data_inputs_dict[ 'name' ] = wfsc.input_name
- data_inputs_dict[ 'label' ] = 'Unknown'
- data_inputs.append( data_inputs_dict )
- return data_inputs
- return module.get_data_inputs()
-def get_workflow_data_outputs( step, module, steps ):
- if module.type == 'tool':
- if module.tool:
- return module.get_data_outputs()
- else:
- data_outputs = []
- data_outputs_dict = {}
- data_outputs_dict[ 'extensions' ] = [ 'input' ]
- found = False
- for workflow_step in steps:
- for wfsc in workflow_step.input_connections:
- if step.name == wfsc.output_step.name:
- data_outputs_dict[ 'name' ] = wfsc.output_name
- found = True
- break
- if found:
- break
- if not found:
- # We're at the last step of the workflow.
- data_outputs_dict[ 'name' ] = 'output'
- data_outputs.append( data_outputs_dict )
- return data_outputs
- return module.get_data_outputs()
-def get_workflow_from_dict( trans, workflow_dict, tools_metadata, repository_id, changeset_revision ):
- """
- Return an in-memory Workflow object from the dictionary object created when it was exported. This method is called from
- both Galaxy and the tool shed to retrieve a Workflow object that can be displayed as an SVG image. This method is also
- called from Galaxy to retrieve a Workflow object that can be used for saving to the Galaxy database.
- """
- trans.workflow_building_mode = True
- workflow = trans.model.Workflow()
- workflow.name = workflow_dict[ 'name' ]
- workflow.has_errors = False
- steps = []
- # Keep ids for each step that we need to use to make connections.
- steps_by_external_id = {}
- # Keep track of tools required by the workflow that are not available in
- # the tool shed repository. Each tuple in the list of missing_tool_tups
- # will be ( tool_id, tool_name, tool_version ).
- missing_tool_tups = []
- # First pass to build step objects and populate basic values
- for key, step_dict in workflow_dict[ 'steps' ].iteritems():
- # Create the model class for the step
- step = trans.model.WorkflowStep()
- step.name = step_dict[ 'name' ]
- step.position = step_dict[ 'position' ]
- module = module_factory.from_dict( trans, repository_id, changeset_revision, step_dict, tools_metadata=tools_metadata, secure=False )
- if module.type == 'tool' and module.tool is None:
- # A required tool is not available in the current repository.
- step.tool_errors = 'unavailable'
- missing_tool_tup = ( step_dict[ 'tool_id' ], step_dict[ 'name' ], step_dict[ 'tool_version' ] )
- if missing_tool_tup not in missing_tool_tups:
- missing_tool_tups.append( missing_tool_tup )
- module.save_to_step( step )
- if step.tool_errors:
- workflow.has_errors = True
- # Stick this in the step temporarily.
- step.temp_input_connections = step_dict[ 'input_connections' ]
- steps.append( step )
- steps_by_external_id[ step_dict[ 'id' ] ] = step
- # Second pass to deal with connections between steps.
- for step in steps:
- # Input connections.
- for input_name, conn_dict in step.temp_input_connections.iteritems():
- if conn_dict:
- output_step = steps_by_external_id[ conn_dict[ 'id' ] ]
- conn = trans.model.WorkflowStepConnection()
- conn.input_step = step
- conn.input_name = input_name
- conn.output_step = output_step
- conn.output_name = conn_dict[ 'output_name' ]
- step.input_connections.append( conn )
- del step.temp_input_connections
- # Order the steps if possible.
- galaxy.webapps.galaxy.controllers.workflow.attach_ordered_steps( workflow, steps )
- # Return the in-memory Workflow object for display or later persistence to the Galaxy database.
- return workflow, missing_tool_tups
-def get_workflow_module_name( module, missing_tool_tups ):
- module_name = module.get_name()
- if module.type == 'tool' and module_name == 'unavailable':
- for missing_tool_tup in missing_tool_tups:
- missing_tool_id, missing_tool_name, missing_tool_version = missing_tool_tup
- if missing_tool_id == module.tool_id:
- module_name = '%s' % missing_tool_name
- break
- return module_name
-def save_workflow( trans, workflow ):
- """Use the received in-memory Workflow object for saving to the Galaxy database."""
- stored = trans.model.StoredWorkflow()
- stored.name = workflow.name
- workflow.stored_workflow = stored
- stored.latest_workflow = workflow
- stored.user = trans.user
- trans.sa_session.add( stored )
- trans.sa_session.flush()
- # Add a new entry to the Workflows menu.
- if trans.user.stored_workflow_menu_entries is None:
- trans.user.stored_workflow_menu_entries = []
- menuEntry = trans.model.StoredWorkflowMenuEntry()
- menuEntry.stored_workflow = stored
- trans.user.stored_workflow_menu_entries.append( menuEntry )
- trans.sa_session.flush()
- return stored
diff -r 3f2e8f5f0d7de7131aa878e867c12c352f1f79b5 -r a5c906f0e8a03b9ec884b224393c5cb767dcf37d lib/tool_shed/util/workflow_util.py
--- /dev/null
+++ b/lib/tool_shed/util/workflow_util.py
@@ -0,0 +1,411 @@
+from galaxy import eggs
+import pkg_resources
+
+pkg_resources.require( "SVGFig" )
+
+import logging, svgfig
+from galaxy.util import json
+import tool_shed.util.shed_util_common as suc
+from tool_shed.util import encoding_util, metadata_util, tool_util
+from galaxy.workflow.modules import InputDataModule, ToolModule, WorkflowModuleFactory
+import galaxy.webapps.galaxy.controllers.workflow
+import galaxy.tools
+import galaxy.tools.parameters
+
+log = logging.getLogger( __name__ )
+
+class RepoInputDataModule( InputDataModule ):
+
+ type = "data_input"
+ name = "Input dataset"
+
+ @classmethod
+ def new( Class, trans, tools_metadata=None, tool_id=None ):
+ module = Class( trans )
+ module.state = dict( name="Input Dataset" )
+ return module
+ @classmethod
+ def from_dict( Class, trans, repository_id, changeset_revision, step_dict, tools_metadata=None, secure=True ):
+ module = Class( trans )
+ state = json.from_json_string( step_dict[ "tool_state" ] )
+ module.state = dict( name=state.get( "name", "Input Dataset" ) )
+ return module
+ @classmethod
+ def from_workflow_step( Class, trans, repository_id, changeset_revision, tools_metadata, step ):
+ module = Class( trans )
+ module.state = dict( name="Input Dataset" )
+ if step.tool_inputs and "name" in step.tool_inputs:
+ module.state[ 'name' ] = step.tool_inputs[ 'name' ]
+ return module
+
+class RepoToolModule( ToolModule ):
+
+ type = "tool"
+
+ def __init__( self, trans, repository_id, changeset_revision, tools_metadata, tool_id ):
+ self.trans = trans
+ self.tools_metadata = tools_metadata
+ self.tool_id = tool_id
+ self.tool = None
+ self.errors = None
+ for tool_dict in tools_metadata:
+ if self.tool_id in [ tool_dict[ 'id' ], tool_dict[ 'guid' ] ]:
+ if trans.webapp.name == 'tool_shed':
+ # We're in the tool shed.
+ repository, self.tool, message = tool_util.load_tool_from_changeset_revision( trans, repository_id, changeset_revision, tool_dict[ 'tool_config' ] )
+ if message and self.tool is None:
+ self.errors = 'unavailable'
+ break
+ else:
+ # We're in Galaxy.
+ self.tool = trans.app.toolbox.tools_by_id.get( self.tool_id, None )
+ if self.tool is None:
+ self.errors = 'unavailable'
+ self.post_job_actions = {}
+ self.workflow_outputs = []
+ self.state = None
+ @classmethod
+ def new( Class, trans, repository_id, changeset_revision, tools_metadata, tool_id=None ):
+ module = Class( trans, repository_id, changeset_revision, tools_metadata, tool_id )
+ module.state = module.tool.new_state( trans, all_pages=True )
+ return module
+ @classmethod
+ def from_dict( Class, trans, repository_id, changeset_revision, step_dict, tools_metadata, secure=True ):
+ tool_id = step_dict[ 'tool_id' ]
+ module = Class( trans, repository_id, changeset_revision, tools_metadata, tool_id )
+ module.state = galaxy.tools.DefaultToolState()
+ if module.tool is not None:
+ module.state.decode( step_dict[ "tool_state" ], module.tool, module.trans.app, secure=secure )
+ module.errors = step_dict.get( "tool_errors", None )
+ return module
+ @classmethod
+ def from_workflow_step( Class, trans, repository_id, changeset_revision, tools_metadata, step ):
+ module = Class( trans, repository_id, changeset_revision, tools_metadata, step.tool_id )
+ module.state = galaxy.tools.DefaultToolState()
+ if module.tool:
+ module.state.inputs = module.tool.params_from_strings( step.tool_inputs, trans.app, ignore_errors=True )
+ else:
+ module.state.inputs = {}
+ module.errors = step.tool_errors
+ return module
+ def get_data_inputs( self ):
+ data_inputs = []
+ def callback( input, value, prefixed_name, prefixed_label ):
+ if isinstance( input, galaxy.tools.parameters.DataToolParameter ):
+ data_inputs.append( dict( name=prefixed_name,
+ label=prefixed_label,
+ extensions=input.extensions ) )
+ if self.tool:
+ galaxy.tools.parameters.visit_input_values( self.tool.inputs, self.state.inputs, callback )
+ return data_inputs
+ def get_data_outputs( self ):
+ data_outputs = []
+ if self.tool:
+ data_inputs = None
+ for name, tool_output in self.tool.outputs.iteritems():
+ if tool_output.format_source != None:
+ # Default to special name "input" which remove restrictions on connections
+ formats = [ 'input' ]
+ if data_inputs == None:
+ data_inputs = self.get_data_inputs()
+ # Find the input parameter referenced by format_source
+ for di in data_inputs:
+ # Input names come prefixed with conditional and repeat names separated by '|',
+ # so remove prefixes when comparing with format_source.
+ if di[ 'name' ] != None and di[ 'name' ].split( '|' )[ -1 ] == tool_output.format_source:
+ formats = di[ 'extensions' ]
+ else:
+ formats = [ tool_output.format ]
+ for change_elem in tool_output.change_format:
+ for when_elem in change_elem.findall( 'when' ):
+ format = when_elem.get( 'format', None )
+ if format and format not in formats:
+ formats.append( format )
+ data_outputs.append( dict( name=name, extensions=formats ) )
+ return data_outputs
+
+class RepoWorkflowModuleFactory( WorkflowModuleFactory ):
+ def __init__( self, module_types ):
+ self.module_types = module_types
+ def new( self, trans, type, tools_metadata=None, tool_id=None ):
+ """Return module for type and (optional) tool_id initialized with new / default state."""
+ assert type in self.module_types
+ return self.module_types[type].new( trans, tool_id )
+ def from_dict( self, trans, repository_id, changeset_revision, step_dict, **kwd ):
+ """Return module initialized from the data in dictionary `step_dict`."""
+ type = step_dict[ 'type' ]
+ assert type in self.module_types
+ return self.module_types[ type ].from_dict( trans, repository_id, changeset_revision, step_dict, **kwd )
+ def from_workflow_step( self, trans, repository_id, changeset_revision, tools_metadata, step ):
+ """Return module initialized from the WorkflowStep object `step`."""
+ type = step.type
+ return self.module_types[ type ].from_workflow_step( trans, repository_id, changeset_revision, tools_metadata, step )
+
+module_factory = RepoWorkflowModuleFactory( dict( data_input=RepoInputDataModule, tool=RepoToolModule ) )
+
+def generate_workflow_image( trans, workflow_name, repository_metadata_id=None, repository_id=None ):
+ """
+ Return an svg image representation of a workflow dictionary created when the workflow was exported. This method is called
+ from both Galaxy and the tool shed. When called from the tool shed, repository_metadata_id will have a value and repository_id
+ will be None. When called from Galaxy, repository_metadata_id will be None and repository_id will have a value.
+ """
+ workflow_name = encoding_util.tool_shed_decode( workflow_name )
+ if trans.webapp.name == 'tool_shed':
+ # We're in the tool shed.
+ repository_metadata = metadata_util.get_repository_metadata_by_id( trans, repository_metadata_id )
+ repository_id = trans.security.encode_id( repository_metadata.repository_id )
+ changeset_revision = repository_metadata.changeset_revision
+ metadata = repository_metadata.metadata
+ else:
+ # We're in Galaxy.
+ repository = suc.get_tool_shed_repository_by_id( trans, repository_id )
+ changeset_revision = repository.changeset_revision
+ metadata = repository.metadata
+ # metadata[ 'workflows' ] is a list of tuples where each contained tuple is
+ # [ <relative path to the .ga file in the repository>, <exported workflow dict> ]
+ for workflow_tup in metadata[ 'workflows' ]:
+ workflow_dict = workflow_tup[1]
+ if workflow_dict[ 'name' ] == workflow_name:
+ break
+ if 'tools' in metadata:
+ tools_metadata = metadata[ 'tools' ]
+ else:
+ tools_metadata = []
+ workflow, missing_tool_tups = get_workflow_from_dict( trans=trans,
+ workflow_dict=workflow_dict,
+ tools_metadata=tools_metadata,
+ repository_id=repository_id,
+ changeset_revision=changeset_revision )
+ data = []
+ canvas = svgfig.canvas( style="stroke:black; fill:none; stroke-width:1px; stroke-linejoin:round; text-anchor:left" )
+ text = svgfig.SVG( "g" )
+ connectors = svgfig.SVG( "g" )
+ boxes = svgfig.SVG( "g" )
+ svgfig.Text.defaults[ "font-size" ] = "10px"
+ in_pos = {}
+ out_pos = {}
+ margin = 5
+ # Spacing between input/outputs.
+ line_px = 16
+ # Store px width for boxes of each step.
+ widths = {}
+ max_width, max_x, max_y = 0, 0, 0
+ for step in workflow.steps:
+ step.upgrade_messages = {}
+ module = module_factory.from_workflow_step( trans, repository_id, changeset_revision, tools_metadata, step )
+ tool_errors = module.type == 'tool' and not module.tool
+ module_data_inputs = get_workflow_data_inputs( step, module )
+ module_data_outputs = get_workflow_data_outputs( step, module, workflow.steps )
+ step_dict = {
+ 'id' : step.order_index,
+ 'data_inputs' : module_data_inputs,
+ 'data_outputs' : module_data_outputs,
+ 'position' : step.position,
+ 'tool_errors' : tool_errors
+ }
+ input_conn_dict = {}
+ for conn in step.input_connections:
+ input_conn_dict[ conn.input_name ] = dict( id=conn.output_step.order_index, output_name=conn.output_name )
+ step_dict[ 'input_connections' ] = input_conn_dict
+ data.append( step_dict )
+ x, y = step.position[ 'left' ], step.position[ 'top' ]
+ count = 0
+ module_name = get_workflow_module_name( module, missing_tool_tups )
+ max_len = len( module_name ) * 1.5
+ text.append( svgfig.Text( x, y + 20, module_name, **{ "font-size": "14px" } ).SVG() )
+ y += 45
+ for di in module_data_inputs:
+ cur_y = y + count * line_px
+ if step.order_index not in in_pos:
+ in_pos[ step.order_index ] = {}
+ in_pos[ step.order_index ][ di[ 'name' ] ] = ( x, cur_y )
+ text.append( svgfig.Text( x, cur_y, di[ 'label' ] ).SVG() )
+ count += 1
+ max_len = max( max_len, len( di[ 'label' ] ) )
+ if len( module.get_data_inputs() ) > 0:
+ y += 15
+ for do in module_data_outputs:
+ cur_y = y + count * line_px
+ if step.order_index not in out_pos:
+ out_pos[ step.order_index ] = {}
+ out_pos[ step.order_index ][ do[ 'name' ] ] = ( x, cur_y )
+ text.append( svgfig.Text( x, cur_y, do[ 'name' ] ).SVG() )
+ count += 1
+ max_len = max( max_len, len( do['name' ] ) )
+ widths[ step.order_index ] = max_len * 5.5
+ max_x = max( max_x, step.position[ 'left' ] )
+ max_y = max( max_y, step.position[ 'top' ] )
+ max_width = max( max_width, widths[ step.order_index ] )
+ for step_dict in data:
+ tool_unavailable = step_dict[ 'tool_errors' ]
+ width = widths[ step_dict[ 'id' ] ]
+ x, y = step_dict[ 'position' ][ 'left' ], step_dict[ 'position' ][ 'top' ]
+ # Only highlight missing tools if displaying in the tool shed.
+ if trans.webapp.name == 'tool_shed' and tool_unavailable:
+ fill = "#EBBCB2"
+ else:
+ fill = "#EBD9B2"
+ boxes.append( svgfig.Rect( x - margin, y, x + width - margin, y + 30, fill=fill ).SVG() )
+ box_height = ( len( step_dict[ 'data_inputs' ] ) + len( step_dict[ 'data_outputs' ] ) ) * line_px + margin
+ # Draw separator line.
+ if len( step_dict[ 'data_inputs' ] ) > 0:
+ box_height += 15
+ sep_y = y + len( step_dict[ 'data_inputs' ] ) * line_px + 40
+ text.append( svgfig.Line( x - margin, sep_y, x + width - margin, sep_y ).SVG() )
+ # Define an input/output box.
+ boxes.append( svgfig.Rect( x - margin, y + 30, x + width - margin, y + 30 + box_height, fill="#ffffff" ).SVG() )
+ for conn, output_dict in step_dict[ 'input_connections' ].iteritems():
+ in_coords = in_pos[ step_dict[ 'id' ] ][ conn ]
+ # out_pos_index will be a step number like 1, 2, 3...
+ out_pos_index = output_dict[ 'id' ]
+ # out_pos_name will be a string like 'o', 'o2', etc.
+ out_pos_name = output_dict[ 'output_name' ]
+ if out_pos_index in out_pos:
+ # out_conn_index_dict will be something like:
+ # 7: {'o': (824.5, 618)}
+ out_conn_index_dict = out_pos[ out_pos_index ]
+ if out_pos_name in out_conn_index_dict:
+ out_conn_pos = out_pos[ out_pos_index ][ out_pos_name ]
+ else:
+ # Take any key / value pair available in out_conn_index_dict.
+ # A problem will result if the dictionary is empty.
+ if out_conn_index_dict.keys():
+ key = out_conn_index_dict.keys()[0]
+ out_conn_pos = out_pos[ out_pos_index ][ key ]
+ adjusted = ( out_conn_pos[ 0 ] + widths[ output_dict[ 'id' ] ], out_conn_pos[ 1 ] )
+ text.append( svgfig.SVG( "circle",
+ cx=out_conn_pos[ 0 ] + widths[ output_dict[ 'id' ] ] - margin,
+ cy=out_conn_pos[ 1 ] - margin,
+ r = 5,
+ fill="#ffffff" ) )
+ connectors.append( svgfig.Line( adjusted[ 0 ],
+ adjusted[ 1 ] - margin,
+ in_coords[ 0 ] - 10,
+ in_coords[ 1 ],
+ arrow_end = "true" ).SVG() )
+ canvas.append( connectors )
+ canvas.append( boxes )
+ canvas.append( text )
+ width, height = ( max_x + max_width + 50 ), max_y + 300
+ canvas[ 'width' ] = "%s px" % width
+ canvas[ 'height' ] = "%s px" % height
+ canvas[ 'viewBox' ] = "0 0 %s %s" % ( width, height )
+ trans.response.set_content_type( "image/svg+xml" )
+ return canvas.standalone_xml()
+def get_workflow_data_inputs( step, module ):
+ if module.type == 'tool':
+ if module.tool:
+ return module.get_data_inputs()
+ else:
+ data_inputs = []
+ for wfsc in step.input_connections:
+ data_inputs_dict = {}
+ data_inputs_dict[ 'extensions' ] = [ '' ]
+ data_inputs_dict[ 'name' ] = wfsc.input_name
+ data_inputs_dict[ 'label' ] = 'Unknown'
+ data_inputs.append( data_inputs_dict )
+ return data_inputs
+ return module.get_data_inputs()
+def get_workflow_data_outputs( step, module, steps ):
+ if module.type == 'tool':
+ if module.tool:
+ return module.get_data_outputs()
+ else:
+ data_outputs = []
+ data_outputs_dict = {}
+ data_outputs_dict[ 'extensions' ] = [ 'input' ]
+ found = False
+ for workflow_step in steps:
+ for wfsc in workflow_step.input_connections:
+ if step.name == wfsc.output_step.name:
+ data_outputs_dict[ 'name' ] = wfsc.output_name
+ found = True
+ break
+ if found:
+ break
+ if not found:
+ # We're at the last step of the workflow.
+ data_outputs_dict[ 'name' ] = 'output'
+ data_outputs.append( data_outputs_dict )
+ return data_outputs
+ return module.get_data_outputs()
+def get_workflow_from_dict( trans, workflow_dict, tools_metadata, repository_id, changeset_revision ):
+ """
+ Return an in-memory Workflow object from the dictionary object created when it was exported. This method is called from
+ both Galaxy and the tool shed to retrieve a Workflow object that can be displayed as an SVG image. This method is also
+ called from Galaxy to retrieve a Workflow object that can be used for saving to the Galaxy database.
+ """
+ trans.workflow_building_mode = True
+ workflow = trans.model.Workflow()
+ workflow.name = workflow_dict[ 'name' ]
+ workflow.has_errors = False
+ steps = []
+ # Keep ids for each step that we need to use to make connections.
+ steps_by_external_id = {}
+ # Keep track of tools required by the workflow that are not available in
+ # the tool shed repository. Each tuple in the list of missing_tool_tups
+ # will be ( tool_id, tool_name, tool_version ).
+ missing_tool_tups = []
+ # First pass to build step objects and populate basic values
+ for key, step_dict in workflow_dict[ 'steps' ].iteritems():
+ # Create the model class for the step
+ step = trans.model.WorkflowStep()
+ step.name = step_dict[ 'name' ]
+ step.position = step_dict[ 'position' ]
+ module = module_factory.from_dict( trans, repository_id, changeset_revision, step_dict, tools_metadata=tools_metadata, secure=False )
+ if module.type == 'tool' and module.tool is None:
+ # A required tool is not available in the current repository.
+ step.tool_errors = 'unavailable'
+ missing_tool_tup = ( step_dict[ 'tool_id' ], step_dict[ 'name' ], step_dict[ 'tool_version' ] )
+ if missing_tool_tup not in missing_tool_tups:
+ missing_tool_tups.append( missing_tool_tup )
+ module.save_to_step( step )
+ if step.tool_errors:
+ workflow.has_errors = True
+ # Stick this in the step temporarily.
+ step.temp_input_connections = step_dict[ 'input_connections' ]
+ steps.append( step )
+ steps_by_external_id[ step_dict[ 'id' ] ] = step
+ # Second pass to deal with connections between steps.
+ for step in steps:
+ # Input connections.
+ for input_name, conn_dict in step.temp_input_connections.iteritems():
+ if conn_dict:
+ output_step = steps_by_external_id[ conn_dict[ 'id' ] ]
+ conn = trans.model.WorkflowStepConnection()
+ conn.input_step = step
+ conn.input_name = input_name
+ conn.output_step = output_step
+ conn.output_name = conn_dict[ 'output_name' ]
+ step.input_connections.append( conn )
+ del step.temp_input_connections
+ # Order the steps if possible.
+ galaxy.webapps.galaxy.controllers.workflow.attach_ordered_steps( workflow, steps )
+ # Return the in-memory Workflow object for display or later persistence to the Galaxy database.
+ return workflow, missing_tool_tups
+def get_workflow_module_name( module, missing_tool_tups ):
+ module_name = module.get_name()
+ if module.type == 'tool' and module_name == 'unavailable':
+ for missing_tool_tup in missing_tool_tups:
+ missing_tool_id, missing_tool_name, missing_tool_version = missing_tool_tup
+ if missing_tool_id == module.tool_id:
+ module_name = '%s' % missing_tool_name
+ break
+ return module_name
+def save_workflow( trans, workflow ):
+ """Use the received in-memory Workflow object for saving to the Galaxy database."""
+ stored = trans.model.StoredWorkflow()
+ stored.name = workflow.name
+ workflow.stored_workflow = stored
+ stored.latest_workflow = workflow
+ stored.user = trans.user
+ trans.sa_session.add( stored )
+ trans.sa_session.flush()
+ # Add a new entry to the Workflows menu.
+ if trans.user.stored_workflow_menu_entries is None:
+ trans.user.stored_workflow_menu_entries = []
+ menuEntry = trans.model.StoredWorkflowMenuEntry()
+ menuEntry.stored_workflow = stored
+ trans.user.stored_workflow_menu_entries.append( menuEntry )
+ trans.sa_session.flush()
+ return stored
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: greg: Add blank lines to shed_util_common.py per new Galaxy dev team standards.
by commits-noreply@bitbucket.org 16 Mar '13
by commits-noreply@bitbucket.org 16 Mar '13
16 Mar '13
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/3f2e8f5f0d7d/
changeset: 3f2e8f5f0d7d
user: greg
date: 2013-03-16 16:34:03
summary: Add blank lines to shed_util_common.py per new Galaxy dev team standards.
affected #: 1 file
diff -r 74da2fbf09633268e1e6b6043bfd7e0515bd8422 -r 3f2e8f5f0d7de7131aa878e867c12c352f1f79b5 lib/tool_shed/util/shed_util_common.py
--- a/lib/tool_shed/util/shed_util_common.py
+++ b/lib/tool_shed/util/shed_util_common.py
@@ -107,12 +107,14 @@
option_value = trans.security.encode_id( repository.id )
repositories_select_field.add_option( option_label, option_value )
return repositories_select_field
+
def changeset_is_malicious( trans, id, changeset_revision, **kwd ):
"""Check the malicious flag in repository metadata for a specified change set"""
repository_metadata = get_repository_metadata_by_changeset_revision( trans, id, changeset_revision )
if repository_metadata:
return repository_metadata.malicious
return False
+
def changeset_is_valid( app, repository, changeset_revision ):
repo = hg.repository( get_configured_ui(), repository.repo_path( app ) )
for changeset in repo.changelog:
@@ -120,6 +122,7 @@
if changeset_revision == changeset_hash:
return True
return False
+
def clean_repository_clone_url( repository_clone_url ):
if repository_clone_url.find( '@' ) > 0:
# We have an url that includes an authenticated user, something like:
@@ -134,11 +137,13 @@
else:
tmp_url = repository_clone_url
return tmp_url
+
def clean_tool_shed_url( tool_shed_url ):
if tool_shed_url.find( ':' ) > 0:
# Eliminate the port, if any, since it will result in an invalid directory name.
return tool_shed_url.split( ':' )[ 0 ]
return tool_shed_url.rstrip( '/' )
+
def clone_repository( repository_clone_url, repository_file_dir, ctx_rev ):
"""Clone the repository up to the specified changeset_revision. No subsequent revisions will be present in the cloned repository."""
try:
@@ -153,6 +158,7 @@
error_message = 'Error cloning repository: %s' % str( e )
log.debug( error_message )
return False, error_message
+
def config_elems_to_xml_file( app, config_elems, config_filename, tool_path ):
# Persist the current in-memory list of config_elems to a file named by the value of config_filename.
fd, filename = tempfile.mkstemp()
@@ -164,6 +170,7 @@
os.close( fd )
shutil.move( filename, os.path.abspath( config_filename ) )
os.chmod( config_filename, 0644 )
+
def copy_file_from_manifest( repo, ctx, filename, dir ):
"""Copy the latest version of the file named filename from the repository manifest to the directory to which dir refers."""
for changeset in reversed_upper_bounded_changelog( repo, ctx ):
@@ -176,6 +183,7 @@
fh.close()
return file_path
return None
+
def create_or_update_tool_shed_repository( app, name, description, installed_changeset_revision, ctx_rev, repository_clone_url, metadata_dict,
status, current_changeset_revision=None, owner='', dist_to_shed=False ):
# The received value for dist_to_shed will be True if the InstallManager is installing a repository that contains tools or datatypes that used
@@ -231,10 +239,12 @@
sa_session.add( tool_shed_repository )
sa_session.flush()
return tool_shed_repository
+
def generate_clone_url_for_installed_repository( app, repository ):
"""Generate the URL for cloning a repository that has been installed into a Galaxy instance."""
tool_shed_url = get_url_from_repository_tool_shed( app, repository )
return url_join( tool_shed_url, 'repos', repository.owner, repository.name )
+
def generate_clone_url_for_repository_in_tool_shed( trans, repository ):
"""Generate the URL for cloning a repository that is in the tool shed."""
base_url = url_for( '/', qualified=True ).rstrip( '/' )
@@ -244,12 +254,14 @@
return '%s://%s%s/repos/%s/%s' % ( protocol, username, base, repository.user.username, repository.name )
else:
return '%s/repos/%s/%s' % ( base_url, repository.user.username, repository.name )
+
def generate_clone_url_from_repo_info_tup( repo_info_tup ):
"""Generate teh URL for cloning a repositoyr given a tuple of toolshed, name, owner, changeset_revision."""
# Example tuple: ['http://localhost:9009', 'blast_datatypes', 'test', '461a4216e8ab']
toolshed, name, owner, changeset_revision = repo_info_tup
# Don't include the changeset_revision in clone urls.
return url_join( toolshed, 'repos', owner, name )
+
def generate_sharable_link_for_repository_in_tool_shed( trans, repository, changeset_revision=None ):
"""Generate the URL for sharing a repository that is in the tool shed."""
base_url = url_for( '/', qualified=True ).rstrip( '/' )
@@ -258,6 +270,7 @@
if changeset_revision:
sharable_url += '/%s' % changeset_revision
return sharable_url
+
def generate_tool_elem( tool_shed, repository_name, changeset_revision, owner, tool_file_path, tool, tool_section ):
if tool_section is not None:
tool_elem = SubElement( tool_section, 'tool' )
@@ -278,6 +291,7 @@
version_elem = SubElement( tool_elem, 'version' )
version_elem.text = tool.version
return tool_elem
+
def generate_tool_guid( repository_clone_url, tool ):
"""
Generate a guid for the installed tool. It is critical that this guid matches the guid for
@@ -286,6 +300,7 @@
"""
tmp_url = clean_repository_clone_url( repository_clone_url )
return '%s/%s/%s' % ( tmp_url, tool.id, tool.version )
+
def generate_tool_panel_dict_from_shed_tool_conf_entries( app, repository ):
"""
Keep track of the section in the tool panel in which this repository's tools will be contained by parsing the shed_tool_conf in
@@ -333,6 +348,7 @@
else:
tool_panel_dict[ guid ] = [ tool_section_dict ]
return tool_panel_dict
+
def generate_tool_shed_repository_install_dir( repository_clone_url, changeset_revision ):
"""
Generate a repository installation directory that guarantees repositories with the same name will always be installed in different directories.
@@ -345,6 +361,7 @@
repo_path = items[ 1 ]
tool_shed_url = clean_tool_shed_url( tool_shed_url )
return url_join( tool_shed_url, 'repos', repo_path, changeset_revision )
+
def get_absolute_path_to_file_in_repository( repo_files_dir, file_name ):
"""Return the absolute path to a specified disk file contained in a repository."""
stripped_file_name = strip_path( file_name )
@@ -355,21 +372,25 @@
if name == stripped_file_name:
return os.path.abspath( os.path.join( root, name ) )
return file_path
+
def get_categories( trans ):
"""Get all categories from the database."""
return trans.sa_session.query( trans.model.Category ) \
.filter( trans.model.Category.table.c.deleted==False ) \
.order_by( trans.model.Category.table.c.name ) \
.all()
+
def get_category( trans, id ):
"""Get a category from the database."""
return trans.sa_session.query( trans.model.Category ).get( trans.security.decode_id( id ) )
+
def get_category_by_name( trans, name ):
"""Get a category from the database via name."""
try:
return trans.sa_session.query( trans.model.Category ).filter_by( name=name ).one()
except sqlalchemy.orm.exc.NoResultFound:
return None
+
def get_changectx_for_changeset( repo, changeset_revision, **kwd ):
"""Retrieve a specified changectx from a repository."""
for changeset in repo.changelog:
@@ -377,6 +398,7 @@
if str( ctx ) == changeset_revision:
return ctx
return None
+
def get_config( config_file, repo, ctx, dir ):
"""Return the latest version of config_filename from the repository manifest."""
config_file = strip_path( config_file )
@@ -387,6 +409,7 @@
if ctx_file_name == config_file:
return get_named_tmpfile_from_ctx( changeset_ctx, ctx_file, dir )
return None
+
def get_config_from_disk( config_file, relative_install_dir ):
for root, dirs, files in os.walk( relative_install_dir ):
if root.find( '.hg' ) < 0:
@@ -394,6 +417,7 @@
if name == config_file:
return os.path.abspath( os.path.join( root, name ) )
return None
+
def get_configured_ui():
"""Configure any desired ui settings."""
_ui = ui.ui()
@@ -403,12 +427,14 @@
# quiet = True
_ui.setconfig( 'ui', 'quiet', True )
return _ui
+
def get_ctx_rev( tool_shed_url, name, owner, changeset_revision ):
url = url_join( tool_shed_url, 'repository/get_ctx_rev?name=%s&owner=%s&changeset_revision=%s' % ( name, owner, changeset_revision ) )
response = urllib2.urlopen( url )
ctx_rev = response.read()
response.close()
return ctx_rev
+
def get_ctx_file_path_from_manifest( filename, repo, changeset_revision ):
"""Get the ctx file path for the latest revision of filename from the repository manifest up to the value of changeset_revision."""
stripped_filename = strip_path( filename )
@@ -420,6 +446,7 @@
if ctx_file_name == stripped_filename:
return manifest_ctx, ctx_file
return None, None
+
def get_file_context_from_ctx( ctx, filename ):
# We have to be careful in determining if we found the correct file because multiple files with the same name may be in different directories
# within ctx if the files were moved within the change set. For example, in the following ctx.files() list, the former may have been moved to
@@ -440,9 +467,11 @@
if deleted:
return 'DELETED'
return None
+
def get_installed_tool_shed_repository( trans, id ):
"""Get a repository on the Galaxy side from the database via id"""
return trans.sa_session.query( trans.model.ToolShedRepository ).get( trans.security.decode_id( id ) )
+
def get_named_tmpfile_from_ctx( ctx, filename, dir ):
filename = strip_path( filename )
for ctx_file in ctx.files():
@@ -464,6 +493,7 @@
fh.close()
return tmp_filename
return None
+
def get_next_downloadable_changeset_revision( repository, repo, after_changeset_revision ):
"""
Return the installable changeset_revision in the repository changelog after the changeset to which after_changeset_revision refers. If there
@@ -484,6 +514,7 @@
# We've found the changeset in the changelog for which we need to get the next downloadable changset.
found_after_changeset_revision = True
return None
+
def get_or_create_tool_shed_repository( trans, tool_shed, name, owner, changeset_revision ):
repository = get_repository_for_dependency_relationship( trans.app, tool_shed, name, owner, changeset_revision )
if not repository:
@@ -502,6 +533,7 @@
owner=owner,
dist_to_shed=False )
return repository
+
def get_ordered_downloadable_changeset_revisions( repository, repo ):
"""Return an ordered list of changeset_revisions defined by a repository changelog."""
changeset_tups = []
@@ -516,6 +548,7 @@
sorted_changeset_tups = sorted( changeset_tups )
sorted_changeset_revisions = [ changeset_tup[ 1 ] for changeset_tup in sorted_changeset_tups ]
return sorted_changeset_revisions
+
def get_previous_downloadable_changeset_revision( repository, repo, before_changeset_revision ):
"""
Return the installable changeset_revision in the repository changelog prior to the changeset to which before_changeset_revision
@@ -537,6 +570,7 @@
return INITIAL_CHANGELOG_HASH
else:
previous_changeset_revision = changeset_revision
+
def get_repo_info_tuple_contents( repo_info_tuple ):
# Take care in handling the repo_info_tuple as it evolves over time as new tool shed features are introduced.
if len( repo_info_tuple ) == 6:
@@ -545,12 +579,14 @@
elif len( repo_info_tuple ) == 7:
description, repository_clone_url, changeset_revision, ctx_rev, repository_owner, repository_dependencies, tool_dependencies = repo_info_tuple
return description, repository_clone_url, changeset_revision, ctx_rev, repository_owner, repository_dependencies, tool_dependencies
+
def get_repository_by_id( trans, id ):
"""Get a repository from the database via id."""
if trans.webapp.name == 'galaxy':
return trans.sa_session.query( trans.model.ToolShedRepository ).get( trans.security.decode_id( id ) )
else:
return trans.sa_session.query( trans.model.Repository ).get( trans.security.decode_id( id ) )
+
def get_repository_by_name( app, name ):
"""Get a repository from the database via name."""
sa_session = app.model.context.current
@@ -558,6 +594,7 @@
return sa_session.query( app.model.ToolShedRepository ).filter_by( name=name ).first()
else:
return sa_session.query( app.model.Repository ).filter_by( name=name ).first()
+
def get_repository_by_name_and_owner( app, name, owner ):
"""Get a repository from the database via name and owner"""
sa_session = app.model.context.current
@@ -574,6 +611,7 @@
app.model.Repository.table.c.user_id == user.id ) ) \
.first()
return None
+
def get_repository_for_dependency_relationship( app, tool_shed, name, owner, changeset_revision ):
repository = get_tool_shed_repository_by_shed_name_owner_installed_changeset_revision( app=app,
tool_shed=tool_shed,
@@ -587,6 +625,7 @@
owner=owner,
changeset_revision=changeset_revision )
return repository
+
def get_repository_file_contents( file_path ):
if checkers.is_gzip( file_path ):
safe_str = to_safe_string( '\ngzip compressed file\n' )
@@ -605,6 +644,7 @@
safe_str = '%s%s' % ( safe_str, to_safe_string( large_str ) )
break
return safe_str
+
def get_repository_files( trans, folder_path ):
contents = []
for item in os.listdir( folder_path ):
@@ -618,9 +658,11 @@
if contents:
contents.sort()
return contents
+
def get_repository_in_tool_shed( trans, id ):
"""Get a repository on the tool shed side from the database via id."""
return trans.sa_session.query( trans.model.Repository ).get( trans.security.decode_id( id ) )
+
def get_repository_metadata_by_changeset_revision( trans, id, changeset_revision ):
"""Get metadata for a specified repository change set from the database."""
# Make sure there are no duplicate records, and return the single unique record for the changeset_revision. Duplicate records were somehow
@@ -640,16 +682,19 @@
elif all_metadata_records:
return all_metadata_records[ 0 ]
return None
+
def get_repository_owner( cleaned_repository_url ):
items = cleaned_repository_url.split( '/repos/' )
repo_path = items[ 1 ]
if repo_path.startswith( '/' ):
repo_path = repo_path.replace( '/', '', 1 )
return repo_path.lstrip( '/' ).split( '/' )[ 0 ]
+
def get_repository_owner_from_clone_url( repository_clone_url ):
tmp_url = clean_repository_clone_url( repository_clone_url )
tool_shed = tmp_url.split( '/repos/' )[ 0 ].rstrip( '/' )
return get_repository_owner( tmp_url )
+
def get_repository_tools_tups( app, metadata_dict ):
repository_tools_tups = []
index, shed_conf_dict = get_shed_tool_conf_dict( app, metadata_dict.get( 'shed_config_filename' ) )
@@ -666,11 +711,13 @@
if tool:
repository_tools_tups.append( ( relative_path, guid, tool ) )
return repository_tools_tups
+
def get_reversed_changelog_changesets( repo ):
reversed_changelog = []
for changeset in repo.changelog:
reversed_changelog.insert( 0, changeset )
return reversed_changelog
+
def get_revision_label( trans, repository, changeset_revision ):
"""Return a string consisting of the human read-able changeset rev and the changeset revision string."""
repo = hg.repository( get_configured_ui(), repository.repo_path( trans.app ) )
@@ -679,6 +726,7 @@
return "%s:%s" % ( str( ctx.rev() ), changeset_revision )
else:
return "-1:%s" % changeset_revision
+
def get_rev_label_from_changeset_revision( repo, changeset_revision ):
ctx = get_changectx_for_changeset( repo, changeset_revision )
if ctx:
@@ -688,6 +736,7 @@
rev = '-1'
label = "-1:%s" % changeset_revision
return rev, label
+
def get_shed_tool_conf_dict( app, shed_tool_conf ):
"""Return the in-memory version of the shed_tool_conf file, which is stored in the config_elems entry in the shed_tool_conf_dict associated with the file."""
for index, shed_tool_conf_dict in enumerate( app.toolbox.shed_tool_confs ):
@@ -697,6 +746,7 @@
file_name = strip_path( shed_tool_conf_dict[ 'config_filename' ] )
if shed_tool_conf == file_name:
return index, shed_tool_conf_dict
+
def get_tool_panel_config_tool_path_install_dir( app, repository ):
# Return shed-related tool panel config, the tool_path configured in it, and the relative path to the directory where the
# repository is installed. This method assumes all repository tools are defined in a single shed-related tool panel config.
@@ -715,6 +765,7 @@
tool_path = shed_config_dict[ 'tool_path' ]
relative_install_dir = partial_install_dir
return shed_tool_conf, tool_path, relative_install_dir
+
def get_tool_path_by_shed_tool_conf_filename( trans, shed_tool_conf ):
"""
Return the tool_path config setting for the received shed_tool_conf file by searching the tool box's in-memory list of shed_tool_confs for the
@@ -729,10 +780,12 @@
if file_name == shed_tool_conf:
return shed_tool_conf_dict[ 'tool_path' ]
return None
+
def get_tool_shed_repository_by_id( trans, repository_id ):
return trans.sa_session.query( trans.model.ToolShedRepository ) \
.filter( trans.model.ToolShedRepository.table.c.id == trans.security.decode_id( repository_id ) ) \
.first()
+
def get_tool_shed_repository_by_shed_name_owner_changeset_revision( app, tool_shed, name, owner, changeset_revision ):
# This method is used only in Galaxy, not the tool shed.
sa_session = app.model.context.current
@@ -745,6 +798,7 @@
app.model.ToolShedRepository.table.c.owner == owner,
app.model.ToolShedRepository.table.c.changeset_revision == changeset_revision ) ) \
.first()
+
def get_tool_shed_repository_by_shed_name_owner_installed_changeset_revision( app, tool_shed, name, owner, installed_changeset_revision ):
# This method is used only in Galaxy, not the tool shed.
sa_session = app.model.context.current
@@ -757,9 +811,11 @@
app.model.ToolShedRepository.table.c.owner == owner,
app.model.ToolShedRepository.table.c.installed_changeset_revision == installed_changeset_revision ) ) \
.first()
+
def get_tool_shed_from_clone_url( repository_clone_url ):
tmp_url = clean_repository_clone_url( repository_clone_url )
return tmp_url.split( '/repos/' )[ 0 ].rstrip( '/' )
+
def get_url_from_repository_tool_shed( app, repository ):
"""
The stored value of repository.tool_shed is something like: toolshed.g2.bx.psu.edu. We need the URL to this tool shed, which is
@@ -772,6 +828,7 @@
return shed_url
# The tool shed from which the repository was originally installed must no longer be configured in tool_sheds_conf.xml.
return None
+
def get_url_from_tool_shed( app, tool_shed ):
# The value of tool_shed is something like: toolshed.g2.bx.psu.edu. We need the URL to this tool shed, which is something like:
# http://toolshed.g2.bx.psu.edu/
@@ -782,9 +839,11 @@
return shed_url
# The tool shed from which the repository was originally installed must no longer be configured in tool_sheds_conf.xml.
return None
+
def get_user( trans, id ):
"""Get a user from the database by id."""
return trans.sa_session.query( trans.model.User ).get( trans.security.decode_id( id ) )
+
def get_user_by_username( app, username ):
"""Get a user from the database by username."""
sa_session = app.model.context.current
@@ -795,6 +854,7 @@
return user
except Exception, e:
return None
+
def handle_email_alerts( trans, repository, content_alert_str='', new_repo_alert=False, admin_only=False ):
# There are 2 complementary features that enable a tool shed user to receive email notification:
# 1. Within User Preferences, they can elect to receive email when the first (or first valid)
@@ -886,6 +946,7 @@
util.send_mail( frm, to, subject, body, trans.app.config )
except Exception, e:
log.exception( "An error occurred sending a tool shed repository update alert by email." )
+
def open_repository_files_folder( trans, folder_path ):
try:
files_list = get_repository_files( trans, folder_path )
@@ -907,12 +968,14 @@
"key": full_path }
folder_contents.append( node )
return folder_contents
+
def remove_dir( dir ):
if os.path.exists( dir ):
try:
shutil.rmtree( dir )
except:
pass
+
def repository_was_previously_installed( trans, tool_shed_url, repository_name, repo_info_tuple ):
"""
Handle the case where the repository was previously installed using an older changeset_revsion, but later the repository was updated
@@ -940,6 +1003,7 @@
if tool_shed_repository and tool_shed_repository.status not in [ trans.model.ToolShedRepository.installation_status.NEW ]:
return tool_shed_repository, previous_changeset_revision
return None, None
+
def reversed_lower_upper_bounded_changelog( repo, excluded_lower_bounds_changeset_revision, included_upper_bounds_changeset_revision ):
"""
Return a reversed list of changesets in the repository changelog after the excluded_lower_bounds_changeset_revision, but up to and
@@ -963,8 +1027,10 @@
if changeset_hash == included_upper_bounds_changeset_revision:
break
return reversed_changelog
+
def reversed_upper_bounded_changelog( repo, included_upper_bounds_changeset_revision ):
return reversed_lower_upper_bounded_changelog( repo, INITIAL_CHANGELOG_HASH, included_upper_bounds_changeset_revision )
+
def strip_path( fpath ):
if not fpath:
return fpath
@@ -973,6 +1039,7 @@
except:
file_name = fpath
return file_name
+
def to_safe_string( text, to_html=True ):
"""Translates the characters in text to an html string"""
if text:
@@ -1003,10 +1070,13 @@
translated.append( '' )
return ''.join( translated )
return text
+
def tool_shed_from_repository_clone_url( repository_clone_url ):
return clean_repository_clone_url( repository_clone_url ).split( '/repos/' )[ 0 ].rstrip( '/' )
+
def tool_shed_is_this_tool_shed( toolshed_base_url ):
return toolshed_base_url.rstrip( '/' ) == str( url_for( '/', qualified=True ) ).rstrip( '/' )
+
def translate_string( raw_text, to_html=True ):
if raw_text:
if len( raw_text ) <= MAX_CONTENT_SIZE:
@@ -1017,6 +1087,7 @@
else:
translated_string = ''
return translated_string
+
def update_in_shed_tool_config( app, repository ):
"""
A tool shed repository is being updated so change the shed_tool_conf file. Parse the config file to generate the entire list
@@ -1050,6 +1121,7 @@
elem = guid_to_tool_elem_dict[ guid ]
config_elems.append( elem )
config_elems_to_xml_file( app, config_elems, shed_tool_conf, tool_path )
+
def update_repository( repo, ctx_rev=None ):
"""
Update the cloned repository to changeset_revision. It is critical that the installed repository is updated to the desired
@@ -1067,11 +1139,13 @@
# It would be nice if we could use mercurial's purge extension to remove untracked files. The problem is that
# purging is not supported by the mercurial API.
commands.update( get_configured_ui(), repo, rev=ctx_rev )
+
def update_tool_shed_repository_status( app, tool_shed_repository, status ):
sa_session = app.model.context.current
tool_shed_repository.status = status
sa_session.add( tool_shed_repository )
sa_session.flush()
+
def url_join( *args ):
parts = []
for arg in args:
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: greg: Extract repository review methods from shed_util_comman and put them into the new tool shed review_util component.
by commits-noreply@bitbucket.org 16 Mar '13
by commits-noreply@bitbucket.org 16 Mar '13
16 Mar '13
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/74da2fbf0963/
changeset: 74da2fbf0963
user: greg
date: 2013-03-16 16:25:43
summary: Extract repository review methods from shed_util_comman and put them into the new tool shed review_util component.
affected #: 4 files
diff -r e40606362e962b4c452960c7c5f9b33b5b13f96c -r 74da2fbf09633268e1e6b6043bfd7e0515bd8422 lib/galaxy/webapps/tool_shed/controllers/repository.py
--- a/lib/galaxy/webapps/tool_shed/controllers/repository.py
+++ b/lib/galaxy/webapps/tool_shed/controllers/repository.py
@@ -11,7 +11,7 @@
from galaxy.util import json
from galaxy.model.orm import and_, or_
import tool_shed.util.shed_util_common as suc
-from tool_shed.util import encoding_util, metadata_util, readme_util, repository_dependency_util, tool_dependency_util, tool_util
+from tool_shed.util import encoding_util, metadata_util, readme_util, repository_dependency_util, review_util, tool_dependency_util, tool_util
from tool_shed.galaxy_install import repository_util
from galaxy.webapps.tool_shed.util import common_util, container_util, workflow_util
import galaxy.tools
@@ -1709,16 +1709,16 @@
categories = suc.get_categories( trans )
selected_categories = [ rca.category_id for rca in repository.categories ]
# Determine if the current changeset revision has been reviewed by the current user.
- reviewed_by_user = suc.changeset_revision_reviewed_by_user( trans, trans.user, repository, changeset_revision )
+ reviewed_by_user = review_util.changeset_revision_reviewed_by_user( trans, trans.user, repository, changeset_revision )
if reviewed_by_user:
- review = suc.get_review_by_repository_id_changeset_revision_user_id( trans=trans,
- repository_id=id,
- changeset_revision=changeset_revision,
- user_id=trans.security.encode_id( trans.user.id ) )
+ review = review_util.get_review_by_repository_id_changeset_revision_user_id( trans=trans,
+ repository_id=id,
+ changeset_revision=changeset_revision,
+ user_id=trans.security.encode_id( trans.user.id ) )
review_id = trans.security.encode_id( review.id )
else:
review_id = None
- can_browse_repository_reviews = suc.can_browse_repository_reviews( trans, repository )
+ can_browse_repository_reviews = review_util.can_browse_repository_reviews( trans, repository )
containers_dict = container_util.build_repository_containers_for_tool_shed( trans, repository, changeset_revision, repository_dependencies, repository_metadata )
return trans.fill_template( '/webapps/tool_shed/repository/manage_repository.mako',
repo_name=repo_name,
@@ -2496,17 +2496,17 @@
message += malicious_error
status = 'error'
# Determine if the current changeset revision has been reviewed by the current user.
- reviewed_by_user = suc.changeset_revision_reviewed_by_user( trans, trans.user, repository, changeset_revision )
+ reviewed_by_user = review_util.changeset_revision_reviewed_by_user( trans, trans.user, repository, changeset_revision )
if reviewed_by_user:
- review = suc.get_review_by_repository_id_changeset_revision_user_id( trans=trans,
- repository_id=id,
- changeset_revision=changeset_revision,
- user_id=trans.security.encode_id( trans.user.id ) )
+ review = review_util.get_review_by_repository_id_changeset_revision_user_id( trans=trans,
+ repository_id=id,
+ changeset_revision=changeset_revision,
+ user_id=trans.security.encode_id( trans.user.id ) )
review_id = trans.security.encode_id( review.id )
else:
review_id = None
containers_dict = container_util.build_repository_containers_for_tool_shed( trans, repository, changeset_revision, repository_dependencies, repository_metadata )
- can_browse_repository_reviews = suc.can_browse_repository_reviews( trans, repository )
+ can_browse_repository_reviews = review_util.can_browse_repository_reviews( trans, repository )
return trans.fill_template( '/webapps/tool_shed/repository/view_repository.mako',
repo=repo,
repository=repository,
@@ -2590,12 +2590,12 @@
add_id_to_name=False,
downloadable=False )
trans.app.config.tool_data_path = original_tool_data_path
- reviewed_by_user = suc.changeset_revision_reviewed_by_user( trans, trans.user, repository, changeset_revision )
+ reviewed_by_user = review_util.changeset_revision_reviewed_by_user( trans, trans.user, repository, changeset_revision )
if reviewed_by_user:
- review = suc.get_review_by_repository_id_changeset_revision_user_id( trans=trans,
- repository_id=repository_id,
- changeset_revision=changeset_revision,
- user_id=trans.security.encode_id( trans.user.id ) )
+ review = review_util.get_review_by_repository_id_changeset_revision_user_id( trans=trans,
+ repository_id=repository_id,
+ changeset_revision=changeset_revision,
+ user_id=trans.security.encode_id( trans.user.id ) )
review_id = trans.security.encode_id( review.id )
else:
review_id = None
diff -r e40606362e962b4c452960c7c5f9b33b5b13f96c -r 74da2fbf09633268e1e6b6043bfd7e0515bd8422 lib/galaxy/webapps/tool_shed/controllers/repository_review.py
--- a/lib/galaxy/webapps/tool_shed/controllers/repository_review.py
+++ b/lib/galaxy/webapps/tool_shed/controllers/repository_review.py
@@ -6,6 +6,7 @@
from galaxy.webapps.tool_shed.util import common_util
from galaxy.webapps.tool_shed.util.container_util import STRSEP
import tool_shed.util.shed_util_common as suc
+from tool_shed.util import review_util
from galaxy.util.odict import odict
import tool_shed.grids.repository_review_grids as repository_review_grids
import tool_shed.grids.util as grids_util
@@ -36,7 +37,7 @@
message = util.restore_text( params.get( 'message', '' ) )
status = params.get( 'status', 'done' )
encoded_review_id = kwd[ 'id' ]
- review = suc.get_review( trans, encoded_review_id )
+ review = review_util.get_review( trans, encoded_review_id )
if kwd.get( 'approve_repository_review_button', False ):
approved_select_field_name = '%s%sapproved' % ( encoded_review_id, STRSEP )
approved_select_field_value = str( kwd[ approved_select_field_name ] )
@@ -70,7 +71,7 @@
params = util.Params( kwd )
message = util.restore_text( params.get( 'message', '' ) )
status = params.get( 'status', 'done' )
- review = suc.get_review( trans, kwd[ 'id' ] )
+ review = review_util.get_review( trans, kwd[ 'id' ] )
repository = review.repository
repo = hg.repository( suc.get_configured_ui(), repository.repo_path( trans.app ) )
rev, changeset_revision_label = suc.get_rev_label_from_changeset_revision( repo, review.changeset_revision )
@@ -108,7 +109,7 @@
if not name or not description:
message = 'Enter a valid name and a description'
status = 'error'
- elif suc.get_component_by_name( trans, name ):
+ elif review_util.get_component_by_name( trans, name ):
message = 'A component with that name already exists'
status = 'error'
else:
@@ -142,15 +143,15 @@
if changeset_revision:
# Make sure there is not already a review of the revision by the user.
repository = suc.get_repository_in_tool_shed( trans, repository_id )
- if suc.get_review_by_repository_id_changeset_revision_user_id( trans=trans,
- repository_id=repository_id,
- changeset_revision=changeset_revision,
- user_id=trans.security.encode_id( trans.user.id ) ):
+ if review_util.get_review_by_repository_id_changeset_revision_user_id( trans=trans,
+ repository_id=repository_id,
+ changeset_revision=changeset_revision,
+ user_id=trans.security.encode_id( trans.user.id ) ):
message = "You have already created a review for revision <b>%s</b> of repository <b>%s</b>." % ( changeset_revision, repository.name )
status = "error"
else:
# See if there are any reviews for previous changeset revisions that the user can copy.
- if not create_without_copying and not previous_review_id and suc.has_previous_repository_reviews( trans, repository, changeset_revision ):
+ if not create_without_copying and not previous_review_id and review_util.has_previous_repository_reviews( trans, repository, changeset_revision ):
return trans.response.send_redirect( web.url_for( controller='repository_review',
action='select_previous_review',
**kwd ) )
@@ -168,7 +169,7 @@
trans.sa_session.add( review )
trans.sa_session.flush()
if previous_review_id:
- review_to_copy = suc.get_review( trans, previous_review_id )
+ review_to_copy = review_util.get_review( trans, previous_review_id )
self.copy_review( trans, review_to_copy, review )
review_id = trans.security.encode_id( review.id )
message = "Begin your review of revision <b>%s</b> of repository <b>%s</b>." \
@@ -205,7 +206,7 @@
action='manage_categories',
message=message,
status='error' ) )
- component = suc.get_component( trans, id )
+ component = review_util.get_component( trans, id )
if params.get( 'edit_component_button', False ):
new_description = util.restore_text( params.get( 'description', '' ) ).strip()
if component.description != new_description:
@@ -231,9 +232,9 @@
message = util.restore_text( params.get( 'message', '' ) )
status = params.get( 'status', 'done' )
review_id = kwd.get( 'id', None )
- review = suc.get_review( trans, review_id )
+ review = review_util.get_review( trans, review_id )
components_dict = odict()
- for component in suc.get_components( trans ):
+ for component in review_util.get_components( trans ):
components_dict[ component.name ] = dict( component=component, component_review=None )
repository = review.repository
repo = hg.repository( suc.get_configured_ui(), repository.repo_path( trans.app ) )
@@ -283,8 +284,8 @@
approved = str( v )
elif component_review_attr == 'rating':
rating = int( str( v ) )
- component = suc.get_component( trans, component_id )
- component_review = suc.get_component_review_by_repository_review_id_component_id( trans, review_id, component_id )
+ component = review_util.get_component( trans, component_id )
+ component_review = review_util.get_component_review_by_repository_review_id_component_id( trans, review_id, component_id )
if component_review:
# See if the existing component review should be updated.
if component_review.comment != comment or \
@@ -481,7 +482,7 @@
rev, changeset_revision_label = suc.get_rev_label_from_changeset_revision( repo, changeset_revision )
if changeset_revision in reviewed_revision_hashes:
# Find the review for this changeset_revision
- repository_reviews = suc.get_reviews_by_repository_id_changeset_revision( trans, repository_id, changeset_revision )
+ repository_reviews = review_util.get_reviews_by_repository_id_changeset_revision( trans, repository_id, changeset_revision )
# Determine if the current user can add a review to this revision.
can_add_review = trans.user not in [ repository_review.user for repository_review in repository_reviews ]
repository_metadata = suc.get_repository_metadata_by_changeset_revision( trans, repository_id, changeset_revision )
@@ -521,7 +522,7 @@
repo = hg.repository( suc.get_configured_ui(), repo_dir )
installable = changeset_revision in [ metadata_revision.changeset_revision for metadata_revision in repository.metadata_revisions ]
rev, changeset_revision_label = suc.get_rev_label_from_changeset_revision( repo, changeset_revision )
- reviews = suc.get_reviews_by_repository_id_changeset_revision( trans, repository_id, changeset_revision )
+ reviews = review_util.get_reviews_by_repository_id_changeset_revision( trans, repository_id, changeset_revision )
return trans.fill_template( '/webapps/tool_shed/repository_review/reviews_of_changeset_revision.mako',
repository=repository,
changeset_revision=changeset_revision,
@@ -541,7 +542,7 @@
if 'operation' in kwd:
operation = kwd['operation'].lower()
# The value of the received id is the encoded review id.
- review = suc.get_review( trans, kwd[ 'id' ] )
+ review = review_util.get_review( trans, kwd[ 'id' ] )
repository = review.repository
kwd[ 'id' ] = trans.security.encode_id( repository.id )
if operation == "inspect repository revisions":
@@ -587,7 +588,7 @@
repository = suc.get_repository_in_tool_shed( trans, kwd[ 'id' ] )
changeset_revision = kwd.get( 'changeset_revision', None )
repo = hg.repository( suc.get_configured_ui(), repository.repo_path( trans.app ) )
- previous_reviews_dict = suc.get_previous_repository_reviews( trans, repository, changeset_revision )
+ previous_reviews_dict = review_util.get_previous_repository_reviews( trans, repository, changeset_revision )
rev, changeset_revision_label = suc.get_rev_label_from_changeset_revision( repo, changeset_revision )
return trans.fill_template( '/webapps/tool_shed/repository_review/select_previous_review.mako',
repository=repository,
diff -r e40606362e962b4c452960c7c5f9b33b5b13f96c -r 74da2fbf09633268e1e6b6043bfd7e0515bd8422 lib/tool_shed/util/review_util.py
--- /dev/null
+++ b/lib/tool_shed/util/review_util.py
@@ -0,0 +1,100 @@
+import os, logging
+from galaxy.util.odict import odict
+import tool_shed.util.shed_util_common as suc
+from galaxy.model.orm import and_
+
+from galaxy import eggs
+import pkg_resources
+
+pkg_resources.require( 'mercurial' )
+from mercurial import hg, ui, commands
+
+log = logging.getLogger( __name__ )
+
+def can_browse_repository_reviews( trans, repository ):
+ """Determine if there are any reviews of the received repository for which the current user has permission to browse any component reviews."""
+ user = trans.user
+ if user:
+ for review in repository.reviews:
+ for component_review in review.component_reviews:
+ if trans.app.security_agent.user_can_browse_component_review( trans.app, repository, component_review, user ):
+ return True
+ return False
+
+def changeset_revision_reviewed_by_user( trans, user, repository, changeset_revision ):
+ """Determine if the current changeset revision has been reviewed by the current user."""
+ for review in repository.reviews:
+ if review.changeset_revision == changeset_revision and review.user == user:
+ return True
+ return False
+
+def get_component( trans, id ):
+ """Get a component from the database."""
+ return trans.sa_session.query( trans.model.Component ).get( trans.security.decode_id( id ) )
+
+def get_component_review( trans, id ):
+ """Get a component_review from the database"""
+ return trans.sa_session.query( trans.model.ComponentReview ).get( trans.security.decode_id( id ) )
+
+def get_component_by_name( trans, name ):
+ """Get a component from the database via a name."""
+ return trans.sa_session.query( trans.app.model.Component ) \
+ .filter( trans.app.model.Component.table.c.name==name ) \
+ .first()
+
+def get_component_review_by_repository_review_id_component_id( trans, repository_review_id, component_id ):
+ """Get a component_review from the database via repository_review_id and component_id."""
+ return trans.sa_session.query( trans.model.ComponentReview ) \
+ .filter( and_( trans.model.ComponentReview.table.c.repository_review_id == trans.security.decode_id( repository_review_id ),
+ trans.model.ComponentReview.table.c.component_id == trans.security.decode_id( component_id ) ) ) \
+ .first()
+
+def get_components( trans ):
+ return trans.sa_session.query( trans.app.model.Component ) \
+ .order_by( trans.app.model.Component.name ) \
+ .all()
+
+def get_previous_repository_reviews( trans, repository, changeset_revision ):
+ """Return an ordered dictionary of repository reviews up to and including the received changeset revision."""
+ repo = hg.repository( suc.get_configured_ui(), repository.repo_path( trans.app ) )
+ reviewed_revision_hashes = [ review.changeset_revision for review in repository.reviews ]
+ previous_reviews_dict = odict()
+ for changeset in suc.reversed_upper_bounded_changelog( repo, changeset_revision ):
+ previous_changeset_revision = str( repo.changectx( changeset ) )
+ if previous_changeset_revision in reviewed_revision_hashes:
+ previous_rev, previous_changeset_revision_label = suc.get_rev_label_from_changeset_revision( repo, previous_changeset_revision )
+ revision_reviews = get_reviews_by_repository_id_changeset_revision( trans,
+ trans.security.encode_id( repository.id ),
+ previous_changeset_revision )
+ previous_reviews_dict[ previous_changeset_revision ] = dict( changeset_revision_label=previous_changeset_revision_label,
+ reviews=revision_reviews )
+ return previous_reviews_dict
+
+def get_review( trans, id ):
+ """Get a repository_review from the database via id."""
+ return trans.sa_session.query( trans.model.RepositoryReview ).get( trans.security.decode_id( id ) )
+
+def get_review_by_repository_id_changeset_revision_user_id( trans, repository_id, changeset_revision, user_id ):
+ """Get a repository_review from the database via repository id, changeset_revision and user_id."""
+ return trans.sa_session.query( trans.model.RepositoryReview ) \
+ .filter( and_( trans.model.RepositoryReview.repository_id == trans.security.decode_id( repository_id ),
+ trans.model.RepositoryReview.changeset_revision == changeset_revision,
+ trans.model.RepositoryReview.user_id == trans.security.decode_id( user_id ) ) ) \
+ .first()
+
+def get_reviews_by_repository_id_changeset_revision( trans, repository_id, changeset_revision ):
+ """Get all repository_reviews from the database via repository id and changeset_revision."""
+ return trans.sa_session.query( trans.model.RepositoryReview ) \
+ .filter( and_( trans.model.RepositoryReview.repository_id == trans.security.decode_id( repository_id ),
+ trans.model.RepositoryReview.changeset_revision == changeset_revision ) ) \
+ .all()
+
+def has_previous_repository_reviews( trans, repository, changeset_revision ):
+ """Determine if a repository has a changeset revision review prior to the received changeset revision."""
+ repo = hg.repository( suc.get_configured_ui(), repository.repo_path( trans.app ) )
+ reviewed_revision_hashes = [ review.changeset_revision for review in repository.reviews ]
+ for changeset in suc.reversed_upper_bounded_changelog( repo, changeset_revision ):
+ previous_changeset_revision = str( repo.changectx( changeset ) )
+ if previous_changeset_revision in reviewed_revision_hashes:
+ return True
+ return False
diff -r e40606362e962b4c452960c7c5f9b33b5b13f96c -r 74da2fbf09633268e1e6b6043bfd7e0515bd8422 lib/tool_shed/util/shed_util_common.py
--- a/lib/tool_shed/util/shed_util_common.py
+++ b/lib/tool_shed/util/shed_util_common.py
@@ -3,7 +3,6 @@
from time import gmtime, strftime
from galaxy import util
from galaxy.util import json
-from galaxy.util.odict import odict
from galaxy.web import url_for
from galaxy.web.form_builder import SelectField
from galaxy.datatypes import checkers
@@ -108,15 +107,6 @@
option_value = trans.security.encode_id( repository.id )
repositories_select_field.add_option( option_label, option_value )
return repositories_select_field
-def can_browse_repository_reviews( trans, repository ):
- """Determine if there are any reviews of the received repository for which the current user has permission to browse any component reviews."""
- user = trans.user
- if user:
- for review in repository.reviews:
- for component_review in review.component_reviews:
- if trans.app.security_agent.user_can_browse_component_review( trans.app, repository, component_review, user ):
- return True
- return False
def changeset_is_malicious( trans, id, changeset_revision, **kwd ):
"""Check the malicious flag in repository metadata for a specified change set"""
repository_metadata = get_repository_metadata_by_changeset_revision( trans, id, changeset_revision )
@@ -130,12 +120,6 @@
if changeset_revision == changeset_hash:
return True
return False
-def changeset_revision_reviewed_by_user( trans, user, repository, changeset_revision ):
- """Determine if the current changeset revision has been reviewed by the current user."""
- for review in repository.reviews:
- if review.changeset_revision == changeset_revision and review.user == user:
- return True
- return False
def clean_repository_clone_url( repository_clone_url ):
if repository_clone_url.find( '@' ) > 0:
# We have an url that includes an authenticated user, something like:
@@ -393,27 +377,6 @@
if str( ctx ) == changeset_revision:
return ctx
return None
-def get_component( trans, id ):
- """Get a component from the database."""
- return trans.sa_session.query( trans.model.Component ).get( trans.security.decode_id( id ) )
-def get_component_by_name( trans, name ):
- """Get a component from the database via a name."""
- return trans.sa_session.query( trans.app.model.Component ) \
- .filter( trans.app.model.Component.table.c.name==name ) \
- .first()
-def get_component_review( trans, id ):
- """Get a component_review from the database"""
- return trans.sa_session.query( trans.model.ComponentReview ).get( trans.security.decode_id( id ) )
-def get_component_review_by_repository_review_id_component_id( trans, repository_review_id, component_id ):
- """Get a component_review from the database via repository_review_id and component_id."""
- return trans.sa_session.query( trans.model.ComponentReview ) \
- .filter( and_( trans.model.ComponentReview.table.c.repository_review_id == trans.security.decode_id( repository_review_id ),
- trans.model.ComponentReview.table.c.component_id == trans.security.decode_id( component_id ) ) ) \
- .first()
-def get_components( trans ):
- return trans.sa_session.query( trans.app.model.Component ) \
- .order_by( trans.app.model.Component.name ) \
- .all()
def get_config( config_file, repo, ctx, dir ):
"""Return the latest version of config_filename from the repository manifest."""
config_file = strip_path( config_file )
@@ -574,21 +537,6 @@
return INITIAL_CHANGELOG_HASH
else:
previous_changeset_revision = changeset_revision
-def get_previous_repository_reviews( trans, repository, changeset_revision ):
- """Return an ordered dictionary of repository reviews up to and including the received changeset revision."""
- repo = hg.repository( get_configured_ui(), repository.repo_path( trans.app ) )
- reviewed_revision_hashes = [ review.changeset_revision for review in repository.reviews ]
- previous_reviews_dict = odict()
- for changeset in reversed_upper_bounded_changelog( repo, changeset_revision ):
- previous_changeset_revision = str( repo.changectx( changeset ) )
- if previous_changeset_revision in reviewed_revision_hashes:
- previous_rev, previous_changeset_revision_label = get_rev_label_from_changeset_revision( repo, previous_changeset_revision )
- revision_reviews = get_reviews_by_repository_id_changeset_revision( trans,
- trans.security.encode_id( repository.id ),
- previous_changeset_revision )
- previous_reviews_dict[ previous_changeset_revision ] = dict( changeset_revision_label=previous_changeset_revision_label,
- reviews=revision_reviews )
- return previous_reviews_dict
def get_repo_info_tuple_contents( repo_info_tuple ):
# Take care in handling the repo_info_tuple as it evolves over time as new tool shed features are introduced.
if len( repo_info_tuple ) == 6:
@@ -723,22 +671,6 @@
for changeset in repo.changelog:
reversed_changelog.insert( 0, changeset )
return reversed_changelog
-def get_review( trans, id ):
- """Get a repository_review from the database via id."""
- return trans.sa_session.query( trans.model.RepositoryReview ).get( trans.security.decode_id( id ) )
-def get_reviews_by_repository_id_changeset_revision( trans, repository_id, changeset_revision ):
- """Get all repository_reviews from the database via repository id and changeset_revision."""
- return trans.sa_session.query( trans.model.RepositoryReview ) \
- .filter( and_( trans.model.RepositoryReview.repository_id == trans.security.decode_id( repository_id ),
- trans.model.RepositoryReview.changeset_revision == changeset_revision ) ) \
- .all()
-def get_review_by_repository_id_changeset_revision_user_id( trans, repository_id, changeset_revision, user_id ):
- """Get a repository_review from the database via repository id, changeset_revision and user_id."""
- return trans.sa_session.query( trans.model.RepositoryReview ) \
- .filter( and_( trans.model.RepositoryReview.repository_id == trans.security.decode_id( repository_id ),
- trans.model.RepositoryReview.changeset_revision == changeset_revision,
- trans.model.RepositoryReview.user_id == trans.security.decode_id( user_id ) ) ) \
- .first()
def get_revision_label( trans, repository, changeset_revision ):
"""Return a string consisting of the human read-able changeset rev and the changeset revision string."""
repo = hg.repository( get_configured_ui(), repository.repo_path( trans.app ) )
@@ -954,15 +886,6 @@
util.send_mail( frm, to, subject, body, trans.app.config )
except Exception, e:
log.exception( "An error occurred sending a tool shed repository update alert by email." )
-def has_previous_repository_reviews( trans, repository, changeset_revision ):
- """Determine if a repository has a changeset revision review prior to the received changeset revision."""
- repo = hg.repository( get_configured_ui(), repository.repo_path( trans.app ) )
- reviewed_revision_hashes = [ review.changeset_revision for review in repository.reviews ]
- for changeset in reversed_upper_bounded_changelog( repo, changeset_revision ):
- previous_changeset_revision = str( repo.changectx( changeset ) )
- if previous_changeset_revision in reviewed_revision_hashes:
- return True
- return False
def open_repository_files_folder( trans, folder_path ):
try:
files_list = get_repository_files( trans, folder_path )
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: greg: Additional refactoring of shed_util_common into appropriate tool shed Galaxy utility components.
by commits-noreply@bitbucket.org 16 Mar '13
by commits-noreply@bitbucket.org 16 Mar '13
16 Mar '13
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/e40606362e96/
changeset: e40606362e96
user: greg
date: 2013-03-16 15:40:26
summary: Additional refactoring of shed_util_common into appropriate tool shed Galaxy utility components.
affected #: 12 files
diff -r 8703329f3716e7c4603e3a3a57286f67bc3b89af -r e40606362e962b4c452960c7c5f9b33b5b13f96c lib/galaxy/webapps/galaxy/controllers/admin_toolshed.py
--- a/lib/galaxy/webapps/galaxy/controllers/admin_toolshed.py
+++ b/lib/galaxy/webapps/galaxy/controllers/admin_toolshed.py
@@ -7,7 +7,7 @@
from galaxy.model.orm import or_
import tool_shed.util.shed_util_common as suc
from tool_shed.util import common_install_util, data_manager_util, datatype_util, encoding_util, metadata_util
-from tool_shed.util import repository_dependency_util, tool_dependency_util, tool_util
+from tool_shed.util import readme_util, repository_dependency_util, tool_dependency_util, tool_util
from tool_shed.galaxy_install import repository_util
from galaxy.webapps.tool_shed.util import workflow_util
import tool_shed.galaxy_install.grids.admin_toolshed_grids as admin_toolshed_grids
@@ -1075,7 +1075,7 @@
name, repository_owner, changeset_revision, includes_tool_dependencies, installed_repository_dependencies, \
missing_repository_dependencies, installed_tool_dependencies, missing_tool_dependencies = \
common_install_util.get_dependencies_for_repository( trans, tool_shed_url, repo_info_dict, includes_tool_dependencies )
- readme_files_dict = suc.get_readme_files_dict_for_display( trans, tool_shed_url, repo_info_dict )
+ readme_files_dict = readme_util.get_readme_files_dict_for_display( trans, tool_shed_url, repo_info_dict )
# We're handling 1 of 2 scenarios here: (1) we're installing a tool shed repository for the first time, so we've retrieved the list of installed
# and missing repository dependencies from the database (2) we're handling the scenario where an error occurred during the installation process,
# so we have a tool_shed_repository record in the database with associated repository dependency records. Since we have the repository
@@ -1362,7 +1362,7 @@
includes_tool_dependencies = True
if 'workflows' in metadata:
includes_workflows = True
- readme_files_dict = suc.build_readme_files_dict( metadata )
+ readme_files_dict = readme_util.build_readme_files_dict( metadata )
tool_dependencies = metadata.get( 'tool_dependencies', None )
repository_dependencies = self.get_repository_dependencies( trans=trans,
repository_id=repository_id,
diff -r 8703329f3716e7c4603e3a3a57286f67bc3b89af -r e40606362e962b4c452960c7c5f9b33b5b13f96c lib/galaxy/webapps/tool_shed/controllers/repository.py
--- a/lib/galaxy/webapps/tool_shed/controllers/repository.py
+++ b/lib/galaxy/webapps/tool_shed/controllers/repository.py
@@ -11,9 +11,9 @@
from galaxy.util import json
from galaxy.model.orm import and_, or_
import tool_shed.util.shed_util_common as suc
-from tool_shed.util import encoding_util, metadata_util, repository_dependency_util, tool_dependency_util
+from tool_shed.util import encoding_util, metadata_util, readme_util, repository_dependency_util, tool_dependency_util, tool_util
from tool_shed.galaxy_install import repository_util
-from galaxy.webapps.tool_shed.util import common_util, workflow_util
+from galaxy.webapps.tool_shed.util import common_util, container_util, workflow_util
import galaxy.tools
import tool_shed.grids.repository_grids as repository_grids
import tool_shed.grids.util as grids_util
@@ -638,7 +638,7 @@
params = util.Params( kwd )
message = util.restore_text( params.get( 'message', '' ) )
status = params.get( 'status', 'done' )
- repository, tool, message = suc.load_tool_from_changeset_revision( trans, repository_id, changeset_revision, tool_config )
+ repository, tool, message = tool_util.load_tool_from_changeset_revision( trans, repository_id, changeset_revision, tool_config )
if message:
status = 'error'
tool_state = self.__new_state( trans )
@@ -1069,7 +1069,7 @@
changeset_revision = kwd[ 'changeset_revision' ]
repository = suc.get_repository_by_name_and_owner( trans.app, repository_name, repository_owner )
repository_metadata = suc.get_repository_metadata_by_changeset_revision( trans, trans.security.encode_id( repository.id ), changeset_revision )
- return suc.build_readme_files_dict( repository_metadata.metadata )
+ return readme_util.build_readme_files_dict( repository_metadata.metadata )
@web.json
def get_repository_dependencies( self, trans, **kwd ):
@@ -1289,7 +1289,7 @@
break
if 'workflows' in metadata:
includes_workflows = True
- readme_files_dict = suc.build_readme_files_dict( metadata )
+ readme_files_dict = readme_util.build_readme_files_dict( metadata )
# See if the repo_info_dict was populated with repository_dependencies or tool_dependencies.
for name, repo_info_tuple in repo_info_dict.items():
description, repository_clone_url, changeset_revision, ctx_rev, repository_owner, repository_dependencies, tool_dependencies = \
@@ -1461,18 +1461,18 @@
params = util.Params( kwd )
message = util.restore_text( params.get( 'message', '' ) )
status = params.get( 'status', 'error' )
- repository, tool, error_message = suc.load_tool_from_changeset_revision( trans, repository_id, changeset_revision, tool_config )
+ repository, tool, error_message = tool_util.load_tool_from_changeset_revision( trans, repository_id, changeset_revision, tool_config )
tool_state = self.__new_state( trans )
is_malicious = suc.changeset_is_malicious( trans, repository_id, repository.tip( trans.app ) )
invalid_file_tups = []
if tool:
- invalid_file_tups = suc.check_tool_input_params( trans.app,
- repository.repo_path( trans.app ),
- tool_config,
- tool,
- [] )
+ invalid_file_tups = tool_util.check_tool_input_params( trans.app,
+ repository.repo_path( trans.app ),
+ tool_config,
+ tool,
+ [] )
if invalid_file_tups:
- message = suc.generate_message_for_invalid_tools( trans, invalid_file_tups, repository, {}, as_html=True, displaying_invalid_tool=True )
+ message = tool_util.generate_message_for_invalid_tools( trans, invalid_file_tups, repository, {}, as_html=True, displaying_invalid_tool=True )
elif error_message:
message = error_message
try:
@@ -1719,7 +1719,7 @@
else:
review_id = None
can_browse_repository_reviews = suc.can_browse_repository_reviews( trans, repository )
- containers_dict = suc.build_repository_containers_for_tool_shed( trans, repository, changeset_revision, repository_dependencies, repository_metadata )
+ containers_dict = container_util.build_repository_containers_for_tool_shed( trans, repository, changeset_revision, repository_dependencies, repository_metadata )
return trans.fill_template( '/webapps/tool_shed/repository/manage_repository.mako',
repo_name=repo_name,
description=description,
@@ -1832,7 +1832,7 @@
selected_value=changeset_revision,
add_id_to_name=False,
downloadable=False )
- containers_dict = suc.build_repository_containers_for_tool_shed( trans, repository, changeset_revision, repository_dependencies, repository_metadata )
+ containers_dict = container_util.build_repository_containers_for_tool_shed( trans, repository, changeset_revision, repository_dependencies, repository_metadata )
return trans.fill_template( '/webapps/tool_shed/repository/preview_tools_in_changeset.mako',
repository=repository,
containers_dict=containers_dict,
@@ -1920,7 +1920,7 @@
invalid_file_tups, metadata_dict = metadata_util.reset_all_metadata_on_repository_in_tool_shed( trans, id, **kwd )
if invalid_file_tups:
repository = suc.get_repository_in_tool_shed( trans, id )
- message = suc.generate_message_for_invalid_tools( trans, invalid_file_tups, repository, metadata_dict )
+ message = tool_util.generate_message_for_invalid_tools( trans, invalid_file_tups, repository, metadata_dict )
status = 'error'
else:
message = "All repository metadata has been reset. "
@@ -2505,7 +2505,7 @@
review_id = trans.security.encode_id( review.id )
else:
review_id = None
- containers_dict = suc.build_repository_containers_for_tool_shed( trans, repository, changeset_revision, repository_dependencies, repository_metadata )
+ containers_dict = container_util.build_repository_containers_for_tool_shed( trans, repository, changeset_revision, repository_dependencies, repository_metadata )
can_browse_repository_reviews = suc.can_browse_repository_reviews( trans, repository )
return trans.fill_template( '/webapps/tool_shed/repository/view_repository.mako',
repo=repo,
@@ -2559,21 +2559,21 @@
guid = tool_metadata_dict[ 'guid' ]
full_path_to_tool_config = os.path.abspath( relative_path_to_tool_config )
full_path_to_dir, tool_config_filename = os.path.split( full_path_to_tool_config )
- can_use_disk_file = suc.can_use_tool_config_disk_file( trans, repository, repo, full_path_to_tool_config, changeset_revision )
+ can_use_disk_file = tool_util.can_use_tool_config_disk_file( trans, repository, repo, full_path_to_tool_config, changeset_revision )
if can_use_disk_file:
trans.app.config.tool_data_path = work_dir
- tool, valid, message, sample_files = suc.handle_sample_files_and_load_tool_from_disk( trans,
+ tool, valid, message, sample_files = tool_util.handle_sample_files_and_load_tool_from_disk( trans,
repo_files_dir,
full_path_to_tool_config,
work_dir )
if message:
status = 'error'
else:
- tool, message, sample_files = suc.handle_sample_files_and_load_tool_from_tmp_config( trans,
- repo,
- changeset_revision,
- tool_config_filename,
- work_dir )
+ tool, message, sample_files = tool_util.handle_sample_files_and_load_tool_from_tmp_config( trans,
+ repo,
+ changeset_revision,
+ tool_config_filename,
+ work_dir )
if message:
status = 'error'
break
diff -r 8703329f3716e7c4603e3a3a57286f67bc3b89af -r e40606362e962b4c452960c7c5f9b33b5b13f96c lib/galaxy/webapps/tool_shed/controllers/upload.py
--- a/lib/galaxy/webapps/tool_shed/controllers/upload.py
+++ b/lib/galaxy/webapps/tool_shed/controllers/upload.py
@@ -3,7 +3,7 @@
from galaxy import web, util
from galaxy.datatypes import checkers
import tool_shed.util.shed_util_common as suc
-from tool_shed.util import metadata_util, repository_dependency_util, tool_dependency_util
+from tool_shed.util import metadata_util, repository_dependency_util, tool_dependency_util, tool_util
from galaxy import eggs
eggs.require('mercurial')
@@ -153,7 +153,7 @@
if full_path.endswith( 'tool_data_table_conf.xml.sample' ):
# Handle the special case where a tool_data_table_conf.xml.sample file is being uploaded by parsing the file and adding new entries
# to the in-memory trans.app.tool_data_tables dictionary.
- error, error_message = suc.handle_sample_tool_data_table_conf_file( trans.app, full_path )
+ error, error_message = tool_util.handle_sample_tool_data_table_conf_file( trans.app, full_path )
if error:
message = '%s<br/>%s' % ( message, error_message )
# See if the content of the change set was valid.
@@ -213,7 +213,7 @@
message += invalid_repository_dependencies_message
status = 'error'
# Reset the tool_data_tables by loading the empty tool_data_table_conf.xml file.
- suc.reset_tool_data_tables( trans.app )
+ tool_util.reset_tool_data_tables( trans.app )
trans.response.send_redirect( web.url_for( controller='repository',
action='browse_repository',
id=repository_id,
@@ -223,7 +223,7 @@
else:
status = 'error'
# Reset the tool_data_tables by loading the empty tool_data_table_conf.xml file.
- suc.reset_tool_data_tables( trans.app )
+ tool_util.reset_tool_data_tables( trans.app )
selected_categories = [ trans.security.decode_id( id ) for id in category_ids ]
return trans.fill_template( '/webapps/tool_shed/repository/upload.mako',
repository=repository,
@@ -360,7 +360,7 @@
if filename_in_archive.endswith( 'tool_data_table_conf.xml.sample' ):
# Handle the special case where a tool_data_table_conf.xml.sample file is being uploaded by parsing the file and adding new entries
# to the in-memory trans.app.tool_data_tables dictionary.
- error, message = suc.handle_sample_tool_data_table_conf_file( trans.app, filename_in_archive )
+ error, message = tool_util.handle_sample_tool_data_table_conf_file( trans.app, filename_in_archive )
if error:
return False, message, files_to_remove, content_alert_str, undesirable_dirs_removed, undesirable_files_removed
commands.commit( repo.ui, repo, full_path, user=trans.user.username, message=commit_message )
diff -r 8703329f3716e7c4603e3a3a57286f67bc3b89af -r e40606362e962b4c452960c7c5f9b33b5b13f96c lib/galaxy/webapps/tool_shed/util/container_util.py
--- a/lib/galaxy/webapps/tool_shed/util/container_util.py
+++ b/lib/galaxy/webapps/tool_shed/util/container_util.py
@@ -1,4 +1,5 @@
-import os, logging
+import os, logging, threading
+from tool_shed.util import readme_util
log = logging.getLogger( __name__ )
@@ -170,6 +171,34 @@
self.repository_metadata_id = repository_metadata_id
self.repository_id = repository_id
+def add_orphan_settings_to_tool_dependencies( tool_dependencies, orphan_tool_dependencies ):
+ """Inspect all received tool dependencies and label those that are orphans within the repository."""
+ orphan_env_dependencies = orphan_tool_dependencies.get( 'set_environment', None )
+ new_tool_dependencies = {}
+ if tool_dependencies:
+ for td_key, requirements_dict in tool_dependencies.items():
+ if td_key in [ 'set_environment' ]:
+ # "set_environment": [{"name": "R_SCRIPT_PATH", "type": "set_environment"}]
+ if orphan_env_dependencies:
+ new_set_environment_dict_list = []
+ for set_environment_dict in requirements_dict:
+ if set_environment_dict in orphan_env_dependencies:
+ set_environment_dict[ 'is_orphan' ] = True
+ else:
+ set_environment_dict[ 'is_orphan' ] = False
+ new_set_environment_dict_list.append( set_environment_dict )
+ new_tool_dependencies[ td_key ] = new_set_environment_dict_list
+ else:
+ new_tool_dependencies[ td_key ] = requirements_dict
+ else:
+ # {"R/2.15.1": {"name": "R", "readme": "some string", "type": "package", "version": "2.15.1"}
+ if td_key in orphan_tool_dependencies:
+ requirements_dict[ 'is_orphan' ] = True
+ else:
+ requirements_dict[ 'is_orphan' ] = False
+ new_tool_dependencies[ td_key ] = requirements_dict
+ return new_tool_dependencies
+
def build_data_managers_folder( trans, folder_id, data_managers, label=None ):
"""Return a folder hierarchy containing Data Managers."""
if data_managers:
@@ -199,6 +228,7 @@
else:
data_managers_root_folder = None
return folder_id, data_managers_root_folder
+
def build_datatypes_folder( trans, folder_id, datatypes, label='Datatypes' ):
"""Return a folder hierarchy containing datatypes."""
if datatypes:
@@ -249,6 +279,7 @@
else:
datatypes_root_folder = None
return folder_id, datatypes_root_folder
+
def build_invalid_data_managers_folder( trans, folder_id, data_managers, error_messages=None, label=None ):
"""Return a folder hierarchy containing invalid Data Managers."""
if data_managers or error_messages:
@@ -285,6 +316,7 @@
else:
data_managers_root_folder = None
return folder_id, data_managers_root_folder
+
def build_invalid_repository_dependencies_root_folder( trans, folder_id, invalid_repository_dependencies_dict ):
"""Return a folder hierarchy containing invalid repository dependencies."""
label = 'Invalid repository dependencies'
@@ -320,6 +352,7 @@
else:
invalid_repository_dependencies_root_folder = None
return folder_id, invalid_repository_dependencies_root_folder
+
def build_invalid_tool_dependencies_root_folder( trans, folder_id, invalid_tool_dependencies_dict ):
"""Return a folder hierarchy containing invalid tool dependencies."""
# # INvalid tool dependencies are always packages like:
@@ -358,6 +391,7 @@
else:
invalid_tool_dependencies_root_folder = None
return folder_id, invalid_tool_dependencies_root_folder
+
def build_invalid_tools_folder( trans, folder_id, invalid_tool_configs, changeset_revision, repository=None, label='Invalid tools' ):
"""Return a folder hierarchy containing invalid tools."""
# TODO: Should we display invalid tools on the tool panel selection page when installing the repository into Galaxy?
@@ -388,6 +422,7 @@
else:
invalid_tools_root_folder = None
return folder_id, invalid_tools_root_folder
+
def build_readme_files_folder( trans, folder_id, readme_files_dict, label='Readme files' ):
"""Return a folder hierarchy containing readme text files."""
if readme_files_dict:
@@ -415,6 +450,254 @@
else:
readme_files_root_folder = None
return folder_id, readme_files_root_folder
+
+def build_repository_containers_for_galaxy( trans, repository, datatypes, invalid_tools, missing_repository_dependencies, missing_tool_dependencies,
+ readme_files_dict, repository_dependencies, tool_dependencies, valid_tools, workflows, valid_data_managers,
+ invalid_data_managers, data_managers_errors, new_install=False, reinstalling=False ):
+ """Return a dictionary of containers for the received repository's dependencies and readme files for display during installation to Galaxy."""
+ containers_dict = dict( datatypes=None,
+ invalid_tools=None,
+ missing_tool_dependencies=None,
+ readme_files=None,
+ repository_dependencies=None,
+ missing_repository_dependencies=None,
+ tool_dependencies=None,
+ valid_tools=None,
+ workflows=None,
+ valid_data_managers=None,
+ invalid_data_managers=None )
+ # Some of the tool dependency folders will include links to display tool dependency information, and some of these links require the repository
+ # id. However we need to be careful because sometimes the repository object is None.
+ if repository:
+ repository_id = repository.id
+ changeset_revision = repository.changeset_revision
+ else:
+ repository_id = None
+ changeset_revision = None
+ lock = threading.Lock()
+ lock.acquire( True )
+ try:
+ folder_id = 0
+ # Datatypes container.
+ if datatypes:
+ folder_id, datatypes_root_folder = build_datatypes_folder( trans, folder_id, datatypes )
+ containers_dict[ 'datatypes' ] = datatypes_root_folder
+ # Invalid tools container.
+ if invalid_tools:
+ folder_id, invalid_tools_root_folder = build_invalid_tools_folder( trans,
+ folder_id,
+ invalid_tools,
+ changeset_revision,
+ repository=repository,
+ label='Invalid tools' )
+ containers_dict[ 'invalid_tools' ] = invalid_tools_root_folder
+ # Readme files container.
+ if readme_files_dict:
+ folder_id, readme_files_root_folder = build_readme_files_folder( trans, folder_id, readme_files_dict )
+ containers_dict[ 'readme_files' ] = readme_files_root_folder
+ # Installed repository dependencies container.
+ if repository_dependencies:
+ if new_install:
+ label = 'Repository dependencies'
+ else:
+ label = 'Installed repository dependencies'
+ folder_id, repository_dependencies_root_folder = build_repository_dependencies_folder( trans=trans,
+ folder_id=folder_id,
+ repository_dependencies=repository_dependencies,
+ label=label,
+ installed=True )
+ containers_dict[ 'repository_dependencies' ] = repository_dependencies_root_folder
+ # Missing repository dependencies container.
+ if missing_repository_dependencies:
+ folder_id, missing_repository_dependencies_root_folder = \
+ build_repository_dependencies_folder( trans=trans,
+ folder_id=folder_id,
+ repository_dependencies=missing_repository_dependencies,
+ label='Missing repository dependencies',
+ installed=False )
+ containers_dict[ 'missing_repository_dependencies' ] = missing_repository_dependencies_root_folder
+ # Installed tool dependencies container.
+ if tool_dependencies:
+ if new_install:
+ label = 'Tool dependencies'
+ else:
+ label = 'Installed tool dependencies'
+ # We only want to display the Status column if the tool_dependency is missing.
+ folder_id, tool_dependencies_root_folder = build_tool_dependencies_folder( trans,
+ folder_id,
+ tool_dependencies,
+ label=label,
+ missing=False,
+ new_install=new_install,
+ reinstalling=reinstalling )
+ containers_dict[ 'tool_dependencies' ] = tool_dependencies_root_folder
+ # Missing tool dependencies container.
+ if missing_tool_dependencies:
+ # We only want to display the Status column if the tool_dependency is missing.
+ folder_id, missing_tool_dependencies_root_folder = build_tool_dependencies_folder( trans,
+ folder_id,
+ missing_tool_dependencies,
+ label='Missing tool dependencies',
+ missing=True,
+ new_install=new_install,
+ reinstalling=reinstalling )
+ containers_dict[ 'missing_tool_dependencies' ] = missing_tool_dependencies_root_folder
+ # Valid tools container.
+ if valid_tools:
+ folder_id, valid_tools_root_folder = build_tools_folder( trans,
+ folder_id,
+ valid_tools,
+ repository,
+ changeset_revision,
+ label='Valid tools' )
+ containers_dict[ 'valid_tools' ] = valid_tools_root_folder
+ # Workflows container.
+ if workflows:
+ folder_id, workflows_root_folder = build_workflows_folder( trans=trans,
+ folder_id=folder_id,
+ workflows=workflows,
+ repository_metadata_id=None,
+ repository_id=repository_id,
+ label='Workflows' )
+ containers_dict[ 'workflows' ] = workflows_root_folder
+ if valid_data_managers:
+ folder_id, valid_data_managers_root_folder = build_data_managers_folder( trans=trans,
+ folder_id=folder_id,
+ data_managers=valid_data_managers,
+ label='Valid Data Managers' )
+ containers_dict[ 'valid_data_managers' ] = valid_data_managers_root_folder
+ if invalid_data_managers or data_managers_errors:
+ folder_id, invalid_data_managers_root_folder = build_invalid_data_managers_folder( trans=trans,
+ folder_id=folder_id,
+ data_managers=invalid_data_managers,
+ error_messages=data_managers_errors,
+ label='Invalid Data Managers' )
+ containers_dict[ 'invalid_data_managers' ] = invalid_data_managers_root_folder
+ except Exception, e:
+ log.debug( "Exception in build_repository_containers_for_galaxy: %s" % str( e ) )
+ finally:
+ lock.release()
+ return containers_dict
+
+def build_repository_containers_for_tool_shed( trans, repository, changeset_revision, repository_dependencies, repository_metadata ):
+ """Return a dictionary of containers for the received repository's dependencies and contents for display in the tool shed."""
+ containers_dict = dict( datatypes=None,
+ invalid_tools=None,
+ readme_files=None,
+ repository_dependencies=None,
+ tool_dependencies=None,
+ valid_tools=None,
+ workflows=None,
+ valid_data_managers=None
+ )
+ if repository_metadata:
+ metadata = repository_metadata.metadata
+ lock = threading.Lock()
+ lock.acquire( True )
+ try:
+ folder_id = 0
+ # Datatypes container.
+ if metadata:
+ if 'datatypes' in metadata:
+ datatypes = metadata[ 'datatypes' ]
+ folder_id, datatypes_root_folder = build_datatypes_folder( trans, folder_id, datatypes )
+ containers_dict[ 'datatypes' ] = datatypes_root_folder
+ # Invalid repository dependencies container.
+ if metadata:
+ if 'invalid_repository_dependencies' in metadata:
+ invalid_repository_dependencies = metadata[ 'invalid_repository_dependencies' ]
+ folder_id, invalid_repository_dependencies_root_folder = \
+ build_invalid_repository_dependencies_root_folder( trans,
+ folder_id,
+ invalid_repository_dependencies )
+ containers_dict[ 'invalid_repository_dependencies' ] = invalid_repository_dependencies_root_folder
+ # Invalid tool dependencies container.
+ if metadata:
+ if 'invalid_tool_dependencies' in metadata:
+ invalid_tool_dependencies = metadata[ 'invalid_tool_dependencies' ]
+ folder_id, invalid_tool_dependencies_root_folder = \
+ build_invalid_tool_dependencies_root_folder( trans,
+ folder_id,
+ invalid_tool_dependencies )
+ containers_dict[ 'invalid_tool_dependencies' ] = invalid_tool_dependencies_root_folder
+ # Invalid tools container.
+ if metadata:
+ if 'invalid_tools' in metadata:
+ invalid_tool_configs = metadata[ 'invalid_tools' ]
+ folder_id, invalid_tools_root_folder = build_invalid_tools_folder( trans,
+ folder_id,
+ invalid_tool_configs,
+ changeset_revision,
+ repository=repository,
+ label='Invalid tools' )
+ containers_dict[ 'invalid_tools' ] = invalid_tools_root_folder
+ # Readme files container.
+ if metadata:
+ if 'readme_files' in metadata:
+ readme_files_dict = readme_util.build_readme_files_dict( metadata )
+ folder_id, readme_files_root_folder = build_readme_files_folder( trans, folder_id, readme_files_dict )
+ containers_dict[ 'readme_files' ] = readme_files_root_folder
+ # Repository dependencies container.
+ folder_id, repository_dependencies_root_folder = build_repository_dependencies_folder( trans=trans,
+ folder_id=folder_id,
+ repository_dependencies=repository_dependencies,
+ label='Repository dependencies',
+ installed=False )
+ if repository_dependencies_root_folder:
+ containers_dict[ 'repository_dependencies' ] = repository_dependencies_root_folder
+ # Tool dependencies container.
+ if metadata:
+ if 'tool_dependencies' in metadata:
+ tool_dependencies = metadata[ 'tool_dependencies' ]
+ if trans.webapp.name == 'tool_shed':
+ if 'orphan_tool_dependencies' in metadata:
+ orphan_tool_dependencies = metadata[ 'orphan_tool_dependencies' ]
+ tool_dependencies = add_orphan_settings_to_tool_dependencies( tool_dependencies, orphan_tool_dependencies )
+ folder_id, tool_dependencies_root_folder = build_tool_dependencies_folder( trans,
+ folder_id,
+ tool_dependencies,
+ missing=False,
+ new_install=False )
+ containers_dict[ 'tool_dependencies' ] = tool_dependencies_root_folder
+ # Valid tools container.
+ if metadata:
+ if 'tools' in metadata:
+ valid_tools = metadata[ 'tools' ]
+ folder_id, valid_tools_root_folder = build_tools_folder( trans,
+ folder_id,
+ valid_tools,
+ repository,
+ changeset_revision,
+ label='Valid tools' )
+ containers_dict[ 'valid_tools' ] = valid_tools_root_folder
+ # Workflows container.
+ if metadata:
+ if 'workflows' in metadata:
+ workflows = metadata[ 'workflows' ]
+ folder_id, workflows_root_folder = build_workflows_folder( trans=trans,
+ folder_id=folder_id,
+ workflows=workflows,
+ repository_metadata_id=repository_metadata.id,
+ repository_id=None,
+ label='Workflows' )
+ containers_dict[ 'workflows' ] = workflows_root_folder
+ # Valid Data Managers container
+ if metadata:
+ if 'data_manager' in metadata:
+ data_managers = metadata['data_manager'].get( 'data_managers', None )
+ folder_id, data_managers_root_folder = build_data_managers_folder( trans, folder_id, data_managers, label="Data Managers" )
+ containers_dict[ 'valid_data_managers' ] = data_managers_root_folder
+ error_messages = metadata['data_manager'].get( 'error_messages', None )
+ data_managers = metadata['data_manager'].get( 'invalid_data_managers', None )
+ folder_id, data_managers_root_folder = build_invalid_data_managers_folder( trans, folder_id, data_managers, error_messages, label="Invalid Data Managers" )
+ containers_dict[ 'invalid_data_managers' ] = data_managers_root_folder
+
+ except Exception, e:
+ log.debug( "Exception in build_repository_containers_for_tool_shed: %s" % str( e ) )
+ finally:
+ lock.release()
+ return containers_dict
+
def build_repository_dependencies_folder( trans, folder_id, repository_dependencies, label='Repository dependencies', installed=False ):
"""Return a folder hierarchy containing repository dependencies."""
if repository_dependencies:
@@ -438,6 +721,7 @@
else:
repository_dependencies_root_folder = None
return folder_id, repository_dependencies_root_folder
+
def build_tools_folder( trans, folder_id, tool_dicts, repository, changeset_revision, valid=True, label='Valid tools' ):
"""Return a folder hierarchy containing valid tools."""
if tool_dicts:
@@ -494,6 +778,7 @@
else:
tools_root_folder = None
return folder_id, tools_root_folder
+
def build_tool_dependencies_folder( trans, folder_id, tool_dependencies, label='Tool dependencies', missing=False, new_install=False, reinstalling=False ):
"""Return a folder hierarchy containing tool dependencies."""
# When we're in Galaxy (not the tool shed) and the tool dependencies are not installed or are in an error state, they are considered missing. The tool
@@ -603,6 +888,7 @@
else:
tool_dependencies_root_folder = None
return folder_id, tool_dependencies_root_folder
+
def build_workflows_folder( trans, folder_id, workflows, repository_metadata_id=None, repository_id=None, label='Workflows' ):
"""
Return a folder hierarchy containing workflow objects for each workflow dictionary in the received workflows list. When
@@ -646,6 +932,7 @@
else:
workflows_root_folder = None
return folder_id, workflows_root_folder
+
def cast_empty_repository_dependency_folders( folder, repository_dependency_id ):
"""
Change any empty folders contained within the repository dependencies container into a repository dependency since it has no repository dependencies
@@ -660,6 +947,7 @@
for sub_folder in folder.folders:
return cast_empty_repository_dependency_folders( sub_folder, repository_dependency_id )
return folder, repository_dependency_id
+
def generate_repository_dependencies_folder_label_from_key( repository_name, repository_owner, changeset_revision, key ):
"""Return a repository dependency label based on the repository dependency key."""
if key_is_current_repositorys_key( repository_name, repository_owner, changeset_revision, key ):
@@ -667,6 +955,7 @@
else:
label = "Repository <b>%s</b> revision <b>%s</b> owned by <b>%s</b>" % ( repository_name, changeset_revision, repository_owner )
return label
+
def generate_repository_dependencies_key_for_repository( toolshed_base_url, repository_name, repository_owner, changeset_revision ):
# FIXME: assumes tool shed is current tool shed since repository dependencies across tool sheds is not yet supported.
return '%s%s%s%s%s%s%s' % ( str( toolshed_base_url ).rstrip( '/' ),
@@ -676,14 +965,17 @@
str( repository_owner ),
STRSEP,
str( changeset_revision ) )
+
def generate_tool_dependencies_key( name, version, type ):
return '%s%s%s%s%s' % ( str( name ), STRSEP, str( version ), STRSEP, str( type ) )
+
def get_folder( folder, key ):
if folder.key == key:
return folder
for sub_folder in folder.folders:
return get_folder( sub_folder, key )
return None
+
def get_components_from_key( key ):
# FIXME: assumes tool shed is current tool shed since repository dependencies across tool sheds is not yet supported.
items = key.split( STRSEP )
@@ -692,6 +984,7 @@
repository_owner = items[ 2 ]
changeset_revision = items[ 3 ]
return toolshed_base_url, repository_name, repository_owner, changeset_revision
+
def handle_repository_dependencies_container_entry( trans, repository_dependencies_folder, rd_key, rd_value, folder_id, repository_dependency_id, folder_keys ):
toolshed, repository_name, repository_owner, changeset_revision = get_components_from_key( rd_key )
folder = get_folder( repository_dependencies_folder, rd_key )
@@ -744,6 +1037,7 @@
# Insert the repository_dependency into the folder.
sub_folder.repository_dependencies.append( repository_dependency )
return repository_dependencies_folder, folder_id, repository_dependency_id
+
def is_subfolder_of( folder, repository_dependency ):
toolshed, repository_name, repository_owner, changeset_revision = repository_dependency
key = generate_repository_dependencies_key_for_repository( toolshed, repository_name, repository_owner, changeset_revision )
@@ -751,15 +1045,18 @@
if key == sub_folder.key:
return True
return False
+
def key_is_current_repositorys_key( repository_name, repository_owner, changeset_revision, key ):
toolshed_base_url, key_name, key_owner, key_changeset_revision = get_components_from_key( key )
return repository_name == key_name and repository_owner == key_owner and changeset_revision == key_changeset_revision
+
def populate_repository_dependencies_container( trans, repository_dependencies_folder, repository_dependencies, folder_id, repository_dependency_id ):
folder_keys = repository_dependencies.keys()
for key, value in repository_dependencies.items():
repository_dependencies_folder, folder_id, repository_dependency_id = \
handle_repository_dependencies_container_entry( trans, repository_dependencies_folder, key, value, folder_id, repository_dependency_id, folder_keys )
return repository_dependencies_folder, folder_id, repository_dependency_id
+
def print_folders( pad, folder ):
# For debugging...
pad_str = ''
@@ -770,6 +1067,7 @@
print ' %s%s' % ( pad_str, repository_dependency.listify )
for sub_folder in folder.folders:
print_folders( pad+5, sub_folder )
+
def prune_repository_dependencies( folder ):
"""
Since the object used to generate a repository dependencies container is a dictionary and not an odict() (it must be json-serialize-able), the
diff -r 8703329f3716e7c4603e3a3a57286f67bc3b89af -r e40606362e962b4c452960c7c5f9b33b5b13f96c lib/galaxy/webapps/tool_shed/util/workflow_util.py
--- a/lib/galaxy/webapps/tool_shed/util/workflow_util.py
+++ b/lib/galaxy/webapps/tool_shed/util/workflow_util.py
@@ -6,7 +6,7 @@
import logging, svgfig
from galaxy.util import json
import tool_shed.util.shed_util_common as suc
-from tool_shed.util import encoding_util, metadata_util
+from tool_shed.util import encoding_util, metadata_util, tool_util
from galaxy.workflow.modules import InputDataModule, ToolModule, WorkflowModuleFactory
import galaxy.webapps.galaxy.controllers.workflow
import galaxy.tools
@@ -52,7 +52,7 @@
if self.tool_id in [ tool_dict[ 'id' ], tool_dict[ 'guid' ] ]:
if trans.webapp.name == 'tool_shed':
# We're in the tool shed.
- repository, self.tool, message = suc.load_tool_from_changeset_revision( trans, repository_id, changeset_revision, tool_dict[ 'tool_config' ] )
+ repository, self.tool, message = tool_util.load_tool_from_changeset_revision( trans, repository_id, changeset_revision, tool_dict[ 'tool_config' ] )
if message and self.tool is None:
self.errors = 'unavailable'
break
diff -r 8703329f3716e7c4603e3a3a57286f67bc3b89af -r e40606362e962b4c452960c7c5f9b33b5b13f96c lib/tool_shed/galaxy_install/repository_util.py
--- a/lib/tool_shed/galaxy_install/repository_util.py
+++ b/lib/tool_shed/galaxy_install/repository_util.py
@@ -203,22 +203,22 @@
repository_missing_tool_dependencies=missing_tool_dependencies,
required_repo_info_dicts=None )
# Since we are installing a new repository, most of the repository contents are set to None since we don't yet know what they are.
- containers_dict = suc.build_repository_containers_for_galaxy( trans=trans,
- repository=None,
- datatypes=None,
- invalid_tools=None,
- missing_repository_dependencies=missing_repository_dependencies,
- missing_tool_dependencies=missing_tool_dependencies,
- readme_files_dict=readme_files_dict,
- repository_dependencies=installed_repository_dependencies,
- tool_dependencies=installed_tool_dependencies,
- valid_tools=None,
- workflows=None,
- valid_data_managers=None,
- invalid_data_managers=None,
- data_managers_errors=None,
- new_install=True,
- reinstalling=False )
+ containers_dict = container_util.build_repository_containers_for_galaxy( trans=trans,
+ repository=None,
+ datatypes=None,
+ invalid_tools=None,
+ missing_repository_dependencies=missing_repository_dependencies,
+ missing_tool_dependencies=missing_tool_dependencies,
+ readme_files_dict=readme_files_dict,
+ repository_dependencies=installed_repository_dependencies,
+ tool_dependencies=installed_tool_dependencies,
+ valid_tools=None,
+ workflows=None,
+ valid_data_managers=None,
+ invalid_data_managers=None,
+ data_managers_errors=None,
+ new_install=True,
+ reinstalling=False )
# Merge the missing_repository_dependencies container contents to the installed_repository_dependencies container.
containers_dict = repository_dependency_util.merge_missing_repository_dependencies_to_installed_container( containers_dict )
# Merge the missing_tool_dependencies container contents to the installed_tool_dependencies container.
diff -r 8703329f3716e7c4603e3a3a57286f67bc3b89af -r e40606362e962b4c452960c7c5f9b33b5b13f96c lib/tool_shed/util/common_install_util.py
--- a/lib/tool_shed/util/common_install_util.py
+++ b/lib/tool_shed/util/common_install_util.py
@@ -1,8 +1,9 @@
-import os, logging
+import os, logging, urllib2
import tool_shed.util.shed_util_common as suc
-from tool_shed.util import data_manager_util, datatype_util, tool_util
+from tool_shed.util import encoding_util, data_manager_util, datatype_util, tool_util
from tool_shed.galaxy_install.tool_dependencies.install_util import install_package, set_environment
from galaxy import util
+from galaxy.util import json
from galaxy.webapps.tool_shed.util import container_util
from galaxy import eggs
@@ -78,7 +79,7 @@
else:
installed_rd, missing_rd = get_installed_and_missing_repository_dependencies_for_new_install( trans, repo_info_tuple )
# Discover all repository dependencies and retrieve information for installing them.
- required_repo_info_dicts = suc.get_required_repo_info_dicts( tool_shed_url, util.listify( repo_info_dict ) )
+ required_repo_info_dicts = get_required_repo_info_dicts( tool_shed_url, util.listify( repo_info_dict ) )
# Display tool dependencies defined for each of the repository dependencies.
if required_repo_info_dicts:
all_tool_dependencies = {}
@@ -234,6 +235,55 @@
missing_tool_dependencies = None
return tool_dependencies, missing_tool_dependencies
+def get_required_repo_info_dicts( tool_shed_url, repo_info_dicts ):
+ """
+ Inspect the list of repo_info_dicts for repository dependencies and append a repo_info_dict for each of them to the list. All
+ repository_dependencies entries in each of the received repo_info_dicts includes all required repositories, so only one pass through
+ this method is required to retrieve all repository dependencies.
+ """
+ all_repo_info_dicts = []
+ if repo_info_dicts:
+ # We'll send tuples of ( tool_shed, repository_name, repository_owner, changeset_revision ) to the tool shed to discover repository ids.
+ required_repository_tups = []
+ for repo_info_dict in repo_info_dicts:
+ for repository_name, repo_info_tup in repo_info_dict.items():
+ description, repository_clone_url, changeset_revision, ctx_rev, repository_owner, repository_dependencies, tool_dependencies = \
+ suc.get_repo_info_tuple_contents( repo_info_tup )
+ if repository_dependencies:
+ for key, val in repository_dependencies.items():
+ if key in [ 'root_key', 'description' ]:
+ continue
+ toolshed, name, owner, changeset_revision = container_util.get_components_from_key( key )
+ components_list = [ toolshed, name, owner, changeset_revision ]
+ if components_list not in required_repository_tups:
+ required_repository_tups.append( components_list )
+ for components_list in val:
+ if components_list not in required_repository_tups:
+ required_repository_tups.append( components_list )
+ if required_repository_tups:
+ # The value of required_repository_tups is a list of tuples, so we need to encode it.
+ encoded_required_repository_tups = []
+ for required_repository_tup in required_repository_tups:
+ encoded_required_repository_tups.append( encoding_util.encoding_sep.join( required_repository_tup ) )
+ encoded_required_repository_str = encoding_util.encoding_sep2.join( encoded_required_repository_tups )
+ encoded_required_repository_str = encoding_util.tool_shed_encode( encoded_required_repository_str )
+ url = suc.url_join( tool_shed_url, '/repository/get_required_repo_info_dict?encoded_str=%s' % encoded_required_repository_str )
+ response = urllib2.urlopen( url )
+ text = response.read()
+ response.close()
+ if text:
+ required_repo_info_dict = json.from_json_string( text )
+ required_repo_info_dicts = []
+ encoded_dict_strings = required_repo_info_dict[ 'repo_info_dicts' ]
+ for encoded_dict_str in encoded_dict_strings:
+ decoded_dict = encoding_util.tool_shed_decode( encoded_dict_str )
+ required_repo_info_dicts.append( decoded_dict )
+ if required_repo_info_dicts:
+ for required_repo_info_dict in required_repo_info_dicts:
+ if required_repo_info_dict not in all_repo_info_dicts:
+ all_repo_info_dicts.append( required_repo_info_dict )
+ return all_repo_info_dicts
+
def handle_tool_dependencies( app, tool_shed_repository, tool_dependencies_config, tool_dependencies ):
"""
Install and build tool dependencies defined in the tool_dependencies_config. This config's tag sets can currently refer to installation
diff -r 8703329f3716e7c4603e3a3a57286f67bc3b89af -r e40606362e962b4c452960c7c5f9b33b5b13f96c lib/tool_shed/util/metadata_util.py
--- a/lib/tool_shed/util/metadata_util.py
+++ b/lib/tool_shed/util/metadata_util.py
@@ -5,7 +5,7 @@
from galaxy.datatypes import checkers
from galaxy.tools.data_manager.manager import DataManager
import tool_shed.util.shed_util_common as suc
-from tool_shed.util import common_install_util, tool_dependency_util
+from tool_shed.util import common_install_util, readme_util, tool_dependency_util, tool_util
from galaxy.model.orm import and_
from galaxy import eggs
@@ -417,7 +417,7 @@
# Parse the tool_config to get the guid.
tool_config_path = suc.get_config_from_disk( tool_config, repository_files_dir )
full_path = os.path.abspath( tool_config_path )
- tool, valid, error_message = suc.load_tool_from_config( app, full_path )
+ tool, valid, error_message = tool_util.load_tool_from_config( app, full_path )
if tool is None:
guid = None
else:
@@ -533,7 +533,7 @@
metadata_dict[ 'sample_files' ] = sample_file_metadata_paths
# Copy all sample files included in the repository to a single directory location so we can load tools that depend on them.
for sample_file in sample_file_copy_paths:
- suc.copy_sample_file( app, sample_file, dest_path=work_dir )
+ tool_util.copy_sample_file( app, sample_file, dest_path=work_dir )
# If the list of sample files includes a tool_data_table_conf.xml.sample file, laad it's table elements into memory.
relative_path, filename = os.path.split( sample_file )
if filename == 'tool_data_table_conf.xml.sample':
@@ -578,13 +578,13 @@
log.debug( "Error parsing %s, exception: %s" % ( full_path, str( e ) ) )
is_tool = False
if is_tool:
- tool, valid, error_message = suc.load_tool_from_config( app, full_path )
+ tool, valid, error_message = tool_util.load_tool_from_config( app, full_path )
if tool is None:
if not valid:
invalid_tool_configs.append( name )
invalid_file_tups.append( ( name, error_message ) )
else:
- invalid_files_and_errors_tups = suc.check_tool_input_params( app, files_dir, name, tool, sample_file_copy_paths )
+ invalid_files_and_errors_tups = tool_util.check_tool_input_params( app, files_dir, name, tool, sample_file_copy_paths )
can_set_metadata = True
for tup in invalid_files_and_errors_tups:
if name in tup:
@@ -1370,7 +1370,7 @@
response.close()
readme_files_dict = json.from_json_string( raw_text )
else:
- readme_files_dict = suc.build_readme_files_dict( repository.metadata, tool_path )
+ readme_files_dict = readme_util.build_readme_files_dict( repository.metadata, tool_path )
else:
readme_files_dict = None
# Handle repository dependencies.
@@ -1403,22 +1403,22 @@
valid_data_managers = metadata['data_manager'].get( 'data_managers', None )
invalid_data_managers = metadata['data_manager'].get( 'invalid_data_managers', None )
data_managers_errors = metadata['data_manager'].get( 'messages', None )
- containers_dict = suc.build_repository_containers_for_galaxy( trans=trans,
- repository=repository,
- datatypes=datatypes,
- invalid_tools=invalid_tools,
- missing_repository_dependencies=missing_repository_dependencies,
- missing_tool_dependencies=missing_tool_dependencies,
- readme_files_dict=readme_files_dict,
- repository_dependencies=installed_repository_dependencies,
- tool_dependencies=installed_tool_dependencies,
- valid_tools=valid_tools,
- workflows=workflows,
- valid_data_managers=valid_data_managers,
- invalid_data_managers=invalid_data_managers,
- data_managers_errors=data_managers_errors,
- new_install=False,
- reinstalling=reinstalling )
+ containers_dict = container_util.build_repository_containers_for_galaxy( trans=trans,
+ repository=repository,
+ datatypes=datatypes,
+ invalid_tools=invalid_tools,
+ missing_repository_dependencies=missing_repository_dependencies,
+ missing_tool_dependencies=missing_tool_dependencies,
+ readme_files_dict=readme_files_dict,
+ repository_dependencies=installed_repository_dependencies,
+ tool_dependencies=installed_tool_dependencies,
+ valid_tools=valid_tools,
+ workflows=workflows,
+ valid_data_managers=valid_data_managers,
+ invalid_data_managers=invalid_data_managers,
+ data_managers_errors=data_managers_errors,
+ new_install=False,
+ reinstalling=reinstalling )
else:
containers_dict = dict( datatypes=None,
invalid_tools=None,
@@ -1583,7 +1583,7 @@
# Set tool version information for all downloadable changeset revisions. Get the list of changeset revisions from the changelog.
reset_all_tool_versions( trans, id, repo )
# Reset the tool_data_tables by loading the empty tool_data_table_conf.xml file.
- suc.reset_tool_data_tables( trans.app )
+ tool_util.reset_tool_data_tables( trans.app )
return invalid_file_tups, metadata_dict
def reset_metadata_on_selected_repositories( trans, **kwd ):
@@ -1608,7 +1608,7 @@
repository = suc.get_installed_tool_shed_repository( trans, repository_id )
invalid_file_tups, metadata_dict = reset_all_metadata_on_installed_repository( trans, repository_id )
if invalid_file_tups:
- message = suc.generate_message_for_invalid_tools( trans, invalid_file_tups, repository, None, as_html=False )
+ message = tool_util.generate_message_for_invalid_tools( trans, invalid_file_tups, repository, None, as_html=False )
log.debug( message )
unsuccessful_count += 1
else:
@@ -1733,10 +1733,10 @@
message += "be defined so this revision cannot be automatically installed into a local Galaxy instance."
status = "error"
if invalid_file_tups:
- message = suc.generate_message_for_invalid_tools( trans, invalid_file_tups, repository, metadata_dict )
+ message = tool_util.generate_message_for_invalid_tools( trans, invalid_file_tups, repository, metadata_dict )
status = 'error'
# Reset the tool_data_tables by loading the empty tool_data_table_conf.xml file.
- suc.reset_tool_data_tables( trans.app )
+ tool_util.reset_tool_data_tables( trans.app )
return message, status
def set_repository_metadata_due_to_new_tip( trans, repository, content_alert_str=None, **kwd ):
diff -r 8703329f3716e7c4603e3a3a57286f67bc3b89af -r e40606362e962b4c452960c7c5f9b33b5b13f96c lib/tool_shed/util/readme_util.py
--- /dev/null
+++ b/lib/tool_shed/util/readme_util.py
@@ -0,0 +1,40 @@
+import os, logging, urllib2
+import tool_shed.util.shed_util_common as suc
+from galaxy.util import json
+
+log = logging.getLogger( __name__ )
+
+def build_readme_files_dict( metadata, tool_path=None ):
+ """Return a dictionary of valid readme file name <-> readme file content pairs for all readme files contained in the received metadata."""
+ readme_files_dict = {}
+ if metadata:
+ if 'readme_files' in metadata:
+ for relative_path_to_readme_file in metadata[ 'readme_files' ]:
+ readme_file_name = os.path.split( relative_path_to_readme_file )[ 1 ]
+ if tool_path:
+ full_path_to_readme_file = os.path.abspath( os.path.join( tool_path, relative_path_to_readme_file ) )
+ else:
+ full_path_to_readme_file = os.path.abspath( relative_path_to_readme_file )
+ try:
+ f = open( full_path_to_readme_file, 'r' )
+ text = f.read()
+ f.close()
+ readme_files_dict[ readme_file_name ] = suc.translate_string( text, to_html=False )
+ except Exception, e:
+ log.debug( "Error reading README file '%s' defined in metadata: %s" % ( str( relative_path_to_readme_file ), str( e ) ) )
+ return readme_files_dict
+
+def get_readme_files_dict_for_display( trans, tool_shed_url, repo_info_dict ):
+ """Return a dictionary of README files contained in the single repository being installed so they can be displayed on the tool panel section selection page."""
+ name = repo_info_dict.keys()[ 0 ]
+ repo_info_tuple = repo_info_dict[ name ]
+ description, repository_clone_url, changeset_revision, ctx_rev, repository_owner, repository_dependencies, installed_td = suc.get_repo_info_tuple_contents( repo_info_tuple )
+ # Handle README files.
+ url = suc.url_join( tool_shed_url,
+ 'repository/get_readme_files?name=%s&owner=%s&changeset_revision=%s' % \
+ ( name, repository_owner, changeset_revision ) )
+ response = urllib2.urlopen( url )
+ raw_text = response.read()
+ response.close()
+ readme_files_dict = json.from_json_string( raw_text )
+ return readme_files_dict
diff -r 8703329f3716e7c4603e3a3a57286f67bc3b89af -r e40606362e962b4c452960c7c5f9b33b5b13f96c lib/tool_shed/util/repository_dependency_util.py
--- a/lib/tool_shed/util/repository_dependency_util.py
+++ b/lib/tool_shed/util/repository_dependency_util.py
@@ -104,7 +104,7 @@
filtered_repo_info_dicts = []
# Discover all repository dependencies and retrieve information for installing them. Even if the user elected to not install repository dependencies we have
# to make sure all repository dependency objects exist so that the appropriate repository dependency relationships can be built.
- all_repo_info_dicts = suc.get_required_repo_info_dicts( tool_shed_url, repo_info_dicts )
+ all_repo_info_dicts = common_install_util.get_required_repo_info_dicts( tool_shed_url, repo_info_dicts )
if not all_repo_info_dicts:
# No repository dependencies were discovered so process the received repositories.
all_repo_info_dicts = [ rid for rid in repo_info_dicts ]
diff -r 8703329f3716e7c4603e3a3a57286f67bc3b89af -r e40606362e962b4c452960c7c5f9b33b5b13f96c lib/tool_shed/util/shed_util_common.py
--- a/lib/tool_shed/util/shed_util_common.py
+++ b/lib/tool_shed/util/shed_util_common.py
@@ -1,18 +1,14 @@
-import os, shutil, tempfile, logging, string, threading, urllib2, filecmp
+import os, shutil, tempfile, logging, string, urllib2
from datetime import datetime
from time import gmtime, strftime
from galaxy import util
-from galaxy.tools import parameters
from galaxy.util import json
from galaxy.util.odict import odict
from galaxy.web import url_for
from galaxy.web.form_builder import SelectField
-from galaxy.webapps.tool_shed.util import container_util
from galaxy.datatypes import checkers
from galaxy.model.orm import and_
import sqlalchemy.orm.exc
-from galaxy.tools.parameters import dynamic_options
-from tool_shed.util import encoding_util
from galaxy import eggs
import pkg_resources
@@ -89,297 +85,6 @@
'${host}'
"""
-def add_orphan_settings_to_tool_dependencies( tool_dependencies, orphan_tool_dependencies ):
- """Inspect all received tool dependencies and label those that are orphans within the repository."""
- orphan_env_dependencies = orphan_tool_dependencies.get( 'set_environment', None )
- new_tool_dependencies = {}
- if tool_dependencies:
- for td_key, requirements_dict in tool_dependencies.items():
- if td_key in [ 'set_environment' ]:
- # "set_environment": [{"name": "R_SCRIPT_PATH", "type": "set_environment"}]
- if orphan_env_dependencies:
- new_set_environment_dict_list = []
- for set_environment_dict in requirements_dict:
- if set_environment_dict in orphan_env_dependencies:
- set_environment_dict[ 'is_orphan' ] = True
- else:
- set_environment_dict[ 'is_orphan' ] = False
- new_set_environment_dict_list.append( set_environment_dict )
- new_tool_dependencies[ td_key ] = new_set_environment_dict_list
- else:
- new_tool_dependencies[ td_key ] = requirements_dict
- else:
- # {"R/2.15.1": {"name": "R", "readme": "some string", "type": "package", "version": "2.15.1"}
- if td_key in orphan_tool_dependencies:
- requirements_dict[ 'is_orphan' ] = True
- else:
- requirements_dict[ 'is_orphan' ] = False
- new_tool_dependencies[ td_key ] = requirements_dict
- return new_tool_dependencies
-def build_readme_files_dict( metadata, tool_path=None ):
- """Return a dictionary of valid readme file name <-> readme file content pairs for all readme files contained in the received metadata."""
- readme_files_dict = {}
- if metadata:
- if 'readme_files' in metadata:
- for relative_path_to_readme_file in metadata[ 'readme_files' ]:
- readme_file_name = os.path.split( relative_path_to_readme_file )[ 1 ]
- if tool_path:
- full_path_to_readme_file = os.path.abspath( os.path.join( tool_path, relative_path_to_readme_file ) )
- else:
- full_path_to_readme_file = os.path.abspath( relative_path_to_readme_file )
- try:
- f = open( full_path_to_readme_file, 'r' )
- text = f.read()
- f.close()
- readme_files_dict[ readme_file_name ] = translate_string( text, to_html=False )
- except Exception, e:
- log.debug( "Error reading README file '%s' defined in metadata: %s" % ( str( relative_path_to_readme_file ), str( e ) ) )
- return readme_files_dict
-def build_repository_containers_for_galaxy( trans, repository, datatypes, invalid_tools, missing_repository_dependencies, missing_tool_dependencies,
- readme_files_dict, repository_dependencies, tool_dependencies, valid_tools, workflows, valid_data_managers,
- invalid_data_managers, data_managers_errors, new_install=False, reinstalling=False ):
- """Return a dictionary of containers for the received repository's dependencies and readme files for display during installation to Galaxy."""
- containers_dict = dict( datatypes=None,
- invalid_tools=None,
- missing_tool_dependencies=None,
- readme_files=None,
- repository_dependencies=None,
- missing_repository_dependencies=None,
- tool_dependencies=None,
- valid_tools=None,
- workflows=None,
- valid_data_managers=None,
- invalid_data_managers=None )
- # Some of the tool dependency folders will include links to display tool dependency information, and some of these links require the repository
- # id. However we need to be careful because sometimes the repository object is None.
- if repository:
- repository_id = repository.id
- changeset_revision = repository.changeset_revision
- else:
- repository_id = None
- changeset_revision = None
- lock = threading.Lock()
- lock.acquire( True )
- try:
- folder_id = 0
- # Datatypes container.
- if datatypes:
- folder_id, datatypes_root_folder = container_util.build_datatypes_folder( trans, folder_id, datatypes )
- containers_dict[ 'datatypes' ] = datatypes_root_folder
- # Invalid tools container.
- if invalid_tools:
- folder_id, invalid_tools_root_folder = container_util.build_invalid_tools_folder( trans,
- folder_id,
- invalid_tools,
- changeset_revision,
- repository=repository,
- label='Invalid tools' )
- containers_dict[ 'invalid_tools' ] = invalid_tools_root_folder
- # Readme files container.
- if readme_files_dict:
- folder_id, readme_files_root_folder = container_util.build_readme_files_folder( trans, folder_id, readme_files_dict )
- containers_dict[ 'readme_files' ] = readme_files_root_folder
- # Installed repository dependencies container.
- if repository_dependencies:
- if new_install:
- label = 'Repository dependencies'
- else:
- label = 'Installed repository dependencies'
- folder_id, repository_dependencies_root_folder = container_util.build_repository_dependencies_folder( trans=trans,
- folder_id=folder_id,
- repository_dependencies=repository_dependencies,
- label=label,
- installed=True )
- containers_dict[ 'repository_dependencies' ] = repository_dependencies_root_folder
- # Missing repository dependencies container.
- if missing_repository_dependencies:
- folder_id, missing_repository_dependencies_root_folder = \
- container_util.build_repository_dependencies_folder( trans=trans,
- folder_id=folder_id,
- repository_dependencies=missing_repository_dependencies,
- label='Missing repository dependencies',
- installed=False )
- containers_dict[ 'missing_repository_dependencies' ] = missing_repository_dependencies_root_folder
- # Installed tool dependencies container.
- if tool_dependencies:
- if new_install:
- label = 'Tool dependencies'
- else:
- label = 'Installed tool dependencies'
- # We only want to display the Status column if the tool_dependency is missing.
- folder_id, tool_dependencies_root_folder = container_util.build_tool_dependencies_folder( trans,
- folder_id,
- tool_dependencies,
- label=label,
- missing=False,
- new_install=new_install,
- reinstalling=reinstalling )
- containers_dict[ 'tool_dependencies' ] = tool_dependencies_root_folder
- # Missing tool dependencies container.
- if missing_tool_dependencies:
- # We only want to display the Status column if the tool_dependency is missing.
- folder_id, missing_tool_dependencies_root_folder = container_util.build_tool_dependencies_folder( trans,
- folder_id,
- missing_tool_dependencies,
- label='Missing tool dependencies',
- missing=True,
- new_install=new_install,
- reinstalling=reinstalling )
- containers_dict[ 'missing_tool_dependencies' ] = missing_tool_dependencies_root_folder
- # Valid tools container.
- if valid_tools:
- folder_id, valid_tools_root_folder = container_util.build_tools_folder( trans,
- folder_id,
- valid_tools,
- repository,
- changeset_revision,
- label='Valid tools' )
- containers_dict[ 'valid_tools' ] = valid_tools_root_folder
- # Workflows container.
- if workflows:
- folder_id, workflows_root_folder = container_util.build_workflows_folder( trans=trans,
- folder_id=folder_id,
- workflows=workflows,
- repository_metadata_id=None,
- repository_id=repository_id,
- label='Workflows' )
- containers_dict[ 'workflows' ] = workflows_root_folder
- if valid_data_managers:
- folder_id, valid_data_managers_root_folder = container_util.build_data_managers_folder( trans=trans,
- folder_id=folder_id,
- data_managers=valid_data_managers,
- label='Valid Data Managers' )
- containers_dict[ 'valid_data_managers' ] = valid_data_managers_root_folder
- if invalid_data_managers or data_managers_errors:
- folder_id, invalid_data_managers_root_folder = container_util.build_invalid_data_managers_folder( trans=trans,
- folder_id=folder_id,
- data_managers=invalid_data_managers,
- error_messages=data_managers_errors,
- label='Invalid Data Managers' )
- containers_dict[ 'invalid_data_managers' ] = invalid_data_managers_root_folder
- except Exception, e:
- log.debug( "Exception in build_repository_containers_for_galaxy: %s" % str( e ) )
- finally:
- lock.release()
- return containers_dict
-def build_repository_containers_for_tool_shed( trans, repository, changeset_revision, repository_dependencies, repository_metadata ):
- """Return a dictionary of containers for the received repository's dependencies and contents for display in the tool shed."""
- containers_dict = dict( datatypes=None,
- invalid_tools=None,
- readme_files=None,
- repository_dependencies=None,
- tool_dependencies=None,
- valid_tools=None,
- workflows=None,
- valid_data_managers=None
- )
- if repository_metadata:
- metadata = repository_metadata.metadata
- lock = threading.Lock()
- lock.acquire( True )
- try:
- folder_id = 0
- # Datatypes container.
- if metadata:
- if 'datatypes' in metadata:
- datatypes = metadata[ 'datatypes' ]
- folder_id, datatypes_root_folder = container_util.build_datatypes_folder( trans, folder_id, datatypes )
- containers_dict[ 'datatypes' ] = datatypes_root_folder
- # Invalid repository dependencies container.
- if metadata:
- if 'invalid_repository_dependencies' in metadata:
- invalid_repository_dependencies = metadata[ 'invalid_repository_dependencies' ]
- folder_id, invalid_repository_dependencies_root_folder = \
- container_util.build_invalid_repository_dependencies_root_folder( trans,
- folder_id,
- invalid_repository_dependencies )
- containers_dict[ 'invalid_repository_dependencies' ] = invalid_repository_dependencies_root_folder
- # Invalid tool dependencies container.
- if metadata:
- if 'invalid_tool_dependencies' in metadata:
- invalid_tool_dependencies = metadata[ 'invalid_tool_dependencies' ]
- folder_id, invalid_tool_dependencies_root_folder = \
- container_util.build_invalid_tool_dependencies_root_folder( trans,
- folder_id,
- invalid_tool_dependencies )
- containers_dict[ 'invalid_tool_dependencies' ] = invalid_tool_dependencies_root_folder
- # Invalid tools container.
- if metadata:
- if 'invalid_tools' in metadata:
- invalid_tool_configs = metadata[ 'invalid_tools' ]
- folder_id, invalid_tools_root_folder = container_util.build_invalid_tools_folder( trans,
- folder_id,
- invalid_tool_configs,
- changeset_revision,
- repository=repository,
- label='Invalid tools' )
- containers_dict[ 'invalid_tools' ] = invalid_tools_root_folder
- # Readme files container.
- if metadata:
- if 'readme_files' in metadata:
- readme_files_dict = build_readme_files_dict( metadata )
- folder_id, readme_files_root_folder = container_util.build_readme_files_folder( trans, folder_id, readme_files_dict )
- containers_dict[ 'readme_files' ] = readme_files_root_folder
- # Repository dependencies container.
- folder_id, repository_dependencies_root_folder = container_util.build_repository_dependencies_folder( trans=trans,
- folder_id=folder_id,
- repository_dependencies=repository_dependencies,
- label='Repository dependencies',
- installed=False )
- if repository_dependencies_root_folder:
- containers_dict[ 'repository_dependencies' ] = repository_dependencies_root_folder
- # Tool dependencies container.
- if metadata:
- if 'tool_dependencies' in metadata:
- tool_dependencies = metadata[ 'tool_dependencies' ]
- if trans.webapp.name == 'tool_shed':
- if 'orphan_tool_dependencies' in metadata:
- orphan_tool_dependencies = metadata[ 'orphan_tool_dependencies' ]
- tool_dependencies = add_orphan_settings_to_tool_dependencies( tool_dependencies, orphan_tool_dependencies )
- folder_id, tool_dependencies_root_folder = container_util.build_tool_dependencies_folder( trans,
- folder_id,
- tool_dependencies,
- missing=False,
- new_install=False )
- containers_dict[ 'tool_dependencies' ] = tool_dependencies_root_folder
- # Valid tools container.
- if metadata:
- if 'tools' in metadata:
- valid_tools = metadata[ 'tools' ]
- folder_id, valid_tools_root_folder = container_util.build_tools_folder( trans,
- folder_id,
- valid_tools,
- repository,
- changeset_revision,
- label='Valid tools' )
- containers_dict[ 'valid_tools' ] = valid_tools_root_folder
- # Workflows container.
- if metadata:
- if 'workflows' in metadata:
- workflows = metadata[ 'workflows' ]
- folder_id, workflows_root_folder = container_util.build_workflows_folder( trans=trans,
- folder_id=folder_id,
- workflows=workflows,
- repository_metadata_id=repository_metadata.id,
- repository_id=None,
- label='Workflows' )
- containers_dict[ 'workflows' ] = workflows_root_folder
- # Valid Data Managers container
- if metadata:
- if 'data_manager' in metadata:
- data_managers = metadata['data_manager'].get( 'data_managers', None )
- folder_id, data_managers_root_folder = container_util.build_data_managers_folder( trans, folder_id, data_managers, label="Data Managers" )
- containers_dict[ 'valid_data_managers' ] = data_managers_root_folder
- error_messages = metadata['data_manager'].get( 'error_messages', None )
- data_managers = metadata['data_manager'].get( 'invalid_data_managers', None )
- folder_id, data_managers_root_folder = container_util.build_invalid_data_managers_folder( trans, folder_id, data_managers, error_messages, label="Invalid Data Managers" )
- containers_dict[ 'invalid_data_managers' ] = data_managers_root_folder
-
- except Exception, e:
- log.debug( "Exception in build_repository_containers_for_tool_shed: %s" % str( e ) )
- finally:
- lock.release()
- return containers_dict
def build_repository_ids_select_field( trans, name='repository_ids', multiple=True, display='checkboxes' ):
"""Method called from both Galaxy and the Tool Shed to generate the current list of repositories for resetting metadata."""
repositories_select_field = SelectField( name=name, multiple=multiple, display=display )
@@ -412,25 +117,6 @@
if trans.app.security_agent.user_can_browse_component_review( trans.app, repository, component_review, user ):
return True
return False
-def can_use_tool_config_disk_file( trans, repository, repo, file_path, changeset_revision ):
- """
- Determine if repository's tool config file on disk can be used. This method is restricted to tool config files since, with the
- exception of tool config files, multiple files with the same name will likely be in various directories in the repository and we're
- comparing file names only (not relative paths).
- """
- if not file_path or not os.path.exists( file_path ):
- # The file no longer exists on disk, so it must have been deleted at some previous point in the change log.
- return False
- if changeset_revision == repository.tip( trans.app ):
- return True
- file_name = strip_path( file_path )
- latest_version_of_file = get_latest_tool_config_revision_from_repository_manifest( repo, file_name, changeset_revision )
- can_use_disk_file = filecmp.cmp( file_path, latest_version_of_file )
- try:
- os.unlink( latest_version_of_file )
- except:
- pass
- return can_use_disk_file
def changeset_is_malicious( trans, id, changeset_revision, **kwd ):
"""Check the malicious flag in repository metadata for a specified change set"""
repository_metadata = get_repository_metadata_by_changeset_revision( trans, id, changeset_revision )
@@ -450,50 +136,6 @@
if review.changeset_revision == changeset_revision and review.user == user:
return True
return False
-def check_tool_input_params( app, repo_dir, tool_config_name, tool, sample_files ):
- """
- Check all of the tool's input parameters, looking for any that are dynamically generated using external data files to make
- sure the files exist.
- """
- invalid_files_and_errors_tups = []
- correction_msg = ''
- for input_param in tool.input_params:
- if isinstance( input_param, parameters.basic.SelectToolParameter ) and input_param.is_dynamic:
- # If the tool refers to .loc files or requires an entry in the tool_data_table_conf.xml, make sure all requirements exist.
- options = input_param.dynamic_options or input_param.options
- if options and isinstance( options, dynamic_options.DynamicOptions ):
- if options.tool_data_table or options.missing_tool_data_table_name:
- # Make sure the repository contains a tool_data_table_conf.xml.sample file.
- sample_tool_data_table_conf = get_config_from_disk( 'tool_data_table_conf.xml.sample', repo_dir )
- if sample_tool_data_table_conf:
- error, correction_msg = handle_sample_tool_data_table_conf_file( app, sample_tool_data_table_conf )
- if error:
- invalid_files_and_errors_tups.append( ( 'tool_data_table_conf.xml.sample', correction_msg ) )
- else:
- options.missing_tool_data_table_name = None
- else:
- correction_msg = "This file requires an entry in the tool_data_table_conf.xml file. Upload a file named tool_data_table_conf.xml.sample "
- correction_msg += "to the repository that includes the required entry to correct this error.<br/>"
- invalid_files_and_errors_tups.append( ( tool_config_name, correction_msg ) )
- if options.index_file or options.missing_index_file:
- # Make sure the repository contains the required xxx.loc.sample file.
- index_file = options.index_file or options.missing_index_file
- index_file_name = strip_path( index_file )
- sample_found = False
- for sample_file in sample_files:
- sample_file_name = strip_path( sample_file )
- if sample_file_name == '%s.sample' % index_file_name:
- options.index_file = index_file_name
- options.missing_index_file = None
- if options.tool_data_table:
- options.tool_data_table.missing_index_file = None
- sample_found = True
- break
- if not sample_found:
- correction_msg = "This file refers to a file named <b>%s</b>. " % str( index_file_name )
- correction_msg += "Upload a file named <b>%s.sample</b> to the repository to correct this error." % str( index_file_name )
- invalid_files_and_errors_tups.append( ( tool_config_name, correction_msg ) )
- return invalid_files_and_errors_tups
def clean_repository_clone_url( repository_clone_url ):
if repository_clone_url.find( '@' ) > 0:
# We have an url that includes an authenticated user, something like:
@@ -527,17 +169,6 @@
error_message = 'Error cloning repository: %s' % str( e )
log.debug( error_message )
return False, error_message
-def concat_messages( msg1, msg2 ):
- if msg1:
- if msg2:
- message = '%s %s' % ( msg1, msg2 )
- else:
- message = msg1
- elif msg2:
- message = msg2
- else:
- message = ''
- return message
def config_elems_to_xml_file( app, config_elems, config_filename, tool_path ):
# Persist the current in-memory list of config_elems to a file named by the value of config_filename.
fd, filename = tempfile.mkstemp()
@@ -549,17 +180,6 @@
os.close( fd )
shutil.move( filename, os.path.abspath( config_filename ) )
os.chmod( config_filename, 0644 )
-def copy_disk_sample_files_to_dir( trans, repo_files_dir, dest_path ):
- """Copy all files currently on disk that end with the .sample extension to the directory to which dest_path refers."""
- sample_files = []
- for root, dirs, files in os.walk( repo_files_dir ):
- if root.find( '.hg' ) < 0:
- for name in files:
- if name.endswith( '.sample' ):
- relative_path = os.path.join( root, name )
- copy_sample_file( trans.app, relative_path, dest_path=dest_path )
- sample_files.append( name )
- return sample_files
def copy_file_from_manifest( repo, ctx, filename, dir ):
"""Copy the latest version of the file named filename from the repository manifest to the directory to which dir refers."""
for changeset in reversed_upper_bounded_changelog( repo, ctx ):
@@ -572,21 +192,6 @@
fh.close()
return file_path
return None
-def copy_sample_file( app, filename, dest_path=None ):
- """Copy xxx.sample to dest_path/xxx.sample and dest_path/xxx. The default value for dest_path is ~/tool-data."""
- if dest_path is None:
- dest_path = os.path.abspath( app.config.tool_data_path )
- sample_file_name = strip_path( filename )
- copied_file = sample_file_name.replace( '.sample', '' )
- full_source_path = os.path.abspath( filename )
- full_destination_path = os.path.join( dest_path, sample_file_name )
- # Don't copy a file to itself - not sure how this happens, but sometimes it does...
- if full_source_path != full_destination_path:
- # It's ok to overwrite the .sample version of the file.
- shutil.copy( full_source_path, full_destination_path )
- # Only create the .loc file if it does not yet exist. We don't overwrite it in case it contains stuff proprietary to the local instance.
- if not os.path.exists( os.path.join( dest_path, copied_file ) ):
- shutil.copy( full_source_path, os.path.join( dest_path, copied_file ) )
def create_or_update_tool_shed_repository( app, name, description, installed_changeset_revision, ctx_rev, repository_clone_url, metadata_dict,
status, current_changeset_revision=None, owner='', dist_to_shed=False ):
# The received value for dist_to_shed will be True if the InstallManager is installing a repository that contains tools or datatypes that used
@@ -642,20 +247,6 @@
sa_session.add( tool_shed_repository )
sa_session.flush()
return tool_shed_repository
-def ensure_required_repositories_exist_for_reinstall( trans, repository_dependencies ):
- """
- Inspect the received repository_dependencies dictionary and make sure tool_shed_repository objects exist in the database for each entry. These
- tool_shed_repositories do not necessarily have to exist on disk, and if they do not, their status will be marked appropriately. They must exist
- in the database in order for repository dependency relationships to be properly built.
- """
- for key, val in repository_dependencies.items():
- if key in [ 'root_key', 'description' ]:
- continue
- tool_shed, name, owner, changeset_revision = container_util.get_components_from_key( key )
- repository = get_or_create_tool_shed_repository( trans, tool_shed, name, owner, changeset_revision )
- for repository_components_list in val:
- tool_shed, name, owner, changeset_revision = repository_components_list
- repository = get_or_create_tool_shed_repository( trans, tool_shed, name, owner, changeset_revision )
def generate_clone_url_for_installed_repository( app, repository ):
"""Generate the URL for cloning a repository that has been installed into a Galaxy instance."""
tool_shed_url = get_url_from_repository_tool_shed( app, repository )
@@ -675,42 +266,6 @@
toolshed, name, owner, changeset_revision = repo_info_tup
# Don't include the changeset_revision in clone urls.
return url_join( toolshed, 'repos', owner, name )
-def generate_message_for_invalid_tools( trans, invalid_file_tups, repository, metadata_dict, as_html=True, displaying_invalid_tool=False ):
- if as_html:
- new_line = '<br/>'
- bold_start = '<b>'
- bold_end = '</b>'
- else:
- new_line = '\n'
- bold_start = ''
- bold_end = ''
- message = ''
- if not displaying_invalid_tool:
- if metadata_dict:
- message += "Metadata may have been defined for some items in revision '%s'. " % str( repository.tip( trans.app ) )
- message += "Correct the following problems if necessary and reset metadata.%s" % new_line
- else:
- message += "Metadata cannot be defined for revision '%s' so this revision cannot be automatically " % str( repository.tip( trans.app ) )
- message += "installed into a local Galaxy instance. Correct the following problems and reset metadata.%s" % new_line
- for itc_tup in invalid_file_tups:
- tool_file, exception_msg = itc_tup
- if exception_msg.find( 'No such file or directory' ) >= 0:
- exception_items = exception_msg.split()
- missing_file_items = exception_items[ 7 ].split( '/' )
- missing_file = missing_file_items[ -1 ].rstrip( '\'' )
- if missing_file.endswith( '.loc' ):
- sample_ext = '%s.sample' % missing_file
- else:
- sample_ext = missing_file
- correction_msg = "This file refers to a missing file %s%s%s. " % ( bold_start, str( missing_file ), bold_end )
- correction_msg += "Upload a file named %s%s%s to the repository to correct this error." % ( bold_start, sample_ext, bold_end )
- else:
- if as_html:
- correction_msg = exception_msg
- else:
- correction_msg = exception_msg.replace( '<br/>', new_line ).replace( '<b>', bold_start ).replace( '</b>', bold_end )
- message += "%s%s%s - %s%s" % ( bold_start, tool_file, bold_end, correction_msg, new_line )
- return message
def generate_sharable_link_for_repository_in_tool_shed( trans, repository, changeset_revision=None ):
"""Generate the URL for sharing a repository that is in the tool shed."""
base_url = url_for( '/', qualified=True ).rstrip( '/' )
@@ -925,64 +480,6 @@
def get_installed_tool_shed_repository( trans, id ):
"""Get a repository on the Galaxy side from the database via id"""
return trans.sa_session.query( trans.model.ToolShedRepository ).get( trans.security.decode_id( id ) )
-def get_latest_tool_config_revision_from_repository_manifest( repo, filename, changeset_revision ):
- """
- Get the latest revision of a tool config file named filename from the repository manifest up to the value of changeset_revision.
- This method is restricted to tool_config files rather than any file since it is likely that, with the exception of tool config files,
- multiple files will have the same name in various directories within the repository.
- """
- stripped_filename = strip_path( filename )
- for changeset in reversed_upper_bounded_changelog( repo, changeset_revision ):
- manifest_ctx = repo.changectx( changeset )
- for ctx_file in manifest_ctx.files():
- ctx_file_name = strip_path( ctx_file )
- if ctx_file_name == stripped_filename:
- try:
- fctx = manifest_ctx[ ctx_file ]
- except LookupError:
- # The ctx_file may have been moved in the change set. For example, 'ncbi_blastp_wrapper.xml' was moved to
- # 'tools/ncbi_blast_plus/ncbi_blastp_wrapper.xml', so keep looking for the file until we find the new location.
- continue
- fh = tempfile.NamedTemporaryFile( 'wb' )
- tmp_filename = fh.name
- fh.close()
- fh = open( tmp_filename, 'wb' )
- fh.write( fctx.data() )
- fh.close()
- return tmp_filename
- return None
-def get_list_of_copied_sample_files( repo, ctx, dir ):
- """
- Find all sample files (files in the repository with the special .sample extension) in the reversed repository manifest up to ctx. Copy
- each discovered file to dir and return the list of filenames. If a .sample file was added in a changeset and then deleted in a later
- changeset, it will be returned in the deleted_sample_files list. The caller will set the value of app.config.tool_data_path to dir in
- order to load the tools and generate metadata for them.
- """
- deleted_sample_files = []
- sample_files = []
- for changeset in reversed_upper_bounded_changelog( repo, ctx ):
- changeset_ctx = repo.changectx( changeset )
- for ctx_file in changeset_ctx.files():
- ctx_file_name = strip_path( ctx_file )
- # If we decide in the future that files deleted later in the changelog should not be used, we can use the following if statement.
- # if ctx_file_name.endswith( '.sample' ) and ctx_file_name not in sample_files and ctx_file_name not in deleted_sample_files:
- if ctx_file_name.endswith( '.sample' ) and ctx_file_name not in sample_files:
- fctx = get_file_context_from_ctx( changeset_ctx, ctx_file )
- if fctx in [ 'DELETED' ]:
- # Since the possibly future used if statement above is commented out, the same file that was initially added will be
- # discovered in an earlier changeset in the change log and fall through to the else block below. In other words, if
- # a file named blast2go.loc.sample was added in change set 0 and then deleted in changeset 3, the deleted file in changeset
- # 3 will be handled here, but the later discovered file in changeset 0 will be handled in the else block below. In this
- # way, the file contents will always be found for future tools even though the file was deleted.
- if ctx_file_name not in deleted_sample_files:
- deleted_sample_files.append( ctx_file_name )
- else:
- sample_files.append( ctx_file_name )
- tmp_ctx_file_name = os.path.join( dir, ctx_file_name.replace( '.sample', '' ) )
- fh = open( tmp_ctx_file_name, 'wb' )
- fh.write( fctx.data() )
- fh.close()
- return sample_files, deleted_sample_files
def get_named_tmpfile_from_ctx( ctx, filename, dir ):
filename = strip_path( filename )
for ctx_file in ctx.files():
@@ -1092,20 +589,6 @@
previous_reviews_dict[ previous_changeset_revision ] = dict( changeset_revision_label=previous_changeset_revision_label,
reviews=revision_reviews )
return previous_reviews_dict
-def get_readme_files_dict_for_display( trans, tool_shed_url, repo_info_dict ):
- """Return a dictionary of README files contained in the single repository being installed so they can be displayed on the tool panel section selection page."""
- name = repo_info_dict.keys()[ 0 ]
- repo_info_tuple = repo_info_dict[ name ]
- description, repository_clone_url, changeset_revision, ctx_rev, repository_owner, repository_dependencies, installed_td = get_repo_info_tuple_contents( repo_info_tuple )
- # Handle README files.
- url = url_join( tool_shed_url,
- 'repository/get_readme_files?name=%s&owner=%s&changeset_revision=%s' % \
- ( name, repository_owner, changeset_revision ) )
- response = urllib2.urlopen( url )
- raw_text = response.read()
- response.close()
- readme_files_dict = json.from_json_string( raw_text )
- return readme_files_dict
def get_repo_info_tuple_contents( repo_info_tuple ):
# Take care in handling the repo_info_tuple as it evolves over time as new tool shed features are introduced.
if len( repo_info_tuple ) == 6:
@@ -1235,54 +718,6 @@
if tool:
repository_tools_tups.append( ( relative_path, guid, tool ) )
return repository_tools_tups
-def get_required_repo_info_dicts( tool_shed_url, repo_info_dicts ):
- """
- Inspect the list of repo_info_dicts for repository dependencies and append a repo_info_dict for each of them to the list. All
- repository_dependencies entries in each of the received repo_info_dicts includes all required repositories, so only one pass through
- this method is required to retrieve all repository dependencies.
- """
- all_repo_info_dicts = []
- if repo_info_dicts:
- # We'll send tuples of ( tool_shed, repository_name, repository_owner, changeset_revision ) to the tool shed to discover repository ids.
- required_repository_tups = []
- for repo_info_dict in repo_info_dicts:
- for repository_name, repo_info_tup in repo_info_dict.items():
- description, repository_clone_url, changeset_revision, ctx_rev, repository_owner, repository_dependencies, tool_dependencies = \
- get_repo_info_tuple_contents( repo_info_tup )
- if repository_dependencies:
- for key, val in repository_dependencies.items():
- if key in [ 'root_key', 'description' ]:
- continue
- toolshed, name, owner, changeset_revision = container_util.get_components_from_key( key )
- components_list = [ toolshed, name, owner, changeset_revision ]
- if components_list not in required_repository_tups:
- required_repository_tups.append( components_list )
- for components_list in val:
- if components_list not in required_repository_tups:
- required_repository_tups.append( components_list )
- if required_repository_tups:
- # The value of required_repository_tups is a list of tuples, so we need to encode it.
- encoded_required_repository_tups = []
- for required_repository_tup in required_repository_tups:
- encoded_required_repository_tups.append( encoding_util.encoding_sep.join( required_repository_tup ) )
- encoded_required_repository_str = encoding_util.encoding_sep2.join( encoded_required_repository_tups )
- encoded_required_repository_str = encoding_util.tool_shed_encode( encoded_required_repository_str )
- url = url_join( tool_shed_url, '/repository/get_required_repo_info_dict?encoded_str=%s' % encoded_required_repository_str )
- response = urllib2.urlopen( url )
- text = response.read()
- response.close()
- if text:
- required_repo_info_dict = json.from_json_string( text )
- required_repo_info_dicts = []
- encoded_dict_strings = required_repo_info_dict[ 'repo_info_dicts' ]
- for encoded_dict_str in encoded_dict_strings:
- decoded_dict = encoding_util.tool_shed_decode( encoded_dict_str )
- required_repo_info_dicts.append( decoded_dict )
- if required_repo_info_dicts:
- for required_repo_info_dict in required_repo_info_dicts:
- if required_repo_info_dict not in all_repo_info_dicts:
- all_repo_info_dicts.append( required_repo_info_dict )
- return all_repo_info_dicts
def get_reversed_changelog_changesets( repo ):
reversed_changelog = []
for changeset in repo.changelog:
@@ -1519,57 +954,6 @@
util.send_mail( frm, to, subject, body, trans.app.config )
except Exception, e:
log.exception( "An error occurred sending a tool shed repository update alert by email." )
-def handle_sample_files_and_load_tool_from_disk( trans, repo_files_dir, tool_config_filepath, work_dir ):
- # Copy all sample files from disk to a temporary directory since the sample files may be in multiple directories.
- message = ''
- sample_files = copy_disk_sample_files_to_dir( trans, repo_files_dir, work_dir )
- if sample_files:
- if 'tool_data_table_conf.xml.sample' in sample_files:
- # Load entries into the tool_data_tables if the tool requires them.
- tool_data_table_config = os.path.join( work_dir, 'tool_data_table_conf.xml' )
- error, message = handle_sample_tool_data_table_conf_file( trans.app, tool_data_table_config )
- tool, valid, message2 = load_tool_from_config( trans.app, tool_config_filepath )
- message = concat_messages( message, message2 )
- return tool, valid, message, sample_files
-def handle_sample_files_and_load_tool_from_tmp_config( trans, repo, changeset_revision, tool_config_filename, work_dir ):
- tool = None
- message = ''
- ctx = get_changectx_for_changeset( repo, changeset_revision )
- # We're not currently doing anything with the returned list of deleted_sample_files here. It is intended to help handle sample files that are in
- # the manifest, but have been deleted from disk.
- sample_files, deleted_sample_files = get_list_of_copied_sample_files( repo, ctx, dir=work_dir )
- if sample_files:
- trans.app.config.tool_data_path = work_dir
- if 'tool_data_table_conf.xml.sample' in sample_files:
- # Load entries into the tool_data_tables if the tool requires them.
- tool_data_table_config = os.path.join( work_dir, 'tool_data_table_conf.xml' )
- if tool_data_table_config:
- error, message = handle_sample_tool_data_table_conf_file( trans.app, tool_data_table_config )
- if error:
- log.debug( message )
- manifest_ctx, ctx_file = get_ctx_file_path_from_manifest( tool_config_filename, repo, changeset_revision )
- if manifest_ctx and ctx_file:
- tool, message2 = load_tool_from_tmp_config( trans, repo, manifest_ctx, ctx_file, work_dir )
- message = concat_messages( message, message2 )
- return tool, message, sample_files
-def handle_sample_tool_data_table_conf_file( app, filename, persist=False ):
- """
- Parse the incoming filename and add new entries to the in-memory app.tool_data_tables dictionary. If persist is True (should only occur
- if call is from the Galaxy side, not the tool shed), the new entries will be appended to Galaxy's shed_tool_data_table_conf.xml file on disk.
- """
- error = False
- message = ''
- try:
- new_table_elems, message = app.tool_data_tables.add_new_entries_from_config_file( config_filename=filename,
- tool_data_path=app.config.tool_data_path,
- shed_tool_data_table_config=app.config.shed_tool_data_table_config,
- persist=persist )
- if message:
- error = True
- except Exception, e:
- message = str( e )
- error = True
- return error, message
def has_previous_repository_reviews( trans, repository, changeset_revision ):
"""Determine if a repository has a changeset revision review prior to the received changeset revision."""
repo = hg.repository( get_configured_ui(), repository.repo_path( trans.app ) )
@@ -1579,87 +963,6 @@
if previous_changeset_revision in reviewed_revision_hashes:
return True
return False
-def load_tool_from_changeset_revision( trans, repository_id, changeset_revision, tool_config_filename ):
- """
- Return a loaded tool whose tool config file name (e.g., filtering.xml) is the value of tool_config_filename. The value of changeset_revision
- is a valid (downloadable) changset revision. The tool config will be located in the repository manifest between the received valid changeset
- revision and the first changeset revision in the repository, searching backwards.
- """
- original_tool_data_path = trans.app.config.tool_data_path
- repository = get_repository_in_tool_shed( trans, repository_id )
- repo_files_dir = repository.repo_path( trans.app )
- repo = hg.repository( get_configured_ui(), repo_files_dir )
- message = ''
- tool = None
- can_use_disk_file = False
- tool_config_filepath = get_absolute_path_to_file_in_repository( repo_files_dir, tool_config_filename )
- work_dir = tempfile.mkdtemp()
- can_use_disk_file = can_use_tool_config_disk_file( trans, repository, repo, tool_config_filepath, changeset_revision )
- if can_use_disk_file:
- trans.app.config.tool_data_path = work_dir
- tool, valid, message, sample_files = handle_sample_files_and_load_tool_from_disk( trans, repo_files_dir, tool_config_filepath, work_dir )
- if tool is not None:
- invalid_files_and_errors_tups = check_tool_input_params( trans.app,
- repo_files_dir,
- tool_config_filename,
- tool,
- sample_files )
- if invalid_files_and_errors_tups:
- message2 = generate_message_for_invalid_tools( trans,
- invalid_files_and_errors_tups,
- repository,
- metadata_dict=None,
- as_html=True,
- displaying_invalid_tool=True )
- message = concat_messages( message, message2 )
- else:
- tool, message, sample_files = handle_sample_files_and_load_tool_from_tmp_config( trans, repo, changeset_revision, tool_config_filename, work_dir )
- remove_dir( work_dir )
- trans.app.config.tool_data_path = original_tool_data_path
- # Reset the tool_data_tables by loading the empty tool_data_table_conf.xml file.
- reset_tool_data_tables( trans.app )
- return repository, tool, message
-def load_tool_from_config( app, full_path ):
- try:
- tool = app.toolbox.load_tool( full_path )
- valid = True
- error_message = None
- except KeyError, e:
- tool = None
- valid = False
- error_message = 'This file requires an entry for "%s" in the tool_data_table_conf.xml file. Upload a file ' % str( e )
- error_message += 'named tool_data_table_conf.xml.sample to the repository that includes the required entry to correct '
- error_message += 'this error. '
- except Exception, e:
- tool = None
- valid = False
- error_message = str( e )
- return tool, valid, error_message
-def load_tool_from_tmp_config( trans, repo, ctx, ctx_file, work_dir ):
- tool = None
- message = ''
- tmp_tool_config = get_named_tmpfile_from_ctx( ctx, ctx_file, work_dir )
- if tmp_tool_config:
- element_tree = util.parse_xml( tmp_tool_config )
- element_tree_root = element_tree.getroot()
- # Look for code files required by the tool config.
- tmp_code_files = []
- for code_elem in element_tree_root.findall( 'code' ):
- code_file_name = code_elem.get( 'file' )
- tmp_code_file_name = copy_file_from_manifest( repo, ctx, code_file_name, work_dir )
- if tmp_code_file_name:
- tmp_code_files.append( tmp_code_file_name )
- tool, valid, message = load_tool_from_config( trans.app, tmp_tool_config )
- for tmp_code_file in tmp_code_files:
- try:
- os.unlink( tmp_code_file )
- except:
- pass
- try:
- os.unlink( tmp_tool_config )
- except:
- pass
- return tool, message
def open_repository_files_folder( trans, folder_path ):
try:
files_list = get_repository_files( trans, folder_path )
@@ -1714,9 +1017,6 @@
if tool_shed_repository and tool_shed_repository.status not in [ trans.model.ToolShedRepository.installation_status.NEW ]:
return tool_shed_repository, previous_changeset_revision
return None, None
-def reset_tool_data_tables( app ):
- # Reset the tool_data_tables to an empty dictionary.
- app.tool_data_tables.data_tables = {}
def reversed_lower_upper_bounded_changelog( repo, excluded_lower_bounds_changeset_revision, included_upper_bounds_changeset_revision ):
"""
Return a reversed list of changesets in the repository changelog after the excluded_lower_bounds_changeset_revision, but up to and
This diff is so big that we needed to truncate the remainder.
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: guerler: Provide backwards compatibility for sorter
by commits-noreply@bitbucket.org 15 Mar '13
by commits-noreply@bitbucket.org 15 Mar '13
15 Mar '13
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/8703329f3716/
changeset: 8703329f3716
user: guerler
date: 2013-03-15 22:29:29
summary: Provide backwards compatibility for sorter
affected #: 1 file
diff -r b9afd514145740b4d5999158d51bd7dc4a409a99 -r 8703329f3716e7c4603e3a3a57286f67bc3b89af tools/filters/sorter.xml
--- a/tools/filters/sorter.xml
+++ b/tools/filters/sorter.xml
@@ -3,59 +3,131 @@
<command interpreter="python">
sorter.py
--input=$input
- --output=$output
+ --output=$out_file1
+
+ #set $style = '' if (str($style) == 'alpha') else 'n'
+ #set $order = '' if (str($order) == 'ASC') else 'r'
--key=$column,$column$style$order
+
#for $col in $column_set:
- --key=${col.other_column},${col.other_column}${col.other_style}${col.other_order}
+ #set $other_column = str($col.other_column)
+ #set $other_style = '' if (str($col.other_style) == "alpha") else 'n'
+ #set $other_order = '' if (str($col.other_order) == "ASC") else 'r'
+ --key=$other_column,$other_column$other_style$other_order
#end for
</command><inputs><param format="tabular" name="input" type="data" label="Sort Dataset" /><param name="column" label="on column" type="data_column" data_ref="input" accept_default="true"/><param name="style" type="select" label="with flavor">
- <option value="n">Numerical sort</option>
- <option value="">Alphabetical sort</option>
+ <option value="num">Numerical sort</option>
+ <option value="alpha">Alphabetical sort</option></param><param name="order" type="select" label="everything in">
- <option value="r">Descending order</option>
- <option value="">Ascending order</option>
+ <option value="DESC">Descending order</option>
+ <option value="ASC">Ascending order</option></param><repeat name="column_set" title="Column selection"><param name="other_column" label="on column" type="data_column" data_ref="input" accept_default="true" /><param name="other_style" type="select" label="with flavor">
- <option value="n">Numerical sort</option>
- <option value="">Alphabetical sort</option>
+ <option value="num">Numerical sort</option>
+ <option value="alpha">Alphabetical sort</option></param><param name="other_order" type="select" label="everything in">
- <option value="r">Descending order</option>
- <option value="">Ascending order</option>
+ <option value="DESC">Descending order</option>
+ <option value="ASC">Ascending order</option></param></repeat></inputs><outputs>
- <data format="input" name="output" metadata_source="input"/>
+ <data format="input" name="out_file1" metadata_source="input"/></outputs><tests><test><param name="input" value="sort_in1.bed"/><param name="column" value="1"/>
- <param name="style" value=""/>
- <param name="order" value=""/>
+ <param name="style" value="alpha"/>
+ <param name="order" value="ASC"/><param name="other_column" value="3"/>
- <param name="other_style" value="n"/>
- <param name="other_order" value="r"/>
- <output name="output" file="sort_out1.bed"/>
+ <param name="other_style" value="num"/>
+ <param name="other_order" value="DESC"/>
+ <output name="out_file1" file="sort_out1.bed"/></test><test><param name="input" value="sort_in1.bed"/><param name="column" value="1"/>
- <param name="style" value=""/>
- <param name="order" value=""/>
+ <param name="style" value="alpha"/>
+ <param name="order" value="ASC"/><param name="other_column" value="3"/>
- <param name="other_style" value="n"/>
- <param name="other_order" value=""/>
- <output name="output" file="sort_out2.bed"/>
+ <param name="other_style" value="num"/>
+ <param name="other_order" value="ASC"/>
+ <output name="out_file1" file="sort_out2.bed"/></test></tests>
-
+ <help>
+
+ .. class:: infomark
+
+ **TIP:** If your data is not TAB delimited, use *Text Manipulation->Convert*
+
+ -----
+
+ **Syntax**
+
+ This tool sorts the dataset on any number of columns in either ascending or descending order.
+
+ * Numerical sort orders numbers by their magnitude, ignores all characters besides numbers, and evaluates a string of numbers to the value they signify.
+ * Alphabetical sort is a phonebook type sort based on the conventional order of letters in an alphabet. Each nth letter is compared with the nth letter of other words in the list, starting at the first letter of each word and advancing to the second, third, fourth, and so on, until the order is established. Therefore, in an alphabetical sort, 2 comes after 100 (1 < 2).
+
+ -----
+
+ **Examples**
+
+ The list of numbers 4,17,3,5 collates to 3,4,5,17 by numerical sorting, while it collates to 17,3,4,5 by alphabetical sorting.
+
+ Sorting the following::
+
+ Q d 7 II jhu 45
+ A kk 4 I h 111
+ Pd p 1 ktY WS 113
+ A g 10 H ZZ 856
+ A edf 4 tw b 234
+ BBB rt 10 H ZZ 100
+ A rew 10 d b 1111
+ C sd 19 YH aa 10
+ Hah c 23 ver bb 467
+ MN gtr 1 a X 32
+ N j 9 a T 205
+ BBB rrf 10 b Z 134
+ odfr ws 6 Weg dew 201
+ C f 3 WW SW 34
+ A jhg 4 I b 345
+ Pd gf 7 Gthe de 567
+ rS hty 90 YY LOp 89
+ A g 10 H h 43
+ A g 4 I h 500
+
+ on columns 1 (alpha), 3 (num), and 6 (num) in ascending order will yield::
+
+ A kk 4 I h 111
+ A edf 4 tw b 234
+ A jhg 4 I b 345
+ A g 4 I h 500
+ A g 10 H h 43
+ A g 10 H ZZ 856
+ A rew 10 d b 1111
+ BBB rt 10 H ZZ 100
+ BBB rrf 10 b Z 134
+ C f 3 WW SW 34
+ C sd 19 YH aa 10
+ Hah c 23 ver bb 467
+ MN gtr 1 a X 32
+ N j 9 a T 205
+ odfr ws 6 Weg dew 201
+ Pd p 1 ktY WS 113
+ Pd gf 7 Gthe de 567
+ Q d 7 II jhu 45
+ rS hty 90 YY LOp 89
+
+ </help></tool>
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: dan: Fix for allow_workflow_parameters during recursive call of check_and_update_param_values_helper.
by commits-noreply@bitbucket.org 15 Mar '13
by commits-noreply@bitbucket.org 15 Mar '13
15 Mar '13
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/b9afd5141457/
changeset: b9afd5141457
user: dan
date: 2013-03-15 22:23:21
summary: Fix for allow_workflow_parameters during recursive call of check_and_update_param_values_helper.
affected #: 1 file
diff -r fea372d824394fa70cb37d306827190dc9b47556 -r b9afd514145740b4d5999158d51bd7dc4a409a99 lib/galaxy/tools/__init__.py
--- a/lib/galaxy/tools/__init__.py
+++ b/lib/galaxy/tools/__init__.py
@@ -2349,7 +2349,7 @@
messages[ input.name ] = { input.test_param.name: "No value found for '%s%s', used default" % ( prefix, input.label ) }
test_value = input.test_param.get_initial_value( trans, context )
current_case = input.get_current_case( test_value, trans )
- self.check_and_update_param_values_helper( input.cases[ current_case ].inputs, {}, trans, messages[ input.name ], context, prefix )
+ self.check_and_update_param_values_helper( input.cases[ current_case ].inputs, {}, trans, messages[ input.name ], context, prefix, allow_workflow_parameters=allow_workflow_parameters )
elif isinstance( input, Repeat ):
if input.min:
messages[ input.name ] = []
@@ -2357,7 +2357,7 @@
rep_prefix = prefix + "%s %d > " % ( input.title, i + 1 )
rep_dict = dict()
messages[ input.name ].append( rep_dict )
- self.check_and_update_param_values_helper( input.inputs, {}, trans, rep_dict, context, rep_prefix )
+ self.check_and_update_param_values_helper( input.inputs, {}, trans, rep_dict, context, rep_prefix, allow_workflow_parameters=allow_workflow_parameters )
else:
messages[ input.name ] = "No value found for '%s%s', used default" % ( prefix, input.label )
values[ input.name ] = input.get_initial_value( trans, context )
@@ -2366,7 +2366,7 @@
if isinstance( input, Repeat ):
for i, d in enumerate( values[ input.name ] ):
rep_prefix = prefix + "%s %d > " % ( input.title, i + 1 )
- self.check_and_update_param_values_helper( input.inputs, d, trans, messages, context, rep_prefix )
+ self.check_and_update_param_values_helper( input.inputs, d, trans, messages, context, rep_prefix, allow_workflow_parameters=allow_workflow_parameters )
elif isinstance( input, Conditional ):
group_values = values[ input.name ]
if input.test_param.name not in group_values:
@@ -2378,7 +2378,7 @@
messages[ child_input.name ] = "Value no longer valid for '%s%s', replaced with default" % ( prefix, child_input.label )
else:
current = group_values["__current_case__"]
- self.check_and_update_param_values_helper( input.cases[current].inputs, group_values, trans, messages, context, prefix )
+ self.check_and_update_param_values_helper( input.cases[current].inputs, group_values, trans, messages, context, prefix, allow_workflow_parameters=allow_workflow_parameters )
else:
# Regular tool parameter, no recursion needed
try:
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: dan: Fix for tool_shed/util/metadata_util.py.generate_guid_for_object().
by commits-noreply@bitbucket.org 15 Mar '13
by commits-noreply@bitbucket.org 15 Mar '13
15 Mar '13
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/fea372d82439/
changeset: fea372d82439
user: dan
date: 2013-03-15 21:36:44
summary: Fix for tool_shed/util/metadata_util.py.generate_guid_for_object().
affected #: 1 file
diff -r 1cc6973069b5e7afeb5c2d401c262b0d39efd24b -r fea372d824394fa70cb37d306827190dc9b47556 lib/tool_shed/util/metadata_util.py
--- a/lib/tool_shed/util/metadata_util.py
+++ b/lib/tool_shed/util/metadata_util.py
@@ -466,7 +466,7 @@
return valid_tool_dependencies_dict
def generate_guid_for_object( repository_clone_url, guid_type, obj_id, version ):
- tmp_url = clean_repository_clone_url( repository_clone_url )
+ tmp_url = suc.clean_repository_clone_url( repository_clone_url )
return '%s/%s/%s/%s' % ( tmp_url, guid_type, obj_id, version )
def generate_metadata_for_changeset_revision( app, repository, changeset_revision, repository_clone_url, shed_config_dict=None, relative_install_dir=None,
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0