1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/9f55b4b37d35/
Changeset: 9f55b4b37d35
User: jmchilton
Date: 2014-01-11 04:17:39
Summary: PEP-8 fixes for tools/actions/__init__.py
Affected #: 1 file
diff -r 582d1e318dd3b604f2371a861f35c6fae1a2d36d -r 9f55b4b37d35b8366f42ab430ac4213554d83311 lib/galaxy/tools/actions/__init__.py
--- a/lib/galaxy/tools/actions/__init__.py
+++ b/lib/galaxy/tools/actions/__init__.py
@@ -3,7 +3,8 @@
from galaxy.exceptions import ObjectInvalid
from galaxy.model import LibraryDatasetDatasetAssociation
-from galaxy.tools.parameters import DataToolParameter, SelectToolParameter
+from galaxy.tools.parameters import DataToolParameter
+from galaxy.tools.parameters import SelectToolParameter
from galaxy.tools.parameters.grouping import Conditional, Repeat
from galaxy.util.json import from_json_string
from galaxy.util.json import to_json_string
@@ -15,6 +16,7 @@
import logging
log = logging.getLogger( __name__ )
+
class ToolAction( object ):
"""
The actions to be taken when a tool is run (after parameters have
@@ -23,6 +25,7 @@
def execute( self, tool, trans, incoming={}, set_output_hid=True ):
raise TypeError("Abstract method")
+
class DefaultToolAction( object ):
"""Default tool action is to run an external command"""
@@ -33,8 +36,10 @@
of the DataToolParameter type.
"""
input_datasets = dict()
- def visitor( prefix, input, value, parent = None ):
- def process_dataset( data, formats = None ):
+
+ def visitor( prefix, input, value, parent=None ):
+
+ def process_dataset( data, formats=None ):
if not data:
return data
if formats is None:
@@ -48,11 +53,11 @@
data = converted_dataset
else:
#run converter here
- new_data = data.datatype.convert_dataset( trans, data, target_ext, return_output = True, visible = False ).values()[0]
+ new_data = data.datatype.convert_dataset( trans, data, target_ext, return_output=True, visible=False ).values()[0]
new_data.hid = data.hid
new_data.name = data.name
trans.sa_session.add( new_data )
- assoc = trans.app.model.ImplicitlyConvertedDatasetAssociation( parent = data, file_type = target_ext, dataset = new_data, metadata_safe = False )
+ assoc = trans.app.model.ImplicitlyConvertedDatasetAssociation( parent=data, file_type=target_ext, dataset=new_data, metadata_safe=False )
trans.sa_session.add( assoc )
trans.sa_session.flush()
data = new_data
@@ -77,17 +82,17 @@
input_datasets[ prefix + conversion_name + str( i + 1 ) ] = new_data
conversions.append( ( conversion_name, new_data ) )
else:
- raise Exception, 'A path for explicit datatype conversion has not been found: %s --/--> %s' % ( input_datasets[ prefix + input.name + str( i + 1 ) ].extension, conversion_extensions )
+ raise Exception('A path for explicit datatype conversion has not been found: %s --/--> %s' % ( input_datasets[ prefix + input.name + str( i + 1 ) ].extension, conversion_extensions ) )
if parent:
parent[input.name][i] = input_datasets[ prefix + input.name + str( i + 1 ) ]
for conversion_name, conversion_data in conversions:
#allow explicit conversion to be stored in job_parameter table
- parent[ conversion_name ][i] = conversion_data.id #a more robust way to determine JSONable value is desired
+ parent[ conversion_name ][i] = conversion_data.id # a more robust way to determine JSONable value is desired
else:
param_values[input.name][i] = input_datasets[ prefix + input.name + str( i + 1 ) ]
for conversion_name, conversion_data in conversions:
#allow explicit conversion to be stored in job_parameter table
- param_values[ conversion_name ][i] = conversion_data.id #a more robust way to determine JSONable value is desired
+ param_values[ conversion_name ][i] = conversion_data.id # a more robust way to determine JSONable value is desired
else:
input_datasets[ prefix + input.name ] = process_dataset( value )
conversions = []
@@ -97,14 +102,14 @@
input_datasets[ prefix + conversion_name ] = new_data
conversions.append( ( conversion_name, new_data ) )
else:
- raise Exception, 'A path for explicit datatype conversion has not been found: %s --/--> %s' % ( input_datasets[ prefix + input.name ].extension, conversion_extensions )
+ raise Exception( 'A path for explicit datatype conversion has not been found: %s --/--> %s' % ( input_datasets[ prefix + input.name ].extension, conversion_extensions ) )
target_dict = parent
if not target_dict:
target_dict = param_values
target_dict[ input.name ] = input_datasets[ prefix + input.name ]
for conversion_name, conversion_data in conversions:
#allow explicit conversion to be stored in job_parameter table
- target_dict[ conversion_name ] = conversion_data.id #a more robust way to determine JSONable value is desired
+ target_dict[ conversion_name ] = conversion_data.id # a more robust way to determine JSONable value is desired
tool.visit_inputs( param_values, visitor )
return input_datasets
@@ -130,6 +135,7 @@
else:
copy_from_dict[ key ] = value
return copy_from_dict
+
def make_list_copy( from_list ):
new_list = []
for value in from_list:
@@ -140,32 +146,33 @@
else:
new_list.append( value )
return new_list
- def wrap_values( inputs, input_values, skip_missing_values = False ):
+
+ def wrap_values( inputs, input_values, skip_missing_values=False ):
# Wrap tool inputs as necessary
for input in inputs.itervalues():
if input.name not in input_values and skip_missing_values:
continue
if isinstance( input, Repeat ):
for d in input_values[ input.name ]:
- wrap_values( input.inputs, d, skip_missing_values = skip_missing_values )
+ wrap_values( input.inputs, d, skip_missing_values=skip_missing_values )
elif isinstance( input, Conditional ):
values = input_values[ input.name ]
current = values[ "__current_case__" ]
- wrap_values( input.cases[current].inputs, values, skip_missing_values = skip_missing_values )
+ wrap_values( input.cases[current].inputs, values, skip_missing_values=skip_missing_values )
elif isinstance( input, DataToolParameter ) and input.multiple:
input_values[ input.name ] = \
galaxy.tools.DatasetListWrapper( input_values[ input.name ],
- datatypes_registry = trans.app.datatypes_registry,
- tool = tool,
- name = input.name )
+ datatypes_registry=trans.app.datatypes_registry,
+ tool=tool,
+ name=input.name )
elif isinstance( input, DataToolParameter ):
input_values[ input.name ] = \
galaxy.tools.DatasetFilenameWrapper( input_values[ input.name ],
- datatypes_registry = trans.app.datatypes_registry,
- tool = tool,
- name = input.name )
+ datatypes_registry=trans.app.datatypes_registry,
+ tool=tool,
+ name=input.name )
elif isinstance( input, SelectToolParameter ):
- input_values[ input.name ] = galaxy.tools.SelectToolParameterWrapper( input, input_values[ input.name ], tool.app, other_values = incoming )
+ input_values[ input.name ] = galaxy.tools.SelectToolParameterWrapper( input, input_values[ input.name ], tool.app, other_values=incoming )
else:
input_values[ input.name ] = galaxy.tools.InputValueWrapper( input, input_values[ input.name ], incoming )
@@ -183,7 +190,7 @@
input_dbkey = incoming.get( "dbkey", "?" )
for name, data in inp_data.items():
if not data:
- data = NoneDataset( datatypes_registry = trans.app.datatypes_registry )
+ data = NoneDataset( datatypes_registry=trans.app.datatypes_registry )
continue
# Convert LDDA to an HDA.
@@ -191,7 +198,7 @@
data = data.to_history_dataset_association( None )
inp_data[name] = data
- else: # HDA
+ else: # HDA
if data.hid:
input_names.append( 'data %s' % data.hid )
input_ext = data.ext
@@ -212,9 +219,9 @@
if trans.user and ( 'dbkeys' in trans.user.preferences ) and ( input_dbkey in from_json_string( trans.user.preferences[ 'dbkeys' ] ) ):
# Custom build.
custom_build_dict = from_json_string( trans.user.preferences[ 'dbkeys' ] )[ input_dbkey ]
- # HACK: the attempt to get chrom_info below will trigger the
+ # HACK: the attempt to get chrom_info below will trigger the
# fasta-to-len converter if the dataset is not available or,
- # which will in turn create a recursive loop when
+ # which will in turn create a recursive loop when
# running the fasta-to-len tool. So, use a hack in the second
# condition below to avoid getting chrom_info when running the
# fasta-to-len converter.
@@ -248,7 +255,7 @@
on_text = ""
# Add the dbkey to the incoming parameters
incoming[ "dbkey" ] = input_dbkey
- params = None #wrapped params are used by change_format action and by output.label; only perform this wrapping once, as needed
+ params = None # wrapped params are used by change_format action and by output.label; only perform this wrapping once, as needed
# Keep track of parent / child relationships, we'll create all the
# datasets first, then create the associations
parent_to_child_pairs = []
@@ -258,10 +265,10 @@
for filter in output.filters:
try:
if not eval( filter.text.strip(), globals(), incoming ):
- break #do not create this dataset
+ break # do not create this dataset
except Exception, e:
log.debug( 'Dataset output filter failed: %s' % e )
- else: #all filters passed
+ else: # all filters passed
if output.parent:
parent_to_child_pairs.append( ( output.parent, name ) )
child_dataset_names.add( name )
@@ -291,7 +298,7 @@
if output.change_format:
if params is None:
params = make_dict_copy( incoming )
- wrap_values( tool.inputs, params, skip_missing_values = not tool.check_values )
+ wrap_values( tool.inputs, params, skip_missing_values=not tool.check_values )
for change_elem in output.change_format:
for when_elem in change_elem.findall( 'when' ):
check = when_elem.get( 'input', None )
@@ -300,9 +307,9 @@
if '$' not in check:
#allow a simple name or more complex specifications
check = '${%s}' % check
- if str( fill_template( check, context = params ) ) == when_elem.get( 'value', None ):
+ if str( fill_template( check, context=params ) ) == when_elem.get( 'value', None ):
ext = when_elem.get( 'format', ext )
- except: #bad tag input value; possibly referencing a param within a different conditional when block or other nonexistent grouping construct
+ except: # bad tag input value; possibly referencing a param within a different conditional when block or other nonexistent grouping construct
continue
else:
check = when_elem.get( 'input_dataset', None )
@@ -348,7 +355,7 @@
# <outputs>
# <data format="input" name="output" label="Blat on ${<input_param>.name}" />
# </outputs>
- wrap_values( tool.inputs, params, skip_missing_values = not tool.check_values )
+ wrap_values( tool.inputs, params, skip_missing_values=not tool.check_values )
#tool (only needing to be set once) and on_string (set differently for each label) are overwritten for each output dataset label being determined
params['tool'] = tool
params['on_string'] = on_text
@@ -356,7 +363,7 @@
else:
if params is None:
params = make_dict_copy( incoming )
- wrap_values( tool.inputs, params, skip_missing_values = not tool.check_values )
+ wrap_values( tool.inputs, params, skip_missing_values=not tool.check_values )
data.name = self._get_default_data_name( data, tool, on_text=on_text, trans=trans, incoming=incoming, history=history, params=params, job_params=job_params )
# Store output
out_data[ name ] = data
@@ -369,10 +376,10 @@
trans.sa_session.flush()
# Add all the top-level (non-child) datasets to the history unless otherwise specified
for name in out_data.keys():
- if name not in child_dataset_names and name not in incoming: #don't add children; or already existing datasets, i.e. async created
+ if name not in child_dataset_names and name not in incoming: # don't add children; or already existing datasets, i.e. async created
data = out_data[ name ]
if set_output_history:
- history.add_dataset( data, set_hid = set_output_hid )
+ history.add_dataset( data, set_hid=set_output_hid )
trans.sa_session.add( data )
trans.sa_session.flush()
# Add all the children to their parents
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/582d1e318dd3/
Changeset: 582d1e318dd3
User: greg
Date: 2014-01-10 22:36:45
Summary: Fix message rendering for repositories with multiple heads.
Affected #: 2 files
diff -r 56b61e7d5bbcea1dff823cf78229289480ee15e7 -r 582d1e318dd3b604f2371a861f35c6fae1a2d36d templates/webapps/tool_shed/repository/manage_repository.mako
--- a/templates/webapps/tool_shed/repository/manage_repository.mako
+++ b/templates/webapps/tool_shed/repository/manage_repository.mako
@@ -125,7 +125,7 @@
from tool_shed.util.shed_util_common import get_revision_label_from_ctx
heads_str = ''
for ctx in heads:
- heads_str += get_revision_label_from_ctx( ctx )
+ heads_str += '%s<br/>' % get_revision_label_from_ctx( ctx )
%>
Contact the administrator of this Tool Shed as soon as possible and let them know that
this repository has the following multiple heads which must be merged.<br/>
diff -r 56b61e7d5bbcea1dff823cf78229289480ee15e7 -r 582d1e318dd3b604f2371a861f35c6fae1a2d36d templates/webapps/tool_shed/repository/view_repository.mako
--- a/templates/webapps/tool_shed/repository/view_repository.mako
+++ b/templates/webapps/tool_shed/repository/view_repository.mako
@@ -72,7 +72,7 @@
from tool_shed.util.shed_util_common import get_revision_label_from_ctx
heads_str = ''
for ctx in heads:
- heads_str += get_revision_label_from_ctx( ctx )
+ heads_str += '%s<br/>' % get_revision_label_from_ctx( ctx )
%>
Contact the administrator of this Tool Shed as soon as possible and let them know that
this repository has the following multiple heads which must be merged.<br/>
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/ad9661041105/
Changeset: ad9661041105
User: greg
Date: 2014-01-10 21:05:57
Summary: Fix tool shed functional test I just broke.
Affected #: 1 file
diff -r 5f6c670dca9f42d52480f66afeb91b63710e7ffb -r ad9661041105bbffe4772ee9bedbcf174c53eac3 test/tool_shed/functional/test_0000_basic_repository_features.py
--- a/test/tool_shed/functional/test_0000_basic_repository_features.py
+++ b/test/tool_shed/functional/test_0000_basic_repository_features.py
@@ -349,6 +349,6 @@
break
# Check for the changeset revision, repository name, owner username, 'repos' in the clone url, and the captured
# unicode decoding error message.
- strings_displayed = [ 'Changeset %d:%s' % ( revision_number, revision_hash ), 'filtering_0000', 'user1', 'repos', 'added:',
+ strings_displayed = [ '%d:%s' % ( revision_number, revision_hash ), 'filtering_0000', 'user1', 'repos', 'added:',
'+These characters should not' ]
- self.load_changeset_in_tool_shed( repository_id, changeset_revision, strings_displayed=strings_displayed )
\ No newline at end of file
+ self.load_changeset_in_tool_shed( repository_id, changeset_revision, strings_displayed=strings_displayed )
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/c8d2e455f0e8/
Changeset: c8d2e455f0e8
User: dannon
Date: 2014-01-10 20:22:03
Summary: Temporarily add simplejson back until webhelpers is updated
Affected #: 1 file
diff -r d7b4edc2d749d9835f61dccaab0e17614a9cd336 -r c8d2e455f0e865662028d831d20a83316f2ab33f eggs.ini
--- a/eggs.ini
+++ b/eggs.ini
@@ -26,6 +26,7 @@
pysam = 0.4.2
pysqlite = 2.5.6
python_lzo = 1.08_2.03_static
+simplejson = 2.1.1
threadframe = 0.2
guppy = 0.1.8
SQLAlchemy = 0.7.9
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.