1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/418993fac75d/
Changeset: 418993fac75d
User: jmchilton
Date: 2013-11-14 19:35:27
Summary: get_intiail_value history fix. Allows ToolDataParameters inside of repeats when using API.
Affected #: 1 file
diff -r 3402a11b008865ee37ee29c9d7d3d5ea27c6fe84 -r 418993fac75d07297dc317e183cf401fd9fbb869 lib/galaxy/tools/parameters/grouping.py
--- a/lib/galaxy/tools/parameters/grouping.py
+++ b/lib/galaxy/tools/parameters/grouping.py
@@ -113,7 +113,7 @@
for i in range( self.default ):
rval_dict = { '__index__': i}
for input in self.inputs.itervalues():
- rval_dict[ input.name ] = input.get_initial_value( trans, context )
+ rval_dict[ input.name ] = input.get_initial_value( trans, context, history=history )
rval.append( rval_dict )
return rval
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
16 new commits in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/2edddde7517d/
Changeset: 2edddde7517d
User: jmchilton
Date: 2013-11-11 08:45:32
Summary: Some PEP-8 fixes for lib/galaxy/tools/parameters/basic.py.
Affected #: 1 file
diff -r 1df960b4892ae935840455de9ac058e396ec9410 -r 2edddde7517d7957fe75a87fa7d45b80dedc156f lib/galaxy/tools/parameters/basic.py
--- a/lib/galaxy/tools/parameters/basic.py
+++ b/lib/galaxy/tools/parameters/basic.py
@@ -2,14 +2,20 @@
Basic tool parameters.
"""
-import logging, string, sys, os, os.path, urllib
+import logging
+import string
+import sys
+import os
+import os.path
+import urllib
from elementtree.ElementTree import XML, Element
from galaxy import config, datatypes, util
from galaxy.web import form_builder
from galaxy.util.bunch import Bunch
from galaxy.util import string_as_bool, sanitize_param, unicodify
from sanitize import ToolParameterSanitizer
-import validation, dynamic_options
+import validation
+import dynamic_options
# For BaseURLToolParameter
from galaxy.web import url_for
from galaxy.model.item_attrs import Dictifiable
@@ -53,8 +59,10 @@
def get_label( self ):
"""Return user friendly name for the parameter"""
- if self.label: return self.label
- else: return self.name
+ if self.label:
+ return self.label
+ else:
+ return self.name
def get_html_field( self, trans=None, value=None, other_values={} ):
raise TypeError( "Abstract Method" )
@@ -87,7 +95,7 @@
if a value has already been chosen from the history. This is to support the capability to
choose each dataset once
"""
- return self.get_initial_value(trans, context, history=history);
+ return self.get_initial_value(trans, context, history=history)
def get_required_enctype( self ):
"""
@@ -166,7 +174,7 @@
return value
def validate( self, value, history=None ):
- if value=="" and self.optional:
+ if value == "" and self.optional:
return
for validator in self.validators:
validator.validate( value, history )
@@ -219,7 +227,8 @@
self.area = string_as_bool( elem.get( 'area', False ) )
def get_html_field( self, trans=None, value=None, other_values={} ):
- if value is None: value = self.value
+ if value is None:
+ value = self.value
if self.area:
return form_builder.TextArea( self.name, self.size, value )
else:
@@ -228,6 +237,7 @@
def get_initial_value( self, trans, context, history=None ):
return self.value
+
class IntegerToolParameter( TextToolParameter ):
"""
Parameter that takes an integer value.
@@ -412,7 +422,7 @@
checked = self.checked
if value is not None:
checked = form_builder.CheckboxField.is_checked( value )
- return form_builder.CheckboxField( self.name, checked, refresh_on_change = self.refresh_on_change )
+ return form_builder.CheckboxField( self.name, checked, refresh_on_change=self.refresh_on_change )
def from_html( self, value, trans=None, other_values={} ):
return form_builder.CheckboxField.is_checked( value )
@@ -461,7 +471,7 @@
self.ajax = string_as_bool( elem.get( 'ajax-upload' ) )
def get_html_field( self, trans=None, value=None, other_values={} ):
- return form_builder.FileField( self.name, ajax = self.ajax, value = value )
+ return form_builder.FileField( self.name, ajax=self.ajax, value=value )
def from_html( self, value, trans=None, other_values={} ):
# Middleware or proxies may encode files in special ways (TODO: this
@@ -476,8 +486,8 @@
assert local_filename.startswith( upload_store ), \
"Filename provided by nginx is not in correct directory"
value = dict(
- filename = value["name"],
- local_filename = local_filename
+ filename=value["name"],
+ local_filename=local_filename
)
return value
@@ -533,7 +543,7 @@
user_ftp_dir = None
else:
user_ftp_dir = trans.user_ftp_dir
- return form_builder.FTPFileField( self.name, user_ftp_dir, trans.app.config.ftp_upload_site, value = value )
+ return form_builder.FTPFileField( self.name, user_ftp_dir, trans.app.config.ftp_upload_site, value=value )
def from_html( self, value, trans=None, other_values={} ):
try:
@@ -754,8 +764,9 @@
else:
return form_builder.TextField( self.name, value=(value or "") )
if value is not None:
- if not isinstance( value, list ): value = [ value ]
- field = form_builder.SelectField( self.name, self.multiple, self.display, self.refresh_on_change, refresh_on_change_values = self.refresh_on_change_values )
+ if not isinstance( value, list ):
+ value = [ value ]
+ field = form_builder.SelectField( self.name, self.multiple, self.display, self.refresh_on_change, refresh_on_change_values=self.refresh_on_change_values )
options = self.get_options( trans, context )
for text, optval, selected in options:
if isinstance( optval, UnvalidatedValue ):
@@ -793,7 +804,7 @@
rval.append( v )
return rval
else:
- value_is_none = ( value == "None" and "None" not in legal_values )
+ value_is_none = ( value == "None" and "None" not in legal_values )
if value_is_none:
if self.multiple:
if self.optional:
@@ -943,7 +954,7 @@
options = []
try:
options = self.get_options( trans, {} )
- except AssertionError, assertion:
+ except AssertionError:
# we dont/cant set other_values (the {} above), so params that require other params to be filled will error:
# required dependency in filter_options
# associated DataToolParam in get_column_list
@@ -1531,8 +1542,9 @@
TODO: There should be an alternate display that allows single selects to be
displayed as radio buttons and multiple selects as a set of checkboxes
- TODO: The following must be fixed to test correctly for the new security_check tag in the DataToolParameter ( the last test below is broken )
- Nate's next pass at the dataset security stuff will dramatically alter this anyway.
+ TODO: The following must be fixed to test correctly for the new security_check tag in
+ the DataToolParameter ( the last test below is broken ) Nate's next pass at the dataset
+ security stuff will dramatically alter this anyway.
"""
def __init__( self, tool, elem, trans=None):
@@ -1579,8 +1591,8 @@
# Load conversions required for the dataset input
self.conversions = []
for conv_elem in elem.findall( "conversion" ):
- name = conv_elem.get( "name" ) #name for commandline substitution
- conv_extensions = conv_elem.get( "type" ) #target datatype extension
+ name = conv_elem.get( "name" ) # name for commandline substitution
+ conv_extensions = conv_elem.get( "type" ) # target datatype extension
# FIXME: conv_extensions should be able to be an ordered list
assert None not in [ name, type ], 'A name (%s) and type (%s) are required for explicit conversion' % ( name, type )
conv_types = tool.app.datatypes_registry.get_datatype_by_extension( conv_extensions.lower() )
@@ -1592,14 +1604,15 @@
try:
filter_value = self.options.get_options( trans, other_values )[0][0]
except IndexError:
- pass #no valid options
+ pass # no valid options
assert trans is not None, "DataToolParameter requires a trans"
history = trans.get_history()
assert history is not None, "DataToolParameter requires a history"
if value is not None:
if type( value ) != list:
value = [ value ]
- field = form_builder.SelectField( self.name, self.multiple, None, self.refresh_on_change, refresh_on_change_values = self.refresh_on_change_values )
+ field = form_builder.SelectField( self.name, self.multiple, None, self.refresh_on_change, refresh_on_change_values=self.refresh_on_change_values )
+
# CRUCIAL: the dataset_collector function needs to be local to DataToolParameter.get_html_field()
def dataset_collector( hdas, parent_hid ):
current_user_roles = trans.get_current_user_roles()
@@ -1654,7 +1667,7 @@
return field
def get_initial_value( self, trans, context, history=None ):
- return self.get_initial_value_from_history_prevent_repeats(trans, context, None, history=history);
+ return self.get_initial_value_from_history_prevent_repeats(trans, context, None, history=history)
def get_initial_value_from_history_prevent_repeats( self, trans, context, already_used, history=None ):
"""
@@ -1676,7 +1689,8 @@
try:
filter_value = self.options.get_options( trans, context )[0][0]
except IndexError:
- pass #no valid options
+ pass # no valid options
+
def dataset_collector( datasets ):
def is_convertable( dataset ):
target_ext, converted_dataset = dataset.find_conversion_destination( self.formats )
https://bitbucket.org/galaxy/galaxy-central/commits/4f68a886036c/
Changeset: 4f68a886036c
User: jmchilton
Date: 2013-11-11 08:45:32
Summary: PEP-8 cleanup of lib/galaxy/tools/actions/upload.py.
Affected #: 1 file
diff -r 2edddde7517d7957fe75a87fa7d45b80dedc156f -r 4f68a886036c934194d4a625cb2fcccf32e64aba lib/galaxy/tools/actions/upload.py
--- a/lib/galaxy/tools/actions/upload.py
+++ b/lib/galaxy/tools/actions/upload.py
@@ -4,8 +4,10 @@
import logging
log = logging.getLogger( __name__ )
+
class UploadToolAction( ToolAction ):
- def execute( self, tool, trans, incoming={}, set_output_hid = True, history=None, **kwargs ):
+
+ def execute( self, tool, trans, incoming={}, set_output_hid=True, history=None, **kwargs ):
dataset_upload_inputs = []
for input_name, input in tool.inputs.iteritems():
if input.type == "upload_dataset":
https://bitbucket.org/galaxy/galaxy-central/commits/510cbdd44e77/
Changeset: 510cbdd44e77
User: jmchilton
Date: 2013-11-11 08:45:32
Summary: PEP-8 cleanups for lib/galaxy/tools/actions/upload_common.py.
Affected #: 1 file
diff -r 4f68a886036c934194d4a625cb2fcccf32e64aba -r 510cbdd44e772629b49d1ae134af199a006ee28d lib/galaxy/tools/actions/upload_common.py
--- a/lib/galaxy/tools/actions/upload_common.py
+++ b/lib/galaxy/tools/actions/upload_common.py
@@ -1,4 +1,8 @@
-import os, tempfile, StringIO, pwd, subprocess
+import os
+import tempfile
+import StringIO
+import pwd
+import subprocess
from cgi import FieldStorage
from galaxy import datatypes, util
from galaxy.util.odict import odict
@@ -10,13 +14,13 @@
import logging
log = logging.getLogger( __name__ )
+
def persist_uploads( params ):
"""
Turn any uploads in the submitted form to persisted files.
"""
if 'files' in params:
new_files = []
- temp_files = []
for upload_dataset in params['files']:
f = upload_dataset['file_data']
if isinstance( f, FieldStorage ):
@@ -24,8 +28,8 @@
assert f.file.name != '<fdopen>'
local_filename = util.mkstemp_ln( f.file.name, 'upload_file_data_' )
f.file.close()
- upload_dataset['file_data'] = dict( filename = f.filename,
- local_filename = local_filename )
+ upload_dataset['file_data'] = dict( filename=f.filename,
+ local_filename=local_filename )
elif type( f ) == dict and 'filename' and 'local_filename' not in f:
raise Exception( 'Uploaded file was encoded in a way not understood by Galaxy.' )
if upload_dataset['url_paste'] and upload_dataset['url_paste'].strip() != '':
@@ -35,6 +39,8 @@
new_files.append( upload_dataset )
params['files'] = new_files
return params
+
+
def handle_library_params( trans, params, folder_id, replace_dataset=None ):
# FIXME: the received params has already been parsed by util.Params() by the time it reaches here,
# so no complex objects remain. This is not good because it does not allow for those objects to be
@@ -62,6 +68,8 @@
role = trans.sa_session.query( trans.app.model.Role ).get( role_id )
library_bunch.roles.append( role )
return library_bunch
+
+
def get_precreated_datasets( trans, params, data_obj, controller='root' ):
"""
Get any precreated datasets (when using asynchronous uploads).
@@ -90,6 +98,8 @@
else:
rval.append( data )
return rval
+
+
def get_precreated_dataset( precreated_datasets, name ):
"""
Return a dataset matching a name from the list of precreated (via async
@@ -101,21 +111,24 @@
return precreated_datasets.pop( names.index( name ) )
else:
return None
+
+
def cleanup_unused_precreated_datasets( precreated_datasets ):
for data in precreated_datasets:
log.info( 'Cleaned up unclaimed precreated dataset (%s).' % ( data.id ) )
data.state = data.states.ERROR
data.info = 'No file contents were available.'
+
def __new_history_upload( trans, uploaded_dataset, history=None, state=None ):
if not history:
history = trans.history
- hda = trans.app.model.HistoryDatasetAssociation( name = uploaded_dataset.name,
- extension = uploaded_dataset.file_type,
- dbkey = uploaded_dataset.dbkey,
- history = history,
- create_dataset = True,
- sa_session = trans.sa_session )
+ hda = trans.app.model.HistoryDatasetAssociation( name=uploaded_dataset.name,
+ extension=uploaded_dataset.file_type,
+ dbkey=uploaded_dataset.dbkey,
+ history=history,
+ create_dataset=True,
+ sa_session=trans.sa_session )
if state:
hda.state = state
else:
@@ -128,6 +141,7 @@
trans.sa_session.flush()
return hda
+
def __new_library_upload( trans, cntrller, uploaded_dataset, library_bunch, state=None ):
current_user_roles = trans.get_current_user_roles()
if not ( ( trans.user_is_admin() and cntrller in [ 'library_admin', 'api' ] ) or trans.app.security_agent.can_add_library_item( current_user_roles, library_bunch.folder ) ):
@@ -156,13 +170,13 @@
trans.sa_session.add( ld )
trans.sa_session.flush()
trans.app.security_agent.copy_library_permissions( trans, folder, ld )
- ldda = trans.app.model.LibraryDatasetDatasetAssociation( name = uploaded_dataset.name,
- extension = uploaded_dataset.file_type,
- dbkey = uploaded_dataset.dbkey,
- library_dataset = ld,
- user = trans.user,
- create_dataset = True,
- sa_session = trans.sa_session )
+ ldda = trans.app.model.LibraryDatasetDatasetAssociation( name=uploaded_dataset.name,
+ extension=uploaded_dataset.file_type,
+ dbkey=uploaded_dataset.dbkey,
+ library_dataset=ld,
+ user=trans.user,
+ create_dataset=True,
+ sa_session=trans.sa_session )
trans.sa_session.add( ldda )
if state:
ldda.state = state
@@ -210,12 +224,14 @@
trans.sa_session.flush()
return ldda
+
def new_upload( trans, cntrller, uploaded_dataset, library_bunch=None, history=None, state=None ):
if library_bunch:
return __new_library_upload( trans, cntrller, uploaded_dataset, library_bunch, state )
else:
return __new_history_upload( trans, uploaded_dataset, history=history, state=state )
+
def get_uploaded_datasets( trans, cntrller, params, precreated_datasets, dataset_upload_inputs, library_bunch=None, history=None ):
uploaded_datasets = []
for dataset_upload_input in dataset_upload_inputs:
@@ -256,6 +272,8 @@
history.genome_build = uploaded_dataset.dbkey
uploaded_dataset.data = data
return uploaded_datasets
+
+
def create_paramfile( trans, uploaded_datasets ):
"""
Create the upload tool's JSON "param" file.
@@ -284,14 +302,14 @@
setattr( data.metadata, meta_name, meta_value )
trans.sa_session.add( data )
trans.sa_session.flush()
- json = dict( file_type = uploaded_dataset.file_type,
- dataset_id = data.dataset.id,
- dbkey = uploaded_dataset.dbkey,
- type = uploaded_dataset.type,
- metadata = uploaded_dataset.metadata,
- primary_file = uploaded_dataset.primary_file,
- composite_file_paths = uploaded_dataset.composite_files,
- composite_files = dict( [ ( k, v.__dict__ ) for k, v in data.datatype.get_composite_files( data ).items() ] ) )
+ json = dict( file_type=uploaded_dataset.file_type,
+ dataset_id=data.dataset.id,
+ dbkey=uploaded_dataset.dbkey,
+ type=uploaded_dataset.type,
+ metadata=uploaded_dataset.metadata,
+ primary_file=uploaded_dataset.primary_file,
+ composite_file_paths=uploaded_dataset.composite_files,
+ composite_files=dict( [ ( k, v.__dict__ ) for k, v in data.datatype.get_composite_files( data ).items() ] ) )
else:
try:
is_binary = uploaded_dataset.datatype.is_binary
@@ -305,18 +323,18 @@
uuid_str = uploaded_dataset.uuid
except:
uuid_str = None
- json = dict( file_type = uploaded_dataset.file_type,
- ext = uploaded_dataset.ext,
- name = uploaded_dataset.name,
- dataset_id = data.dataset.id,
- dbkey = uploaded_dataset.dbkey,
- type = uploaded_dataset.type,
- is_binary = is_binary,
- link_data_only = link_data_only,
- uuid = uuid_str,
- space_to_tab = uploaded_dataset.space_to_tab,
- in_place = trans.app.config.external_chown_script is None,
- path = uploaded_dataset.path )
+ json = dict( file_type=uploaded_dataset.file_type,
+ ext=uploaded_dataset.ext,
+ name=uploaded_dataset.name,
+ dataset_id=data.dataset.id,
+ dbkey=uploaded_dataset.dbkey,
+ type=uploaded_dataset.type,
+ is_binary=is_binary,
+ link_data_only=link_data_only,
+ uuid=uuid_str,
+ space_to_tab=uploaded_dataset.space_to_tab,
+ in_place=trans.app.config.external_chown_script is None,
+ path=uploaded_dataset.path )
# TODO: This will have to change when we start bundling inputs.
# Also, in_place above causes the file to be left behind since the
# user cannot remove it unless the parent directory is writable.
@@ -327,6 +345,8 @@
if trans.app.config.external_chown_script:
_chown( json_file_path )
return json_file_path
+
+
def create_job( trans, params, tool, json_file_path, data_list, folder=None, history=None ):
"""
Create the upload job.
@@ -383,6 +403,8 @@
for i, v in enumerate( data_list ):
output[ 'output%i' % i ] = v
return job, output
+
+
def active_folders( trans, folder ):
# Stolen from galaxy.web.controllers.library_common (importing from which causes a circular issues).
# Much faster way of retrieving all active sub-folders within a given folder than the
https://bitbucket.org/galaxy/galaxy-central/commits/83970bc291d2/
Changeset: 83970bc291d2
User: jmchilton
Date: 2013-11-11 08:45:32
Summary: Break up row_for_param in tool_form.mako into smaller pieces.
Affected #: 2 files
diff -r 510cbdd44e772629b49d1ae134af199a006ee28d -r 83970bc291d2866ccc8782111a64a01e48bea9d1 lib/galaxy/tools/parameters/basic.py
--- a/lib/galaxy/tools/parameters/basic.py
+++ b/lib/galaxy/tools/parameters/basic.py
@@ -1359,7 +1359,7 @@
options = []
for filter_key, filter_value in self.filtered.iteritems():
dataset = other_values[filter_key]
- if dataset.__class__.__name__.endswith( "DatasetFilenameWrapper" ): #this is a bad way to check for this, but problems importing class ( due to circular imports? )
+ if dataset.__class__.__name__.endswith( "DatasetFilenameWrapper" ): # this is a bad way to check for this, but problems importing class ( due to circular imports? )
dataset = dataset.dataset
if dataset:
for meta_key, meta_dict in filter_value.iteritems():
diff -r 510cbdd44e772629b49d1ae134af199a006ee28d -r 83970bc291d2866ccc8782111a64a01e48bea9d1 templates/webapps/galaxy/tool_form.mako
--- a/templates/webapps/galaxy/tool_form.mako
+++ b/templates/webapps/galaxy/tool_form.mako
@@ -218,12 +218,11 @@
else:
cls = "form-row"
- label = param.get_label()
-
field = param.get_html_field( trans, parent_state[ param.name ], other_values )
field.refresh_on_change = param.refresh_on_change
- # Field may contain characters submitted by user and these characters may be unicode; handle non-ascii characters gracefully.
+ # Field may contain characters submitted by user and these characters may
+ # be unicode; handle non-ascii characters gracefully.
field_html = field.get_html( prefix )
if type( field_html ) is not unicode:
field_html = unicode( field_html, 'utf-8', 'replace' )
@@ -232,25 +231,39 @@
return field_html
%><div class="${cls}">
- %if label:
- <label for="${param.name}">${label}:</label>
- %endif
- <div class="form-row-input">${field_html}</div>
- %if parent_errors.has_key( param.name ):
- <div class="form-row-error-message">
- <div><img style="vertical-align: middle;" src="${h.url_for('/static/style/error_small.png')}"> <span style="vertical-align: middle;">${parent_errors[param.name]}</span></div>
- </div>
- %endif
+ ${label_for_param( param )}
+ ${input_for_param( param, field_html )}
+ ${errors_for_param( param, parent_errors )}
+ ${help_for_param( param )}
+ <div style="clear: both;"></div>
+ </div>
+</%def>
- %if param.help:
- <div class="toolParamHelp" style="clear: both;">
- ${param.help}
- </div>
- %endif
+<%def name="input_for_param( param, field_html )">
+ <div class="form-row-input">${field_html}</div>
+</%def>
- <div style="clear: both;"></div>
+<%def name="label_for_param( param )">
+ <% label = param.get_label()%>
+ %if label:
+ <label for="${param.name}">${label}:</label>
+ %endif
+</%def>
- </div>
+<%def name="errors_for_param( param, parent_errors )">
+ %if parent_errors.has_key( param.name ):
+ <div class="form-row-error-message">
+ <div><img style="vertical-align: middle;" src="${h.url_for('/static/style/error_small.png')}"> <span style="vertical-align: middle;">${parent_errors[param.name]}</span></div>
+ </div>
+ %endif
+</%def>
+
+<%def name="help_for_param( param )">
+ %if param.help:
+ <div class="toolParamHelp" style="clear: both;">
+ ${param.help}
+ </div>
+ %endif
</%def><%def name="row_for_rerun()">
https://bitbucket.org/galaxy/galaxy-central/commits/fcb39c3fdd3e/
Changeset: fcb39c3fdd3e
User: jmchilton
Date: 2013-11-11 08:45:32
Summary: Break up tools.handle_input - split out param checking.
Affected #: 1 file
diff -r 83970bc291d2866ccc8782111a64a01e48bea9d1 -r fcb39c3fdd3ece51e9d4753a91880a634e6419e2 lib/galaxy/tools/__init__.py
--- a/lib/galaxy/tools/__init__.py
+++ b/lib/galaxy/tools/__init__.py
@@ -1926,24 +1926,8 @@
if len(incoming):
self.update_state( trans, self.inputs_by_page[state.page], state.inputs, incoming, old_errors=old_errors or {} )
return "tool_form.mako", dict( errors={}, tool_state=state, param_values={}, incoming={} )
- # Process incoming data
- if not( self.check_values ):
- # If `self.check_values` is false we don't do any checking or
- # processing on input This is used to pass raw values
- # through to/from external sites. FIXME: This should be handled
- # more cleanly, there is no reason why external sites need to
- # post back to the same URL that the tool interface uses.
- errors = {}
- params = incoming
- else:
- # Update state for all inputs on the current page taking new
- # values from `incoming`.
- errors = self.update_state( trans, self.inputs_by_page[state.page], state.inputs, incoming, old_errors=old_errors or {} )
- # If the tool provides a `validate_input` hook, call it.
- validate_input = self.get_hook( 'validate_input' )
- if validate_input:
- validate_input( trans, errors, state.inputs, self.inputs_by_page[state.page] )
- params = state.inputs
+
+ errors, params = self.__check_param_values( trans, incoming, state, old_errors )
# Did the user actually click next / execute or is this just
# a refresh?
if 'runtool_btn' in incoming or 'URL' in incoming or 'ajax_upload' in incoming:
@@ -1996,6 +1980,28 @@
if not self.display_interface:
return 'message.mako', dict( status='info', message="The interface for this tool cannot be displayed", refresh_frames=['everything'] )
return 'tool_form.mako', dict( errors=errors, tool_state=state )
+
+ def __check_param_values( self, trans, incoming, state, old_errors ):
+ # Process incoming data
+ if not( self.check_values ):
+ # If `self.check_values` is false we don't do any checking or
+ # processing on input This is used to pass raw values
+ # through to/from external sites. FIXME: This should be handled
+ # more cleanly, there is no reason why external sites need to
+ # post back to the same URL that the tool interface uses.
+ errors = {}
+ params = incoming
+ else:
+ # Update state for all inputs on the current page taking new
+ # values from `incoming`.
+ errors = self.update_state( trans, self.inputs_by_page[state.page], state.inputs, incoming, old_errors=old_errors or {} )
+ # If the tool provides a `validate_input` hook, call it.
+ validate_input = self.get_hook( 'validate_input' )
+ if validate_input:
+ validate_input( trans, errors, state.inputs, self.inputs_by_page[state.page] )
+ params = state.inputs
+ return errors, params
+
def find_fieldstorage( self, x ):
if isinstance( x, FieldStorage ):
raise InterruptedUpload( None )
https://bitbucket.org/galaxy/galaxy-central/commits/0c0692e52ff7/
Changeset: 0c0692e52ff7
User: jmchilton
Date: 2013-11-11 08:45:32
Summary: Break up tools.handle_input - split out state creation.
Affected #: 1 file
diff -r fcb39c3fdd3ece51e9d4753a91880a634e6419e2 -r 0c0692e52ff7ed81b7784ecaea7dff7dcdd28fa5 lib/galaxy/tools/__init__.py
--- a/lib/galaxy/tools/__init__.py
+++ b/lib/galaxy/tools/__init__.py
@@ -1908,13 +1908,8 @@
to the form or execute the tool (only if 'execute' was clicked and
there were no errors).
"""
- # Get the state or create if not found
- if "tool_state" in incoming:
- encoded_state = string_to_object( incoming["tool_state"] )
- state = DefaultToolState()
- state.decode( encoded_state, self, trans.app )
- else:
- state = self.new_state( trans, history=history )
+ state, state_new = self.__fetch_state( trans, incoming, history )
+ if state_new:
# This feels a bit like a hack. It allows forcing full processing
# of inputs even when there is no state in the incoming dictionary
# by providing either 'runtool_btn' (the name of the submit button
@@ -1928,6 +1923,7 @@
return "tool_form.mako", dict( errors={}, tool_state=state, param_values={}, incoming={} )
errors, params = self.__check_param_values( trans, incoming, state, old_errors )
+
# Did the user actually click next / execute or is this just
# a refresh?
if 'runtool_btn' in incoming or 'URL' in incoming or 'ajax_upload' in incoming:
@@ -1981,6 +1977,18 @@
return 'message.mako', dict( status='info', message="The interface for this tool cannot be displayed", refresh_frames=['everything'] )
return 'tool_form.mako', dict( errors=errors, tool_state=state )
+ def __fetch_state( self, trans, incoming, history ):
+ # Get the state or create if not found
+ if "tool_state" in incoming:
+ encoded_state = string_to_object( incoming["tool_state"] )
+ state = DefaultToolState()
+ state.decode( encoded_state, self, trans.app )
+ new = False
+ else:
+ state = self.new_state( trans, history=history )
+ new = True
+ return state, new
+
def __check_param_values( self, trans, incoming, state, old_errors ):
# Process incoming data
if not( self.check_values ):
https://bitbucket.org/galaxy/galaxy-central/commits/c4632090a444/
Changeset: c4632090a444
User: jmchilton
Date: 2013-11-11 08:45:32
Summary: Break up tools.handle_input - split out refresh state logic.
Affected #: 1 file
diff -r 0c0692e52ff7ed81b7784ecaea7dff7dcdd28fa5 -r c4632090a444751660a354e997873da5fd14cfe1 lib/galaxy/tools/__init__.py
--- a/lib/galaxy/tools/__init__.py
+++ b/lib/galaxy/tools/__init__.py
@@ -1924,9 +1924,11 @@
errors, params = self.__check_param_values( trans, incoming, state, old_errors )
- # Did the user actually click next / execute or is this just
- # a refresh?
- if 'runtool_btn' in incoming or 'URL' in incoming or 'ajax_upload' in incoming:
+ if self.__should_refresh_state( incoming ):
+ return self.__handle_state_refresh( trans, state, errors )
+ else:
+ # User actually clicked next or execute.
+
# If there were errors, we stay on the same page and display
# error messages
if errors:
@@ -1962,7 +1964,11 @@
if not self.display_interface:
return 'message.mako', dict( status='info', message="The interface for this tool cannot be displayed", refresh_frames=['everything'] )
return 'tool_form.mako', dict( errors=errors, tool_state=state )
- else:
+
+ def __should_refresh_state( self, incoming ):
+ return not( 'runtool_btn' in incoming or 'URL' in incoming or 'ajax_upload' in incoming )
+
+ def __handle_state_refresh( self, trans, state, errors ):
try:
self.find_fieldstorage( state.inputs )
except InterruptedUpload:
@@ -1974,7 +1980,7 @@
pass
# Just a refresh, render the form with updated state and errors.
if not self.display_interface:
- return 'message.mako', dict( status='info', message="The interface for this tool cannot be displayed", refresh_frames=['everything'] )
+ return 'message.mako', dict( status='info', message="The interface for this tool cannot be displayed", refresh_frames=['everything'] )
return 'tool_form.mako', dict( errors=errors, tool_state=state )
def __fetch_state( self, trans, incoming, history ):
https://bitbucket.org/galaxy/galaxy-central/commits/2cf8be3f467b/
Changeset: 2cf8be3f467b
User: jmchilton
Date: 2013-11-11 08:45:32
Summary: Break up tools.handle_input - split out page advance logic.
Affected #: 1 file
diff -r c4632090a444751660a354e997873da5fd14cfe1 -r 2cf8be3f467ba645d175782ae4c85d942c0cd88f lib/galaxy/tools/__init__.py
--- a/lib/galaxy/tools/__init__.py
+++ b/lib/galaxy/tools/__init__.py
@@ -1958,12 +1958,7 @@
return 'message.mako', dict( status='error', message=message, refresh_frames=[] )
# Otherwise move on to the next page
else:
- state.page += 1
- # Fill in the default values for the next page
- self.fill_in_new_state( trans, self.inputs_by_page[ state.page ], state.inputs )
- if not self.display_interface:
- return 'message.mako', dict( status='info', message="The interface for this tool cannot be displayed", refresh_frames=['everything'] )
- return 'tool_form.mako', dict( errors=errors, tool_state=state )
+ return self.__handle_page_advance( trans, state, errors )
def __should_refresh_state( self, incoming ):
return not( 'runtool_btn' in incoming or 'URL' in incoming or 'ajax_upload' in incoming )
@@ -1983,6 +1978,14 @@
return 'message.mako', dict( status='info', message="The interface for this tool cannot be displayed", refresh_frames=['everything'] )
return 'tool_form.mako', dict( errors=errors, tool_state=state )
+ def __handle_page_advance( self, trans, state, errors ):
+ state.page += 1
+ # Fill in the default values for the next page
+ self.fill_in_new_state( trans, self.inputs_by_page[ state.page ], state.inputs )
+ if not self.display_interface:
+ return 'message.mako', dict( status='info', message="The interface for this tool cannot be displayed", refresh_frames=['everything'] )
+ return 'tool_form.mako', dict( errors=errors, tool_state=state )
+
def __fetch_state( self, trans, incoming, history ):
# Get the state or create if not found
if "tool_state" in incoming:
https://bitbucket.org/galaxy/galaxy-central/commits/5541fd3245c5/
Changeset: 5541fd3245c5
User: jmchilton
Date: 2013-11-11 08:45:32
Summary: Break up tools.handle_input - split out actual tool execution intiation.
Affected #: 1 file
diff -r 2cf8be3f467ba645d175782ae4c85d942c0cd88f -r 5541fd3245c571c1cbe7a51faa734cd6aa052d4e lib/galaxy/tools/__init__.py
--- a/lib/galaxy/tools/__init__.py
+++ b/lib/galaxy/tools/__init__.py
@@ -1936,26 +1936,7 @@
return "tool_form.mako", dict( errors=errors, tool_state=state, incoming=incoming, error_message=error_message )
# If we've completed the last page we can execute the tool
elif state.page == self.last_page:
- try:
- rerun_remap_job_id = None
- if 'rerun_remap_job_id' in incoming:
- rerun_remap_job_id = trans.app.security.decode_id(incoming['rerun_remap_job_id'])
- _, out_data = self.execute( trans, incoming=params, history=history, rerun_remap_job_id=rerun_remap_job_id )
- except httpexceptions.HTTPFound, e:
- #if it's a paste redirect exception, pass it up the stack
- raise e
- except Exception, e:
- log.exception('Exception caught while attempting tool execution:')
- return 'message.mako', dict( status='error', message='Error executing tool: %s' % str(e), refresh_frames=[] )
- try:
- assert isinstance( out_data, odict )
- return 'tool_executed.mako', dict( out_data=out_data )
- except:
- if isinstance( out_data, str ):
- message = out_data
- else:
- message = 'Failure executing tool (odict not returned from tool execution)'
- return 'message.mako', dict( status='error', message=message, refresh_frames=[] )
+ return self.__handle_tool_execute( trans, incoming, params, history )
# Otherwise move on to the next page
else:
return self.__handle_page_advance( trans, state, errors )
@@ -1963,6 +1944,28 @@
def __should_refresh_state( self, incoming ):
return not( 'runtool_btn' in incoming or 'URL' in incoming or 'ajax_upload' in incoming )
+ def __handle_tool_execute( self, trans, incoming, params, history ):
+ try:
+ rerun_remap_job_id = None
+ if 'rerun_remap_job_id' in incoming:
+ rerun_remap_job_id = trans.app.security.decode_id(incoming['rerun_remap_job_id'])
+ _, out_data = self.execute( trans, incoming=params, history=history, rerun_remap_job_id=rerun_remap_job_id )
+ except httpexceptions.HTTPFound, e:
+ #if it's a paste redirect exception, pass it up the stack
+ raise e
+ except Exception, e:
+ log.exception('Exception caught while attempting tool execution:')
+ return 'message.mako', dict( status='error', message='Error executing tool: %s' % str(e), refresh_frames=[] )
+ try:
+ assert isinstance( out_data, odict )
+ return 'tool_executed.mako', dict( out_data=out_data )
+ except:
+ if isinstance( out_data, str ):
+ message = out_data
+ else:
+ message = 'Failure executing tool (odict not returned from tool execution)'
+ return 'message.mako', dict( status='error', message=message, refresh_frames=[] )
+
def __handle_state_refresh( self, trans, state, errors ):
try:
self.find_fieldstorage( state.inputs )
https://bitbucket.org/galaxy/galaxy-central/commits/d4f8824c11b1/
Changeset: d4f8824c11b1
User: jmchilton
Date: 2013-11-11 08:45:32
Summary: Don't pass API key to datatype display method.
Some datatypes cannot take the keyword parameter and none of them should need it.
Affected #: 1 file
diff -r 5541fd3245c571c1cbe7a51faa734cd6aa052d4e -r d4f8824c11b1cb868f3a0a06c3d533cad123ccee lib/galaxy/webapps/galaxy/api/datasets.py
--- a/lib/galaxy/webapps/galaxy/api/datasets.py
+++ b/lib/galaxy/webapps/galaxy/api/datasets.py
@@ -269,7 +269,13 @@
hda = self.get_history_dataset_association( trans, history, history_content_id,
check_ownership=True, check_accessible=True )
- rval = hda.datatype.display_data( trans, hda, preview, filename, to_ext, chunk, **kwd )
+ display_kwd = kwd.copy()
+ try:
+ del display_kwd["key"]
+ except KeyError:
+ pass
+ rval = hda.datatype.display_data( trans, hda, preview, filename, to_ext, chunk, **display_kwd )
+
except Exception, exception:
log.error( "Error getting display data for dataset (%s) from history (%s): %s",
https://bitbucket.org/galaxy/galaxy-central/commits/4f7e715db123/
Changeset: 4f7e715db123
User: jmchilton
Date: 2013-11-11 08:45:32
Summary: Allow easier use of tool repeat and conditional params via the API.
For now, still using the flat format of web ui. So
{...
"a_repeat_0|a_repeat_param" : "value"
}
instead of the more ideal:
{...
a_repeat: [
{ "a_repeat_param": "value" }
]
}
To implement this, there is a new path through the tool state processing code that sets the state based on the supplied inputs instead of requiring iterative calls (one for each repeat addition for instance) like the UI or specifing a tool_state externally. To specify this path through the code, simply do not include a tool_state parameter when using the tools API.
Affected #: 2 files
diff -r d4f8824c11b1cb868f3a0a06c3d533cad123ccee -r 4f7e715db123cb3d31258fbe828854c36719af9e lib/galaxy/tools/__init__.py
--- a/lib/galaxy/tools/__init__.py
+++ b/lib/galaxy/tools/__init__.py
@@ -1901,12 +1901,18 @@
callback( "", input, value[input.name] )
else:
input.visit_inputs( "", value[input.name], callback )
- def handle_input( self, trans, incoming, history=None, old_errors=None ):
+ def handle_input( self, trans, incoming, history=None, old_errors=None, process_state='update' ):
"""
Process incoming parameters for this tool from the dict `incoming`,
update the tool state (or create if none existed), and either return
to the form or execute the tool (only if 'execute' was clicked and
there were no errors).
+
+ process_state can be either 'update' (to incrementally build up the state
+ over several calls - one repeat per handle for instance) or 'populate'
+ force a complete build of the state and submission all at once (like
+ from API). May want an incremental version of the API also at some point,
+ that is why this is not just called for_api.
"""
state, state_new = self.__fetch_state( trans, incoming, history )
if state_new:
@@ -1922,8 +1928,7 @@
self.update_state( trans, self.inputs_by_page[state.page], state.inputs, incoming, old_errors=old_errors or {} )
return "tool_form.mako", dict( errors={}, tool_state=state, param_values={}, incoming={} )
- errors, params = self.__check_param_values( trans, incoming, state, old_errors )
-
+ errors, params = self.__check_param_values( trans, incoming, state, old_errors, process_state, history=history )
if self.__should_refresh_state( incoming ):
return self.__handle_state_refresh( trans, state, errors )
else:
@@ -2001,7 +2006,7 @@
new = True
return state, new
- def __check_param_values( self, trans, incoming, state, old_errors ):
+ def __check_param_values( self, trans, incoming, state, old_errors, process_state, history ):
# Process incoming data
if not( self.check_values ):
# If `self.check_values` is false we don't do any checking or
@@ -2014,7 +2019,12 @@
else:
# Update state for all inputs on the current page taking new
# values from `incoming`.
- errors = self.update_state( trans, self.inputs_by_page[state.page], state.inputs, incoming, old_errors=old_errors or {} )
+ if process_state == "update":
+ errors = self.update_state( trans, self.inputs_by_page[state.page], state.inputs, incoming, old_errors=old_errors or {} )
+ elif process_state == "populate":
+ errors = self.populate_state( trans, self.inputs_by_page[state.page], state.inputs, incoming, history )
+ else:
+ raise Exception("Unknown process_state type %s" % process_state)
# If the tool provides a `validate_input` hook, call it.
validate_input = self.get_hook( 'validate_input' )
if validate_input:
@@ -2059,6 +2069,137 @@
return 'message.mako', dict( status='error',
message='Your upload was interrupted. If this was uninentional, please retry it.',
refresh_frames=[], cont=None )
+
+ def populate_state( self, trans, inputs, state, incoming, history, prefix="", context=None ):
+ errors = dict()
+ # Push this level onto the context stack
+ context = ExpressionContext( state, context )
+ for input in inputs.itervalues():
+ key = prefix + input.name
+ if isinstance( input, Repeat ):
+ group_state = state[input.name]
+ # Create list of empty errors for each previously existing state
+ group_errors = [ ]
+ any_group_errors = False
+ rep_index = 0
+ while True:
+ rep_name = "%s_%d" % ( key, rep_index )
+ if not any( [ key.startswith(rep_name) for key in incoming.keys() ] ):
+ break
+ if rep_index < input.max:
+ new_state = {}
+ new_state['__index__'] = rep_index
+ self.fill_in_new_state( trans, input.inputs, new_state, context, history=history )
+ group_state.append( new_state )
+ group_errors.append( {} )
+ rep_errors = self.populate_state( trans,
+ input.inputs,
+ new_state,
+ incoming,
+ history,
+ prefix=rep_name + "|",
+ context=context )
+ if rep_errors:
+ any_group_errors = True
+ group_errors[rep_index].update( rep_errors )
+
+ else:
+ group_errors[-1] = { '__index__': 'Cannot add repeat (max size=%i).' % input.max }
+ any_group_errors = True
+ rep_index += 1
+ elif isinstance( input, Conditional ):
+ group_state = state[input.name]
+ group_prefix = "%s|" % ( key )
+ # Deal with the 'test' element and see if it's value changed
+ if input.value_ref and not input.value_ref_in_group:
+ # We are referencing an existent parameter, which is not
+ # part of this group
+ test_param_key = prefix + input.test_param.name
+ else:
+ test_param_key = group_prefix + input.test_param.name
+ test_param_error = None
+ test_incoming = get_incoming_value( incoming, test_param_key, None )
+
+ # Get value of test param and determine current case
+ value, test_param_error = \
+ check_param( trans, input.test_param, test_incoming, context )
+ current_case = input.get_current_case( value, trans )
+ # Current case has changed, throw away old state
+ group_state = state[input.name] = {}
+ # TODO: we should try to preserve values if we can
+ self.fill_in_new_state( trans, input.cases[current_case].inputs, group_state, context, history=history )
+ group_errors = self.populate_state( trans,
+ input.cases[current_case].inputs,
+ group_state,
+ incoming,
+ history,
+ prefix=group_prefix,
+ context=context,
+ )
+ if test_param_error:
+ group_errors[ input.test_param.name ] = test_param_error
+ if group_errors:
+ errors[ input.name ] = group_errors
+ # Store the current case in a special value
+ group_state['__current_case__'] = current_case
+ # Store the value of the test element
+ group_state[ input.test_param.name ] = value
+ elif isinstance( input, UploadDataset ):
+ group_state = state[input.name]
+ group_errors = []
+ any_group_errors = False
+ d_type = input.get_datatype( trans, context )
+ writable_files = d_type.writable_files
+ #remove extra files
+ while len( group_state ) > len( writable_files ):
+ del group_state[-1]
+ # Update state
+ max_index = -1
+ for i, rep_state in enumerate( group_state ):
+ rep_index = rep_state['__index__']
+ max_index = max( max_index, rep_index )
+ rep_prefix = "%s_%d|" % ( key, rep_index )
+ rep_errors = self.populate_state( trans,
+ input.inputs,
+ rep_state,
+ incoming,
+ history,
+ prefix=rep_prefix,
+ context=context)
+ if rep_errors:
+ any_group_errors = True
+ group_errors.append( rep_errors )
+ else:
+ group_errors.append( {} )
+ # Add new fileupload as needed
+ offset = 1
+ while len( writable_files ) > len( group_state ):
+ new_state = {}
+ new_state['__index__'] = max_index + offset
+ offset += 1
+ self.fill_in_new_state( trans, input.inputs, new_state, context )
+ group_state.append( new_state )
+ if any_group_errors:
+ group_errors.append( {} )
+ # Were there *any* errors for any repetition?
+ if any_group_errors:
+ errors[input.name] = group_errors
+ else:
+ if key not in incoming \
+ and "__force_update__" + key not in incoming:
+ # No new value provided, and we are only updating, so keep
+ # the old value (which should already be in the state) and
+ # preserve the old error message.
+ pass
+ else:
+ incoming_value = get_incoming_value( incoming, key, None )
+ value, error = check_param( trans, input, incoming_value, context )
+ # If a callback was provided, allow it to process the value
+ if error:
+ errors[ input.name ] = error
+ state[ input.name ] = value
+ return errors
+
def update_state( self, trans, inputs, state, incoming, prefix="", context=None,
update_only=False, old_errors={}, item_callback=None ):
"""
diff -r d4f8824c11b1cb868f3a0a06c3d533cad123ccee -r 4f7e715db123cb3d31258fbe828854c36719af9e lib/galaxy/webapps/galaxy/api/tools.py
--- a/lib/galaxy/webapps/galaxy/api/tools.py
+++ b/lib/galaxy/webapps/galaxy/api/tools.py
@@ -9,6 +9,9 @@
import logging
log = logging.getLogger( __name__ )
+DEFAULT_STATE_PROCESSING = "update" # See comment below.
+
+
class ToolsController( BaseAPIController, UsesVisualizationMixin ):
"""
RESTful controller for interactions with tools.
@@ -113,7 +116,15 @@
# TODO: encode data ids and decode ids.
# TODO: handle dbkeys
params = util.Params( inputs, sanitize = False )
- template, vars = tool.handle_input( trans, params.__dict__, history=target_history )
+ # process_state must be 'populate' or 'update'. If 'populate', fully
+ # expand repeat and conditionals when building up state, if 'update'
+ # state must be built up over several iterative calls to the API -
+ # mimicing behavior of web controller. Mimic the the web controller
+ # and modify state outright if "tool_state" is contain in input params,
+ # else "populate" the tool state from scratch using payload.
+ incoming = params.__dict__
+ process_state = "update" if "tool_state" in incoming else "populate"
+ template, vars = tool.handle_input( trans, incoming, history=target_history, process_state=process_state )
if 'errors' in vars:
trans.response.status = 400
return { "message": { "type": "error", "data" : vars[ 'errors' ] } }
https://bitbucket.org/galaxy/galaxy-central/commits/336525248012/
Changeset: 336525248012
User: jmchilton
Date: 2013-11-11 08:45:32
Summary: When parsing tool inputs - distinguish between those coming from HTML post and those from JSON.
Most of the time they have the same value parsing rules right now, but the parsing rules for booleans would kind of hackish if applied to JSON clients (only a list with two values is considered true). Ultimately this could simplify from_html in DataToolParameter which has grown unwieldy.
TODO: Refactor state building logic out into its own class, too many variables are being passed around over and over at this point.
Affected #: 4 files
diff -r 4f7e715db123cb3d31258fbe828854c36719af9e -r 33652524801287703e2abefecf2c676243c83b25 lib/galaxy/tools/__init__.py
--- a/lib/galaxy/tools/__init__.py
+++ b/lib/galaxy/tools/__init__.py
@@ -1901,7 +1901,7 @@
callback( "", input, value[input.name] )
else:
input.visit_inputs( "", value[input.name], callback )
- def handle_input( self, trans, incoming, history=None, old_errors=None, process_state='update' ):
+ def handle_input( self, trans, incoming, history=None, old_errors=None, process_state='update', source='html' ):
"""
Process incoming parameters for this tool from the dict `incoming`,
update the tool state (or create if none existed), and either return
@@ -1925,10 +1925,10 @@
if not self.display_interface:
return 'message.mako', dict( status='info', message="The interface for this tool cannot be displayed", refresh_frames=['everything'] )
if len(incoming):
- self.update_state( trans, self.inputs_by_page[state.page], state.inputs, incoming, old_errors=old_errors or {} )
+ self.update_state( trans, self.inputs_by_page[state.page], state.inputs, incoming, old_errors=old_errors or {}, source=source )
return "tool_form.mako", dict( errors={}, tool_state=state, param_values={}, incoming={} )
- errors, params = self.__check_param_values( trans, incoming, state, old_errors, process_state, history=history )
+ errors, params = self.__check_param_values( trans, incoming, state, old_errors, process_state, history=history, source=source )
if self.__should_refresh_state( incoming ):
return self.__handle_state_refresh( trans, state, errors )
else:
@@ -2006,7 +2006,7 @@
new = True
return state, new
- def __check_param_values( self, trans, incoming, state, old_errors, process_state, history ):
+ def __check_param_values( self, trans, incoming, state, old_errors, process_state, history, source ):
# Process incoming data
if not( self.check_values ):
# If `self.check_values` is false we don't do any checking or
@@ -2020,9 +2020,9 @@
# Update state for all inputs on the current page taking new
# values from `incoming`.
if process_state == "update":
- errors = self.update_state( trans, self.inputs_by_page[state.page], state.inputs, incoming, old_errors=old_errors or {} )
+ errors = self.update_state( trans, self.inputs_by_page[state.page], state.inputs, incoming, old_errors=old_errors or {}, source=source )
elif process_state == "populate":
- errors = self.populate_state( trans, self.inputs_by_page[state.page], state.inputs, incoming, history )
+ errors = self.populate_state( trans, self.inputs_by_page[state.page], state.inputs, incoming, history, source=source )
else:
raise Exception("Unknown process_state type %s" % process_state)
# If the tool provides a `validate_input` hook, call it.
@@ -2070,7 +2070,7 @@
message='Your upload was interrupted. If this was uninentional, please retry it.',
refresh_frames=[], cont=None )
- def populate_state( self, trans, inputs, state, incoming, history, prefix="", context=None ):
+ def populate_state( self, trans, inputs, state, incoming, history, source, prefix="", context=None ):
errors = dict()
# Push this level onto the context stack
context = ExpressionContext( state, context )
@@ -2097,6 +2097,7 @@
new_state,
incoming,
history,
+ source,
prefix=rep_name + "|",
context=context )
if rep_errors:
@@ -2122,7 +2123,7 @@
# Get value of test param and determine current case
value, test_param_error = \
- check_param( trans, input.test_param, test_incoming, context )
+ check_param( trans, input.test_param, test_incoming, context, source=source )
current_case = input.get_current_case( value, trans )
# Current case has changed, throw away old state
group_state = state[input.name] = {}
@@ -2133,6 +2134,7 @@
group_state,
incoming,
history,
+ source,
prefix=group_prefix,
context=context,
)
@@ -2164,6 +2166,7 @@
rep_state,
incoming,
history,
+ source,
prefix=rep_prefix,
context=context)
if rep_errors:
@@ -2193,14 +2196,14 @@
pass
else:
incoming_value = get_incoming_value( incoming, key, None )
- value, error = check_param( trans, input, incoming_value, context )
+ value, error = check_param( trans, input, incoming_value, context, source=source )
# If a callback was provided, allow it to process the value
if error:
errors[ input.name ] = error
state[ input.name ] = value
return errors
- def update_state( self, trans, inputs, state, incoming, prefix="", context=None,
+ def update_state( self, trans, inputs, state, incoming, source='html', prefix="", context=None,
update_only=False, old_errors={}, item_callback=None ):
"""
Update the tool state in `state` using the user input in `incoming`.
@@ -2258,6 +2261,7 @@
input.inputs,
rep_state,
incoming,
+ source=source,
prefix=rep_prefix,
context=context,
update_only=update_only,
@@ -2306,7 +2310,7 @@
else:
# Get value of test param and determine current case
value, test_param_error = \
- check_param( trans, input.test_param, test_incoming, context )
+ check_param( trans, input.test_param, test_incoming, context, source=source )
current_case = input.get_current_case( value, trans )
if current_case != old_current_case:
# Current case has changed, throw away old state
@@ -2323,6 +2327,7 @@
incoming,
prefix=group_prefix,
context=context,
+ source=source,
update_only=update_only,
old_errors=group_old_errors,
item_callback=item_callback )
@@ -2364,6 +2369,7 @@
incoming,
prefix=rep_prefix,
context=context,
+ source=source,
update_only=update_only,
old_errors=rep_old_errors,
item_callback=item_callback )
@@ -2396,7 +2402,7 @@
errors[ input.name ] = old_errors[ input.name ]
else:
incoming_value = get_incoming_value( incoming, key, None )
- value, error = check_param( trans, input, incoming_value, context )
+ value, error = check_param( trans, input, incoming_value, context, source=source )
# If a callback was provided, allow it to process the value
if item_callback:
old_value = state.get( input.name, None )
diff -r 4f7e715db123cb3d31258fbe828854c36719af9e -r 33652524801287703e2abefecf2c676243c83b25 lib/galaxy/tools/parameters/__init__.py
--- a/lib/galaxy/tools/parameters/__init__.py
+++ b/lib/galaxy/tools/parameters/__init__.py
@@ -40,7 +40,7 @@
if new_value:
input_values[input.name] = new_value
-def check_param( trans, param, incoming_value, param_values ):
+def check_param( trans, param, incoming_value, param_values, source='html' ):
"""
Check the value of a single parameter `param`. The value in
`incoming_value` is converted from its HTML encoding and validated.
@@ -53,7 +53,10 @@
try:
if value is not None or isinstance(param, DataToolParameter):
# Convert value from HTML representation
- value = param.from_html( value, trans, param_values )
+ if source == 'html':
+ value = param.from_html( value, trans, param_values )
+ else:
+ value = param.from_json( value, trans, param_values )
# Allow the value to be converted if neccesary
filtered_value = param.filter_value( value, trans, param_values )
# Then do any further validation on the value
diff -r 4f7e715db123cb3d31258fbe828854c36719af9e -r 33652524801287703e2abefecf2c676243c83b25 lib/galaxy/tools/parameters/basic.py
--- a/lib/galaxy/tools/parameters/basic.py
+++ b/lib/galaxy/tools/parameters/basic.py
@@ -81,6 +81,9 @@
"""
return value
+ def from_json( self, value, trans=None, other_values={} ):
+ return self.from_html( value, trans, other_values )
+
def get_initial_value( self, trans, context, history=None ):
"""
Return the starting value of the parameter
@@ -427,6 +430,9 @@
def from_html( self, value, trans=None, other_values={} ):
return form_builder.CheckboxField.is_checked( value )
+ def from_json( self, value, trans=None, other_values={} ):
+ return string_as_bool( value )
+
def to_html_value( self, value, app ):
if value:
return [ 'true', 'true' ]
diff -r 4f7e715db123cb3d31258fbe828854c36719af9e -r 33652524801287703e2abefecf2c676243c83b25 lib/galaxy/webapps/galaxy/api/tools.py
--- a/lib/galaxy/webapps/galaxy/api/tools.py
+++ b/lib/galaxy/webapps/galaxy/api/tools.py
@@ -124,7 +124,7 @@
# else "populate" the tool state from scratch using payload.
incoming = params.__dict__
process_state = "update" if "tool_state" in incoming else "populate"
- template, vars = tool.handle_input( trans, incoming, history=target_history, process_state=process_state )
+ template, vars = tool.handle_input( trans, incoming, history=target_history, process_state=process_state, source="json" )
if 'errors' in vars:
trans.response.status = 400
return { "message": { "type": "error", "data" : vars[ 'errors' ] } }
https://bitbucket.org/galaxy/galaxy-central/commits/9e503de59f9b/
Changeset: 9e503de59f9b
User: jmchilton
Date: 2013-11-11 08:45:32
Summary: Allow raw display of data via API.
Matching functionality available with sessions at /display in root controller. The test framework uses this functionality in root controller to read test result data so an API driven approach to testing will need to have similar functionality available. I am not sure I am enitrely comfortable with serving raw data this way, but I guess it is no worse than the existing /display route. This is marked as beta and there is a note that it may be dropped at some point in the future.
Affected #: 1 file
diff -r 33652524801287703e2abefecf2c676243c83b25 -r 9e503de59f9b939e81cfa26645d7595d3c8a5df8 lib/galaxy/webapps/galaxy/api/datasets.py
--- a/lib/galaxy/webapps/galaxy/api/datasets.py
+++ b/lib/galaxy/webapps/galaxy/api/datasets.py
@@ -7,7 +7,7 @@
from galaxy.web.base.controller import UsesHistoryMixin
from galaxy.web.framework.helpers import is_true
from galaxy.datatypes import dataproviders
-
+from galaxy.util import string_as_bool_or_none
import logging
log = logging.getLogger( __name__ )
@@ -245,11 +245,16 @@
@web.expose_api_raw_anonymous
def display( self, trans, history_content_id, history_id,
- preview=False, filename=None, to_ext=None, chunk=None, **kwd ):
+ preview=False, filename=None, to_ext=None, chunk=None, raw=False, **kwd ):
"""
GET /api/histories/{encoded_history_id}/contents/{encoded_content_id}/display
Displays history content (dataset).
+
+ The query parameter 'raw' should be considered experimental and may be dropped at
+ some point in the future without warning. Generally, data should be processed by its
+ datatype prior to display (the defult if raw is unspecified or explicitly false.
"""
+ raw = string_as_bool_or_none( raw )
# Huge amount of code overlap with lib/galaxy/webapps/galaxy/api/history_content:show here.
rval = ''
try:
@@ -274,8 +279,10 @@
del display_kwd["key"]
except KeyError:
pass
- rval = hda.datatype.display_data( trans, hda, preview, filename, to_ext, chunk, **display_kwd )
-
+ if raw:
+ rval = open( hda.file_name )
+ else:
+ rval = hda.datatype.display_data( trans, hda, preview, filename, to_ext, chunk, **display_kwd )
except Exception, exception:
log.error( "Error getting display data for dataset (%s) from history (%s): %s",
https://bitbucket.org/galaxy/galaxy-central/commits/468fe654a360/
Changeset: 468fe654a360
User: jmchilton
Date: 2013-11-11 08:45:32
Summary: Fix specifing dbkeys with API uploads.
Essentially a redo of changeset 83103cff8757. In order to fix uploads when now history was specified, the only available dbkey allowed was '?'. As a result of that changeset, when uploading files via the API users could only specify '?'. Now the stock dbbuilds are also available - though builds in the history are still not aviailable (TODO) and user custom builds may not be available (TODO check this).
Affected #: 2 files
diff -r 9e503de59f9b939e81cfa26645d7595d3c8a5df8 -r 468fe654a360f41e5f20f40d2b1461a074076969 lib/galaxy/tools/parameters/basic.py
--- a/lib/galaxy/tools/parameters/basic.py
+++ b/lib/galaxy/tools/parameters/basic.py
@@ -1023,16 +1023,13 @@
self.static_options = [ ( value, key, False ) for key, value in util.dbnames ]
def get_options( self, trans, other_values ):
- if not trans.history:
- yield 'unspecified', '?', False
- else:
+ last_used_build = object()
+ if trans.history:
last_used_build = trans.history.genome_build
- for dbkey, build_name in trans.db_builds:
- yield build_name, dbkey, ( dbkey == last_used_build )
+ for dbkey, build_name in trans.db_builds:
+ yield build_name, dbkey, ( dbkey == last_used_build )
def get_legal_values( self, trans, other_values ):
- if not trans.history:
- return set( '?' )
return set( dbkey for dbkey, _ in trans.db_builds )
def to_dict( self, trans, view='collection', value_mapper=None ):
diff -r 9e503de59f9b939e81cfa26645d7595d3c8a5df8 -r 468fe654a360f41e5f20f40d2b1461a074076969 lib/galaxy/web/framework/__init__.py
--- a/lib/galaxy/web/framework/__init__.py
+++ b/lib/galaxy/web/framework/__init__.py
@@ -1070,10 +1070,11 @@
the user (chromInfo in history).
"""
dbnames = list()
- datasets = self.sa_session.query( self.app.model.HistoryDatasetAssociation ) \
- .filter_by( deleted=False, history_id=self.history.id, extension="len" )
- for dataset in datasets:
- dbnames.append( (dataset.dbkey, dataset.name) )
+ if self.history:
+ datasets = self.sa_session.query( self.app.model.HistoryDatasetAssociation ) \
+ .filter_by( deleted=False, history_id=self.history.id, extension="len" )
+ for dataset in datasets:
+ dbnames.append( (dataset.dbkey, dataset.name) )
user = self.get_user()
if user and 'dbkeys' in user.preferences:
user_keys = from_json_string( user.preferences['dbkeys'] )
https://bitbucket.org/galaxy/galaxy-central/commits/2769dec77197/
Changeset: 2769dec77197
User: jmchilton
Date: 2013-11-14 19:32:24
Summary: Clean up 4f7e715 slightly.
Affected #: 1 file
diff -r 468fe654a360f41e5f20f40d2b1461a074076969 -r 2769dec771976d29ac5904bff55fa448672d8bd6 lib/galaxy/webapps/galaxy/api/tools.py
--- a/lib/galaxy/webapps/galaxy/api/tools.py
+++ b/lib/galaxy/webapps/galaxy/api/tools.py
@@ -9,8 +9,6 @@
import logging
log = logging.getLogger( __name__ )
-DEFAULT_STATE_PROCESSING = "update" # See comment below.
-
class ToolsController( BaseAPIController, UsesVisualizationMixin ):
"""
@@ -116,12 +114,16 @@
# TODO: encode data ids and decode ids.
# TODO: handle dbkeys
params = util.Params( inputs, sanitize = False )
- # process_state must be 'populate' or 'update'. If 'populate', fully
- # expand repeat and conditionals when building up state, if 'update'
- # state must be built up over several iterative calls to the API -
- # mimicing behavior of web controller. Mimic the the web controller
- # and modify state outright if "tool_state" is contain in input params,
- # else "populate" the tool state from scratch using payload.
+ # process_state will be 'populate' or 'update'. When no tool
+ # state is specified in input - it will be 'populate', and
+ # tool will fully expand repeat and conditionals when building
+ # up state. If tool state is found in input
+ # parameters,process_state will be 'update' and complex
+ # submissions (with repeats and conditionals) must be built up
+ # over several iterative calls to the API - mimicing behavior
+ # of web controller (though frankly API never returns
+ # tool_state so this "legacy" behavior is probably impossible
+ # through API currently).
incoming = params.__dict__
process_state = "update" if "tool_state" in incoming else "populate"
template, vars = tool.handle_input( trans, incoming, history=target_history, process_state=process_state, source="json" )
https://bitbucket.org/galaxy/galaxy-central/commits/3402a11b0088/
Changeset: 3402a11b0088
User: jmchilton
Date: 2013-11-14 19:34:05
Summary: Merged in jmchilton/galaxy-central-fork-1 (pull request #256)
API Enhancements Required for API Driven Tool Testing
Affected #: 9 files
diff -r ac744f96faa5d4a61c73ca2e20655f818af3dddd -r 3402a11b008865ee37ee29c9d7d3d5ea27c6fe84 lib/galaxy/tools/__init__.py
--- a/lib/galaxy/tools/__init__.py
+++ b/lib/galaxy/tools/__init__.py
@@ -1901,20 +1901,21 @@
callback( "", input, value[input.name] )
else:
input.visit_inputs( "", value[input.name], callback )
- def handle_input( self, trans, incoming, history=None, old_errors=None ):
+ def handle_input( self, trans, incoming, history=None, old_errors=None, process_state='update', source='html' ):
"""
Process incoming parameters for this tool from the dict `incoming`,
update the tool state (or create if none existed), and either return
to the form or execute the tool (only if 'execute' was clicked and
there were no errors).
+
+ process_state can be either 'update' (to incrementally build up the state
+ over several calls - one repeat per handle for instance) or 'populate'
+ force a complete build of the state and submission all at once (like
+ from API). May want an incremental version of the API also at some point,
+ that is why this is not just called for_api.
"""
- # Get the state or create if not found
- if "tool_state" in incoming:
- encoded_state = string_to_object( incoming["tool_state"] )
- state = DefaultToolState()
- state.decode( encoded_state, self, trans.app )
- else:
- state = self.new_state( trans, history=history )
+ state, state_new = self.__fetch_state( trans, incoming, history )
+ if state_new:
# This feels a bit like a hack. It allows forcing full processing
# of inputs even when there is no state in the incoming dictionary
# by providing either 'runtool_btn' (the name of the submit button
@@ -1924,8 +1925,88 @@
if not self.display_interface:
return 'message.mako', dict( status='info', message="The interface for this tool cannot be displayed", refresh_frames=['everything'] )
if len(incoming):
- self.update_state( trans, self.inputs_by_page[state.page], state.inputs, incoming, old_errors=old_errors or {} )
+ self.update_state( trans, self.inputs_by_page[state.page], state.inputs, incoming, old_errors=old_errors or {}, source=source )
return "tool_form.mako", dict( errors={}, tool_state=state, param_values={}, incoming={} )
+
+ errors, params = self.__check_param_values( trans, incoming, state, old_errors, process_state, history=history, source=source )
+ if self.__should_refresh_state( incoming ):
+ return self.__handle_state_refresh( trans, state, errors )
+ else:
+ # User actually clicked next or execute.
+
+ # If there were errors, we stay on the same page and display
+ # error messages
+ if errors:
+ error_message = "One or more errors were found in the input you provided. The specific errors are marked below."
+ return "tool_form.mako", dict( errors=errors, tool_state=state, incoming=incoming, error_message=error_message )
+ # If we've completed the last page we can execute the tool
+ elif state.page == self.last_page:
+ return self.__handle_tool_execute( trans, incoming, params, history )
+ # Otherwise move on to the next page
+ else:
+ return self.__handle_page_advance( trans, state, errors )
+
+ def __should_refresh_state( self, incoming ):
+ return not( 'runtool_btn' in incoming or 'URL' in incoming or 'ajax_upload' in incoming )
+
+ def __handle_tool_execute( self, trans, incoming, params, history ):
+ try:
+ rerun_remap_job_id = None
+ if 'rerun_remap_job_id' in incoming:
+ rerun_remap_job_id = trans.app.security.decode_id(incoming['rerun_remap_job_id'])
+ _, out_data = self.execute( trans, incoming=params, history=history, rerun_remap_job_id=rerun_remap_job_id )
+ except httpexceptions.HTTPFound, e:
+ #if it's a paste redirect exception, pass it up the stack
+ raise e
+ except Exception, e:
+ log.exception('Exception caught while attempting tool execution:')
+ return 'message.mako', dict( status='error', message='Error executing tool: %s' % str(e), refresh_frames=[] )
+ try:
+ assert isinstance( out_data, odict )
+ return 'tool_executed.mako', dict( out_data=out_data )
+ except:
+ if isinstance( out_data, str ):
+ message = out_data
+ else:
+ message = 'Failure executing tool (odict not returned from tool execution)'
+ return 'message.mako', dict( status='error', message=message, refresh_frames=[] )
+
+ def __handle_state_refresh( self, trans, state, errors ):
+ try:
+ self.find_fieldstorage( state.inputs )
+ except InterruptedUpload:
+ # If inputs contain a file it won't persist. Most likely this
+ # is an interrupted upload. We should probably find a more
+ # standard method of determining an incomplete POST.
+ return self.handle_interrupted( trans, state.inputs )
+ except:
+ pass
+ # Just a refresh, render the form with updated state and errors.
+ if not self.display_interface:
+ return 'message.mako', dict( status='info', message="The interface for this tool cannot be displayed", refresh_frames=['everything'] )
+ return 'tool_form.mako', dict( errors=errors, tool_state=state )
+
+ def __handle_page_advance( self, trans, state, errors ):
+ state.page += 1
+ # Fill in the default values for the next page
+ self.fill_in_new_state( trans, self.inputs_by_page[ state.page ], state.inputs )
+ if not self.display_interface:
+ return 'message.mako', dict( status='info', message="The interface for this tool cannot be displayed", refresh_frames=['everything'] )
+ return 'tool_form.mako', dict( errors=errors, tool_state=state )
+
+ def __fetch_state( self, trans, incoming, history ):
+ # Get the state or create if not found
+ if "tool_state" in incoming:
+ encoded_state = string_to_object( incoming["tool_state"] )
+ state = DefaultToolState()
+ state.decode( encoded_state, self, trans.app )
+ new = False
+ else:
+ state = self.new_state( trans, history=history )
+ new = True
+ return state, new
+
+ def __check_param_values( self, trans, incoming, state, old_errors, process_state, history, source ):
# Process incoming data
if not( self.check_values ):
# If `self.check_values` is false we don't do any checking or
@@ -1938,64 +2019,19 @@
else:
# Update state for all inputs on the current page taking new
# values from `incoming`.
- errors = self.update_state( trans, self.inputs_by_page[state.page], state.inputs, incoming, old_errors=old_errors or {} )
+ if process_state == "update":
+ errors = self.update_state( trans, self.inputs_by_page[state.page], state.inputs, incoming, old_errors=old_errors or {}, source=source )
+ elif process_state == "populate":
+ errors = self.populate_state( trans, self.inputs_by_page[state.page], state.inputs, incoming, history, source=source )
+ else:
+ raise Exception("Unknown process_state type %s" % process_state)
# If the tool provides a `validate_input` hook, call it.
validate_input = self.get_hook( 'validate_input' )
if validate_input:
validate_input( trans, errors, state.inputs, self.inputs_by_page[state.page] )
params = state.inputs
- # Did the user actually click next / execute or is this just
- # a refresh?
- if 'runtool_btn' in incoming or 'URL' in incoming or 'ajax_upload' in incoming:
- # If there were errors, we stay on the same page and display
- # error messages
- if errors:
- error_message = "One or more errors were found in the input you provided. The specific errors are marked below."
- return "tool_form.mako", dict( errors=errors, tool_state=state, incoming=incoming, error_message=error_message )
- # If we've completed the last page we can execute the tool
- elif state.page == self.last_page:
- try:
- rerun_remap_job_id = None
- if 'rerun_remap_job_id' in incoming:
- rerun_remap_job_id = trans.app.security.decode_id(incoming['rerun_remap_job_id'])
- _, out_data = self.execute( trans, incoming=params, history=history, rerun_remap_job_id=rerun_remap_job_id )
- except httpexceptions.HTTPFound, e:
- #if it's a paste redirect exception, pass it up the stack
- raise e
- except Exception, e:
- log.exception('Exception caught while attempting tool execution:')
- return 'message.mako', dict( status='error', message='Error executing tool: %s' % str(e), refresh_frames=[] )
- try:
- assert isinstance( out_data, odict )
- return 'tool_executed.mako', dict( out_data=out_data )
- except:
- if isinstance( out_data, str ):
- message = out_data
- else:
- message = 'Failure executing tool (odict not returned from tool execution)'
- return 'message.mako', dict( status='error', message=message, refresh_frames=[] )
- # Otherwise move on to the next page
- else:
- state.page += 1
- # Fill in the default values for the next page
- self.fill_in_new_state( trans, self.inputs_by_page[ state.page ], state.inputs )
- if not self.display_interface:
- return 'message.mako', dict( status='info', message="The interface for this tool cannot be displayed", refresh_frames=['everything'] )
- return 'tool_form.mako', dict( errors=errors, tool_state=state )
- else:
- try:
- self.find_fieldstorage( state.inputs )
- except InterruptedUpload:
- # If inputs contain a file it won't persist. Most likely this
- # is an interrupted upload. We should probably find a more
- # standard method of determining an incomplete POST.
- return self.handle_interrupted( trans, state.inputs )
- except:
- pass
- # Just a refresh, render the form with updated state and errors.
- if not self.display_interface:
- return 'message.mako', dict( status='info', message="The interface for this tool cannot be displayed", refresh_frames=['everything'] )
- return 'tool_form.mako', dict( errors=errors, tool_state=state )
+ return errors, params
+
def find_fieldstorage( self, x ):
if isinstance( x, FieldStorage ):
raise InterruptedUpload( None )
@@ -2033,7 +2069,141 @@
return 'message.mako', dict( status='error',
message='Your upload was interrupted. If this was uninentional, please retry it.',
refresh_frames=[], cont=None )
- def update_state( self, trans, inputs, state, incoming, prefix="", context=None,
+
+ def populate_state( self, trans, inputs, state, incoming, history, source, prefix="", context=None ):
+ errors = dict()
+ # Push this level onto the context stack
+ context = ExpressionContext( state, context )
+ for input in inputs.itervalues():
+ key = prefix + input.name
+ if isinstance( input, Repeat ):
+ group_state = state[input.name]
+ # Create list of empty errors for each previously existing state
+ group_errors = [ ]
+ any_group_errors = False
+ rep_index = 0
+ while True:
+ rep_name = "%s_%d" % ( key, rep_index )
+ if not any( [ key.startswith(rep_name) for key in incoming.keys() ] ):
+ break
+ if rep_index < input.max:
+ new_state = {}
+ new_state['__index__'] = rep_index
+ self.fill_in_new_state( trans, input.inputs, new_state, context, history=history )
+ group_state.append( new_state )
+ group_errors.append( {} )
+ rep_errors = self.populate_state( trans,
+ input.inputs,
+ new_state,
+ incoming,
+ history,
+ source,
+ prefix=rep_name + "|",
+ context=context )
+ if rep_errors:
+ any_group_errors = True
+ group_errors[rep_index].update( rep_errors )
+
+ else:
+ group_errors[-1] = { '__index__': 'Cannot add repeat (max size=%i).' % input.max }
+ any_group_errors = True
+ rep_index += 1
+ elif isinstance( input, Conditional ):
+ group_state = state[input.name]
+ group_prefix = "%s|" % ( key )
+ # Deal with the 'test' element and see if it's value changed
+ if input.value_ref and not input.value_ref_in_group:
+ # We are referencing an existent parameter, which is not
+ # part of this group
+ test_param_key = prefix + input.test_param.name
+ else:
+ test_param_key = group_prefix + input.test_param.name
+ test_param_error = None
+ test_incoming = get_incoming_value( incoming, test_param_key, None )
+
+ # Get value of test param and determine current case
+ value, test_param_error = \
+ check_param( trans, input.test_param, test_incoming, context, source=source )
+ current_case = input.get_current_case( value, trans )
+ # Current case has changed, throw away old state
+ group_state = state[input.name] = {}
+ # TODO: we should try to preserve values if we can
+ self.fill_in_new_state( trans, input.cases[current_case].inputs, group_state, context, history=history )
+ group_errors = self.populate_state( trans,
+ input.cases[current_case].inputs,
+ group_state,
+ incoming,
+ history,
+ source,
+ prefix=group_prefix,
+ context=context,
+ )
+ if test_param_error:
+ group_errors[ input.test_param.name ] = test_param_error
+ if group_errors:
+ errors[ input.name ] = group_errors
+ # Store the current case in a special value
+ group_state['__current_case__'] = current_case
+ # Store the value of the test element
+ group_state[ input.test_param.name ] = value
+ elif isinstance( input, UploadDataset ):
+ group_state = state[input.name]
+ group_errors = []
+ any_group_errors = False
+ d_type = input.get_datatype( trans, context )
+ writable_files = d_type.writable_files
+ #remove extra files
+ while len( group_state ) > len( writable_files ):
+ del group_state[-1]
+ # Update state
+ max_index = -1
+ for i, rep_state in enumerate( group_state ):
+ rep_index = rep_state['__index__']
+ max_index = max( max_index, rep_index )
+ rep_prefix = "%s_%d|" % ( key, rep_index )
+ rep_errors = self.populate_state( trans,
+ input.inputs,
+ rep_state,
+ incoming,
+ history,
+ source,
+ prefix=rep_prefix,
+ context=context)
+ if rep_errors:
+ any_group_errors = True
+ group_errors.append( rep_errors )
+ else:
+ group_errors.append( {} )
+ # Add new fileupload as needed
+ offset = 1
+ while len( writable_files ) > len( group_state ):
+ new_state = {}
+ new_state['__index__'] = max_index + offset
+ offset += 1
+ self.fill_in_new_state( trans, input.inputs, new_state, context )
+ group_state.append( new_state )
+ if any_group_errors:
+ group_errors.append( {} )
+ # Were there *any* errors for any repetition?
+ if any_group_errors:
+ errors[input.name] = group_errors
+ else:
+ if key not in incoming \
+ and "__force_update__" + key not in incoming:
+ # No new value provided, and we are only updating, so keep
+ # the old value (which should already be in the state) and
+ # preserve the old error message.
+ pass
+ else:
+ incoming_value = get_incoming_value( incoming, key, None )
+ value, error = check_param( trans, input, incoming_value, context, source=source )
+ # If a callback was provided, allow it to process the value
+ if error:
+ errors[ input.name ] = error
+ state[ input.name ] = value
+ return errors
+
+ def update_state( self, trans, inputs, state, incoming, source='html', prefix="", context=None,
update_only=False, old_errors={}, item_callback=None ):
"""
Update the tool state in `state` using the user input in `incoming`.
@@ -2091,6 +2261,7 @@
input.inputs,
rep_state,
incoming,
+ source=source,
prefix=rep_prefix,
context=context,
update_only=update_only,
@@ -2139,7 +2310,7 @@
else:
# Get value of test param and determine current case
value, test_param_error = \
- check_param( trans, input.test_param, test_incoming, context )
+ check_param( trans, input.test_param, test_incoming, context, source=source )
current_case = input.get_current_case( value, trans )
if current_case != old_current_case:
# Current case has changed, throw away old state
@@ -2156,6 +2327,7 @@
incoming,
prefix=group_prefix,
context=context,
+ source=source,
update_only=update_only,
old_errors=group_old_errors,
item_callback=item_callback )
@@ -2197,6 +2369,7 @@
incoming,
prefix=rep_prefix,
context=context,
+ source=source,
update_only=update_only,
old_errors=rep_old_errors,
item_callback=item_callback )
@@ -2229,7 +2402,7 @@
errors[ input.name ] = old_errors[ input.name ]
else:
incoming_value = get_incoming_value( incoming, key, None )
- value, error = check_param( trans, input, incoming_value, context )
+ value, error = check_param( trans, input, incoming_value, context, source=source )
# If a callback was provided, allow it to process the value
if item_callback:
old_value = state.get( input.name, None )
diff -r ac744f96faa5d4a61c73ca2e20655f818af3dddd -r 3402a11b008865ee37ee29c9d7d3d5ea27c6fe84 lib/galaxy/tools/actions/upload.py
--- a/lib/galaxy/tools/actions/upload.py
+++ b/lib/galaxy/tools/actions/upload.py
@@ -4,8 +4,10 @@
import logging
log = logging.getLogger( __name__ )
+
class UploadToolAction( ToolAction ):
- def execute( self, tool, trans, incoming={}, set_output_hid = True, history=None, **kwargs ):
+
+ def execute( self, tool, trans, incoming={}, set_output_hid=True, history=None, **kwargs ):
dataset_upload_inputs = []
for input_name, input in tool.inputs.iteritems():
if input.type == "upload_dataset":
diff -r ac744f96faa5d4a61c73ca2e20655f818af3dddd -r 3402a11b008865ee37ee29c9d7d3d5ea27c6fe84 lib/galaxy/tools/actions/upload_common.py
--- a/lib/galaxy/tools/actions/upload_common.py
+++ b/lib/galaxy/tools/actions/upload_common.py
@@ -1,4 +1,8 @@
-import os, tempfile, StringIO, pwd, subprocess
+import os
+import tempfile
+import StringIO
+import pwd
+import subprocess
from cgi import FieldStorage
from galaxy import datatypes, util
from galaxy.util.odict import odict
@@ -10,13 +14,13 @@
import logging
log = logging.getLogger( __name__ )
+
def persist_uploads( params ):
"""
Turn any uploads in the submitted form to persisted files.
"""
if 'files' in params:
new_files = []
- temp_files = []
for upload_dataset in params['files']:
f = upload_dataset['file_data']
if isinstance( f, FieldStorage ):
@@ -24,8 +28,8 @@
assert f.file.name != '<fdopen>'
local_filename = util.mkstemp_ln( f.file.name, 'upload_file_data_' )
f.file.close()
- upload_dataset['file_data'] = dict( filename = f.filename,
- local_filename = local_filename )
+ upload_dataset['file_data'] = dict( filename=f.filename,
+ local_filename=local_filename )
elif type( f ) == dict and 'filename' and 'local_filename' not in f:
raise Exception( 'Uploaded file was encoded in a way not understood by Galaxy.' )
if upload_dataset['url_paste'] and upload_dataset['url_paste'].strip() != '':
@@ -35,6 +39,8 @@
new_files.append( upload_dataset )
params['files'] = new_files
return params
+
+
def handle_library_params( trans, params, folder_id, replace_dataset=None ):
# FIXME: the received params has already been parsed by util.Params() by the time it reaches here,
# so no complex objects remain. This is not good because it does not allow for those objects to be
@@ -62,6 +68,8 @@
role = trans.sa_session.query( trans.app.model.Role ).get( role_id )
library_bunch.roles.append( role )
return library_bunch
+
+
def get_precreated_datasets( trans, params, data_obj, controller='root' ):
"""
Get any precreated datasets (when using asynchronous uploads).
@@ -90,6 +98,8 @@
else:
rval.append( data )
return rval
+
+
def get_precreated_dataset( precreated_datasets, name ):
"""
Return a dataset matching a name from the list of precreated (via async
@@ -101,21 +111,24 @@
return precreated_datasets.pop( names.index( name ) )
else:
return None
+
+
def cleanup_unused_precreated_datasets( precreated_datasets ):
for data in precreated_datasets:
log.info( 'Cleaned up unclaimed precreated dataset (%s).' % ( data.id ) )
data.state = data.states.ERROR
data.info = 'No file contents were available.'
+
def __new_history_upload( trans, uploaded_dataset, history=None, state=None ):
if not history:
history = trans.history
- hda = trans.app.model.HistoryDatasetAssociation( name = uploaded_dataset.name,
- extension = uploaded_dataset.file_type,
- dbkey = uploaded_dataset.dbkey,
- history = history,
- create_dataset = True,
- sa_session = trans.sa_session )
+ hda = trans.app.model.HistoryDatasetAssociation( name=uploaded_dataset.name,
+ extension=uploaded_dataset.file_type,
+ dbkey=uploaded_dataset.dbkey,
+ history=history,
+ create_dataset=True,
+ sa_session=trans.sa_session )
if state:
hda.state = state
else:
@@ -128,6 +141,7 @@
trans.sa_session.flush()
return hda
+
def __new_library_upload( trans, cntrller, uploaded_dataset, library_bunch, state=None ):
current_user_roles = trans.get_current_user_roles()
if not ( ( trans.user_is_admin() and cntrller in [ 'library_admin', 'api' ] ) or trans.app.security_agent.can_add_library_item( current_user_roles, library_bunch.folder ) ):
@@ -156,13 +170,13 @@
trans.sa_session.add( ld )
trans.sa_session.flush()
trans.app.security_agent.copy_library_permissions( trans, folder, ld )
- ldda = trans.app.model.LibraryDatasetDatasetAssociation( name = uploaded_dataset.name,
- extension = uploaded_dataset.file_type,
- dbkey = uploaded_dataset.dbkey,
- library_dataset = ld,
- user = trans.user,
- create_dataset = True,
- sa_session = trans.sa_session )
+ ldda = trans.app.model.LibraryDatasetDatasetAssociation( name=uploaded_dataset.name,
+ extension=uploaded_dataset.file_type,
+ dbkey=uploaded_dataset.dbkey,
+ library_dataset=ld,
+ user=trans.user,
+ create_dataset=True,
+ sa_session=trans.sa_session )
trans.sa_session.add( ldda )
if state:
ldda.state = state
@@ -210,12 +224,14 @@
trans.sa_session.flush()
return ldda
+
def new_upload( trans, cntrller, uploaded_dataset, library_bunch=None, history=None, state=None ):
if library_bunch:
return __new_library_upload( trans, cntrller, uploaded_dataset, library_bunch, state )
else:
return __new_history_upload( trans, uploaded_dataset, history=history, state=state )
+
def get_uploaded_datasets( trans, cntrller, params, precreated_datasets, dataset_upload_inputs, library_bunch=None, history=None ):
uploaded_datasets = []
for dataset_upload_input in dataset_upload_inputs:
@@ -256,6 +272,8 @@
history.genome_build = uploaded_dataset.dbkey
uploaded_dataset.data = data
return uploaded_datasets
+
+
def create_paramfile( trans, uploaded_datasets ):
"""
Create the upload tool's JSON "param" file.
@@ -284,14 +302,14 @@
setattr( data.metadata, meta_name, meta_value )
trans.sa_session.add( data )
trans.sa_session.flush()
- json = dict( file_type = uploaded_dataset.file_type,
- dataset_id = data.dataset.id,
- dbkey = uploaded_dataset.dbkey,
- type = uploaded_dataset.type,
- metadata = uploaded_dataset.metadata,
- primary_file = uploaded_dataset.primary_file,
- composite_file_paths = uploaded_dataset.composite_files,
- composite_files = dict( [ ( k, v.__dict__ ) for k, v in data.datatype.get_composite_files( data ).items() ] ) )
+ json = dict( file_type=uploaded_dataset.file_type,
+ dataset_id=data.dataset.id,
+ dbkey=uploaded_dataset.dbkey,
+ type=uploaded_dataset.type,
+ metadata=uploaded_dataset.metadata,
+ primary_file=uploaded_dataset.primary_file,
+ composite_file_paths=uploaded_dataset.composite_files,
+ composite_files=dict( [ ( k, v.__dict__ ) for k, v in data.datatype.get_composite_files( data ).items() ] ) )
else:
try:
is_binary = uploaded_dataset.datatype.is_binary
@@ -305,18 +323,18 @@
uuid_str = uploaded_dataset.uuid
except:
uuid_str = None
- json = dict( file_type = uploaded_dataset.file_type,
- ext = uploaded_dataset.ext,
- name = uploaded_dataset.name,
- dataset_id = data.dataset.id,
- dbkey = uploaded_dataset.dbkey,
- type = uploaded_dataset.type,
- is_binary = is_binary,
- link_data_only = link_data_only,
- uuid = uuid_str,
- space_to_tab = uploaded_dataset.space_to_tab,
- in_place = trans.app.config.external_chown_script is None,
- path = uploaded_dataset.path )
+ json = dict( file_type=uploaded_dataset.file_type,
+ ext=uploaded_dataset.ext,
+ name=uploaded_dataset.name,
+ dataset_id=data.dataset.id,
+ dbkey=uploaded_dataset.dbkey,
+ type=uploaded_dataset.type,
+ is_binary=is_binary,
+ link_data_only=link_data_only,
+ uuid=uuid_str,
+ space_to_tab=uploaded_dataset.space_to_tab,
+ in_place=trans.app.config.external_chown_script is None,
+ path=uploaded_dataset.path )
# TODO: This will have to change when we start bundling inputs.
# Also, in_place above causes the file to be left behind since the
# user cannot remove it unless the parent directory is writable.
@@ -327,6 +345,8 @@
if trans.app.config.external_chown_script:
_chown( json_file_path )
return json_file_path
+
+
def create_job( trans, params, tool, json_file_path, data_list, folder=None, history=None ):
"""
Create the upload job.
@@ -383,6 +403,8 @@
for i, v in enumerate( data_list ):
output[ 'output%i' % i ] = v
return job, output
+
+
def active_folders( trans, folder ):
# Stolen from galaxy.web.controllers.library_common (importing from which causes a circular issues).
# Much faster way of retrieving all active sub-folders within a given folder than the
diff -r ac744f96faa5d4a61c73ca2e20655f818af3dddd -r 3402a11b008865ee37ee29c9d7d3d5ea27c6fe84 lib/galaxy/tools/parameters/__init__.py
--- a/lib/galaxy/tools/parameters/__init__.py
+++ b/lib/galaxy/tools/parameters/__init__.py
@@ -40,7 +40,7 @@
if new_value:
input_values[input.name] = new_value
-def check_param( trans, param, incoming_value, param_values ):
+def check_param( trans, param, incoming_value, param_values, source='html' ):
"""
Check the value of a single parameter `param`. The value in
`incoming_value` is converted from its HTML encoding and validated.
@@ -53,7 +53,10 @@
try:
if value is not None or isinstance(param, DataToolParameter):
# Convert value from HTML representation
- value = param.from_html( value, trans, param_values )
+ if source == 'html':
+ value = param.from_html( value, trans, param_values )
+ else:
+ value = param.from_json( value, trans, param_values )
# Allow the value to be converted if neccesary
filtered_value = param.filter_value( value, trans, param_values )
# Then do any further validation on the value
diff -r ac744f96faa5d4a61c73ca2e20655f818af3dddd -r 3402a11b008865ee37ee29c9d7d3d5ea27c6fe84 lib/galaxy/tools/parameters/basic.py
--- a/lib/galaxy/tools/parameters/basic.py
+++ b/lib/galaxy/tools/parameters/basic.py
@@ -2,14 +2,20 @@
Basic tool parameters.
"""
-import logging, string, sys, os, os.path, urllib
+import logging
+import string
+import sys
+import os
+import os.path
+import urllib
from elementtree.ElementTree import XML, Element
from galaxy import config, datatypes, util
from galaxy.web import form_builder
from galaxy.util.bunch import Bunch
from galaxy.util import string_as_bool, sanitize_param, unicodify
from sanitize import ToolParameterSanitizer
-import validation, dynamic_options
+import validation
+import dynamic_options
# For BaseURLToolParameter
from galaxy.web import url_for
from galaxy.model.item_attrs import Dictifiable
@@ -53,8 +59,10 @@
def get_label( self ):
"""Return user friendly name for the parameter"""
- if self.label: return self.label
- else: return self.name
+ if self.label:
+ return self.label
+ else:
+ return self.name
def get_html_field( self, trans=None, value=None, other_values={} ):
raise TypeError( "Abstract Method" )
@@ -73,6 +81,9 @@
"""
return value
+ def from_json( self, value, trans=None, other_values={} ):
+ return self.from_html( value, trans, other_values )
+
def get_initial_value( self, trans, context, history=None ):
"""
Return the starting value of the parameter
@@ -87,7 +98,7 @@
if a value has already been chosen from the history. This is to support the capability to
choose each dataset once
"""
- return self.get_initial_value(trans, context, history=history);
+ return self.get_initial_value(trans, context, history=history)
def get_required_enctype( self ):
"""
@@ -166,7 +177,7 @@
return value
def validate( self, value, history=None ):
- if value=="" and self.optional:
+ if value == "" and self.optional:
return
for validator in self.validators:
validator.validate( value, history )
@@ -219,7 +230,8 @@
self.area = string_as_bool( elem.get( 'area', False ) )
def get_html_field( self, trans=None, value=None, other_values={} ):
- if value is None: value = self.value
+ if value is None:
+ value = self.value
if self.area:
return form_builder.TextArea( self.name, self.size, value )
else:
@@ -228,6 +240,7 @@
def get_initial_value( self, trans, context, history=None ):
return self.value
+
class IntegerToolParameter( TextToolParameter ):
"""
Parameter that takes an integer value.
@@ -412,11 +425,14 @@
checked = self.checked
if value is not None:
checked = form_builder.CheckboxField.is_checked( value )
- return form_builder.CheckboxField( self.name, checked, refresh_on_change = self.refresh_on_change )
+ return form_builder.CheckboxField( self.name, checked, refresh_on_change=self.refresh_on_change )
def from_html( self, value, trans=None, other_values={} ):
return form_builder.CheckboxField.is_checked( value )
+ def from_json( self, value, trans=None, other_values={} ):
+ return string_as_bool( value )
+
def to_html_value( self, value, app ):
if value:
return [ 'true', 'true' ]
@@ -461,7 +477,7 @@
self.ajax = string_as_bool( elem.get( 'ajax-upload' ) )
def get_html_field( self, trans=None, value=None, other_values={} ):
- return form_builder.FileField( self.name, ajax = self.ajax, value = value )
+ return form_builder.FileField( self.name, ajax=self.ajax, value=value )
def from_html( self, value, trans=None, other_values={} ):
# Middleware or proxies may encode files in special ways (TODO: this
@@ -476,8 +492,8 @@
assert local_filename.startswith( upload_store ), \
"Filename provided by nginx is not in correct directory"
value = dict(
- filename = value["name"],
- local_filename = local_filename
+ filename=value["name"],
+ local_filename=local_filename
)
return value
@@ -533,7 +549,7 @@
user_ftp_dir = None
else:
user_ftp_dir = trans.user_ftp_dir
- return form_builder.FTPFileField( self.name, user_ftp_dir, trans.app.config.ftp_upload_site, value = value )
+ return form_builder.FTPFileField( self.name, user_ftp_dir, trans.app.config.ftp_upload_site, value=value )
def from_html( self, value, trans=None, other_values={} ):
try:
@@ -754,8 +770,9 @@
else:
return form_builder.TextField( self.name, value=(value or "") )
if value is not None:
- if not isinstance( value, list ): value = [ value ]
- field = form_builder.SelectField( self.name, self.multiple, self.display, self.refresh_on_change, refresh_on_change_values = self.refresh_on_change_values )
+ if not isinstance( value, list ):
+ value = [ value ]
+ field = form_builder.SelectField( self.name, self.multiple, self.display, self.refresh_on_change, refresh_on_change_values=self.refresh_on_change_values )
options = self.get_options( trans, context )
for text, optval, selected in options:
if isinstance( optval, UnvalidatedValue ):
@@ -793,7 +810,7 @@
rval.append( v )
return rval
else:
- value_is_none = ( value == "None" and "None" not in legal_values )
+ value_is_none = ( value == "None" and "None" not in legal_values )
if value_is_none:
if self.multiple:
if self.optional:
@@ -943,7 +960,7 @@
options = []
try:
options = self.get_options( trans, {} )
- except AssertionError, assertion:
+ except AssertionError:
# we dont/cant set other_values (the {} above), so params that require other params to be filled will error:
# required dependency in filter_options
# associated DataToolParam in get_column_list
@@ -1006,16 +1023,13 @@
self.static_options = [ ( value, key, False ) for key, value in util.dbnames ]
def get_options( self, trans, other_values ):
- if not trans.history:
- yield 'unspecified', '?', False
- else:
+ last_used_build = object()
+ if trans.history:
last_used_build = trans.history.genome_build
- for dbkey, build_name in trans.db_builds:
- yield build_name, dbkey, ( dbkey == last_used_build )
+ for dbkey, build_name in trans.db_builds:
+ yield build_name, dbkey, ( dbkey == last_used_build )
def get_legal_values( self, trans, other_values ):
- if not trans.history:
- return set( '?' )
return set( dbkey for dbkey, _ in trans.db_builds )
def to_dict( self, trans, view='collection', value_mapper=None ):
@@ -1348,7 +1362,7 @@
options = []
for filter_key, filter_value in self.filtered.iteritems():
dataset = other_values[filter_key]
- if dataset.__class__.__name__.endswith( "DatasetFilenameWrapper" ): #this is a bad way to check for this, but problems importing class ( due to circular imports? )
+ if dataset.__class__.__name__.endswith( "DatasetFilenameWrapper" ): # this is a bad way to check for this, but problems importing class ( due to circular imports? )
dataset = dataset.dataset
if dataset:
for meta_key, meta_dict in filter_value.iteritems():
@@ -1531,8 +1545,9 @@
TODO: There should be an alternate display that allows single selects to be
displayed as radio buttons and multiple selects as a set of checkboxes
- TODO: The following must be fixed to test correctly for the new security_check tag in the DataToolParameter ( the last test below is broken )
- Nate's next pass at the dataset security stuff will dramatically alter this anyway.
+ TODO: The following must be fixed to test correctly for the new security_check tag in
+ the DataToolParameter ( the last test below is broken ) Nate's next pass at the dataset
+ security stuff will dramatically alter this anyway.
"""
def __init__( self, tool, elem, trans=None):
@@ -1579,8 +1594,8 @@
# Load conversions required for the dataset input
self.conversions = []
for conv_elem in elem.findall( "conversion" ):
- name = conv_elem.get( "name" ) #name for commandline substitution
- conv_extensions = conv_elem.get( "type" ) #target datatype extension
+ name = conv_elem.get( "name" ) # name for commandline substitution
+ conv_extensions = conv_elem.get( "type" ) # target datatype extension
# FIXME: conv_extensions should be able to be an ordered list
assert None not in [ name, type ], 'A name (%s) and type (%s) are required for explicit conversion' % ( name, type )
conv_types = tool.app.datatypes_registry.get_datatype_by_extension( conv_extensions.lower() )
@@ -1592,14 +1607,15 @@
try:
filter_value = self.options.get_options( trans, other_values )[0][0]
except IndexError:
- pass #no valid options
+ pass # no valid options
assert trans is not None, "DataToolParameter requires a trans"
history = trans.get_history()
assert history is not None, "DataToolParameter requires a history"
if value is not None:
if type( value ) != list:
value = [ value ]
- field = form_builder.SelectField( self.name, self.multiple, None, self.refresh_on_change, refresh_on_change_values = self.refresh_on_change_values )
+ field = form_builder.SelectField( self.name, self.multiple, None, self.refresh_on_change, refresh_on_change_values=self.refresh_on_change_values )
+
# CRUCIAL: the dataset_collector function needs to be local to DataToolParameter.get_html_field()
def dataset_collector( hdas, parent_hid ):
current_user_roles = trans.get_current_user_roles()
@@ -1654,7 +1670,7 @@
return field
def get_initial_value( self, trans, context, history=None ):
- return self.get_initial_value_from_history_prevent_repeats(trans, context, None, history=history);
+ return self.get_initial_value_from_history_prevent_repeats(trans, context, None, history=history)
def get_initial_value_from_history_prevent_repeats( self, trans, context, already_used, history=None ):
"""
@@ -1676,7 +1692,8 @@
try:
filter_value = self.options.get_options( trans, context )[0][0]
except IndexError:
- pass #no valid options
+ pass # no valid options
+
def dataset_collector( datasets ):
def is_convertable( dataset ):
target_ext, converted_dataset = dataset.find_conversion_destination( self.formats )
diff -r ac744f96faa5d4a61c73ca2e20655f818af3dddd -r 3402a11b008865ee37ee29c9d7d3d5ea27c6fe84 lib/galaxy/web/framework/__init__.py
--- a/lib/galaxy/web/framework/__init__.py
+++ b/lib/galaxy/web/framework/__init__.py
@@ -1070,10 +1070,11 @@
the user (chromInfo in history).
"""
dbnames = list()
- datasets = self.sa_session.query( self.app.model.HistoryDatasetAssociation ) \
- .filter_by( deleted=False, history_id=self.history.id, extension="len" )
- for dataset in datasets:
- dbnames.append( (dataset.dbkey, dataset.name) )
+ if self.history:
+ datasets = self.sa_session.query( self.app.model.HistoryDatasetAssociation ) \
+ .filter_by( deleted=False, history_id=self.history.id, extension="len" )
+ for dataset in datasets:
+ dbnames.append( (dataset.dbkey, dataset.name) )
user = self.get_user()
if user and 'dbkeys' in user.preferences:
user_keys = from_json_string( user.preferences['dbkeys'] )
diff -r ac744f96faa5d4a61c73ca2e20655f818af3dddd -r 3402a11b008865ee37ee29c9d7d3d5ea27c6fe84 lib/galaxy/webapps/galaxy/api/datasets.py
--- a/lib/galaxy/webapps/galaxy/api/datasets.py
+++ b/lib/galaxy/webapps/galaxy/api/datasets.py
@@ -7,7 +7,7 @@
from galaxy.web.base.controller import UsesHistoryMixin
from galaxy.web.framework.helpers import is_true
from galaxy.datatypes import dataproviders
-
+from galaxy.util import string_as_bool_or_none
import logging
log = logging.getLogger( __name__ )
@@ -245,11 +245,16 @@
@web.expose_api_raw_anonymous
def display( self, trans, history_content_id, history_id,
- preview=False, filename=None, to_ext=None, chunk=None, **kwd ):
+ preview=False, filename=None, to_ext=None, chunk=None, raw=False, **kwd ):
"""
GET /api/histories/{encoded_history_id}/contents/{encoded_content_id}/display
Displays history content (dataset).
+
+ The query parameter 'raw' should be considered experimental and may be dropped at
+ some point in the future without warning. Generally, data should be processed by its
+ datatype prior to display (the defult if raw is unspecified or explicitly false.
"""
+ raw = string_as_bool_or_none( raw )
# Huge amount of code overlap with lib/galaxy/webapps/galaxy/api/history_content:show here.
rval = ''
try:
@@ -269,7 +274,15 @@
hda = self.get_history_dataset_association( trans, history, history_content_id,
check_ownership=True, check_accessible=True )
- rval = hda.datatype.display_data( trans, hda, preview, filename, to_ext, chunk, **kwd )
+ display_kwd = kwd.copy()
+ try:
+ del display_kwd["key"]
+ except KeyError:
+ pass
+ if raw:
+ rval = open( hda.file_name )
+ else:
+ rval = hda.datatype.display_data( trans, hda, preview, filename, to_ext, chunk, **display_kwd )
except Exception, exception:
log.error( "Error getting display data for dataset (%s) from history (%s): %s",
diff -r ac744f96faa5d4a61c73ca2e20655f818af3dddd -r 3402a11b008865ee37ee29c9d7d3d5ea27c6fe84 lib/galaxy/webapps/galaxy/api/tools.py
--- a/lib/galaxy/webapps/galaxy/api/tools.py
+++ b/lib/galaxy/webapps/galaxy/api/tools.py
@@ -9,6 +9,7 @@
import logging
log = logging.getLogger( __name__ )
+
class ToolsController( BaseAPIController, UsesVisualizationMixin ):
"""
RESTful controller for interactions with tools.
@@ -113,7 +114,19 @@
# TODO: encode data ids and decode ids.
# TODO: handle dbkeys
params = util.Params( inputs, sanitize = False )
- template, vars = tool.handle_input( trans, params.__dict__, history=target_history )
+ # process_state will be 'populate' or 'update'. When no tool
+ # state is specified in input - it will be 'populate', and
+ # tool will fully expand repeat and conditionals when building
+ # up state. If tool state is found in input
+ # parameters,process_state will be 'update' and complex
+ # submissions (with repeats and conditionals) must be built up
+ # over several iterative calls to the API - mimicing behavior
+ # of web controller (though frankly API never returns
+ # tool_state so this "legacy" behavior is probably impossible
+ # through API currently).
+ incoming = params.__dict__
+ process_state = "update" if "tool_state" in incoming else "populate"
+ template, vars = tool.handle_input( trans, incoming, history=target_history, process_state=process_state, source="json" )
if 'errors' in vars:
trans.response.status = 400
return { "message": { "type": "error", "data" : vars[ 'errors' ] } }
diff -r ac744f96faa5d4a61c73ca2e20655f818af3dddd -r 3402a11b008865ee37ee29c9d7d3d5ea27c6fe84 templates/webapps/galaxy/tool_form.mako
--- a/templates/webapps/galaxy/tool_form.mako
+++ b/templates/webapps/galaxy/tool_form.mako
@@ -218,12 +218,11 @@
else:
cls = "form-row"
- label = param.get_label()
-
field = param.get_html_field( trans, parent_state[ param.name ], other_values )
field.refresh_on_change = param.refresh_on_change
- # Field may contain characters submitted by user and these characters may be unicode; handle non-ascii characters gracefully.
+ # Field may contain characters submitted by user and these characters may
+ # be unicode; handle non-ascii characters gracefully.
field_html = field.get_html( prefix )
if type( field_html ) is not unicode:
field_html = unicode( field_html, 'utf-8', 'replace' )
@@ -232,25 +231,39 @@
return field_html
%><div class="${cls}">
- %if label:
- <label for="${param.name}">${label}:</label>
- %endif
- <div class="form-row-input">${field_html}</div>
- %if parent_errors.has_key( param.name ):
- <div class="form-row-error-message">
- <div><img style="vertical-align: middle;" src="${h.url_for('/static/style/error_small.png')}"> <span style="vertical-align: middle;">${parent_errors[param.name]}</span></div>
- </div>
- %endif
+ ${label_for_param( param )}
+ ${input_for_param( param, field_html )}
+ ${errors_for_param( param, parent_errors )}
+ ${help_for_param( param )}
+ <div style="clear: both;"></div>
+ </div>
+</%def>
- %if param.help:
- <div class="toolParamHelp" style="clear: both;">
- ${param.help}
- </div>
- %endif
+<%def name="input_for_param( param, field_html )">
+ <div class="form-row-input">${field_html}</div>
+</%def>
- <div style="clear: both;"></div>
+<%def name="label_for_param( param )">
+ <% label = param.get_label()%>
+ %if label:
+ <label for="${param.name}">${label}:</label>
+ %endif
+</%def>
- </div>
+<%def name="errors_for_param( param, parent_errors )">
+ %if parent_errors.has_key( param.name ):
+ <div class="form-row-error-message">
+ <div><img style="vertical-align: middle;" src="${h.url_for('/static/style/error_small.png')}"> <span style="vertical-align: middle;">${parent_errors[param.name]}</span></div>
+ </div>
+ %endif
+</%def>
+
+<%def name="help_for_param( param )">
+ %if param.help:
+ <div class="toolParamHelp" style="clear: both;">
+ ${param.help}
+ </div>
+ %endif
</%def><%def name="row_for_rerun()">
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
2 new commits in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/e54372d60f11/
Changeset: e54372d60f11
Branch: stable
User: jgoecks
Date: 2013-11-14 17:00:19
Summary: Trackster: only allow datasets with proper dbkey to be added to visualization. This is a regression because summary data format [bigwig] requires a proper dbkey.
Affected #: 1 file
diff -r 88a08db3c73393ef4f64b0c2f50262d5aa03967b -r e54372d60f111f4be26868ba416649697b551a13 lib/galaxy/webapps/galaxy/controllers/visualization.py
--- a/lib/galaxy/webapps/galaxy/controllers/visualization.py
+++ b/lib/galaxy/webapps/galaxy/controllers/visualization.py
@@ -86,15 +86,19 @@
class DbKeyColumn( grids.GridColumn ):
""" Column for filtering by and displaying dataset dbkey. """
def filter( self, trans, user, query, dbkey ):
- """ Filter by dbkey; datasets without a dbkey are returned as well. """
- # use raw SQL b/c metadata is a BLOB
+ """ Filter by dbkey. """
+ # Use raw SQL b/c metadata is a BLOB.
dbkey_user, dbkey = decode_dbkey( dbkey )
dbkey = dbkey.replace("'", "\\'")
- return query.filter( or_( \
- or_( "metadata like '%%\"dbkey\": [\"%s\"]%%'" % dbkey, "metadata like '%%\"dbkey\": \"%s\"%%'" % dbkey ), \
- or_( "metadata like '%%\"dbkey\": [\"?\"]%%'", "metadata like '%%\"dbkey\": \"?\"%%'" ) \
- )
- )
+ return query.filter( or_( "metadata like '%%\"dbkey\": [\"%s\"]%%'" % dbkey, "metadata like '%%\"dbkey\": \"%s\"%%'" % dbkey ) )
+
+ #Use this query when datasets with matching dbkey *or* no dbkey can be added to the visualization.
+ #return query.filter( or_( \
+ # or_( "metadata like '%%\"dbkey\": [\"%s\"]%%'" % dbkey, "metadata like '%%\"dbkey\": \"%s\"%%'" % dbkey ), \
+ # or_( "metadata like '%%\"dbkey\": [\"?\"]%%'", "metadata like '%%\"dbkey\": \"?\"%%'" ) \
+ # )
+ # )
+
class HistoryColumn( grids.GridColumn ):
""" Column for filtering by history id. """
https://bitbucket.org/galaxy/galaxy-central/commits/516df888b97a/
Changeset: 516df888b97a
User: jgoecks
Date: 2013-11-14 17:00:58
Summary: Merge stable with default branch.
Affected #: 1 file
diff -r 7553213e0646fd4337ceacc78820990c6ce0c710 -r 516df888b97a164b7ff2d8235f5d4ea6462e185d lib/galaxy/webapps/galaxy/controllers/visualization.py
--- a/lib/galaxy/webapps/galaxy/controllers/visualization.py
+++ b/lib/galaxy/webapps/galaxy/controllers/visualization.py
@@ -86,15 +86,19 @@
class DbKeyColumn( grids.GridColumn ):
""" Column for filtering by and displaying dataset dbkey. """
def filter( self, trans, user, query, dbkey ):
- """ Filter by dbkey; datasets without a dbkey are returned as well. """
- # use raw SQL b/c metadata is a BLOB
+ """ Filter by dbkey. """
+ # Use raw SQL b/c metadata is a BLOB.
dbkey_user, dbkey = decode_dbkey( dbkey )
dbkey = dbkey.replace("'", "\\'")
- return query.filter( or_( \
- or_( "metadata like '%%\"dbkey\": [\"%s\"]%%'" % dbkey, "metadata like '%%\"dbkey\": \"%s\"%%'" % dbkey ), \
- or_( "metadata like '%%\"dbkey\": [\"?\"]%%'", "metadata like '%%\"dbkey\": \"?\"%%'" ) \
- )
- )
+ return query.filter( or_( "metadata like '%%\"dbkey\": [\"%s\"]%%'" % dbkey, "metadata like '%%\"dbkey\": \"%s\"%%'" % dbkey ) )
+
+ #Use this query when datasets with matching dbkey *or* no dbkey can be added to the visualization.
+ #return query.filter( or_( \
+ # or_( "metadata like '%%\"dbkey\": [\"%s\"]%%'" % dbkey, "metadata like '%%\"dbkey\": \"%s\"%%'" % dbkey ), \
+ # or_( "metadata like '%%\"dbkey\": [\"?\"]%%'", "metadata like '%%\"dbkey\": \"?\"%%'" ) \
+ # )
+ # )
+
class HistoryColumn( grids.GridColumn ):
""" Column for filtering by history id. """
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
2 new commits in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/88a08db3c733/
Changeset: 88a08db3c733
Branch: stable
User: jgoecks
Date: 2013-11-14 00:41:23
Summary: Implement a hack to avoid infinite recursion in custom builds.
Affected #: 1 file
diff -r 9bde6f2b9c59a29a35b85b7fe3c2e3bef3ae6a2e -r 88a08db3c73393ef4f64b0c2f50262d5aa03967b lib/galaxy/tools/actions/__init__.py
--- a/lib/galaxy/tools/actions/__init__.py
+++ b/lib/galaxy/tools/actions/__init__.py
@@ -206,12 +206,19 @@
db_datasets[ "chromInfo" ] = db_dataset
incoming[ "chromInfo" ] = db_dataset.file_name
else:
- # For custom builds, chrom info resides in converted dataset; for built-in builds, chrom info resides in tool-data/shared.
+ # -- Get chrom_info from either a custom or built-in build. --
+
chrom_info = None
if trans.user and ( 'dbkeys' in trans.user.preferences ) and ( input_dbkey in from_json_string( trans.user.preferences[ 'dbkeys' ] ) ):
# Custom build.
custom_build_dict = from_json_string( trans.user.preferences[ 'dbkeys' ] )[ input_dbkey ]
- if 'fasta' in custom_build_dict:
+ # HACK: the attempt to get chrom_info below will trigger the
+ # fasta-to-len converter if the dataset is not available or,
+ # which will in turn create a recursive loop when
+ # running the fasta-to-len tool. So, use a hack in the second
+ # condition below to avoid getting chrom_info when running the
+ # fasta-to-len converter.
+ if 'fasta' in custom_build_dict and tool.id != 'CONVERTER_fasta_to_len':
build_fasta_dataset = trans.sa_session.query( trans.app.model.HistoryDatasetAssociation ).get( custom_build_dict[ 'fasta' ] )
chrom_info = build_fasta_dataset.get_converted_dataset( trans, 'len' ).file_name
https://bitbucket.org/galaxy/galaxy-central/commits/1c4bb73d630a/
Changeset: 1c4bb73d630a
User: jgoecks
Date: 2013-11-14 00:42:54
Summary: Merge stable branch to default.
Affected #: 0 files
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/39256e03655f/
Changeset: 39256e03655f
User: jgoecks
Date: 2013-11-14 00:41:23
Summary: Implement a hack to avoid infinite recursion in custom builds.
Affected #: 1 file
diff -r a9d90d2d13e121ffda929baa6ffab6ee43e8892f -r 39256e03655f35b03b940d2b91fc2436106f4e88 lib/galaxy/tools/actions/__init__.py
--- a/lib/galaxy/tools/actions/__init__.py
+++ b/lib/galaxy/tools/actions/__init__.py
@@ -206,12 +206,19 @@
db_datasets[ "chromInfo" ] = db_dataset
incoming[ "chromInfo" ] = db_dataset.file_name
else:
- # For custom builds, chrom info resides in converted dataset; for built-in builds, chrom info resides in tool-data/shared.
+ # -- Get chrom_info from either a custom or built-in build. --
+
chrom_info = None
if trans.user and ( 'dbkeys' in trans.user.preferences ) and ( input_dbkey in from_json_string( trans.user.preferences[ 'dbkeys' ] ) ):
# Custom build.
custom_build_dict = from_json_string( trans.user.preferences[ 'dbkeys' ] )[ input_dbkey ]
- if 'fasta' in custom_build_dict:
+ # HACK: the attempt to get chrom_info below will trigger the
+ # fasta-to-len converter if the dataset is not available or,
+ # which will in turn create a recursive loop when
+ # running the fasta-to-len tool. So, use a hack in the second
+ # condition below to avoid getting chrom_info when running the
+ # fasta-to-len converter.
+ if 'fasta' in custom_build_dict and tool.id != 'CONVERTER_fasta_to_len':
build_fasta_dataset = trans.sa_session.query( trans.app.model.HistoryDatasetAssociation ).get( custom_build_dict[ 'fasta' ] )
chrom_info = build_fasta_dataset.get_converted_dataset( trans, 'len' ).file_name
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/a9d90d2d13e1/
Changeset: a9d90d2d13e1
User: jmchilton
Date: 2013-11-13 23:37:11
Summary: LWR: Expand user properties (__user_name__, __user_email__, __user_id__) in job params before creating client.
This was final piece of puzzle for running jobs as real user through LWR. Now on just needs to add <param id="submit_user">$__user_name__</param> to LWR destination in job_conf.xml on Galaxy side to pass this information to the LWR. More complicated on the LWR side obviously, need to configure DRMAA, set job manager to be 'queued_external_drmaa', and setup sudo rules. Will add some details to LWR documentation shortly.
Affected #: 1 file
diff -r 521b4ff9060b72f1f8c9f5977f8c4fa2b0bd7877 -r a9d90d2d13e121ffda929baa6ffab6ee43e8892f lib/galaxy/jobs/runners/lwr.py
--- a/lib/galaxy/jobs/runners/lwr.py
+++ b/lib/galaxy/jobs/runners/lwr.py
@@ -108,7 +108,10 @@
job_id = job_wrapper.job_id
if hasattr(job_wrapper, 'task_id'):
job_id = "%s_%s" % (job_id, job_wrapper.task_id)
- return self.get_client( job_wrapper.job_destination.params, job_id )
+ params = job_wrapper.job_destination.params.copy()
+ for key, value in params.iteritems():
+ params[key] = model.User.expand_user_properties( job_wrapper.get_job().user, value )
+ return self.get_client( params, job_id )
def get_client_from_state(self, job_state):
job_destination_params = job_state.job_destination.params
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/c70a6f517aa9/
Changeset: c70a6f517aa9
User: Dave Bouvier
Date: 2013-11-13 20:46:25
Summary: When running the install and test framework, explicitly uninstall any tool dependency that is not in the state Installed.
Affected #: 2 files
diff -r e6b3faf87f026f03412ff7b1b133fa0872c8be45 -r c70a6f517aa96506b3245db857025f567aac0511 test/install_and_test_tool_shed_repositories/base/test_db_util.py
--- a/test/install_and_test_tool_shed_repositories/base/test_db_util.py
+++ b/test/install_and_test_tool_shed_repositories/base/test_db_util.py
@@ -36,12 +36,17 @@
return role
raise AssertionError( "Private role not found for user '%s'" % user.email )
-def get_tool_dependencies_for_installed_repository( repository_id, status=None ):
+def get_tool_dependencies_for_installed_repository( repository_id, status=None, exclude_status=None ):
if status is not None:
return sa_session.query( model.ToolDependency ) \
.filter( and_( model.ToolDependency.table.c.tool_shed_repository_id == repository_id,
model.ToolDependency.table.c.status == status ) ) \
.all()
+ elif exclude_status is not None:
+ return sa_session.query( model.ToolDependency ) \
+ .filter( and_( model.ToolDependency.table.c.tool_shed_repository_id == repository_id,
+ model.ToolDependency.table.c.status != exclude_status ) ) \
+ .all()
else:
return sa_session.query( model.ToolDependency ) \
.filter( model.ToolDependency.table.c.tool_shed_repository_id == repository_id ) \
diff -r e6b3faf87f026f03412ff7b1b133fa0872c8be45 -r c70a6f517aa96506b3245db857025f567aac0511 test/install_and_test_tool_shed_repositories/base/twilltestcase.py
--- a/test/install_and_test_tool_shed_repositories/base/twilltestcase.py
+++ b/test/install_and_test_tool_shed_repositories/base/twilltestcase.py
@@ -150,10 +150,10 @@
else:
strings_displayed.append( 'has been uninstalled' )
self.check_for_strings( strings_displayed, strings_not_displayed=[] )
- # Get all tool dependencies that are in an error state and uninstall them explicitly, so that the next installation attempt
+ # Get all tool dependencies that are not in an installed state and uninstall them explicitly, so that the next installation attempt
# may succeed.
- error_state = model.ToolDependency.installation_status.ERROR
- tool_dependencies = test_db_util.get_tool_dependencies_for_installed_repository( installed_repository.id, status=error_state )
+ installed_state = model.ToolDependency.installation_status.INSTALLED
+ tool_dependencies = test_db_util.get_tool_dependencies_for_installed_repository( installed_repository.id, exclude_status=installed_state )
if len( tool_dependencies ) > 0:
encoded_tool_dependency_ids = [ self.security.encode_id( tool_dependency.id ) for tool_dependency in tool_dependencies ]
self.uninstall_tool_dependencies( self.security.encode_id( installed_repository.id ), encoded_tool_dependency_ids )
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.