1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/f2abe519d05e/
changeset: f2abe519d05e
user: clements
date: 2012-11-01 06:50:03
summary: Change docstring so it no longer gerneates a warning in Sphinx.
affected #: 1 file
diff -r 563d279f69bcec68c76a5a3c7c4144b34b614011 -r f2abe519d05e6f4e01fcf9983ab3fb72f5009272 lib/galaxy/config.py
--- a/lib/galaxy/config.py
+++ b/lib/galaxy/config.py
@@ -340,7 +340,7 @@
def get_database_engine_options( kwargs ):
"""
Allow options for the SQLAlchemy database engine to be passed by using
- the prefix "database_engine_option_".
+ the prefix "database_engine_option".
"""
conversions = {
'convert_unicode': string_as_bool,
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
4 new commits in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/67da1098be88/
changeset: 67da1098be88
user: dan
date: 2012-11-06 21:46:57
summary: Add a helper menthol "params_to_incoming" that takes a set of parameters and populates a dictionary as if it were an incoming html post.
affected #: 1 file
diff -r aba101adc4a7012f6c959c23ea5f1b8701667649 -r 67da1098be8881f223f469c53ef7dc3528409343 lib/galaxy/tools/parameters/__init__.py
--- a/lib/galaxy/tools/parameters/__init__.py
+++ b/lib/galaxy/tools/parameters/__init__.py
@@ -94,3 +94,26 @@
value = params[key].value_from_basic( value, app, ignore_errors )
rval[ key ] = value
return rval
+
+def params_to_incoming( incoming, inputs, input_values, app, name_prefix="" ):
+ """
+ Given a tool's parameter definition (`inputs`) and a specific set of
+ parameter `input_values` objects, populate `incoming` with the html values.
+
+ Useful for e.g. the rerun function.
+ """
+ for input in inputs.itervalues():
+ if isinstance( input, Repeat ) or isinstance( input, UploadDataset ):
+ for i, d in enumerate( input_values[ input.name ] ):
+ index = d['__index__']
+ new_name_prefix = name_prefix + "%s_%d|" % ( input.name, index )
+ params_to_incoming( incoming, input.inputs, d, app, new_name_prefix )
+ elif isinstance( input, Conditional ):
+ values = input_values[ input.name ]
+ current = values["__current_case__"]
+ new_name_prefix = name_prefix + input.name + "|"
+ incoming[ new_name_prefix + input.test_param.name ] = values[ input.test_param.name ]
+ params_to_incoming( incoming, input.cases[current].inputs, values, app, new_name_prefix )
+ else:
+ incoming[ name_prefix + input.name ] = input.to_string( input_values.get( input.name ), app )
+
https://bitbucket.org/galaxy/galaxy-central/changeset/50513229f6ef/
changeset: 50513229f6ef
user: dan
date: 2012-11-06 21:46:57
summary: Allow passing old_errors to handle_input. Fixes for handling old_errors in handle_input for grouping parameters.
affected #: 1 file
diff -r 67da1098be8881f223f469c53ef7dc3528409343 -r 50513229f6ef8338361d838a40c956108836465d lib/galaxy/tools/__init__.py
--- a/lib/galaxy/tools/__init__.py
+++ b/lib/galaxy/tools/__init__.py
@@ -1731,7 +1731,7 @@
callback( "", input, value[input.name] )
else:
input.visit_inputs( "", value[input.name], callback )
- def handle_input( self, trans, incoming, history=None ):
+ def handle_input( self, trans, incoming, history=None, old_errors=None ):
"""
Process incoming parameters for this tool from the dict `incoming`,
update the tool state (or create if none existed), and either return
@@ -1766,7 +1766,7 @@
else:
# Update state for all inputs on the current page taking new
# values from `incoming`.
- errors = self.update_state( trans, self.inputs_by_page[state.page], state.inputs, incoming )
+ errors = self.update_state( trans, self.inputs_by_page[state.page], state.inputs, incoming, old_errors=old_errors or {} )
# If the tool provides a `validate_input` hook, call it.
validate_input = self.get_hook( 'validate_input' )
if validate_input:
@@ -1895,7 +1895,10 @@
any_group_errors = True
# Only need to find one that can't be removed due to size, since only
# one removal is processed at # a time anyway
- break
+ break
+ elif group_old_errors and group_old_errors[i]:
+ group_errors[i] = group_old_errors[i]
+ any_group_errors = True
# Update state
max_index = -1
for i, rep_state in enumerate( group_state ):
@@ -1978,6 +1981,8 @@
update_only=update_only,
old_errors=group_old_errors,
item_callback=item_callback )
+ if input.test_param.name in group_old_errors and not test_param_error:
+ test_param_error = group_old_errors[ input.test_param.name ]
if test_param_error:
group_errors[ input.test_param.name ] = test_param_error
if group_errors:
https://bitbucket.org/galaxy/galaxy-central/changeset/81a007dbc152/
changeset: 81a007dbc152
user: dan
date: 2012-11-06 21:46:57
summary: Rework rerun functionality to treat the previously set job parameters as though they are an incoming form post. This allows validation and subsequent display of errors between the original and current states.
affected #: 1 file
diff -r 50513229f6ef8338361d838a40c956108836465d -r 81a007dbc1528a6124dfdc9caf05579887fe0d4c lib/galaxy/webapps/galaxy/controllers/tool_runner.py
--- a/lib/galaxy/webapps/galaxy/controllers/tool_runner.py
+++ b/lib/galaxy/webapps/galaxy/controllers/tool_runner.py
@@ -6,6 +6,7 @@
from galaxy.util.bunch import Bunch
from galaxy.tools import DefaultToolState
from galaxy.tools.parameters.basic import UnvalidatedValue
+from galaxy.tools.parameters import params_to_incoming
from galaxy.tools.actions import upload_common
import logging
@@ -192,25 +193,29 @@
if isinstance(value,list):
values = []
for val in value:
- if val not in history.datasets and val in hda_source_dict:
+ if val in history.datasets:
+ values.append( val )
+ elif val in hda_source_dict:
values.append( hda_source_dict[ val ])
return values
if value not in history.datasets and value in hda_source_dict:
return hda_source_dict[ value ]
visit_input_values( tool.inputs, params_objects, rerun_callback )
- # Create a fake tool_state for the tool, with the parameters values
+ # Create a fake tool_state for the tool, with the parameters values
state = tool.new_state( trans )
state.inputs = params_objects
- tool_state_string = util.object_to_string(state.encode(tool, trans.app))
- # Setup context for template
- vars = dict( tool_state=state, errors = upgrade_messages )
+ #create an incoming object from the original job's dataset-modified param objects
+ incoming = {}
+ params_to_incoming( incoming, tool.inputs, params_objects, trans.app )
+ incoming[ "tool_state" ] = util.object_to_string( state.encode( tool, trans.app ) )
+ template, vars = tool.handle_input( trans, incoming, old_errors=upgrade_messages ) #update new state with old parameters
# Is the "add frame" stuff neccesary here?
add_frame = AddFrameData()
add_frame.debug = trans.debug
if from_noframe is not None:
add_frame.wiki_url = trans.app.config.wiki_url
add_frame.from_noframe = True
- return trans.fill_template( "tool_form.mako",
+ return trans.fill_template( template,
history=history,
toolbox=self.get_toolbox(),
tool_version_select_field=tool_version_select_field,
https://bitbucket.org/galaxy/galaxy-central/changeset/907f364107c5/
changeset: 907f364107c5
user: dan
date: 2012-11-06 21:46:58
summary: Add error messages for a DataToolParameter when the provided value is no longer valid due to be deleted or being in an error state.
affected #: 1 file
diff -r 81a007dbc1528a6124dfdc9caf05579887fe0d4c -r 907f364107c534cd531b4d91fcda7fe3e59eb4b1 lib/galaxy/tools/parameters/basic.py
--- a/lib/galaxy/tools/parameters/basic.py
+++ b/lib/galaxy/tools/parameters/basic.py
@@ -1539,25 +1539,38 @@
if trans.workflow_building_mode:
return None
if not value:
- raise ValueError( "History does not include a dataset of the required format / build" )
+ raise ValueError( "History does not include a dataset of the required format / build" )
if value in [None, "None"]:
return None
if isinstance( value, list ):
- return [ trans.sa_session.query( trans.app.model.HistoryDatasetAssociation ).get( v ) for v in value ]
+ rval = [ trans.sa_session.query( trans.app.model.HistoryDatasetAssociation ).get( v ) for v in value ]
elif isinstance( value, trans.app.model.HistoryDatasetAssociation ):
- return value
+ rval = value
else:
- return trans.sa_session.query( trans.app.model.HistoryDatasetAssociation ).get( value )
+ rval = trans.sa_session.query( trans.app.model.HistoryDatasetAssociation ).get( value )
+ if isinstance( rval, list ):
+ values = rval
+ else:
+ values = [ rval ]
+ for v in values:
+ if v:
+ if v.deleted:
+ raise ValueError( "The previously selected dataset has been previously deleted" )
+ if v.dataset.state in [galaxy.model.Dataset.states.ERROR, galaxy.model.Dataset.states.DISCARDED ]:
+ raise ValueError( "The previously selected dataset has entered an unusable state" )
+ return rval
def to_string( self, value, app ):
- if value is None or isinstance( value, str ):
+ if value is None or isinstance( value, basestring ):
return value
+ elif isinstance( value, int ):
+ return str( value )
elif isinstance( value, DummyDataset ):
return None
elif isinstance( value, list) and len(value) > 0 and isinstance( value[0], DummyDataset):
return None
elif isinstance( value, list ):
- return ",".join( [ val if isinstance( val, str ) else str(val.id) for val in value] )
+ return ",".join( [ val if isinstance( val, basestring ) else str(val.id) for val in value] )
return value.id
def to_python( self, value, app ):
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/aba101adc4a7/
changeset: aba101adc4a7
user: smcmanus
date: 2012-11-06 21:18:42
summary: The job's external metadata is returned from the model. The Task class had included a get_external_output_metadata method that was missing from the Job class, and this was the result of trying to merge the two interfaces. This caused problems in cancelling jobs (i.e., the jobs would run to completion) when the jobs were scheduled for the local runner.
affected #: 3 files
diff -r 8fb2a905f2492a388799ac4d861e52bda9365300 -r aba101adc4a7012f6c959c23ea5f1b8701667649 lib/galaxy/jobs/__init__.py
--- a/lib/galaxy/jobs/__init__.py
+++ b/lib/galaxy/jobs/__init__.py
@@ -400,7 +400,6 @@
# the state or whether the tool used exit codes and regular
# expressions to do so. So we use
# job.state == job.states.ERROR to replace this same test.
- #elif not self.external_output_metadata.external_metadata_set_successfully( dataset, self.sa_session ) and not context['stderr']:
elif not self.external_output_metadata.external_metadata_set_successfully( dataset, self.sa_session ) and job.states.ERROR != job.state:
dataset._state = model.Dataset.states.FAILED_METADATA
else:
diff -r 8fb2a905f2492a388799ac4d861e52bda9365300 -r aba101adc4a7012f6c959c23ea5f1b8701667649 lib/galaxy/jobs/runners/local.py
--- a/lib/galaxy/jobs/runners/local.py
+++ b/lib/galaxy/jobs/runners/local.py
@@ -183,8 +183,9 @@
def stop_job( self, job ):
#if our local job has JobExternalOutputMetadata associated, then our primary job has to have already finished
- if job.get_external_output_metadata():
- pid = job.get_external_output_metadata()[0].job_runner_external_pid #every JobExternalOutputMetadata has a pid set, we just need to take from one of them
+ job_ext_output_metadata = job.get_external_output_metadata()
+ if job_ext_output_metadata:
+ pid = job_ext_output_metadata[0].job_runner_external_pid #every JobExternalOutputMetadata has a pid set, we just need to take from one of them
else:
pid = job.get_job_runner_external_id()
if pid in [ None, '' ]:
diff -r 8fb2a905f2492a388799ac4d861e52bda9365300 -r aba101adc4a7012f6c959c23ea5f1b8701667649 lib/galaxy/model/__init__.py
--- a/lib/galaxy/model/__init__.py
+++ b/lib/galaxy/model/__init__.py
@@ -18,7 +18,7 @@
from galaxy.model.item_attrs import UsesAnnotations, APIItem
from sqlalchemy.orm import object_session
from sqlalchemy.sql.expression import func
-import sys, os.path, os, errno, codecs, operator, socket, pexpect, logging, time, shutil
+import os.path, os, errno, codecs, operator, socket, pexpect, logging, time, shutil
if sys.version_info[:2] < ( 2, 5 ):
from sets import Set as set
@@ -138,6 +138,12 @@
# TODO: Add accessors for members defined in SQL Alchemy for the Job table and
# for the mapper defined to the Job table.
+ def get_external_output_metadata( self ):
+ """
+ The external_output_metadata is currently a reference from Job to
+ JobExternalOutputMetadata. It exists for a job but not a task.
+ """
+ return self.external_output_metadata
def get_session_id( self ):
return self.session_id
def get_user_id( self ):
@@ -370,6 +376,13 @@
# (e.g., for a session) or never use the member (e.g., external output
# metdata). These can be filled in as needed.
def get_external_output_metadata( self ):
+ """
+ The external_output_metadata is currently a backref to
+ JobExternalOutputMetadata. It exists for a job but not a task,
+ and when a task is cancelled its corresponding parent Job will
+ be cancelled. So None is returned now, but that could be changed
+ to self.get_job().get_external_output_metadata().
+ """
return None
def get_job_runner_name( self ):
"""
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/8fb2a905f249/
changeset: 8fb2a905f249
user: greg
date: 2012-11-06 21:09:34
summary: Per James Johnson, allow for environment variables that contain neither REPOSITORY_INSTALL_DIR nor INSTALL_DIR when defining tool dependencies to be installed along with tool shed repositories.
affected #: 1 file
diff -r 3d27b35e1c1fa629ec4175f5e613b5a771152377 -r 8fb2a905f2492a388799ac4d861e52bda9365300 lib/galaxy/tool_shed/tool_dependencies/common_util.py
--- a/lib/galaxy/tool_shed/tool_dependencies/common_util.py
+++ b/lib/galaxy/tool_shed/tool_dependencies/common_util.py
@@ -19,6 +19,14 @@
else:
env_var_text = elem.text.replace( '$INSTALL_DIR', tool_shed_repository_install_dir )
return dict( name=env_var_name, action=env_var_action, value=env_var_text )
+ if elem.text:
+ # Allow for environment variables that contain neither REPOSITORY_INSTALL_DIR nor INSTALL_DIR since there may be command line
+ # parameters that are tuned for a Galaxy instance. Allowing them to be set in one location rather than being hard coded into
+ # each tool config is the best approach. For example:
+ # <environment_variable name="GATK2_SITE_OPTIONS" action="set_to">
+ # "--num_threads 4 --num_cpu_threads_per_data_thread 3 --phone_home STANDARD"
+ # </environment_variable>
+ return dict( name=env_var_name, action=env_var_action, value=elem.text)
return None
def create_or_update_env_shell_file( install_dir, env_var_dict ):
env_var_name = env_var_dict[ 'name' ]
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/5dcbbdfe1087/
changeset: 5dcbbdfe1087
user: dan
date: 2012-11-06 18:57:23
summary: Allow rerun to access hidden datasets.
affected #: 1 file
diff -r 6624cd467f30618d5a1319e14b2a41ab7c6a2407 -r 5dcbbdfe1087e8d75f1df939d363b347e2764dd5 lib/galaxy/tools/parameters/basic.py
--- a/lib/galaxy/tools/parameters/basic.py
+++ b/lib/galaxy/tools/parameters/basic.py
@@ -1434,7 +1434,7 @@
else:
hid = str( hda.hid )
if not hda.dataset.state in [galaxy.model.Dataset.states.ERROR, galaxy.model.Dataset.states.DISCARDED] and \
- hda.visible and \
+ ( hda.visible or ( value and hda in value and not hda.implicitly_converted_parent_datasets ) ) and \
trans.app.security_agent.can_access_dataset( current_user_roles, hda.dataset ):
# If we are sending data to an external application, then we need to make sure there are no roles
# associated with the dataset that restrict it's access from "public".
@@ -1444,7 +1444,11 @@
continue
if isinstance( hda.datatype, self.formats):
selected = ( value and ( hda in value ) )
- field.add_option( "%s: %s" % ( hid, hda_name ), hda.id, selected )
+ if hda.visible:
+ hidden_text = ""
+ else:
+ hidden_text = " (hidden)"
+ field.add_option( "%s:%s %s" % ( hid, hidden_text, hda_name ), hda.id, selected )
else:
target_ext, converted_dataset = hda.find_conversion_destination( self.formats )
if target_ext:
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/6624cd467f30/
changeset: 6624cd467f30
user: richard_burhans
date: 2012-11-06 17:22:56
summary: Fixed installation of toolsheds containing proprietary datatypes
affected #: 1 file
diff -r efccb227d72ccb2764544fd461195789653b4509 -r 6624cd467f30618d5a1319e14b2a41ab7c6a2407 lib/galaxy/webapps/galaxy/controllers/admin_toolshed.py
--- a/lib/galaxy/webapps/galaxy/controllers/admin_toolshed.py
+++ b/lib/galaxy/webapps/galaxy/controllers/admin_toolshed.py
@@ -745,10 +745,11 @@
Generate the metadata for the installed tool shed repository, among other things. This method is called from Galaxy (never the tool shed)
when an admin is installing a new repository or reinstalling an uninstalled repository.
"""
+ shed_config_dict = trans.app.toolbox.get_shed_config_dict_by_filename( shed_tool_conf )
metadata_dict, invalid_file_tups = generate_metadata_for_changeset_revision( app=trans.app,
repository=tool_shed_repository,
repository_clone_url=repository_clone_url,
- shed_config_dict = trans.app.toolbox.get_shed_config_dict_by_filename( shed_tool_conf ),
+ shed_config_dict=shed_config_dict,
relative_install_dir=relative_install_dir,
repository_files_dir=None,
resetting_all_metadata_on_repository=False,
@@ -791,9 +792,12 @@
tool_shed_repository.includes_datatypes = True
trans.sa_session.add( tool_shed_repository )
trans.sa_session.flush()
- datatypes_config = get_config_from_disk( 'datatypes_conf.xml', relative_install_dir )
+ files_dir = relative_install_dir
+ if shed_config_dict.get( 'tool_path' ):
+ files_dir = os.path.join( shed_config_dict['tool_path'], files_dir )
+ datatypes_config = get_config_from_disk( 'datatypes_conf.xml', files_dir )
# Load data types required by tools.
- converter_path, display_path = alter_config_and_load_prorietary_datatypes( trans.app, datatypes_config, relative_install_dir, override=False )
+ converter_path, display_path = alter_config_and_load_prorietary_datatypes( trans.app, datatypes_config, files_dir, override=False )
if converter_path or display_path:
# Create a dictionary of tool shed repository related information.
repository_dict = create_repository_dict_for_proprietary_datatypes( tool_shed=tool_shed,
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/efccb227d72c/
changeset: efccb227d72c
user: jgoecks
date: 2012-11-06 15:52:23
summary: Circster improvements: (a) fetch a smaller amount of data on load and (b) use large quantile rather than max for top data range to limit impact of exceptionally large values.
affected #: 2 files
diff -r dd4786c5b4e48db780f6ec5da42dfafec19538f4 -r efccb227d72ccb2764544fd461195789653b4509 lib/galaxy/web/base/controller.py
--- a/lib/galaxy/web/base/controller.py
+++ b/lib/galaxy/web/base/controller.py
@@ -636,9 +636,12 @@
data_provider = trans.app.data_provider_registry.get_data_provider( trans,
original_dataset=dataset,
source=source )
- # HACK: pass in additional params which are used for only some types of data providers;
- # level, cutoffs used for summary tree, and interchromosomal used for chromatin interactions.
- rval = data_provider.get_genome_data( chroms_info, level=4, detail_cutoff=0, draw_cutoff=0,
+ # HACK: pass in additional params which are used for only some
+ # types of data providers; level, cutoffs used for summary tree,
+ # num_samples for BBI, and interchromosomal used for chromatin interactions.
+ rval = data_provider.get_genome_data( chroms_info,
+ level=4, detail_cutoff=0, draw_cutoff=0,
+ num_samples=150,
interchromosomal=True )
return rval
diff -r dd4786c5b4e48db780f6ec5da42dfafec19538f4 -r efccb227d72ccb2764544fd461195789653b4509 static/scripts/viz/circster.js
--- a/static/scripts/viz/circster.js
+++ b/static/scripts/viz/circster.js
@@ -720,6 +720,14 @@
var CircsterQuantitativeTrackView = CircsterTrackView.extend({
/**
+ * Returns quantile for an array of numbers.
+ */
+ _quantile: function(numbers, quantile) {
+ numbers.sort(d3.ascending);
+ return d3.quantile(numbers, quantile);
+ },
+
+ /**
* Renders quantitative data with the form [x, value] and assumes data is equally spaced across
* chromosome. Attachs a dict with track and chrom name information to DOM element.
*/
@@ -750,7 +758,8 @@
// Radius scaler.
var radius = d3.scale.linear()
.domain(this.data_bounds)
- .range(this.radius_bounds);
+ .range(this.radius_bounds)
+ .clamp(true);
// Scaler for placing data points across arc.
var angle = d3.scale.linear()
@@ -854,7 +863,7 @@
if (typeof d === 'string' || !d.max) { return 0; }
return d.max;
});
- return [ 0, (max_data && typeof max_data !== 'string' ? _.max(max_data) : 0) ];
+ return [ 0, (max_data && typeof max_data !== 'string' ? this._quantile(values, 0.98) : 0) ];
}
});
@@ -865,7 +874,7 @@
get_data_bounds: function(data) {
// Set max across dataset by extracting all values, flattening them into a
- // single array, and getting the min and max.
+ // single array, and getting third quartile.
var values = _.flatten( _.map(data, function(d) {
if (d) {
// Each data point has the form [position, value], so return all values.
@@ -878,7 +887,7 @@
}
}) );
- return [ _.min(values), _.max(values) ];
+ return [ _.min(values), this._quantile(values, 0.98) ];
}
});
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.