galaxy-commits
Threads by month
- ----- 2024 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2023 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2022 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2021 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2020 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2019 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2018 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2017 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2016 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2015 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2014 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2013 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2012 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2011 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2010 -----
- December
- November
- October
- September
- August
- July
- June
- May
February 2012
- 2 participants
- 113 discussions
commit/galaxy-central: jgoecks: Better error message when bias correction/sequence data cannot be used in Cufflinks/compare/merge/diff.
by Bitbucket 14 Feb '12
by Bitbucket 14 Feb '12
14 Feb '12
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/71031bf3105c/
changeset: 71031bf3105c
user: jgoecks
date: 2012-02-14 18:27:47
summary: Better error message when bias correction/sequence data cannot be used in Cufflinks/compare/merge/diff.
affected #: 5 files
diff -r 33c780b4c1455628ec135b0ba98e285efcb89ed8 -r 71031bf3105c6557256bf2957ef5565ad672355d tools/ngs_rna/cuffcompare_wrapper.py
--- a/tools/ngs_rna/cuffcompare_wrapper.py
+++ b/tools/ngs_rna/cuffcompare_wrapper.py
@@ -58,16 +58,21 @@
# Set/link to sequence file.
if options.use_seq_data:
- cached_seqs_pointer_file = os.path.join( options.index_dir, 'sam_fa_indices.loc' )
- if not os.path.exists( cached_seqs_pointer_file ):
- stop_err( 'The required file (%s) does not exist.' % cached_seqs_pointer_file )
- # If found for the dbkey, seq_path will look something like /galaxy/data/equCab2/sam_index/equCab2.fa,
- # and the equCab2.fa file will contain fasta sequences.
- seq_path = check_seq_file( options.dbkey, cached_seqs_pointer_file )
if options.ref_file != 'None':
+ # Sequence data from history.
# Create symbolic link to ref_file so that index will be created in working directory.
seq_path = "ref.fa"
os.symlink( options.ref_file, seq_path )
+ else:
+ # Sequence data from loc file.
+ cached_seqs_pointer_file = os.path.join( options.index_dir, 'sam_fa_indices.loc' )
+ if not os.path.exists( cached_seqs_pointer_file ):
+ stop_err( 'The required file (%s) does not exist.' % cached_seqs_pointer_file )
+ # If found for the dbkey, seq_path will look something like /galaxy/data/equCab2/sam_index/equCab2.fa,
+ # and the equCab2.fa file will contain fasta sequences.
+ seq_path = check_seq_file( options.dbkey, cached_seqs_pointer_file )
+ if seq_path == '':
+ stop_err( 'No sequence data found for dbkey %s, so sequence data cannot be used.' % options.dbkey )
# Build command.
diff -r 33c780b4c1455628ec135b0ba98e285efcb89ed8 -r 71031bf3105c6557256bf2957ef5565ad672355d tools/ngs_rna/cuffdiff_wrapper.py
--- a/tools/ngs_rna/cuffdiff_wrapper.py
+++ b/tools/ngs_rna/cuffdiff_wrapper.py
@@ -121,16 +121,21 @@
# If doing bias correction, set/link to sequence file.
if options.do_bias_correction:
- cached_seqs_pointer_file = os.path.join( options.index_dir, 'sam_fa_indices.loc' )
- if not os.path.exists( cached_seqs_pointer_file ):
- stop_err( 'The required file (%s) does not exist.' % cached_seqs_pointer_file )
- # If found for the dbkey, seq_path will look something like /galaxy/data/equCab2/sam_index/equCab2.fa,
- # and the equCab2.fa file will contain fasta sequences.
- seq_path = check_seq_file( options.dbkey, cached_seqs_pointer_file )
if options.ref_file != 'None':
+ # Sequence data from history.
# Create symbolic link to ref_file so that index will be created in working directory.
seq_path = os.path.join( tmp_output_dir, "ref.fa" )
os.symlink( options.ref_file, seq_path )
+ else:
+ # Sequence data from loc file.
+ cached_seqs_pointer_file = os.path.join( options.index_dir, 'sam_fa_indices.loc' )
+ if not os.path.exists( cached_seqs_pointer_file ):
+ stop_err( 'The required file (%s) does not exist.' % cached_seqs_pointer_file )
+ # If found for the dbkey, seq_path will look something like /galaxy/data/equCab2/sam_index/equCab2.fa,
+ # and the equCab2.fa file will contain fasta sequences.
+ seq_path = check_seq_file( options.dbkey, cached_seqs_pointer_file )
+ if seq_path == '':
+ stop_err( 'No sequence data found for dbkey %s, so bias correction cannot be used.' % options.dbkey )
# Build command.
diff -r 33c780b4c1455628ec135b0ba98e285efcb89ed8 -r 71031bf3105c6557256bf2957ef5565ad672355d tools/ngs_rna/cuffdiff_wrapper.xml
--- a/tools/ngs_rna/cuffdiff_wrapper.xml
+++ b/tools/ngs_rna/cuffdiff_wrapper.xml
@@ -96,8 +96,8 @@
</param><conditional name="bias_correction"><param name="do_bias_correction" type="select" label="Perform Bias Correction" help="Bias detection and correction can significantly improve accuracy of transcript abundance estimates.">
+ <option value="No">No</option><option value="Yes">Yes</option>
- <option value="No">No</option></param><when value="Yes"><conditional name="seq_source">
diff -r 33c780b4c1455628ec135b0ba98e285efcb89ed8 -r 71031bf3105c6557256bf2957ef5565ad672355d tools/ngs_rna/cufflinks_wrapper.py
--- a/tools/ngs_rna/cufflinks_wrapper.py
+++ b/tools/ngs_rna/cufflinks_wrapper.py
@@ -79,16 +79,21 @@
# If doing bias correction, set/link to sequence file.
if options.do_bias_correction:
- cached_seqs_pointer_file = os.path.join( options.index_dir, 'sam_fa_indices.loc' )
- if not os.path.exists( cached_seqs_pointer_file ):
- stop_err( 'The required file (%s) does not exist.' % cached_seqs_pointer_file )
- # If found for the dbkey, seq_path will look something like /galaxy/data/equCab2/sam_index/equCab2.fa,
- # and the equCab2.fa file will contain fasta sequences.
- seq_path = check_seq_file( options.dbkey, cached_seqs_pointer_file )
if options.ref_file != 'None':
+ # Sequence data from history.
# Create symbolic link to ref_file so that index will be created in working directory.
seq_path = "ref.fa"
- os.symlink( options.ref_file, seq_path )
+ os.symlink( options.ref_file, seq_path )
+ else:
+ # Sequence data from loc file.
+ cached_seqs_pointer_file = os.path.join( options.index_dir, 'sam_fa_indices.loc' )
+ if not os.path.exists( cached_seqs_pointer_file ):
+ stop_err( 'The required file (%s) does not exist.' % cached_seqs_pointer_file )
+ # If found for the dbkey, seq_path will look something like /galaxy/data/equCab2/sam_index/equCab2.fa,
+ # and the equCab2.fa file will contain fasta sequences.
+ seq_path = check_seq_file( options.dbkey, cached_seqs_pointer_file )
+ if seq_path == '':
+ stop_err( 'No sequence data found for dbkey %s, so bias correction cannot be used.' % options.dbkey )
# Build command.
diff -r 33c780b4c1455628ec135b0ba98e285efcb89ed8 -r 71031bf3105c6557256bf2957ef5565ad672355d tools/ngs_rna/cuffmerge_wrapper.py
--- a/tools/ngs_rna/cuffmerge_wrapper.py
+++ b/tools/ngs_rna/cuffmerge_wrapper.py
@@ -59,16 +59,21 @@
# Set/link to sequence file.
if options.use_seq_data:
- cached_seqs_pointer_file = os.path.join( options.index_dir, 'sam_fa_indices.loc' )
- if not os.path.exists( cached_seqs_pointer_file ):
- stop_err( 'The required file (%s) does not exist.' % cached_seqs_pointer_file )
- # If found for the dbkey, seq_path will look something like /galaxy/data/equCab2/sam_index/equCab2.fa,
- # and the equCab2.fa file will contain fasta sequences.
- seq_path = check_seq_file( options.dbkey, cached_seqs_pointer_file )
if options.ref_file != 'None':
+ # Sequence data from history.
# Create symbolic link to ref_file so that index will be created in working directory.
seq_path = "ref.fa"
os.symlink( options.ref_file, seq_path )
+ else:
+ # Sequence data from loc file.
+ cached_seqs_pointer_file = os.path.join( options.index_dir, 'sam_fa_indices.loc' )
+ if not os.path.exists( cached_seqs_pointer_file ):
+ stop_err( 'The required file (%s) does not exist.' % cached_seqs_pointer_file )
+ # If found for the dbkey, seq_path will look something like /galaxy/data/equCab2/sam_index/equCab2.fa,
+ # and the equCab2.fa file will contain fasta sequences.
+ seq_path = check_seq_file( options.dbkey, cached_seqs_pointer_file )
+ if seq_path == '':
+ stop_err( 'No sequence data found for dbkey %s, so sequence data cannot be used.' % options.dbkey )
# Build command.
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: greg: Encode application name and link name when sending as request params to display application.
by Bitbucket 14 Feb '12
by Bitbucket 14 Feb '12
14 Feb '12
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/33c780b4c145/
changeset: 33c780b4c145
user: greg
date: 2012-02-14 17:58:51
summary: Encode application name and link name when sending as request params to display application.
affected #: 4 files
diff -r 95876f2552d354bf88320521dcf5083f31147bf0 -r 33c780b4c1455628ec135b0ba98e285efcb89ed8 lib/galaxy/datatypes/display_applications/application.py
--- a/lib/galaxy/datatypes/display_applications/application.py
+++ b/lib/galaxy/datatypes/display_applications/application.py
@@ -1,4 +1,5 @@
#Contains objects for using external display applications
+import logging, urllib
from galaxy.util import parse_xml, string_as_bool
from galaxy.util.odict import odict
from galaxy.util.template import fill_template
@@ -8,6 +9,8 @@
from util import encode_dataset_user
from copy import deepcopy
+log = logging.getLogger( __name__ )
+
#Any basic functions that we want to provide as a basic part of parameter dict should be added to this dict
BASE_PARAMS = { 'qp': quote_plus, 'url_for':url_for } #url_for has route memory...
@@ -37,7 +40,13 @@
self.name = None
def get_display_url( self, data, trans ):
dataset_hash, user_hash = encode_dataset_user( trans, data, None )
- return url_for( controller = '/dataset', action = "display_application", dataset_id = dataset_hash, user_id = user_hash, app_name = self.display_application.id, link_name = self.id, app_action = None )
+ return url_for( controller='/dataset',
+ action="display_application",
+ dataset_id=dataset_hash,
+ user_id=user_hash,
+ app_name=urllib.quote_plus( self.display_application.id ),
+ link_name=urllib.quote_plus( self.id ),
+ app_action=None )
def get_inital_values( self, data, trans ):
if self.other_values:
rval = odict( self.other_values )
diff -r 95876f2552d354bf88320521dcf5083f31147bf0 -r 33c780b4c1455628ec135b0ba98e285efcb89ed8 lib/galaxy/datatypes/display_applications/parameters.py
--- a/lib/galaxy/datatypes/display_applications/parameters.py
+++ b/lib/galaxy/datatypes/display_applications/parameters.py
@@ -1,4 +1,5 @@
#Contains parameters that are used in Display Applications
+import logging, urllib
from galaxy.util import string_as_bool
from galaxy.util.bunch import Bunch
from galaxy.util.template import fill_template
@@ -161,7 +162,15 @@
base_url = self.trans.request.base
if self.parameter.strip_https and base_url[ : 5].lower() == 'https':
base_url = "http%s" % base_url[ 5: ]
- return "%s%s" % ( base_url, url_for( controller = '/dataset', action = "display_application", dataset_id = self._dataset_hash, user_id = self._user_hash, app_name = self.parameter.link.display_application.id, link_name = self.parameter.link.id, app_action = self.action_name, action_param = self._url ) )
+ return "%s%s" % ( base_url,
+ url_for( controller='/dataset',
+ action="display_application",
+ dataset_id=self._dataset_hash,
+ user_id=self._user_hash,
+ app_name=urllib.quote_plus( self.parameter.link.display_application.id ),
+ link_name=urllib.quote_plus( self.parameter.link.id ),
+ app_action=self.action_name,
+ action_param=self._url ) )
@property
def action_name( self ):
return self.ACTION_NAME
diff -r 95876f2552d354bf88320521dcf5083f31147bf0 -r 33c780b4c1455628ec135b0ba98e285efcb89ed8 lib/galaxy/datatypes/registry.py
--- a/lib/galaxy/datatypes/registry.py
+++ b/lib/galaxy/datatypes/registry.py
@@ -514,7 +514,7 @@
display_app.repository_owner = installed_repository_dict[ 'repository_owner' ]
display_app.installed_changeset_revision = installed_repository_dict[ 'installed_changeset_revision' ]
display_app.old_id = display_app.id
- # The converter should be included in the list of tools defined in tool_dicts.
+ # The display application should be included in the list of tools defined in tool_dicts.
tool_dicts = installed_repository_dict[ 'tool_dicts' ]
for tool_dict in tool_dicts:
if tool_dict[ 'id' ] == display_app.id:
diff -r 95876f2552d354bf88320521dcf5083f31147bf0 -r 33c780b4c1455628ec135b0ba98e285efcb89ed8 lib/galaxy/web/controllers/dataset.py
--- a/lib/galaxy/web/controllers/dataset.py
+++ b/lib/galaxy/web/controllers/dataset.py
@@ -815,9 +815,11 @@
user_roles = user.all_roles()
else:
user_roles = []
+ # Decode application name and link name
+ app_name = urllib.unquote_plus( app_name )
+ link_name = urllib.unquote_plus( link_name )
if None in [ app_name, link_name ]:
return trans.show_error_message( "A display application name and link name must be provided." )
-
if trans.app.security_agent.can_access_dataset( user_roles, data.dataset ):
msg = []
refresh = False
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: jgoecks: Trackster: for flexibility, use dictionaries in all object constructors. Fixed bugs associated with saving/restoring visualizations.
by Bitbucket 13 Feb '12
by Bitbucket 13 Feb '12
13 Feb '12
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/95876f2552d3/
changeset: 95876f2552d3
user: jgoecks
date: 2012-02-13 17:08:50
summary: Trackster: for flexibility, use dictionaries in all object constructors. Fixed bugs associated with saving/restoring visualizations.
affected #: 4 files
diff -r a00a36e7cf4c3b73a0ee2a411b065213f19cf3df -r 95876f2552d354bf88320521dcf5083f31147bf0 lib/galaxy/visualization/tracks/visual_analytics.py
--- a/lib/galaxy/visualization/tracks/visual_analytics.py
+++ b/lib/galaxy/visualization/tracks/visual_analytics.py
@@ -43,16 +43,16 @@
# TODO: could use this assertion to provide more information.
# assert job is not None, 'Requested job has not been loaded.'
if not job:
- return {}
+ return None
tool = trans.app.toolbox.get_tool( job.tool_id )
# TODO: could use this assertion to provide more information.
# assert tool is not None, 'Requested tool has not been loaded.'
if not tool:
- return {}
+ return None
# Tool must have a Trackster configuration.
if not tool.trackster_conf:
- return {}
+ return None
# Get list of tool parameters that can be interactively modified.
tool_params = []
@@ -75,7 +75,6 @@
'html' : urllib.quote( input.get_html() ) } )
# If tool has parameters that can be interactively modified, return tool.
- tool_def = {}
if len( tool_params ) != 0:
- tool_def = { 'name' : tool.name, 'params' : tool_params }
- return tool_def
\ No newline at end of file
+ return { 'name' : tool.name, 'params' : tool_params }
+ return None
\ No newline at end of file
diff -r a00a36e7cf4c3b73a0ee2a411b065213f19cf3df -r 95876f2552d354bf88320521dcf5083f31147bf0 static/scripts/trackster.js
--- a/static/scripts/trackster.js
+++ b/static/scripts/trackster.js
@@ -679,10 +679,10 @@
*
* They optionally have a drag handle class.
*/
-var Drawable = function(name, view, container, prefs, drag_handle_class) {
+var Drawable = function(view, container, obj_dict) {
if (!Drawable.id_counter) { Drawable.id_counter = 0; }
this.id = Drawable.id_counter++;
- this.name = name;
+ this.name = ('name' in obj_dict ? obj_dict.name : null);
this.view = view;
this.container = container;
this.config = new DrawableConfig({
@@ -690,13 +690,13 @@
params: [
{ key: 'name', label: 'Name', type: 'text', default_value: name }
],
- saved_values: prefs,
+ saved_values: obj_dict.prefs,
onchange: function() {
this.track.set_name(this.track.config.values.name);
}
});
this.prefs = this.config.values;
- this.drag_handle_class = drag_handle_class;
+ this.drag_handle_class = obj_dict.drag_handle_class;
this.is_overview = false;
this.action_icons = {};
@@ -808,10 +808,6 @@
* Use from_dict to recreate object.
*/
to_dict: function() {},
- /**
- * Restore object from a dictionary created by to_dict()
- */
- from_dict: function(object_dict) {},
update_icons: function() {},
/**
* Set drawable name.
@@ -889,15 +885,27 @@
/**
* A collection of drawable objects.
*/
-var DrawableCollection = function(obj_type, name, view, container, prefs, drag_handle_class) {
- Drawable.call(this, name, view, container, prefs, drag_handle_class);
+var DrawableCollection = function(view, container, obj_dict) {
+ Drawable.call(this, view, container, obj_dict);
// Attribute init.
- this.obj_type = obj_type;
+ this.obj_type = obj_dict.obj_type;
this.drawables = [];
};
extend(DrawableCollection.prototype, Drawable.prototype, {
/**
+ * Unpack and add drawables to the collection.
+ */
+ unpack_drawables: function(drawables_array) {
+ // Add drawables to collection.
+ this.drawables = [];
+ var drawable;
+ for (var i = 0; i < drawables_array.length; i++) {
+ drawable = object_from_template(drawables_array[i], this);
+ this.add_drawable(drawable);
+ }
+ },
+ /**
* Init each drawable in the collection.
*/
init: function() {
@@ -930,28 +938,6 @@
};
},
/**
- * Restore object from a dictionary created by to_dict()
- */
- from_dict: function(collection_dict, container) {
- var collection = new this.constructor( collection_dict.name, view,
- container, collection_dict.prefs,
- view.viewport_container, view);
- var drawable_dict,
- drawable_type,
- drawable;
- for (var i = 0; i < collection_dict.drawables.length; i++) {
- drawable_dict = collection_dict.drawables[i];
- drawable_type = drawable_dict['obj_type'];
- // For backward compatibility:
- if (!drawable_type) {
- drawable_type = drawable_dict['track_type'];
- }
- drawable = addable_objects[ drawable_type ].prototype.from_dict( drawable_dict, collection );
- collection.add_drawable( drawable );
- }
- return collection;
- },
- /**
* Add a drawable to the end of the collection.
*/
add_drawable: function(drawable) {
@@ -1019,8 +1005,12 @@
/**
* A group of drawables that are moveable, visible.
*/
-var DrawableGroup = function(name, view, container, prefs) {
- DrawableCollection.call(this, "DrawableGroup", name, view, container, prefs, "group-handle");
+var DrawableGroup = function(view, container, obj_dict) {
+ extend(obj_dict, {
+ obj_type: "DrawableGroup",
+ drag_handle_class: "group-handle"
+ });
+ DrawableCollection.call(this, view, container, obj_dict);
// Set up containers/moving for group: register both container_div and content div as container
// because both are used as containers (container div to recognize container, content_div to
@@ -1035,6 +1025,23 @@
this.header_div.after(this.filters_manager.parent_div);
// For saving drawables' filter managers when group-level filtering is done:
this.saved_filters_managers = [];
+
+ // Add drawables.
+ if ('drawables' in obj_dict) {
+ this.unpack_drawables(obj_dict.drawables);
+ }
+
+ // Restore filters.
+ if ('filters' in obj_dict) {
+ // FIXME: Pass collection_dict to DrawableCollection/Drawable will make this easier.
+ var old_manager = this.filters_manager;
+ this.filters_manager = new FiltersManager(this, obj_dict['filters']);
+ old_manager.parent_div.replaceWith(this.filters_manager.parent_div);
+
+ if (obj_dict.filters.visible) {
+ this.setup_multitrack_filtering();
+ }
+ }
};
extend(DrawableGroup.prototype, Drawable.prototype, DrawableCollection.prototype, {
@@ -1249,7 +1256,10 @@
var new_track_name = "Composite Track of " + this.drawables.length + " tracks (" + drawables_names.join(", ") + ")";
// Replace this group with composite track.
- var composite_track = new CompositeTrack(new_track_name, this.view, this.view, this.drawables);
+ var composite_track = new CompositeTrack(this.view, this.view, {
+ name: new_track_name,
+ drawables: this.drawables
+ });
var index = this.container.replace_drawable(this, composite_track, true);
composite_track.request_draw();
},
@@ -1276,29 +1286,6 @@
return obj_dict;
},
- /**
- * Restore object from a dictionary created by to_dict()
- */
- from_dict: function(collection_dict, container) {
- var group = DrawableCollection.prototype.from_dict.call(this, collection_dict, container);
-
- // Add drawables to group's content div to make them visible.
- for (var i = 0; i < group.drawables.length; i++) {
- group.content_div.append(group.drawables[i].container_div);
- }
-
- // Handle filters.
- // FIXME: Pass collection_dict to DrawableCollection/Drawable will make this easier.
- var old_manager = group.filters_manager;
- group.filters_manager = new FiltersManager(group, collection_dict['filters']);
- old_manager.parent_div.replaceWith(group.filters_manager.parent_div);
-
- if (collection_dict.filters.visible) {
- group.setup_multitrack_filtering();
- }
-
- return group;
- },
request_draw: function(clear_after, force) {
for (var i = 0; i < this.drawables.length; i++) {
this.drawables[i].request_draw(clear_after, force);
@@ -1309,13 +1296,15 @@
/**
* View object manages complete viz view, including tracks and user interactions.
*/
-var View = function(container, title, vis_id, dbkey) {
- DrawableCollection.call(this, "View");
- this.container = container;
+var View = function(obj_dict) {
+ extend(obj_dict, {
+ obj_type: "View"
+ });
+ DrawableCollection.call(this, "View", obj_dict.container, obj_dict);
this.chrom = null;
- this.vis_id = vis_id;
- this.dbkey = dbkey;
- this.title = title;
+ this.vis_id = obj_dict.vis_id;
+ this.dbkey = obj_dict.dbkey;
+ this.title = obj_dict.title;
this.label_tracks = [];
this.tracks_to_be_redrawn = [];
this.max_low = 0;
@@ -1326,7 +1315,7 @@
// Deferred object that indicates when view's chrom data has been loaded.
this.load_chroms_deferred = null;
this.init();
- this.canvas_manager = new CanvasManager( container.get(0).ownerDocument );
+ this.canvas_manager = new CanvasManager( this.container.get(0).ownerDocument );
this.reset();
};
extend( View.prototype, DrawableCollection.prototype, {
@@ -2051,7 +2040,10 @@
}
// Create and init new track.
- var new_track = new current_track.constructor(track_name, view, container, "hda");
+ var new_track = new current_track.constructor(view, container, {
+ name: track_name,
+ hda_ldda: "hda"
+ });
new_track.init_for_tool_data();
new_track.change_mode(current_track.mode);
new_track.set_filters_manager(current_track.filters_manager.copy(new_track));
@@ -2479,7 +2471,7 @@
//
// Restore state from dict.
//
- if (obj_dict) {
+ if (obj_dict && 'filters' in obj_dict) { // Second condition needed for backward compatibility.
var
alpha_filter_name = ('alpha_filter' in obj_dict ? obj_dict.alpha_filter : null),
height_filter_name = ('height_filter' in obj_dict ? obj_dict.height_filter : null),
@@ -3060,18 +3052,21 @@
* -------> ReadTrack
* -------> VcfTrack
*/
-var Track = function(name, view, container, prefs, data_manager, data_url, data_query_wait) {
+var Track = function(view, container, obj_dict) {
// For now, track's container is always view.
- Drawable.call(this, name, view, container, {}, "draghandle");
+ extend(obj_dict, {
+ drag_handle_class: "draghandle"
+ });
+ Drawable.call(this, view, container, obj_dict);
//
// Attribute init.
//
- this.data_url = (data_url ? data_url : default_data_url);
+ this.data_url = ('data_url' in obj_dict ? obj_dict.data_url : default_data_url);
this.data_url_extra_params = {}
- this.data_query_wait = (data_query_wait ? data_query_wait : DEFAULT_DATA_QUERY_WAIT);
+ this.data_query_wait = ('data_query_wait' in obj_dict ? obj_dict.data_query_wait : DEFAULT_DATA_QUERY_WAIT);
this.dataset_check_url = converted_datasets_state_url;
- this.data_manager = (data_manager ? data_manager : new DataManager(DATA_CACHE_SIZE, this));
+ this.data_manager = ('data_manager' in obj_dict ? obj_dict.data_manager : new DataManager(DATA_CACHE_SIZE, this));
//
// Create content div, which is where track is displayed, and add to container if available.
@@ -3327,8 +3322,8 @@
predraw_init: function() {}
});
-var TiledTrack = function(name, view, container, prefs, filters_dict, tool_dict, data_manager) {
- Track.call(this, name, view, container, prefs, data_manager);
+var TiledTrack = function(view, container, obj_dict) {
+ Track.call(this, view, container, obj_dict);
var track = this,
view = track.view;
@@ -3337,9 +3332,9 @@
moveable(track.container_div, track.drag_handle_class, ".group", track);
// Attribute init.
- this.filters_manager = new FiltersManager(this, filters_dict);
+ this.filters_manager = new FiltersManager(this, ('filters' in obj_dict ? obj_dict.filters : null));
this.filters_available = false;
- this.tool = (tool_dict !== undefined && obj_length(tool_dict) > 0 ? new Tool(this, tool_dict) : undefined);
+ this.tool = ('tool' in obj_dict && obj_dict.tool ? new Tool(this, obj_dict.tool) : null);
this.tile_cache = new Cache(TILE_CACHE_SIZE);
if (this.header_div) {
@@ -3356,6 +3351,10 @@
this.header_div.after(this.dynamic_tool_div);
}
}
+
+ if (obj_dict.mode) {
+ this.change_mode(obj_dict.mode);
+ }
};
extend(TiledTrack.prototype, Drawable.prototype, Track.prototype, {
/**
@@ -3363,8 +3362,11 @@
*/
copy: function(container) {
// Create copy.
- var new_track = new this.constructor(this.name, this.view, container, this.hda_ldda, this.dataset_id, this.prefs,
- this.filters, this.tool, this.data_manager);
+ var obj_dict = this.to_dict();
+ extend(obj_dict, {
+ data_manager: this.data_manager
+ });
+ var new_track = new this.constructor(this.view, container, obj_dict);
// Misc. init and return.
new_track.change_mode(this.mode);
new_track.enabled = this.enabled;
@@ -3389,22 +3391,11 @@
"dataset_id": this.dataset_id,
"prefs": this.prefs,
"mode": this.mode,
- "filters": this.filters_manager.to_dict()
+ "filters": this.filters_manager.to_dict(),
+ "tool": null
};
},
/**
- * Restore object from a dictionary created by to_dict()
- */
- from_dict: function(track_dict, container) {
- var track = new this.constructor(
- track_dict.name, view, container, track_dict.hda_ldda, track_dict.dataset_id,
- track_dict.prefs, track_dict.filters, track_dict.tool);
- if (track_dict.mode) {
- track.change_mode(track_dict.mode);
- }
- return track;
- },
- /**
* Change track's mode.
*/
change_mode: function(new_mode) {
@@ -3767,7 +3758,11 @@
});
var LabelTrack = function (view, container) {
- Track.call(this, "label", view, container, false, {} );
+ var obj_dict = {
+ todo: "label",
+ todo: false
+ };
+ Track.call(this, view, container, obj_dict);
this.container_div.addClass( "label-track" );
};
extend(LabelTrack.prototype, Track.prototype, {
@@ -3800,21 +3795,18 @@
/**
* A tiled track composed of multiple other tracks.
*/
-var CompositeTrack = function(name, view, container, drawables) {
- TiledTrack.call(this, name, view, container);
+var CompositeTrack = function(view, container, obj_dict) {
+ TiledTrack.call(this, view, container, obj_dict);
// Init drawables; each drawable is a copy so that config/preferences
// are independent of each other. Also init left offset.
this.drawables = [];
this.left_offset = 0;
- if (drawables) {
- var
- ids = [],
- drawable;
- for (var i = 0; i < drawables.length; i++) {
- drawable = drawables[i];
- ids.push(drawable.dataset_id);
- this.drawables[i] = drawable.copy();
+ if ('drawables' in obj_dict) {
+ var drawable;
+ for (var i = 0; i < obj_dict.drawables.length; i++) {
+ drawable = obj_dict.drawables[i];
+ this.drawables[i] = object_from_template(drawable);
// Track's left offset is the max of all tracks.
if (drawable.left_offset > this.left_offset) {
@@ -3858,34 +3850,7 @@
*/
to_dict: DrawableCollection.prototype.to_dict,
add_drawable: DrawableCollection.prototype.add_drawable,
- /**
- * Restore object from a dictionary created by to_dict()
- */
- // TODO: unify with DrawableCollection.prototype.from_dict?
- from_dict: function(collection_dict, container) {
- var collection = new this.constructor( collection_dict.name, view,
- container, collection_dict.prefs,
- view.viewport_container, view);
- var drawable_dict,
- drawable_type,
- drawable;
- for (var i = 0; i < collection_dict.drawables.length; i++) {
- drawable_dict = collection_dict.drawables[i];
- drawable_type = drawable_dict['obj_type'];
- // For backward compatibility:
- if (!drawable_type) {
- drawable_type = drawable_dict['track_type'];
- }
- // No container for tracks so that it is not made visible.
- drawable = addable_objects[ drawable_type ].prototype.from_dict(drawable_dict);
- collection.add_drawable(drawable);
- }
-
- // HACKish: set mode using the first drawable. Should use config object and set mode from there.
- collection.set_display_modes(collection.drawables[0].display_modes, collection.drawables[0].mode);
-
- return collection;
- },
+ unpack_drawables: DrawableCollection.prototype.unpack_drawables,
change_mode: function(new_mode) {
TiledTrack.prototype.change_mode.call(this, new_mode);
for (var i = 0; i < this.drawables.length; i++) {
@@ -3919,8 +3884,9 @@
// FIXME: this function is similar to TiledTrack.draw_helper -- can the two be merged/refactored?
var track = this,
key = this._gen_tile_cache_key(width, w_scale, tile_index),
- tile_low = tile_index * TILE_SIZE * resolution,
- tile_high = tile_low + TILE_SIZE * resolution;
+ tile_bounds = this._get_tile_bounds(tile_index, resolution),
+ tile_low = tile_bounds[0],
+ tile_high = tile_bounds[1];
// Init kwargs if necessary to avoid having to check if kwargs defined.
if (!kwargs) { kwargs = {}; }
@@ -4036,7 +4002,9 @@
show_group: function() {
// Create group with individual tracks.
var
- group = new DrawableGroup(this.name, this.view, this.container),
+ group = new DrawableGroup(this.view, this.container, {
+ name: this.name
+ }),
track;
for (var i = 0; i < this.drawables.length; i++) {
track = this.drawables[i]
@@ -4154,11 +4122,11 @@
}
});
-var LineTrack = function (name, view, container, hda_ldda, dataset_id, prefs, filters, tool, data_manager) {
+var LineTrack = function (view, container, obj_dict) {
var track = this;
this.display_modes = ["Histogram", "Line", "Filled", "Intensity"];
this.mode = "Histogram";
- TiledTrack.call(this, name, view, container, prefs, filters, tool, data_manager);
+ TiledTrack.call(this, view, container, obj_dict);
// Cannot subset LineTrack data right now; see note in DataManager about using resolution in key
// to address this issue.
@@ -4168,9 +4136,9 @@
this.max_height_px = 400;
// Default height for new tracks, should be a defined constant?
this.height_px = 32;
- this.hda_ldda = hda_ldda;
- this.dataset_id = dataset_id;
- this.original_dataset_id = dataset_id;
+ this.hda_ldda = obj_dict.hda_ldda;
+ this.dataset_id = obj_dict.dataset_id;
+ this.original_dataset_id = this.dataset_id;
this.left_offset = 0;
// Define track configuration
@@ -4184,7 +4152,7 @@
{ key: 'mode', type: 'string', default_value: this.mode, hidden: true },
{ key: 'height', type: 'int', default_value: this.height_px, hidden: true }
],
- saved_values: prefs,
+ saved_values: obj_dict.prefs,
onchange: function() {
track.set_name(track.prefs.name);
track.vertical_range = track.prefs.max_value - track.prefs.min_value;
@@ -4319,7 +4287,7 @@
}
});
-var FeatureTrack = function(name, view, container, hda_ldda, dataset_id, prefs, filters, tool, data_manager) {
+var FeatureTrack = function(view, container, obj_dict) {
//
// Preinitialization: do things that need to be done before calling Track and TiledTrack
// initialization code.
@@ -4330,7 +4298,7 @@
//
// Initialization.
//
- TiledTrack.call(this, name, view, container, prefs, filters, tool, data_manager);
+ TiledTrack.call(this, view, container, obj_dict);
// Define and restore track configuration.
this.config = new DrawableConfig( {
@@ -4346,7 +4314,7 @@
options: [ { label: 'Line with arrows', value: 'fishbone' }, { label: 'Arcs', value: 'arcs' } ] },
{ key: 'mode', type: 'string', default_value: this.mode, hidden: true },
],
- saved_values: prefs,
+ saved_values: obj_dict.prefs,
onchange: function() {
track.set_name(track.prefs.name);
track.tile_cache.clear();
@@ -4358,9 +4326,9 @@
this.height_px = 0;
this.container_div.addClass( "feature-track" );
- this.hda_ldda = hda_ldda;
- this.dataset_id = dataset_id;
- this.original_dataset_id = dataset_id;
+ this.hda_ldda = obj_dict.hda_ldda;
+ this.dataset_id = obj_dict.dataset_id;
+ this.original_dataset_id = obj_dict.dataset_id;
this.show_labels_scale = 0.001;
this.showing_details = false;
this.summary_draw_height = 30;
@@ -4734,8 +4702,8 @@
},
});
-var VcfTrack = function(name, view, container, hda_ldda, dataset_id, prefs, filters, tool, data_manager) {
- FeatureTrack.call(this, name, view, container, hda_ldda, dataset_id, prefs, filters, tool, data_manager);
+var VcfTrack = function(view, container, obj_dict) {
+ FeatureTrack.call(this, view, container, obj_dict);
this.config = new DrawableConfig( {
track: this,
@@ -4747,7 +4715,7 @@
{ key: 'show_counts', label: 'Show summary counts', type: 'bool', default_value: true },
{ key: 'mode', type: 'string', default_value: this.mode, hidden: true },
],
- saved_values: prefs,
+ saved_values: obj_dict.prefs,
onchange: function() {
this.track.set_name(this.track.prefs.name);
this.track.tile_cache.clear();
@@ -4761,8 +4729,8 @@
extend(VcfTrack.prototype, Drawable.prototype, TiledTrack.prototype, FeatureTrack.prototype);
-var ReadTrack = function (name, view, container, hda_ldda, dataset_id, prefs, filters, data_manager) {
- FeatureTrack.call(this, name, view, container, hda_ldda, dataset_id, prefs, filters, data_manager);
+var ReadTrack = function (view, container, obj_dict) {
+ FeatureTrack.call(this, view, container, obj_dict);
var
block_color = get_random_color(),
@@ -4780,7 +4748,7 @@
{ key: 'histogram_max', label: 'Histogram maximum', type: 'float', default_value: null, help: 'Clear value to set automatically' },
{ key: 'mode', type: 'string', default_value: this.mode, hidden: true },
],
- saved_values: prefs,
+ saved_values: obj_dict.prefs,
onchange: function() {
this.track.set_name(this.track.prefs.name);
this.track.tile_cache.clear();
diff -r a00a36e7cf4c3b73a0ee2a411b065213f19cf3df -r 95876f2552d354bf88320521dcf5083f31147bf0 static/scripts/trackster_ui.js
--- a/static/scripts/trackster_ui.js
+++ b/static/scripts/trackster_ui.js
@@ -29,16 +29,24 @@
};
/**
- * Create object from a dictionary.
+ * Create new object from a template. A template can be either an object dictionary or an
+ * object itself.
*/
-var object_from_dict = function(track_dict, container) {
- var
- drawable_type = track_dict['obj_type'];
- // For backward compatibility:
- if (!drawable_type) {
- drawable_type = track_dict['track_type'];
+var object_from_template = function(template, container) {
+ if ('copy' in template) {
+ // Template is an object.
+ return template.copy(container);
}
- return addable_objects[ drawable_type ].prototype.from_dict(track_dict, container);
+ else {
+ // Template is a dictionary.
+ var
+ drawable_type = template['obj_type'];
+ // For backward compatibility:
+ if (!drawable_type) {
+ drawable_type = template['track_type'];
+ }
+ return new addable_objects[ drawable_type ](view, container, template);
+ }
};
/**
@@ -55,10 +63,10 @@
/**
* Create a complete Trackster visualization. Returns view.
*/
-var create_visualization = function(parent_elt, title, id, dbkey, viewport_config, drawables_config, bookmarks_config) {
+var create_visualization = function(view_config, viewport_config, drawables_config, bookmarks_config) {
// Create view.
- view = new View(parent_elt, title, id, dbkey);
+ view = new View(view_config);
view.editor = true;
$.when( view.load_chroms_deferred ).then(function() {
// Viewport config.
@@ -81,7 +89,7 @@
drawable_type,
drawable;
for (var i = 0; i < drawables_config.length; i++) {
- view.add_drawable( object_from_dict( drawables_config[i], view ) );
+ view.add_drawable( object_from_template( drawables_config[i], view ) );
}
}
diff -r a00a36e7cf4c3b73a0ee2a411b065213f19cf3df -r 95876f2552d354bf88320521dcf5083f31147bf0 templates/tracks/browser.mako
--- a/templates/tracks/browser.mako
+++ b/templates/tracks/browser.mako
@@ -168,8 +168,12 @@
$("#right-border").click(function() { view.resize_window(); });
%if config:
- view = create_visualization( $("#browser-container"), "${config.get('title') | h}",
- "${config.get('vis_id')}", "${config.get('dbkey')}",
+ view = create_visualization( {
+ container: $("#browser-container"),
+ title: "${config.get('title') | h}",
+ vis_id: "${config.get('vis_id')}",
+ dbkey: "${config.get('dbkey')}"
+ },
JSON.parse('${ h.to_json_string( config.get( 'viewport', dict() ) ) }'),
JSON.parse('${ h.to_json_string( config['tracks'] ).replace("'", "\\'") }'),
JSON.parse('${ h.to_json_string( config['bookmarks'] ) }')
@@ -225,7 +229,7 @@
$("#add-tracks-icon").click( function() { add_tracks(); } );
$("#add-group-icon").click( function() {
- view.add_drawable( new DrawableGroup("New Group", view, view) );
+ view.add_drawable( new DrawableGroup(view, view, { name: "New Group" }) );
});
$("#save-icon").click( function() {
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/a00a36e7cf4c/
changeset: a00a36e7cf4c
user: greg
date: 2012-02-10 22:33:19
summary: Do not allow conflicting datatypes included in a tool shed repository currently being installed to override datattypes already in the datatypes registry. Also, a bit of cleanup in handling datatypes included in repositories being activated / installed / deactivated / uninstalled. A bit more cleanup is still necessary.
affected #: 3 files
diff -r 62fc9e05383514811e5add9db5f9ad6560a06b20 -r a00a36e7cf4c3b73a0ee2a411b065213f19cf3df lib/galaxy/datatypes/registry.py
--- a/lib/galaxy/datatypes/registry.py
+++ b/lib/galaxy/datatypes/registry.py
@@ -47,11 +47,13 @@
self.datatype_elems = []
self.sniffer_elems = []
self.xml_filename = None
- def load_datatypes( self, root_dir=None, config=None, deactivate=False ):
+ def load_datatypes( self, root_dir=None, config=None, deactivate=False, override=True ):
"""
- Parse a datatypes XML file located at root_dir/config. If deactivate is True, an installed
- tool shed repository that includes proprietary datatypes is being deactivated, so appropriate
- loaded datatypes will be removed from the registry.
+ Parse a datatypes XML file located at root_dir/config. If deactivate is True, an installed tool shed
+ repository that includes proprietary datatypes is being deactivated, so appropriate loaded datatypes
+ will be removed from the registry. The value of override will be False when a tool shed repository is
+ being installed. Since installation is occurring after the datatypes registry has been initialized, its
+ contents cannot be overridden by new introduced conflicting data types.
"""
def __import_module( full_path, datatype_module ):
sys.path.insert( 0, full_path )
@@ -116,9 +118,13 @@
self.datatype_elems.append( elem )
if extension and extension in self.datatypes_by_extension and deactivate:
# We are deactivating an installed tool shed repository, so eliminate the datatype from the registry.
+ # TODO: Handle deactivating datatype converters, etc before removing from self.datatypes_by_extension.
self.log.debug( "Removing datatype with extension '%s' from the registry." % extension )
del self.datatypes_by_extension[ extension ]
- elif extension and ( dtype or type_extension ):
+ can_process_datatype = False
+ else:
+ can_process_datatype = ( extension and ( dtype or type_extension ) ) and ( extension not in self.datatypes_by_extension or override )
+ if can_process_datatype:
if dtype:
fields = dtype.split( ':' )
datatype_module = fields[0]
@@ -194,6 +200,11 @@
else:
if elem not in self.display_app_containers:
self.display_app_containers.append( elem )
+ elif not deactivate:
+ # A new tool shed repository that contains proprietary datatypes is being installed, and since installation
+ # is occurring after the datatypes registry has been initialized, its contents cannot be overridden by new
+ # introduced conflicting data types.
+ self.log.warning( "Ignoring conflicting datatype with extension '%s' from %s." % ( extension, config ) )
except Exception, e:
if deactivate:
self.log.warning( "Error deactivating datatype with extension '%s': %s" % ( extension, str( e ) ) )
@@ -233,14 +244,22 @@
self.log.debug( "Deactivated sniffer for datatype '%s'" % dtype )
else:
# See if we have a conflicting sniffer already loaded.
+ conflict = False
for conflict_loc, sniffer_class in enumerate( self.sniff_order ):
if sniffer_class.__class__ == aclass.__class__:
# We have a conflicting sniffer, so replace the one previously loaded.
- del self.sniff_order[ conflict_loc ]
- self.log.debug( "Replaced conflicting sniffer for datatype '%s'" % dtype )
+ conflict = True
+ if override:
+ del self.sniff_order[ conflict_loc ]
+ self.log.debug( "Replaced conflicting sniffer for datatype '%s'" % dtype )
break
- self.sniff_order.append( aclass )
- self.log.debug( "Loaded sniffer for datatype '%s'" % dtype )
+ if conflict:
+ if override:
+ self.sniff_order.append( aclass )
+ self.log.debug( "Loaded sniffer for datatype '%s'" % dtype )
+ else:
+ self.sniff_order.append( aclass )
+ self.log.debug( "Loaded sniffer for datatype '%s'" % dtype )
except Exception, exc:
if deactivate:
self.log.warning( "Error deactivating sniffer for datatype '%s': %s" % ( dtype, str( exc ) ) )
diff -r 62fc9e05383514811e5add9db5f9ad6560a06b20 -r a00a36e7cf4c3b73a0ee2a411b065213f19cf3df lib/galaxy/util/shed_util.py
--- a/lib/galaxy/util/shed_util.py
+++ b/lib/galaxy/util/shed_util.py
@@ -527,12 +527,14 @@
if display_path:
# Load or deactivate proprietary datatype display applications
app.datatypes_registry.load_display_applications( installed_repository_dict=repository_dict, deactivate=deactivate )
-def alter_config_and_load_prorietary_datatypes( app, datatypes_config, relative_install_dir, deactivate=False ):
+def alter_config_and_load_prorietary_datatypes( app, datatypes_config, relative_install_dir, deactivate=False, override=True ):
"""
Parse a proprietary datatypes config (a datatypes_conf.xml file included in an installed tool shed repository) and
add information to appropriate elements that will enable proprietary datatype class modules, datatypes converters
- and display application to be discovered and properly imported by the datatypes registry. This method is used by
- the InstallManager, which does not have access to trans.
+ and display application to be discovered and properly imported by the datatypes registry. The value of override will
+ be False when a tool shed repository is being installed. Since installation is occurring after the datatypes registry
+ has been initialized, its contents cannot be overridden by conflicting data types. This method is used by the InstallManager,
+ which does not have access to trans.
"""
tree = util.parse_xml( datatypes_config )
datatypes_config_root = tree.getroot()
@@ -597,7 +599,7 @@
else:
proprietary_datatypes_config = datatypes_config
# Load proprietary datatypes
- app.datatypes_registry.load_datatypes( root_dir=app.config.root, config=proprietary_datatypes_config, deactivate=deactivate )
+ app.datatypes_registry.load_datatypes( root_dir=app.config.root, config=proprietary_datatypes_config, deactivate=deactivate, override=override )
try:
os.unlink( proprietary_datatypes_config )
except:
@@ -728,7 +730,8 @@
if 'datatypes_config' in metadata_dict:
datatypes_config = os.path.abspath( metadata_dict[ 'datatypes_config' ] )
# Load data types required by tools.
- converter_path, display_path = alter_config_and_load_prorietary_datatypes( app, datatypes_config, relative_install_dir )
+ override = not new_install
+ converter_path, display_path = alter_config_and_load_prorietary_datatypes( app, datatypes_config, relative_install_dir, override=override )
if converter_path or display_path:
# Create a dictionary of tool shed repository related information.
repository_dict = create_repository_dict_for_proprietary_datatypes( tool_shed=tool_shed,
diff -r 62fc9e05383514811e5add9db5f9ad6560a06b20 -r a00a36e7cf4c3b73a0ee2a411b065213f19cf3df lib/galaxy/web/controllers/admin_toolshed.py
--- a/lib/galaxy/web/controllers/admin_toolshed.py
+++ b/lib/galaxy/web/controllers/admin_toolshed.py
@@ -616,9 +616,6 @@
break
for elem in sections_to_load:
trans.app.toolbox.load_section_tag_set( elem, trans.app.toolbox.tool_panel, tool_path )
- if repository.includes_datatypes:
- # Load proprietary datatypes.
- load_datatype_items( trans.app, repository, relative_install_dir )
if uninstalled:
message = 'The <b>%s</b> repository has been reinstalled.' % repository.name
else:
@@ -656,8 +653,6 @@
if tool_section:
trans.app.toolbox.tool_panel[ section_key ] = tool_section
log.debug( "Appended reactivated tool to section: %s" % tool_section.name )
- shed_tool_conf, tool_path, relative_install_dir = self.__get_tool_path_and_relative_install_dir( trans, repository )
- load_datatype_items( trans.app, repository, relative_install_dir )
@web.expose
@web.require_admin
def manage_repository( self, trans, **kwd ):
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: greg: Fixes for handling proprietary datatypes included in installed tool shed repositories - import proprietary class modules within the registry.
by Bitbucket 10 Feb '12
by Bitbucket 10 Feb '12
10 Feb '12
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/62fc9e053835/
changeset: 62fc9e053835
user: greg
date: 2012-02-10 20:52:41
summary: Fixes for handling proprietary datatypes included in installed tool shed repositories - import proprietary class modules within the registry.
affected #: 2 files
diff -r ce17eb369f95ccaf2c8331e1a69fe34b1203cff1 -r 62fc9e05383514811e5add9db5f9ad6560a06b20 lib/galaxy/datatypes/registry.py
--- a/lib/galaxy/datatypes/registry.py
+++ b/lib/galaxy/datatypes/registry.py
@@ -1,8 +1,7 @@
"""
Provides mapping between extensions and datatypes, mime-types, etc.
"""
-import os, tempfile
-import logging
+import os, sys, tempfile, threading, logging
import data, tabular, interval, images, sequence, qualityscore, genetics, xml, coverage, tracks, chrominfo, binary, assembly, ngsindex, wsf
import galaxy.util
from galaxy.util.odict import odict
@@ -20,8 +19,7 @@
self.datatype_converters = odict()
# Converters defined in local datatypes_conf.xml
self.converters = []
- # Converters defined in datatypes_conf.xml included
- # in installed tool shed repositories.
+ # Converters defined in datatypes_conf.xml included in installed tool shed repositories.
self.proprietary_converters = []
self.converter_deps = {}
self.available_tracks = []
@@ -44,17 +42,24 @@
# The 'default' display_path defined in local datatypes_conf.xml
self.display_applications_path = None
self.inherit_display_application_by_class = []
+ # Keep a list of imported proprietary datatype class modules.
+ self.imported_modules = []
self.datatype_elems = []
self.sniffer_elems = []
self.xml_filename = None
- def load_datatypes( self, root_dir=None, config=None, imported_modules=None, deactivate=False ):
+ def load_datatypes( self, root_dir=None, config=None, deactivate=False ):
"""
- Parse a datatypes XML file located at root_dir/config. If imported_modules is received, it
- is a list of imported datatypes class files included in an installed tool shed repository.
- If deactivate is received as True, an installed tool shed repository that includes proprietary
- datatypes is being deactivated, so relevant loaded datatypes will be removed from the registry.
+ Parse a datatypes XML file located at root_dir/config. If deactivate is True, an installed
+ tool shed repository that includes proprietary datatypes is being deactivated, so appropriate
+ loaded datatypes will be removed from the registry.
"""
+ def __import_module( full_path, datatype_module ):
+ sys.path.insert( 0, full_path )
+ imported_module = __import__( datatype_module )
+ sys.path.pop( 0 )
+ return imported_module
if root_dir and config:
+ handling_proprietary_datatypes = False
# Parse datatypes_conf.xml
tree = galaxy.util.parse_xml( config )
root = tree.getroot()
@@ -73,6 +78,11 @@
if not self.display_applications_path:
self.display_path_attr = registration.get( 'display_path', 'display_applications' )
self.display_applications_path = os.path.join( root_dir, self.display_path_attr )
+ # Proprietary datatype's <registration> tag may have special attributes, proprietary_converter_path and proprietary_display_path.
+ proprietary_converter_path = registration.get( 'proprietary_converter_path', None )
+ proprietary_display_path = registration.get( 'proprietary_display_path', None )
+ if proprietary_converter_path or proprietary_display_path and not handling_proprietary_datatypes:
+ handling_proprietary_datatypes = True
for elem in registration.findall( 'datatype' ):
try:
extension = elem.get( 'extension', None )
@@ -81,6 +91,12 @@
mimetype = elem.get( 'mimetype', None )
display_in_upload = elem.get( 'display_in_upload', False )
make_subclass = galaxy.util.string_as_bool( elem.get( 'subclass', False ) )
+ # Proprietary datatypes included in installed tool shed repositories will include two special attributes
+ # (proprietary_path and proprietary_datatype_module) if they depend on proprietary datatypes classes.
+ proprietary_path = elem.get( 'proprietary_path', None )
+ proprietary_datatype_module = elem.get( 'proprietary_datatype_module', None )
+ if proprietary_path or proprietary_datatype_module and not handling_proprietary_datatypes:
+ handling_proprietary_datatypes = True
if deactivate:
# We are deactivating an installed tool shed repository, so eliminate the
# datatype elem from the in-memory list of datatype elems.
@@ -108,12 +124,21 @@
datatype_module = fields[0]
datatype_class_name = fields[1]
datatype_class = None
- if imported_modules:
- # See if one of the imported modules contains the datatype class name.
- for imported_module in imported_modules:
+ if proprietary_path and proprietary_datatype_module:
+ # We need to change the value of sys.path, so do it in a way that is thread-safe.
+ lock = threading.Lock()
+ lock.acquire( True )
+ try:
+ imported_module = __import_module( proprietary_path, proprietary_datatype_module )
+ if imported_module not in self.imported_modules:
+ self.imported_modules.append( imported_module )
if hasattr( imported_module, datatype_class_name ):
datatype_class = getattr( imported_module, datatype_class_name )
- break
+ except Exception, e:
+ full_path = os.path.join( full_path, proprietary_datatype_module )
+ self.log.debug( "Exception importing proprietary code file %s: %s" % ( str( full_path ), str( e ) ) )
+ finally:
+ lock.release()
if datatype_class is None:
# The datatype class name must be contained in one of the datatype modules in the Galaxy distribution.
fields = datatype_module.split( '.' )
@@ -130,14 +155,14 @@
self.datatypes_by_extension[ extension ] = datatype_class()
if mimetype is None:
# Use default mime type as per datatype spec
- mimetype = self.datatypes_by_extension[extension].get_mime()
- self.mimetypes_by_extension[extension] = mimetype
+ mimetype = self.datatypes_by_extension[ extension ].get_mime()
+ self.mimetypes_by_extension[ extension ] = mimetype
if hasattr( datatype_class, "get_track_type" ):
self.available_tracks.append( extension )
if display_in_upload:
self.upload_file_formats.append( extension )
# Max file size cut off for setting optional metadata
- self.datatypes_by_extension[extension].max_optional_metadata_filesize = elem.get( 'max_optional_metadata_filesize', None )
+ self.datatypes_by_extension[ extension ].max_optional_metadata_filesize = elem.get( 'max_optional_metadata_filesize', None )
for converter in elem.findall( 'converter' ):
# Build the list of datatype converters which will later be loaded into the calling app's toolbox.
converter_config = converter.get( 'file', None )
@@ -148,7 +173,8 @@
self.converter_deps[extension] = {}
self.converter_deps[extension][target_datatype] = depends_on.split(',')
if converter_config and target_datatype:
- if imported_modules:
+ #if imported_modules:
+ if proprietary_converter_path:
self.proprietary_converters.append( ( converter_config, extension, target_datatype ) )
else:
self.converters.append( ( converter_config, extension, target_datatype ) )
@@ -161,7 +187,8 @@
mimetype = composite_file.get( 'mimetype', None )
self.datatypes_by_extension[extension].add_composite_file( name, optional=optional, mimetype=mimetype )
for display_app in elem.findall( 'display' ):
- if imported_modules:
+ #if imported_modules:
+ if proprietary_display_path:
if elem not in self.proprietary_display_app_containers:
self.proprietary_display_app_containers.append( elem )
else:
@@ -185,9 +212,10 @@
datatype_module = fields[0]
datatype_class_name = fields[1]
module = None
- if imported_modules:
+ #if imported_modules:
+ if handling_proprietary_datatypes:
# See if one of the imported modules contains the datatype class name.
- for imported_module in imported_modules:
+ for imported_module in self.imported_modules:
if hasattr( imported_module, datatype_class_name ):
module = imported_module
break
@@ -197,13 +225,6 @@
for comp in datatype_module.split( '.' )[ 1: ]:
module = getattr( module, comp )
aclass = getattr( module, datatype_class_name )()
- # See if we have a conflicting sniffer already loaded.
- conflict_loc = None
- conflict = False
- for conflict_loc, sniffer_class in enumerate( self.sniff_order ):
- if sniffer_class.__class__ == aclass.__class__:
- conflict = True
- break
if deactivate:
for sniffer_class in self.sniff_order:
if sniffer_class.__class__ == aclass.__class__:
@@ -211,19 +232,21 @@
break
self.log.debug( "Deactivated sniffer for datatype '%s'" % dtype )
else:
- if conflict:
- # We have a conflicting sniffer, so replace the one previously loaded.
- del self.sniff_order[ conflict_loc ]
- self.sniff_order.append( aclass )
- self.log.debug( "Replaced conflicting sniffer for datatype '%s'" % dtype )
- else:
- self.sniff_order.append( aclass )
- self.log.debug( "Loaded sniffer for datatype '%s'" % dtype )
+ # See if we have a conflicting sniffer already loaded.
+ for conflict_loc, sniffer_class in enumerate( self.sniff_order ):
+ if sniffer_class.__class__ == aclass.__class__:
+ # We have a conflicting sniffer, so replace the one previously loaded.
+ del self.sniff_order[ conflict_loc ]
+ self.log.debug( "Replaced conflicting sniffer for datatype '%s'" % dtype )
+ break
+ self.sniff_order.append( aclass )
+ self.log.debug( "Loaded sniffer for datatype '%s'" % dtype )
except Exception, exc:
if deactivate:
self.log.warning( "Error deactivating sniffer for datatype '%s': %s" % ( dtype, str( exc ) ) )
else:
self.log.warning( "Error appending sniffer for datatype '%s' to sniff_order: %s" % ( dtype, str( exc ) ) )
+ self.upload_file_formats.sort()
# Persist the xml form of the registry into a temporary file so that it
# can be loaded from the command line by tools and set_metadata processing.
self.to_xml_file()
@@ -385,8 +408,7 @@
app's toolbox.
"""
if installed_repository_dict:
- # Load converters defined by datatypes_conf.xml
- # included in installed tool shed repository.
+ # Load converters defined by datatypes_conf.xml included in installed tool shed repository.
converters = self.proprietary_converters
else:
# Load converters defined by local datatypes_conf.xml.
@@ -438,10 +460,9 @@
If deactivate is False, add display applications from self.display_app_containers or
self.proprietary_display_app_containers to appropriate datatypes. If deactivate is
True, eliminates relevant display applications from appropriate datatypes.
- """
+ """
if installed_repository_dict:
- # Load display applications defined by datatypes_conf.xml
- # included in installed tool shed repository.
+ # Load display applications defined by datatypes_conf.xml included in installed tool shed repository.
datatype_elems = self.proprietary_display_app_containers
else:
# Load display applications defined by local datatypes_conf.xml.
@@ -452,7 +473,8 @@
display_file = display_app.get( 'file', None )
if installed_repository_dict:
display_path = installed_repository_dict[ 'display_path' ]
- config_path = os.path.join( display_path, display_file )
+ display_file_head, display_file_tail = os.path.split( display_file )
+ config_path = os.path.join( display_path, display_file_tail )
else:
config_path = os.path.join( self.display_applications_path, display_file )
try:
diff -r ce17eb369f95ccaf2c8331e1a69fe34b1203cff1 -r 62fc9e05383514811e5add9db5f9ad6560a06b20 lib/galaxy/util/shed_util.py
--- a/lib/galaxy/util/shed_util.py
+++ b/lib/galaxy/util/shed_util.py
@@ -1,4 +1,4 @@
-import os, sys, tempfile, shutil, subprocess, threading, logging
+import os, tempfile, shutil, subprocess, logging
from datetime import date, datetime, timedelta
from time import strftime
from galaxy import util
@@ -511,7 +511,7 @@
metadata = repository.metadata
datatypes_config = metadata.get( 'datatypes_config', None )
if datatypes_config:
- converter_path, display_path = load_datatypes( app, datatypes_config, relative_install_dir, deactivate=deactivate )
+ converter_path, display_path = alter_config_and_load_prorietary_datatypes( app, datatypes_config, relative_install_dir, deactivate=deactivate )
if converter_path or display_path:
# Create a dictionary of tool shed repository related information.
repository_dict = create_repository_dict_for_proprietary_datatypes( tool_shed=repository.tool_shed,
@@ -527,15 +527,13 @@
if display_path:
# Load or deactivate proprietary datatype display applications
app.datatypes_registry.load_display_applications( installed_repository_dict=repository_dict, deactivate=deactivate )
-def load_datatypes( app, datatypes_config, relative_install_dir, deactivate=False ):
- # This method is used by the InstallManager, which does not have access to trans.
- def __import_module( relative_path, datatype_module ):
- sys.path.insert( 0, relative_path )
- imported_module = __import__( datatype_module )
- sys.path.pop( 0 )
- return imported_module
- imported_modules = []
- # Parse datatypes_config.
+def alter_config_and_load_prorietary_datatypes( app, datatypes_config, relative_install_dir, deactivate=False ):
+ """
+ Parse a proprietary datatypes config (a datatypes_conf.xml file included in an installed tool shed repository) and
+ add information to appropriate elements that will enable proprietary datatype class modules, datatypes converters
+ and display application to be discovered and properly imported by the datatypes registry. This method is used by
+ the InstallManager, which does not have access to trans.
+ """
tree = util.parse_xml( datatypes_config )
datatypes_config_root = tree.getroot()
# Path to datatype converters
@@ -551,6 +549,7 @@
# <datatype_file name="gmap.py"/>
# <datatype_file name="metagenomics.py"/>
# </datatype_files>
+ # We'll add attributes to the datatype tag sets so that the modules can be properly imported by the datatypes registry.
for elem in datatype_files.findall( 'datatype_file' ):
datatype_file_name = elem.get( 'name', None )
if datatype_file_name:
@@ -563,76 +562,91 @@
break
break
if datatype_class_modules:
- # Import each of the datatype class modules.
+ registration = datatypes_config_root.find( 'registration' )
+ converter_path, display_path = get_converter_and_display_paths( registration, relative_install_dir )
+ if converter_path:
+ registration.attrib[ 'proprietary_converter_path' ] = converter_path
+ if display_path:
+ registration.attrib[ 'proprietary_display_path' ] = display_path
for relative_path_to_datatype_file_name in datatype_class_modules:
relative_head, relative_tail = os.path.split( relative_path_to_datatype_file_name )
- registration = datatypes_config_root.find( 'registration' )
- # Get the module by parsing the <datatype> tag.
for elem in registration.findall( 'datatype' ):
- # A 'type' attribute is currently required. The attribute
- # should be something like one of the following:
+ # Handle 'type' attribute which should be something like one of the following:
# type="gmap:GmapDB"
# type="galaxy.datatypes.gmap:GmapDB"
dtype = elem.get( 'type', None )
if dtype:
fields = dtype.split( ':' )
- datatype_module = fields[ 0 ]
- if datatype_module.find( '.' ) >= 0:
- # Handle the case where datatype_module is "galaxy.datatypes.gmap"
- datatype_module = datatype_module.split( '.' )[ -1 ]
- datatype_class_name = fields[ 1 ]
- # We need to change the value of sys.path, so do it in a way that is thread-safe.
- lock = threading.Lock()
- lock.acquire( True )
- try:
- imported_module = __import_module( relative_head, datatype_module )
- if imported_module not in imported_modules:
- imported_modules.append( imported_module )
- except Exception, e:
- log.debug( "Exception importing datatypes code file %s: %s" % ( str( relative_path_to_datatype_file_name ), str( e ) ) )
- finally:
- lock.release()
- # Handle data type converters and display applications.
- for elem in registration.findall( 'datatype' ):
- if not converter_path:
- # If any of the <datatype> tag sets contain <converter> tags, set the converter_path
- # if it is not already set. This requires developers to place all converters in the
- # same subdirectory within the repository hierarchy.
- for converter in elem.findall( 'converter' ):
- converter_config = converter.get( 'file', None )
- if converter_config:
- for root, dirs, files in os.walk( relative_install_dir ):
- if root.find( '.hg' ) < 0:
- for name in files:
- if name == converter_config:
- converter_path = root
- break
- if converter_path:
- break
- if not display_path:
- # If any of the <datatype> tag sets contain <display> tags, set the display_path
- # if it is not already set. This requires developers to place all display acpplications
- # in the same subdirectory within the repository hierarchy.
- for display_app in elem.findall( 'display' ):
- display_config = display_app.get( 'file', None )
- if display_config:
- for root, dirs, files in os.walk( relative_install_dir ):
- if root.find( '.hg' ) < 0:
- for name in files:
- if name == display_config:
- display_path = root
- break
- if display_path:
- break
- if converter_path and display_path:
- break
+ proprietary_datatype_module = fields[ 0 ]
+ if proprietary_datatype_module.find( '.' ) >= 0:
+ # Handle the case where datatype_module is "galaxy.datatypes.gmap".
+ proprietary_datatype_module = proprietary_datatype_module.split( '.' )[ -1 ]
+ # The value of proprietary_path must be an absolute path due to job_working_directory.
+ elem.attrib[ 'proprietary_path' ] = os.path.abspath( relative_head )
+ elem.attrib[ 'proprietary_datatype_module' ] = proprietary_datatype_module
+
+ sniffers = datatypes_config_root.find( 'sniffers' )
+ fd, proprietary_datatypes_config = tempfile.mkstemp()
+ os.write( fd, '<?xml version="1.0"?>\n' )
+ os.write( fd, '<datatypes>\n' )
+ os.write( fd, '%s' % util.xml_to_string( registration ) )
+ os.write( fd, '%s' % util.xml_to_string( sniffers ) )
+ os.write( fd, '</datatypes>\n' )
+ os.close( fd )
+ os.chmod( proprietary_datatypes_config, 0644 )
else:
- # The repository includes a dataypes_conf.xml file, but no code file that
- # contains data type classes. This implies that the data types in datayptes_conf.xml
- # are all subclasses of data types that are in the distribution.
- imported_modules = []
+ proprietary_datatypes_config = datatypes_config
# Load proprietary datatypes
- app.datatypes_registry.load_datatypes( root_dir=app.config.root, config=datatypes_config, imported_modules=imported_modules, deactivate=deactivate )
+ app.datatypes_registry.load_datatypes( root_dir=app.config.root, config=proprietary_datatypes_config, deactivate=deactivate )
+ try:
+ os.unlink( proprietary_datatypes_config )
+ except:
+ pass
+ return converter_path, display_path
+def get_converter_and_display_paths( registration_elem, relative_install_dir ):
+ """
+ Find the relative path to data type converters and display
+ applications included in installed tool shed repositories.
+ """
+ converter_path = None
+ display_path = None
+ for elem in registration_elem.findall( 'datatype' ):
+ if not converter_path:
+ # If any of the <datatype> tag sets contain <converter> tags, set the converter_path
+ # if it is not already set. This requires developers to place all converters in the
+ # same subdirectory within the repository hierarchy.
+ for converter in elem.findall( 'converter' ):
+ converter_config = converter.get( 'file', None )
+ if converter_config:
+ relative_head, relative_tail = os.path.split( converter_config )
+ for root, dirs, files in os.walk( relative_install_dir ):
+ if root.find( '.hg' ) < 0:
+ for name in files:
+ if name == relative_tail:
+ # The value of converter_path must be absolute due to job_working_directory.
+ converter_path = os.path.abspath( root )
+ break
+ if converter_path:
+ break
+ if not display_path:
+ # If any of the <datatype> tag sets contain <display> tags, set the display_path
+ # if it is not already set. This requires developers to place all display acpplications
+ # in the same subdirectory within the repository hierarchy.
+ for display_app in elem.findall( 'display' ):
+ display_config = display_app.get( 'file', None )
+ if display_config:
+ relative_head, relative_tail = os.path.split( display_config )
+ for root, dirs, files in os.walk( relative_install_dir ):
+ if root.find( '.hg' ) < 0:
+ for name in files:
+ if name == relative_tail:
+ # The value of display_path must be absolute due to job_working_directory.
+ display_path = os.path.abspath( root )
+ break
+ if display_path:
+ break
+ if converter_path and display_path:
+ break
return converter_path, display_path
def load_repository_contents( app, repository_name, description, owner, changeset_revision, tool_path, repository_clone_url, relative_install_dir,
current_working_dir, tmp_name, tool_shed=None, tool_section=None, shed_tool_conf=None, new_install=True, dist_to_shed=False ):
@@ -714,7 +728,7 @@
if 'datatypes_config' in metadata_dict:
datatypes_config = os.path.abspath( metadata_dict[ 'datatypes_config' ] )
# Load data types required by tools.
- converter_path, display_path = load_datatypes( app, datatypes_config, relative_install_dir )
+ converter_path, display_path = alter_config_and_load_prorietary_datatypes( app, datatypes_config, relative_install_dir )
if converter_path or display_path:
# Create a dictionary of tool shed repository related information.
repository_dict = create_repository_dict_for_proprietary_datatypes( tool_shed=tool_shed,
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
2 new commits in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/fe7b7d7bda41/
changeset: fe7b7d7bda41
branch: dynamic_options_memory_fix
user: natefoo
date: 2012-02-10 19:19:06
summary: closed branch dynamic_options_memory_fix
affected #: 0 files
https://bitbucket.org/galaxy/galaxy-central/changeset/ce17eb369f95/
changeset: ce17eb369f95
user: natefoo
date: 2012-02-10 19:19:41
summary: merge
affected #: 0 files
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
3 new commits in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/eb7a31e38a4c/
changeset: eb7a31e38a4c
branch: dynamic_options_memory_fix
user: jmchilton
date: 2012-02-07 17:35:17
summary: Patch dynamic_options.py to just read just the first megabyte of an
input dataset (if it is larger than that). Otherwise Galaxy will
attempt the read the entire file into memory, this can causes crashes
and freezeups without the user even selecting a bad input (for
instance, if a large FASTQ file is the top most history item and a txt
input is sought, Galaxy will freezeup as soon as the tool is clicked
because it will try to load the options for the top item immediately
by default).
Though the implementation is not as clean, I am restricting it to a
megabyte instead of a number of lines/options in case the top most history
item has no line breaks. I imagine in that case the same problem could
occur even if the input is restricted to a reasonable number of lines
(say 100).
affected #: 1 file
diff -r 74b6319b38b4b3876d0b81ef1296bb5afc729cc1 -r eb7a31e38a4cf9968d115984ab2ded908d3ddc9e lib/galaxy/tools/parameters/dynamic_options.py
--- a/lib/galaxy/tools/parameters/dynamic_options.py
+++ b/lib/galaxy/tools/parameters/dynamic_options.py
@@ -500,7 +500,16 @@
dataset = other_values.get( self.dataset_ref_name, None )
assert dataset is not None, "Required dataset '%s' missing from input" % self.dataset_ref_name
if not dataset: return [] #no valid dataset in history
- options = self.parse_file_fields( open( dataset.file_name ) )
+ # Ensure parsing dynamic options does not consume more than a megabyte worth memory.
+ file_size = os.path.getsize( path )
+ if os.path.getsize( path ) < 1048576:
+ options = self.parse_file_fields( open( path ) )
+ else:
+ # Pass just the first megabyte to parse_file_fields.
+ import StringIO
+ log.warn( "Attempting to load options from large file, reading just first megabyte" )
+ contents = open( path, 'r' ).read( megabyte )
+ options = self.parse_file_fields( StringIO.StringIO( contents ) )
elif self.tool_data_table:
options = self.tool_data_table.get_fields()
else:
https://bitbucket.org/galaxy/galaxy-central/changeset/53e55b49a24a/
changeset: 53e55b49a24a
branch: dynamic_options_memory_fix
user: jmchilton
date: 2012-02-07 17:38:00
summary: Fix a typo made with last commit.
affected #: 1 file
diff -r eb7a31e38a4cf9968d115984ab2ded908d3ddc9e -r 53e55b49a24a8e9a2f00b1c594c4e54c226e8865 lib/galaxy/tools/parameters/dynamic_options.py
--- a/lib/galaxy/tools/parameters/dynamic_options.py
+++ b/lib/galaxy/tools/parameters/dynamic_options.py
@@ -501,6 +501,7 @@
assert dataset is not None, "Required dataset '%s' missing from input" % self.dataset_ref_name
if not dataset: return [] #no valid dataset in history
# Ensure parsing dynamic options does not consume more than a megabyte worth memory.
+ path = dataset.file_name
file_size = os.path.getsize( path )
if os.path.getsize( path ) < 1048576:
options = self.parse_file_fields( open( path ) )
https://bitbucket.org/galaxy/galaxy-central/changeset/046157d1ed8a/
changeset: 046157d1ed8a
branch: dynamic_options_memory_fix
user: jmchilton
date: 2012-02-07 17:50:22
summary: One more fixed typo.
affected #: 1 file
diff -r 53e55b49a24a8e9a2f00b1c594c4e54c226e8865 -r 046157d1ed8af7ff178c6b1edee9512deccbc0ce lib/galaxy/tools/parameters/dynamic_options.py
--- a/lib/galaxy/tools/parameters/dynamic_options.py
+++ b/lib/galaxy/tools/parameters/dynamic_options.py
@@ -509,7 +509,7 @@
# Pass just the first megabyte to parse_file_fields.
import StringIO
log.warn( "Attempting to load options from large file, reading just first megabyte" )
- contents = open( path, 'r' ).read( megabyte )
+ contents = open( path, 'r' ).read( 1048576 )
options = self.parse_file_fields( StringIO.StringIO( contents ) )
elif self.tool_data_table:
options = self.tool_data_table.get_fields()
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: jgoecks: Trackster: save and restore group/multi-track filters.
by Bitbucket 10 Feb '12
by Bitbucket 10 Feb '12
10 Feb '12
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/73ba96b2b5a8/
changeset: 73ba96b2b5a8
user: jgoecks
date: 2012-02-10 17:13:55
summary: Trackster: save and restore group/multi-track filters.
affected #: 1 file
diff -r 60d72f4bd1ea1930487e7eedfc3ce78372c00e9c -r 73ba96b2b5a82ee2a292e8b533838c81e30f552b static/scripts/trackster.js
--- a/static/scripts/trackster.js
+++ b/static/scripts/trackster.js
@@ -1034,7 +1034,7 @@
this.filters_manager = new FiltersManager(this);
this.header_div.after(this.filters_manager.parent_div);
// For saving drawables' filter managers when group-level filtering is done:
- this.saved_filters_managers = null;
+ this.saved_filters_managers = [];
};
extend(DrawableGroup.prototype, Drawable.prototype, DrawableCollection.prototype, {
@@ -1060,33 +1060,15 @@
// TODO: update tipsy text.
if (group.filters_manager.visible()) {
// Hiding filters.
- group.filters_manager.clear_filters();
-
- // Restore filter managers.
+ group.filters_manager.clear_filters();
+ group._restore_filter_managers();
// TODO: maintain current filter by restoring and setting saved manager's
// settings to current/shared manager's settings.
- // TODO: need to restore filter managers when moving drawable outside group.
- for (var i = 0; i < group.drawables.length; i++) {
- group.drawables[i].filters_manager = group.saved_filters_managers[i];
- }
- group.saved_filters_managers = null;
+ // TODO: need to restore filter managers when moving drawable outside group.
}
else {
// Showing filters.
-
- // Save tracks' managers and set up shared manager.
- if (group.filters_manager.filters.length > 0) {
- // For all tracks, save current filter manager and set manager to shared (this object's) manager.
- group.saved_filters_managers = [];
- for (var i = 0; i < group.drawables.length; i++) {
- drawable = group.drawables[i];
- group.saved_filters_managers.push(drawable.filters_manager);
- drawable.filters_manager = group.filters_manager;
- }
-
- //TODO: hide filters icons for each drawable.
- }
- group.filters_manager.init_filters();
+ group.setup_multitrack_filtering();
group.request_draw(true);
}
group.filters_manager.toggle();
@@ -1229,6 +1211,33 @@
}
},
/**
+ * Restore individual track filter managers.
+ */
+ _restore_filter_managers: function() {
+ for (var i = 0; i < this.drawables.length; i++) {
+ this.drawables[i].filters_manager = this.saved_filters_managers[i];
+ }
+ this.saved_filters_managers = [];
+ },
+ /**
+ *
+ */
+ setup_multitrack_filtering: function() {
+ // Save tracks' managers and set up shared manager.
+ if (this.filters_manager.filters.length > 0) {
+ // For all tracks, save current filter manager and set manager to shared (this object's) manager.
+ this.saved_filters_managers = [];
+ for (var i = 0; i < this.drawables.length; i++) {
+ drawable = this.drawables[i];
+ this.saved_filters_managers.push(drawable.filters_manager);
+ drawable.filters_manager = this.filters_manager;
+ }
+
+ //TODO: hide filters icons for each drawable?
+ }
+ this.filters_manager.init_filters();
+ },
+ /**
* Replace group with a single composite track that includes all group's tracks.
*/
show_composite_track: function() {
@@ -1252,6 +1261,21 @@
DrawableCollection.prototype.remove_drawable.call(this, drawable);
this.update_icons();
},
+ to_dict: function() {
+ // If filters are visible, need to restore original filter managers before converting to dict.
+ if (this.filters_manager.visible()) {
+ this._restore_filter_managers();
+ }
+
+ var obj_dict = extend(DrawableCollection.prototype.to_dict.call(this), { "filters": this.filters_manager.to_dict() });
+
+ // Setup multi-track filtering again.
+ if (this.filters_manager.visible()) {
+ this.setup_multitrack_filtering();
+ }
+
+ return obj_dict;
+ },
/**
* Restore object from a dictionary created by to_dict()
*/
@@ -1262,6 +1286,17 @@
for (var i = 0; i < group.drawables.length; i++) {
group.content_div.append(group.drawables[i].container_div);
}
+
+ // Handle filters.
+ // FIXME: Pass collection_dict to DrawableCollection/Drawable will make this easier.
+ var old_manager = group.filters_manager;
+ group.filters_manager = new FiltersManager(group, collection_dict['filters']);
+ old_manager.parent_div.replaceWith(group.filters_manager.parent_div);
+
+ if (collection_dict.filters.visible) {
+ group.setup_multitrack_filtering();
+ }
+
return group;
},
request_draw: function(clear_after, force) {
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
3 new commits in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/3bd8bed55631/
changeset: 3bd8bed55631
user: jgoecks
date: 2012-02-10 15:23:33
summary: Tophat wrapper modifications: (a) remove parameter junctions_filter and (b) change default value of max_multihits to 20.
affected #: 2 files
diff -r abae2caf3da253353fbdcc077a7ce3cd64a38e54 -r 3bd8bed55631728a05a9d873ebff70b74acbfd6e tools/ngs_rna/tophat_wrapper.py
--- a/tools/ngs_rna/tophat_wrapper.py
+++ b/tools/ngs_rna/tophat_wrapper.py
@@ -27,7 +27,6 @@
help='The minimum intron length. TopHat will ignore donor/acceptor pairs closer than this many bases apart.' )
parser.add_option( '-I', '--max-intron-length', dest='max_intron_length',
help='The maximum intron length. When searching for junctions ab initio, TopHat will ignore donor/acceptor pairs farther than this many bases apart, except when such a pair is supported by a split segment alignment of a long read.' )
- parser.add_option( '-F', '--junction_filter', dest='junction_filter', help='Filter out junctions supported by too few alignments (number of reads divided by average depth of coverage)' )
parser.add_option( '-g', '--max_multihits', dest='max_multihits', help='Maximum number of alignments to be allowed' )
parser.add_option( '', '--initial-read-mismatches', dest='initial_read_mismatches', help='Number of mismatches allowed in the initial read mapping' )
parser.add_option( '', '--seg-mismatches', dest='seg_mismatches', help='Number of mismatches allowed in each segment alignment for reads mapped independently' )
@@ -150,8 +149,6 @@
opts += ' -m %s' % options.splice_mismatches
opts += ' -i %s' % options.min_intron_length
opts += ' -I %s' % options.max_intron_length
- if float( options.junction_filter ) != 0.0:
- opts += ' -F %s' % options.junction_filter
opts += ' -g %s' % options.max_multihits
# Custom junctions options.
if options.gene_model_annotations:
diff -r abae2caf3da253353fbdcc077a7ce3cd64a38e54 -r 3bd8bed55631728a05a9d873ebff70b74acbfd6e tools/ngs_rna/tophat_wrapper.xml
--- a/tools/ngs_rna/tophat_wrapper.xml
+++ b/tools/ngs_rna/tophat_wrapper.xml
@@ -34,7 +34,6 @@
-m $singlePaired.sParams.splice_mismatches
-i $singlePaired.sParams.min_intron_length
-I $singlePaired.sParams.max_intron_length
- -F $singlePaired.sParams.junction_filter
-g $singlePaired.sParams.max_multihits
--min-segment-intron $singlePaired.sParams.min_segment_intron
--max-segment-intron $singlePaired.sParams.max_segment_intron
@@ -96,7 +95,6 @@
-m $singlePaired.pParams.splice_mismatches
-i $singlePaired.pParams.min_intron_length
-I $singlePaired.pParams.max_intron_length
- -F $singlePaired.pParams.junction_filter
-g $singlePaired.pParams.max_multihits
--min-segment-intron $singlePaired.pParams.min_segment_intron
--max-segment-intron $singlePaired.pParams.max_segment_intron
@@ -203,8 +201,8 @@
<param name="max_deletion_length" type="integer" value="3" label="Max deletion length." help="The maximum deletion length." /></when></conditional>
- <param name="junction_filter" type="float" value="0.15" label="Minimum isoform fraction: filter out junctions supported by too few alignments (number of reads divided by average depth of coverage)" help="0.0 to 1.0 (0 to turn off)" />
- <param name="max_multihits" type="integer" value="40" label="Maximum number of alignments to be allowed" />
+alignments (number of reads divided by average depth of coverage)" help="0.0 to 1.0 (0 to turn off)" />
+ <param name="max_multihits" type="integer" value="20" label="Maximum number of alignments to be allowed" /><param name="min_segment_intron" type="integer" value="50" label="Minimum intron length that may be found during split-segment (default) search" /><param name="max_segment_intron" type="integer" value="500000" label="Maximum intron length that may be found during split-segment (default) search" /><param name="initial_read_mismatches" type="integer" min="0" value="2" label="Number of mismatches allowed in the initial read mapping" />
@@ -310,8 +308,7 @@
<param name="max_deletion_length" type="integer" value="3" label="Max deletion length." help="The maximum deletion length." /></when></conditional>
- <param name="junction_filter" type="float" value="0.15" label="Minimum isoform fraction: filter out junctions supported by too few alignments (number of reads divided by average depth of coverage)" help="0.0 to 1.0 (0 to turn off)" />
- <param name="max_multihits" type="integer" value="40" label="Maximum number of alignments to be allowed" />
+ <param name="max_multihits" type="integer" value="20" label="Maximum number of alignments to be allowed" /><param name="min_segment_intron" type="integer" value="50" label="Minimum intron length that may be found during split-segment (default) search" /><param name="max_segment_intron" type="integer" value="500000" label="Maximum intron length that may be found during split-segment (default) search" /><param name="initial_read_mismatches" type="integer" min="0" value="2" label="Number of mismatches allowed in the initial read mapping" />
@@ -532,7 +529,6 @@
<param name="splice_mismatches" value="0"/><param name="min_intron_length" value="70"/><param name="max_intron_length" value="500000"/>
- <param name="junction_filter" value="0.15"/><param name="max_multihits" value="40"/><param name="min_segment_intron" value="50" /><param name="max_segment_intron" value="500000" />
@@ -578,7 +574,6 @@
<param name="splice_mismatches" value="0"/><param name="min_intron_length" value="70"/><param name="max_intron_length" value="500000"/>
- <param name="junction_filter" value="0.15"/><param name="max_multihits" value="40"/><param name="min_segment_intron" value="50" /><param name="max_segment_intron" value="500000" />
https://bitbucket.org/galaxy/galaxy-central/changeset/3a9fecf3be20/
changeset: 3a9fecf3be20
user: jgoecks
date: 2012-02-10 15:52:01
summary: Trackster: filter bug fix and abstract filter display control.
affected #: 3 files
diff -r 3bd8bed55631728a05a9d873ebff70b74acbfd6e -r 3a9fecf3be20e2413e8993738312246695fda2d3 lib/galaxy/web/base/controller.py
--- a/lib/galaxy/web/base/controller.py
+++ b/lib/galaxy/web/base/controller.py
@@ -355,7 +355,8 @@
'name': collection_dict.get( 'name', 'dummy' ),
'obj_type': collection_dict[ 'obj_type' ],
'drawables': drawables,
- 'prefs': collection_dict.get( 'prefs', [] )
+ 'prefs': collection_dict.get( 'prefs', [] ),
+ 'filters': collection_dict.get( 'filters', {} )
}
def encode_dbkey( dbkey ):
diff -r 3bd8bed55631728a05a9d873ebff70b74acbfd6e -r 3a9fecf3be20e2413e8993738312246695fda2d3 lib/galaxy/web/controllers/tracks.py
--- a/lib/galaxy/web/controllers/tracks.py
+++ b/lib/galaxy/web/controllers/tracks.py
@@ -252,7 +252,7 @@
"hda_ldda": hda_ldda,
"dataset_id": trans.security.encode_id( dataset.id ),
"prefs": {},
- "filters": track_data_provider.get_filters(),
+ "filters": { 'filters' : track_data_provider.get_filters() },
"tool": get_tool_def( trans, dataset )
}
return track
@@ -671,7 +671,8 @@
"name": collection_json.get( 'name', '' ),
"obj_type": collection_json[ 'obj_type' ],
"drawables": unpacked_drawables,
- "prefs": collection_json.get( 'prefs' , [] )
+ "prefs": collection_json.get( 'prefs' , [] ),
+ "filters": collection_json.get( 'filters', None )
}
# TODO: unpack and validate bookmarks:
diff -r 3bd8bed55631728a05a9d873ebff70b74acbfd6e -r 3a9fecf3be20e2413e8993738312246695fda2d3 static/scripts/trackster.js
--- a/static/scripts/trackster.js
+++ b/static/scripts/trackster.js
@@ -1032,8 +1032,7 @@
// Set up filters.
this.filters_manager = new FiltersManager(this);
- this.filters_div = this.filters_manager.parent_div;
- this.header_div.after(this.filters_div);
+ this.header_div.after(this.filters_manager.parent_div);
// For saving drawables' filter managers when group-level filtering is done:
this.saved_filters_managers = null;
};
@@ -1059,7 +1058,7 @@
css_class: "filters-icon",
on_click_fn: function(group) {
// TODO: update tipsy text.
- if (group.filters_div.is(":visible")) {
+ if (group.filters_manager.visible()) {
// Hiding filters.
group.filters_manager.clear_filters();
@@ -1090,7 +1089,7 @@
group.filters_manager.init_filters();
group.request_draw(true);
}
- group.filters_div.toggle();
+ group.filters_manager.toggle();
}
},
Drawable.prototype.action_icons_def[2]
@@ -2421,7 +2420,6 @@
this.track = track;
this.alpha_filter = null;
this.height_filter = null;
- this.visible = false;
this.filters = [];
//
@@ -2489,6 +2487,11 @@
};
extend(FiltersManager.prototype, {
+ // HTML manipulation and inspection.
+ show: function() { this.parent_div.show(); },
+ hide: function() { this.parent_div.hide(); },
+ toggle: function() { this.parent_div.toggle(); },
+ visible: function() { return this.parent_div.is(":visible"); },
/**
* Returns dictionary for manager.
*/
@@ -3071,13 +3074,13 @@
css_class: "filters-icon",
on_click_fn: function(drawable) {
// TODO: update tipsy text.
- if (drawable.filters_div.is(":visible")) {
+ if (drawable.filters_manager.visible()) {
drawable.filters_manager.clear_filters();
}
else {
drawable.filters_manager.init_filters();
}
- drawable.filters_div.toggle();
+ drawable.filters_manager.toggle();
}
},
// Toggle track tool.
@@ -3337,8 +3340,7 @@
*/
set_filters_manager: function(filters_manager) {
this.filters_manager = filters_manager;
- this.filters_div = this.filters_manager.parent_div;
- this.header_div.after(this.filters_div);
+ this.header_div.after(this.filters_manager.parent_div);
},
/**
* Returns representation of object in a dictionary for easy saving.
@@ -4405,7 +4407,7 @@
if (track.filters_available !== filters_available) {
track.filters_available = filters_available;
if (!track.filters_available) {
- track.filters_div.hide();
+ track.filters_manager.hide();
}
track.update_icons();
}
https://bitbucket.org/galaxy/galaxy-central/changeset/60d72f4bd1ea/
changeset: 60d72f4bd1ea
user: jgoecks
date: 2012-02-10 15:52:14
summary: Merge
affected #: 2 files
diff -r 3a9fecf3be20e2413e8993738312246695fda2d3 -r 60d72f4bd1ea1930487e7eedfc3ce78372c00e9c tools/rgenetics/rgFastQC.py
--- a/tools/rgenetics/rgFastQC.py
+++ b/tools/rgenetics/rgFastQC.py
@@ -37,6 +37,7 @@
fastqc_data.txt per_base_gc_content.png per_base_sequence_content.png sequence_length_distribution.png warning.png
"""
+ serr = ''
dummy,tlog = tempfile.mkstemp(prefix='rgFastQClog')
sout = open(tlog, 'w')
fastq = os.path.basename(self.opts.input)
@@ -45,12 +46,18 @@
cl.append('-f %s' % self.opts.informat)
if self.opts.contaminants <> None :
cl.append('-c %s' % self.opts.contaminants)
- cl.append(self.opts.input)
+ # patch suggested by bwlang https://bitbucket.org/galaxy/galaxy-central/pull-request/30
+ # use a symlink in a temporary directory so that the FastQC report reflects the history input file name
+ fastqinfilename = os.path.basename(self.opts.inputfilename).replace(' ','_')
+ link_name = os.path.join(self.opts.outputdir, fastqinfilename)
+ os.symlink(self.opts.input, link_name)
+ cl.append(link_name)
p = subprocess.Popen(' '.join(cl), shell=True, stderr=sout, stdout=sout, cwd=self.opts.outputdir)
- return_value = p.wait()
+ retval = p.wait()
sout.close()
runlog = open(tlog,'r').readlines()
os.unlink(tlog)
+ os.unlink(link_name)
flist = os.listdir(self.opts.outputdir) # fastqc plays games with its output directory name. eesh
odpath = None
for f in flist:
@@ -65,7 +72,8 @@
rep = open(hpath,'r').readlines() # for our new html file but we need to insert our stuff after the <body> tag
except:
pass
- if hpath == None:
+ if hpath == None:
+ serr = '\n'.join(runlog)
res = ['## odpath=%s: No output found in %s. Output for the run was:<pre>\n' % (odpath,hpath),]
res += runlog
res += ['</pre>\n',
@@ -74,7 +82,7 @@
'It is also possible that the log shows that fastqc is not installed?<br/>\n',
'If that is the case, please tell the relevant Galaxy administrator that it can be snarfed from<br/>\n',
'http://www.bioinformatics.bbsrc.ac.uk/projects/fastqc/<br/>\n',]
- return res
+ return res,1,serr
self.fix_fastqcimages(odpath)
flist = os.listdir(self.opts.outputdir) # these have now been fixed
excludefiles = ['tick.png','warning.png','fastqc_icon.png','error.png']
@@ -84,7 +92,7 @@
rep[i] = rep[i].replace('Images/','')
html = self.fix_fastqc(rep,flist,runlog)
- return html
+ return html,retval,serr
@@ -129,6 +137,7 @@
if __name__ == '__main__':
op = optparse.OptionParser()
op.add_option('-i', '--input', default=None)
+ op.add_option('-j', '--inputfilename', default=None)
op.add_option('-o', '--htmloutput', default=None)
op.add_option('-d', '--outputdir', default="/tmp/shortread")
op.add_option('-f', '--informat', default='fastq')
@@ -141,9 +150,12 @@
if not os.path.exists(opts.outputdir):
os.makedirs(opts.outputdir)
f = FastQC(opts)
- html = f.run_fastqc()
+ html,retval,serr = f.run_fastqc()
f = open(opts.htmloutput, 'w')
f.write(''.join(html))
f.close()
+ if retval <> 0:
+ print >> sys.stderr, serr # indicate failure
+
diff -r 3a9fecf3be20e2413e8993738312246695fda2d3 -r 60d72f4bd1ea1930487e7eedfc3ce78372c00e9c tools/rgenetics/rgFastQC.xml
--- a/tools/rgenetics/rgFastQC.xml
+++ b/tools/rgenetics/rgFastQC.xml
@@ -1,7 +1,7 @@
-<tool name="Fastqc: Fastqc QC" id="fastqc" version="0.3">
+<tool name="Fastqc: Fastqc QC" id="fastqc" version="0.4"><description>using FastQC from Babraham</description><command interpreter="python">
- rgFastQC.py -i $input_file -d $html_file.files_path -o $html_file -n "$out_prefix" -f $input_file.ext -e ${GALAXY_DATA_INDEX_DIR}/shared/jars/FastQC/fastqc
+ rgFastQC.py -i $input_file -d $html_file.files_path -o $html_file -n "$out_prefix" -f $input_file.ext -j $input_file.name -e ${GALAXY_DATA_INDEX_DIR}/shared/jars/FastQC/fastqc
#if $contaminants.dataset and str($contaminants) > ''
-c "$contaminants"
#end if
@@ -16,7 +16,7 @@
help="tab delimited file with 2 columns: name and sequence. For example: Illumina Small RNA RT Primer CAAGCAGAAGACGGCATACGA"/></inputs><outputs>
- <data format="html" name="html_file" label="${out_prefix}.html" />
+ <data format="html" name="html_file" label="${out_prefix}_${on_string}.html" /></outputs><tests><test>
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/9a568ceeb9f5/
changeset: 9a568ceeb9f5
user: fubar
date: 2012-02-10 00:40:00
summary: Updates to FastQC wrapper
1) improvement suggested by bwlang to pass the history file name in as the file name to fastqc so it appears in the outputs instead of the previously unhelpful internal galaxy filename
2) lack of html output from fastqc or return of non-zero error code now triggers job failure - outputs may still be there but at least it goes red.
affected #: 2 files
diff -r abae2caf3da253353fbdcc077a7ce3cd64a38e54 -r 9a568ceeb9f5e281e7ae3c09d6049ac7d16183d4 tools/rgenetics/rgFastQC.py
--- a/tools/rgenetics/rgFastQC.py
+++ b/tools/rgenetics/rgFastQC.py
@@ -37,6 +37,7 @@
fastqc_data.txt per_base_gc_content.png per_base_sequence_content.png sequence_length_distribution.png warning.png
"""
+ serr = ''
dummy,tlog = tempfile.mkstemp(prefix='rgFastQClog')
sout = open(tlog, 'w')
fastq = os.path.basename(self.opts.input)
@@ -45,12 +46,18 @@
cl.append('-f %s' % self.opts.informat)
if self.opts.contaminants <> None :
cl.append('-c %s' % self.opts.contaminants)
- cl.append(self.opts.input)
+ # patch suggested by bwlang https://bitbucket.org/galaxy/galaxy-central/pull-request/30
+ # use a symlink in a temporary directory so that the FastQC report reflects the history input file name
+ fastqinfilename = os.path.basename(self.opts.inputfilename).replace(' ','_')
+ link_name = os.path.join(self.opts.outputdir, fastqinfilename)
+ os.symlink(self.opts.input, link_name)
+ cl.append(link_name)
p = subprocess.Popen(' '.join(cl), shell=True, stderr=sout, stdout=sout, cwd=self.opts.outputdir)
- return_value = p.wait()
+ retval = p.wait()
sout.close()
runlog = open(tlog,'r').readlines()
os.unlink(tlog)
+ os.unlink(link_name)
flist = os.listdir(self.opts.outputdir) # fastqc plays games with its output directory name. eesh
odpath = None
for f in flist:
@@ -65,7 +72,8 @@
rep = open(hpath,'r').readlines() # for our new html file but we need to insert our stuff after the <body> tag
except:
pass
- if hpath == None:
+ if hpath == None:
+ serr = '\n'.join(runlog)
res = ['## odpath=%s: No output found in %s. Output for the run was:<pre>\n' % (odpath,hpath),]
res += runlog
res += ['</pre>\n',
@@ -74,7 +82,7 @@
'It is also possible that the log shows that fastqc is not installed?<br/>\n',
'If that is the case, please tell the relevant Galaxy administrator that it can be snarfed from<br/>\n',
'http://www.bioinformatics.bbsrc.ac.uk/projects/fastqc/<br/>\n',]
- return res
+ return res,1,serr
self.fix_fastqcimages(odpath)
flist = os.listdir(self.opts.outputdir) # these have now been fixed
excludefiles = ['tick.png','warning.png','fastqc_icon.png','error.png']
@@ -84,7 +92,7 @@
rep[i] = rep[i].replace('Images/','')
html = self.fix_fastqc(rep,flist,runlog)
- return html
+ return html,retval,serr
@@ -129,6 +137,7 @@
if __name__ == '__main__':
op = optparse.OptionParser()
op.add_option('-i', '--input', default=None)
+ op.add_option('-j', '--inputfilename', default=None)
op.add_option('-o', '--htmloutput', default=None)
op.add_option('-d', '--outputdir', default="/tmp/shortread")
op.add_option('-f', '--informat', default='fastq')
@@ -141,9 +150,12 @@
if not os.path.exists(opts.outputdir):
os.makedirs(opts.outputdir)
f = FastQC(opts)
- html = f.run_fastqc()
+ html,retval,serr = f.run_fastqc()
f = open(opts.htmloutput, 'w')
f.write(''.join(html))
f.close()
+ if retval <> 0:
+ print >> sys.stderr, serr # indicate failure
+
diff -r abae2caf3da253353fbdcc077a7ce3cd64a38e54 -r 9a568ceeb9f5e281e7ae3c09d6049ac7d16183d4 tools/rgenetics/rgFastQC.xml
--- a/tools/rgenetics/rgFastQC.xml
+++ b/tools/rgenetics/rgFastQC.xml
@@ -1,7 +1,7 @@
-<tool name="Fastqc: Fastqc QC" id="fastqc" version="0.3">
+<tool name="Fastqc: Fastqc QC" id="fastqc" version="0.4"><description>using FastQC from Babraham</description><command interpreter="python">
- rgFastQC.py -i $input_file -d $html_file.files_path -o $html_file -n "$out_prefix" -f $input_file.ext -e ${GALAXY_DATA_INDEX_DIR}/shared/jars/FastQC/fastqc
+ rgFastQC.py -i $input_file -d $html_file.files_path -o $html_file -n "$out_prefix" -f $input_file.ext -j $input_file.name -e ${GALAXY_DATA_INDEX_DIR}/shared/jars/FastQC/fastqc
#if $contaminants.dataset and str($contaminants) > ''
-c "$contaminants"
#end if
@@ -16,7 +16,7 @@
help="tab delimited file with 2 columns: name and sequence. For example: Illumina Small RNA RT Primer CAAGCAGAAGACGGCATACGA"/></inputs><outputs>
- <data format="html" name="html_file" label="${out_prefix}.html" />
+ <data format="html" name="html_file" label="${out_prefix}_${on_string}.html" /></outputs><tests><test>
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0