galaxy-dev
Threads by month
- ----- 2024 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2023 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2022 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2021 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2020 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2019 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2018 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2017 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2016 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2015 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2014 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2013 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2012 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2011 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2010 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2009 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2008 -----
- December
- November
- October
- September
- August
October 2008
- 4 participants
- 57 discussions
20 Oct '08
details: http://www.bx.psu.edu/hg/galaxy/rev/960820cccaaa
changeset: 1549:960820cccaaa
user: Dan Blankenberg <dan(a)bx.psu.edu>
date: Tue Oct 07 11:58:32 2008 -0400
description:
Users can copy datasets between their histories.
3 file(s) affected in this change:
lib/galaxy/web/controllers/dataset.py
templates/dataset/copy_view.mako
templates/dataset/edit_attributes.mako
diffs (164 lines):
diff -r ae341e281c89 -r 960820cccaaa lib/galaxy/web/controllers/dataset.py
--- a/lib/galaxy/web/controllers/dataset.py Mon Oct 06 13:34:51 2008 -0400
+++ b/lib/galaxy/web/controllers/dataset.py Tue Oct 07 11:58:32 2008 -0400
@@ -155,3 +155,67 @@
if self._undelete( trans, id ):
return "OK"
raise "Error undeleting"
+
+
+ @web.expose
+ def copy_datasets( self, trans, source_dataset_ids = "", target_history_ids = "", new_history_name="", do_copy = False ):
+ user = trans.get_user()
+ history = trans.get_history()
+ create_new_history = False
+ if source_dataset_ids:
+ if not isinstance( source_dataset_ids, list ):
+ source_dataset_ids = source_dataset_ids.split( "," )
+ source_dataset_ids = map( int, source_dataset_ids )
+ else:
+ source_dataset_ids = []
+ if target_history_ids:
+ if not isinstance( target_history_ids, list ):
+ target_history_ids = target_history_ids.split( "," )
+ if "create_new_history" in target_history_ids:
+ create_new_history = True
+ target_history_ids.remove( "create_new_history" )
+ target_history_ids = map( int, target_history_ids )
+ else:
+ target_history_ids = []
+ done_msg = error_msg = ""
+ if do_copy:
+ invalid_datasets = 0
+ if not source_dataset_ids or not ( target_history_ids or create_new_history ):
+ error_msg = "You must provide both source datasets and target histories."
+ if create_new_history:
+ target_history_ids.append( "create_new_history" )
+ else:
+ if create_new_history:
+ new_history = trans.app.model.History()
+ if new_history_name:
+ new_history.name = new_history_name
+ new_history.user = user
+ new_history.flush()
+ target_history_ids.append( new_history.id )
+ if user:
+ target_histories = [ hist for hist in map( trans.app.model.History.get, target_history_ids ) if ( hist is not None and hist.user == user )]
+ else:
+ target_histories = [ history ]
+ if len( target_histories ) != len( target_history_ids ):
+ error_msg = error_msg + "You do not have permission to add datasets to %i requested histories. " % ( len( target_history_ids ) - len( target_histories ) )
+ for data in map( trans.app.model.HistoryDatasetAssociation.get, source_dataset_ids ):
+ if data is None:
+ error_msg = error_msg + "You tried to copy a non-existant dataset. "
+ invalid_datasets += 1
+ elif data.history != history:
+ error_msg = error_msg + "You tried to copy a dataset which is not in your current history. "
+ invalid_datasets += 1
+ else:
+ for hist in target_histories:
+ hist.add_dataset( data.copy( copy_children = True ) )
+ trans.app.model.flush()
+ done_msg = "%i datasets copied to %i histories." % ( len( source_dataset_ids ) - invalid_datasets, len( target_histories ) )
+ history.refresh()
+ elif create_new_history:
+ target_history_ids.append( "create_new_history" )
+ source_datasets = history.active_datasets
+ target_histories = [history]
+ if user:
+ target_histories = user.histories
+
+ return trans.fill_template( "/dataset/copy_view.mako", source_dataset_ids = source_dataset_ids, target_history_ids = target_history_ids, source_datasets = source_datasets, target_histories = target_histories, new_history_name = new_history_name, done_msg = done_msg, error_msg = error_msg )
diff -r ae341e281c89 -r 960820cccaaa templates/dataset/copy_view.mako
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/templates/dataset/copy_view.mako Tue Oct 07 11:58:32 2008 -0400
@@ -0,0 +1,66 @@
+<%inherit file="/base.mako"/>
+<%def name="title()">Copy History Items</%def>
+
+%if error_msg:
+<p>
+<div class="errormessage">${error_msg}</div>
+<div style="clear: both"></div>
+</p>
+%endif
+%if done_msg:
+<p>
+<div class="donemessage">${done_msg}</div>
+<div style="clear: both"></div>
+</p>
+%endif
+<p>
+<div class="toolForm">
+ <form>
+ <div style="float: left; width: 50%; padding: 0px 0px 0px 0px;">
+ <div class="toolFormTitle">Source History Items</div>
+ <div class="toolFormBody">
+ %for data in source_datasets:
+ <%
+ checked = ""
+ if data.id in source_dataset_ids:
+ checked = " checked"
+ %>
+ <div class="form-row"><input type="checkbox" name="source_dataset_ids" value="${data.id}"${checked}> ${data.hid}: ${data.name}</div>
+ %endfor
+ </div>
+ </div>
+ <div style="float: right; width: 50%; padding: 0px 0px 0px 0px;">
+ <div class="toolFormTitle">Target Histories</div>
+ <div class="toolFormBody">
+ %for i, hist in enumerate( target_histories ):
+ <%
+ checked = ""
+ if hist.id in target_history_ids:
+ checked = " checked"
+ cur_history_text = ""
+ if hist == trans.get_history():
+ cur_history_text = " <strong>(current history)</strong>"
+ %>
+ <div class="form-row"><input type="checkbox" name="target_history_ids" value="${hist.id}"${checked}> ${i + 1}${cur_history_text}: ${hist.name}</div>
+ %endfor
+ %if trans.get_user():
+ <%
+ checked = ""
+ if "create_new_history" in target_history_ids:
+ checked = " checked"
+ %>
+ <br>
+ <div class="form-row"><input type="checkbox" name="target_history_ids" value="create_new_history"${checked}>New history named: <input type="textbox" name="new_history_name" value="${new_history_name}"></div>
+ %endif
+ </div>
+ </div>
+ <div style="clear: both"></div>
+ <div class="form-row" align="center"><input type="submit" class="primary-button" name="do_copy" value="Copy History Items"></div>
+ </form>
+</div>
+</p>
+<div style="clear: both"></div>
+<p>
+<div class="infomessage">Select any number of source history items and any number of target histories and click on "Copy History Items" to add a copy of each selected dataset to each selected history.</div>
+<div style="clear: both"></div>
+</p>
diff -r ae341e281c89 -r 960820cccaaa templates/dataset/edit_attributes.mako
--- a/templates/dataset/edit_attributes.mako Mon Oct 06 13:34:51 2008 -0400
+++ b/templates/dataset/edit_attributes.mako Tue Oct 07 11:58:32 2008 -0400
@@ -1,5 +1,5 @@
<%inherit file="/base.mako"/>
-<%def name="title()">Your saved histories</%def>
+<%def name="title()">History Item Attributes</%def>
<%def name="datatype( dataset, datatypes )">
@@ -132,3 +132,12 @@
</form>
</div>
</div>
+
+ <p>
+ <div class="toolForm">
+ <div class="toolFormTitle">Copy History Item</div>
+ <div class="toolFormBody">
+ Click <a href="${h.url_for( controller='dataset', action='copy_datasets', source_dataset_ids=data.id, target_history_ids=data.history_id )}" target="galaxy_main">here</a> to make a copy of this history item.
+ </div>
+ </div>
+ </p>
1
0
details: http://www.bx.psu.edu/hg/galaxy/rev/fdbf15ea1f8a
changeset: 1551:fdbf15ea1f8a
user: Dan Blankenberg <dan(a)bx.psu.edu>
date: Wed Oct 08 12:00:16 2008 -0400
description:
Add ability to undelete histories.
2 file(s) affected in this change:
lib/galaxy/web/controllers/root.py
templates/history/list.mako
diffs (188 lines):
diff -r 64c0734ff262 -r fdbf15ea1f8a lib/galaxy/web/controllers/root.py
--- a/lib/galaxy/web/controllers/root.py Tue Oct 07 15:21:46 2008 -0400
+++ b/lib/galaxy/web/controllers/root.py Wed Oct 08 12:00:16 2008 -0400
@@ -371,6 +371,42 @@
refresh_frames=['history'])
@web.expose
+ def history_undelete( self, trans, id=[], **kwd):
+ """Undeletes a list of histories, ensures that histories are owned by current user"""
+ history_names = []
+ errors = []
+ ok_msg = ""
+ if id:
+ if not isinstance( id, list ):
+ id = id.split( "," )
+ user = trans.get_user()
+ for hid in id:
+ try:
+ int( hid )
+ except:
+ errors.append( "Invalid history: %s" % str( hid ) )
+ continue
+ history = self.app.model.History.get( hid )
+ if history:
+ if history.user != user:
+ errors.append( "History does not belong to current user." )
+ continue
+ if history.purged:
+ errors.append( "History has already been purged and can not be undeleted." )
+ continue
+ history_names.append( history.name )
+ history.deleted = False
+ else:
+ errors.append( "Not able to find history %s." % str( hid ) )
+ trans.log_event( "History id %s marked as undeleted" % str(hid) )
+ self.app.model.flush()
+ if history_names:
+ ok_msg = "Histories (%s) have been undeleted." % ", ".join( history_names )
+ else:
+ errors.append( "You must select at least one history to undelete." )
+ return self.history_available( trans, id=','.join( id ), show_deleted=True, ok_msg = ok_msg, error_msg = " ".join( errors ) )
+
+ @web.expose
def clear_history( self, trans ):
"""Clears the history for a user"""
history = trans.get_history()
@@ -414,7 +450,7 @@
@web.expose
@web.require_login( "work with multiple histories" )
- def history_available( self, trans, id=None, as_xml=False, **kwd ):
+ def history_available( self, trans, id=[], do_operation = "view", show_deleted = False, ok_msg = "", error_msg="", as_xml=False, **kwd ):
"""
List all available histories
"""
@@ -422,11 +458,21 @@
trans.response.set_content_type('text/xml')
return trans.fill_template( "/history/list_as_xml.mako" )
if not isinstance( id, list ):
- id = [ id ]
+ id = id.split( "," )
trans.log_event( "History id %s available" % str( id ) )
+
+ history_operations = dict( share=self.history_share, rename=self.history_rename, delete=self.history_delete, undelete=self.history_undelete )
+
+ if do_operation in history_operations:
+ return history_operations[do_operation]( trans, id=id, show_deleted=show_deleted, ok_msg=ok_msg, error_msg=error_msg, **kwd )
+
return trans.fill_template( "/history/list.mako", ids=id,
user=trans.get_user(),
- current_history=trans.get_history() )
+ current_history=trans.get_history(),
+ show_deleted=util.string_as_bool( show_deleted ),
+ ok_msg=ok_msg, error_msg=error_msg )
+
+
@web.expose
def history_import( self, trans, id=None, confirm=False, **kwd ):
diff -r 64c0734ff262 -r fdbf15ea1f8a templates/history/list.mako
--- a/templates/history/list.mako Tue Oct 07 15:21:46 2008 -0400
+++ b/templates/history/list.mako Wed Oct 08 12:00:16 2008 -0400
@@ -1,67 +1,65 @@
<%inherit file="/base.mako"/>
<%def name="title()">Your saved histories</%def>
-<%def name="javascripts()">
-${parent.javascripts()}
-<script type="text/javascript">
- ## FIXME: This depends on javascript, could be moved into controller
- function OnSubmitForm()
- {
- if(document.history_actions.operation[0].checked == true)
- {
- document.history_actions.action = "${h.url_for( action="history_share") }";
- }
- else if(document.history_actions.operation[1].checked == true)
- {
-
- document.history_actions.action = "${h.url_for( action="history_rename") }";
- }
- else if(document.history_actions.operation[2].checked == true)
- {
- if (confirm("Are you sure you want to delete these histories?"))
- {
- document.history_actions.action = "${h.url_for( action="history_delete" )}";
- }
- }
-
- return true;
- }
-</script>
-</%def>
-
+%if error_msg:
+<p>
+<div class="errormessage">${error_msg}</div>
+<div style="clear: both"></div>
+</p>
+%endif
+%if ok_msg:
+<p>
+<div class="donemessage">${ok_msg}</div>
+<div style="clear: both"></div>
+</p>
+%endif
+
%if user.histories:
- <h1>Stored Histories</h1>
- <form name="history_actions" onSubmit="return OnSubmitForm();" method="post" >
+ <h1 style="margin-bottom:0px;">Stored Histories</h1>
+ %if show_deleted:
+ <div><a href="${h.url_for( action='history_available', id=','.join( ids ), show_deleted=False )}">hide deleted</a></div>
+ %else:
+ <div><a href="${h.url_for( action='history_available', id=','.join( ids ), show_deleted=True )}">show deleted</a></div>
+ %endif
+ <form name="history_actions" action="${h.url_for( action='history_available')}" method="post" >
<table class="colored" border="0" cellspacing="0" cellpadding="0" width="100%">
- <tr class="header" align="center"><td></td><td>Name</td><td>Size</td><td>Last modified</td><td>Actions</td></tr>
+ <tr class="header" align="center"><td>Name</td><td>Size</td><td>Last modified</td><td>Actions</td></tr>
%for history in user.histories:
- %if not( history.deleted ):
+ %if ( show_deleted and not history.purged ) or not( history.deleted ):
<tr>
- <td><input type=checkbox name="id" value="${history.id}"
+ <td>
+ <input type=checkbox name="id" value="${history.id}"
%if str(history.id) in ids:
checked
%endif
- ></td><td>${history.name}
- %if history.deleted:
- (deleted)
+ >${history.name}
+ %if history == trans.get_history():
+ (current history)
%endif
</td>
<td>${len(history.active_datasets)}</td>
<td>${str(history.update_time)[:19]}</td>
<td>
+ %if not history.deleted:
<a href="${h.url_for( action='history_rename', id=history.id )}">rename</a><br />
<a href="${h.url_for( action='history_switch', id=history.id )}">switch to</a><br />
- <a href="${h.url_for( action='history_delete', id=history.id )}" confirm="Are you sure you want to delete this history?">delete</a>
+ <a href="${h.url_for( action='history_delete', id=history.id )}" confirm="Are you sure you want to delete this history?">delete</a><br />
+ %else:
+ <a href="${h.url_for( action='history_undelete', id=history.id )}">undelete</a><br />
+ %endif
</td>
</tr>
%endif
%endfor
<tr><th colspan="100%">Action</th></tr>
- <tr><td colspan="100%" align="center"><input type="radio" name="operation" value="1" checked>Share <input type="radio" name="operation" value="2">Rename <input type="radio" name="operation" value="3">Delete </td></tr>
+ <tr><td colspan="100%" align="center"><input type="radio" name="do_operation" value="share" checked>Share <input type="radio" name="do_operation" value="rename">Rename <input type="radio" name="do_operation" value="delete">Delete
+ %if show_deleted:
+ <input type="radio" name="do_operation" value="undelete">Undelete
+ %endif
+ </td></tr>
<tr><td colspan="100%" align="center"><input type="submit" name="submit" value="Perform Action"></td></tr>
</table>
</form>
%else:
You have no stored histories
%endif
-
\ No newline at end of file
1
0
20 Oct '08
details: http://www.bx.psu.edu/hg/galaxy/rev/64c0734ff262
changeset: 1550:64c0734ff262
user: Greg Von Kuster <greg(a)bx.psu.edu>
date: Tue Oct 07 15:21:46 2008 -0400
description:
Use only 1 underlying executable ( data_source.py ) for data source tools. A new tag set is added to the data source tool configs to handle tranlsation of request param names sent by remote apps ( something like <param_trans galaxy_name="dbkey" remote_name="GENOME" missing="?" /> ).
21 file(s) affected in this change:
lib/galaxy/jobs/__init__.py
lib/galaxy/tools/__init__.py
lib/galaxy/util/__init__.py
lib/galaxy/web/controllers/tool_runner.py
tool_conf.xml.sample
tools/data_source/biomart.py
tools/data_source/biomart.xml
tools/data_source/biomart_filter.py
tools/data_source/biomart_test.xml
tools/data_source/data_source.py
tools/data_source/epigraph.py
tools/data_source/epigraph_code.py
tools/data_source/epigraph_import.xml
tools/data_source/flymine.xml
tools/data_source/flymine_filter_code.py
tools/data_source/intermine.py
tools/data_source/ucsc_tablebrowser.py
tools/data_source/ucsc_tablebrowser.xml
tools/data_source/ucsc_tablebrowser_archaea.xml
tools/data_source/ucsc_tablebrowser_code.py
tools/data_source/ucsc_tablebrowser_test.xml
diffs (1016 lines):
diff -r 960820cccaaa -r 64c0734ff262 lib/galaxy/jobs/__init__.py
--- a/lib/galaxy/jobs/__init__.py Tue Oct 07 11:58:32 2008 -0400
+++ b/lib/galaxy/jobs/__init__.py Tue Oct 07 15:21:46 2008 -0400
@@ -270,6 +270,10 @@
incoming['userEmail'] = userEmail
# Build params, done before hook so hook can use
param_dict = self.tool.build_param_dict( incoming, inp_data, out_data )
+ # Certain tools require tasks to be completed prior to job execution
+ # ( this used to be performed in the "exec_before_job" hook, but hooks are deprecated ).
+ if self.tool.tool_type is not None:
+ out_data = self.tool.exec_before_job( self.queue.app, inp_data, out_data, param_dict )
# Run the before queue ("exec_before_job") hook
self.tool.call_hook( 'exec_before_job', self.queue.app, inp_data=inp_data,
out_data=out_data, tool=self.tool, param_dict=incoming)
@@ -437,6 +441,10 @@
# Create generated output children and primary datasets and add to param_dict
collected_datasets = {'children':self.tool.collect_child_datasets(out_data),'primary':self.tool.collect_primary_datasets(out_data)}
param_dict.update({'__collected_datasets__':collected_datasets})
+ # Certain tools require tasks to be completed after job execution
+ # ( this used to be performed in the "exec_after_process" hook, but hooks are deprecated ).
+ if self.tool.tool_type is not None:
+ self.tool.exec_after_process( self.queue.app, inp_data, out_data, param_dict )
# Call 'exec_after_process' hook
self.tool.call_hook( 'exec_after_process', self.queue.app, inp_data=inp_data,
out_data=out_data, param_dict=param_dict,
diff -r 960820cccaaa -r 64c0734ff262 lib/galaxy/tools/__init__.py
--- a/lib/galaxy/tools/__init__.py Tue Oct 07 11:58:32 2008 -0400
+++ b/lib/galaxy/tools/__init__.py Tue Oct 07 15:21:46 2008 -0400
@@ -225,8 +225,22 @@
if not self.version:
# For backward compatibility, some tools may not have versions yet.
self.version = "1.0.0"
- # Command line (template). Optional for tools that do not invoke a
- # local program
+ # Type of tool
+ self.tool_type = root.get( "tool_type", None )
+ if self.tool_type is not None:
+ # data_source tool
+ if self.tool_type == "data_source":
+ self.param_trans_dict = {}
+ req_param_trans = root.find( "request_param_translation" )
+ if req_param_trans is not None:
+ for req_param in req_param_trans.findall( "request_param" ):
+ # req_param tags must look like <request_param galaxy_name="dbkey" remote_name="GENOME" missing="" />
+ trans_list = []
+ remote_name = req_param.get( "remote_name" )
+ trans_list.append( req_param.get( "galaxy_name" ) )
+ trans_list.append( req_param.get( "missing" ) )
+ self.param_trans_dict[ remote_name ] = trans_list
+ # Command line (template). Optional for tools that do not invoke a local program
command = root.find("command")
if command is not None and command.text is not None:
self.command = command.text.lstrip() # get rid of leading whitespace
@@ -1115,7 +1129,56 @@
except Exception, e:
e.args = ( "Error in '%s' hook '%s', original message: %s" % ( self.name, hook_name, e.args[0] ) )
raise
-
+
+ def exec_before_job( self, app, inp_data, out_data, param_dict={} ):
+ if self.tool_type == 'data_source':
+ # List for converting UCSC to Galaxy exts, if not in following dictionary, use provided datatype
+ data_type_to_ext = { 'wigdata':'wig', 'tab':'interval', 'hyperlinks':'html', 'sequence':'fasta' }
+ dbkey = param_dict.get( 'dbkey ' )
+ organism = param_dict.get( 'organism' )
+ table = param_dict.get( 'table' )
+ description = param_dict.get( 'description' )
+ if description == 'range':
+ description = param_dict.get( 'position', '' )
+ if not description:
+ description = 'unknown position'
+ data_type = param_dict.get( 'data_type ')
+ items = out_data.items()
+ for name, data in items:
+ if organism and table and description:
+ data.name = '%s on %s: %s (%s)' % ( data.name, organism, table, description )
+ data.dbkey = dbkey
+ ext = data_type
+ try:
+ ext = data_type_to_ext[ data_type ]
+ except:
+ pass
+ if ext not in app.datatypes_registry.datatypes_by_extension:
+ ext = 'interval'
+ data = app.datatypes_registry.change_datatype( data, ext )
+ # store external data source's request parameters temporarily in output file
+ out = open( data.file_name, 'w' )
+ for key, value in param_dict.items():
+ print >> out, '%s\t%s' % ( key, value )
+ out.close()
+ out_data[ name ] = data
+ return out_data
+
+ def exec_after_process( self, app, inp_data, out_data, param_dict ):
+ # TODO: for data_source tools at least, this code can probably be handled more optimally by adding a new
+ # tag set in the tool config.
+ if self.tool_type == 'data_source':
+ name, data = out_data.items()[0]
+ if data.state == data.states.OK:
+ data.info = data.name
+ if not isinstance( data.datatype, datatypes.interval.Bed ) and isinstance( data.datatype, datatypes.interval.Interval ):
+ data.set_meta()
+ if data.missing_meta():
+ data = app.datatypes_registry.change_datatype( data, 'tabular' )
+ data.set_peek()
+ data.set_size()
+ data.flush()
+
def collect_associated_files( self, output ):
for name, outdata in output.items():
temp_file_path = os.path.join( self.app.config.new_file_path, "dataset_%s_files" % ( outdata.id ) )
diff -r 960820cccaaa -r 64c0734ff262 lib/galaxy/util/__init__.py
--- a/lib/galaxy/util/__init__.py Tue Oct 07 11:58:32 2008 -0400
+++ b/lib/galaxy/util/__init__.py Tue Oct 07 15:21:46 2008 -0400
@@ -141,13 +141,30 @@
# different parameters can be sanitized in different ways.
NEVER_SANITIZE = ['file_data', 'url_paste', 'URL']
- def __init__(self, params, safe=True, sanitize=True):
+ def __init__( self, params, safe=True, sanitize=True, tool_type=None, param_trans_dict={} ):
if safe:
for key, value in params.items():
+ # Check to see if we should translate certain parameter names. For example,
+ # in data_source tools, the external data source application may send back
+ # parameter names like GENOME which is translated to dbkey in Galaxy.
+ # param_trans_dict looks like { "GENOME" : [ "dbkey" "?" ] }
+ new_key = key
+ new_value = value
+ if tool_type == 'data_source':
+ if key in param_trans_dict:
+ new_key = param_trans_dict[ key ][0]
+ if not value:
+ new_value = param_trans_dict[ key ][1]
if key not in self.NEVER_SANITIZE and sanitize:
- self.__dict__[key] = sanitize_param(value)
+ self.__dict__[ new_key ] = sanitize_param( new_value )
else:
- self.__dict__[key] = value
+ self.__dict__[ new_key ] = new_value
+ for key, value in param_trans_dict.items():
+ # Make sure that all translated values used in Galaxy are added to the params
+ galaxy_name = param_trans_dict[ key ][0]
+ if galaxy_name not in self.__dict__:
+ # This will set the galaxy_name to the "missing" value
+ self.__dict__[ galaxy_name ] = param_trans_dict[ key ][1]
else:
self.__dict__.update(params)
diff -r 960820cccaaa -r 64c0734ff262 lib/galaxy/web/controllers/tool_runner.py
--- a/lib/galaxy/web/controllers/tool_runner.py Tue Oct 07 11:58:32 2008 -0400
+++ b/lib/galaxy/web/controllers/tool_runner.py Tue Oct 07 15:21:46 2008 -0400
@@ -39,7 +39,11 @@
log.error( "index called with tool id '%s' but no such tool exists", tool_id )
trans.log_event( "Tool id '%s' does not exist" % tool_id )
return "Tool '%s' does not exist, kwd=%s " % (tool_id, kwd)
- params = util.Params(kwd, sanitize = tool.options.sanitize)
+ try:
+ param_trans_dict = tool.param_trans_dict
+ except:
+ param_trans_dict = {}
+ params = util.Params( kwd, sanitize=tool.options.sanitize, tool_type=tool.tool_type, param_trans_dict=param_trans_dict )
history = trans.get_history()
trans.ensure_valid_galaxy_session()
template, vars = tool.handle_input( trans, params.__dict__ )
diff -r 960820cccaaa -r 64c0734ff262 tool_conf.xml.sample
--- a/tool_conf.xml.sample Tue Oct 07 11:58:32 2008 -0400
+++ b/tool_conf.xml.sample Tue Oct 07 15:21:46 2008 -0400
@@ -2,6 +2,7 @@
<toolbox>
<section name="Get Data" id="getext">
<tool file="data_source/upload.xml"/>
+ <tool file="data_source/access_libraries.xml" />
<tool file="data_source/ucsc_tablebrowser.xml" />
<tool file="data_source/ucsc_tablebrowser_test.xml" />
<tool file="data_source/ucsc_tablebrowser_archaea.xml" />
@@ -123,7 +124,6 @@
<tool file="visualization/GMAJ.xml" />
<tool file="visualization/LAJ.xml" />
<tool file="visualization/build_ucsc_custom_track.xml" />
- <tool file="visualization/build_gbrowse_custom_track.xml" />
</section>
<section name="Regional Variation" id="regVar">
<tool file="regVariation/windowSplitter.xml" />
@@ -156,8 +156,8 @@
<tool file="taxonomy/poisson2test.xml" />
</section>
<section name="Solexa tools" id="solexa_tools">
- <tool file="solexa/fastq_statistics.xml" />
- <tool file="solexa/lastz_wrapper.xml" />
+ <tool file="sr_mapping/fastq_statistics.xml" />
+ <tool file="sr_mapping/lastz_wrapper.xml" />
</section>
<!--
TODO: uncomment the following EMBOSS section whenever
diff -r 960820cccaaa -r 64c0734ff262 tools/data_source/biomart.py
--- a/tools/data_source/biomart.py Tue Oct 07 11:58:32 2008 -0400
+++ /dev/null Thu Jan 01 00:00:00 1970 +0000
@@ -1,55 +0,0 @@
-#!/usr/bin/env python
-#Retreives data from BIOMART and stores in a file. Biomart parameters are provided in the input/output file.
-#guruprasad Ananda
-
-import urllib, sys, os, gzip, tempfile, shutil
-from galaxy import eggs
-
-assert sys.version_info[:2] >= ( 2, 4 )
-
-def stop_err( msg ):
- sys.stderr.write( msg )
- sys.exit()
-
-def __main__():
- filename = sys.argv[1]
- params = {}
- for line in open(filename, 'r'):
- try:
- line = line.strip()
- fields = line.split('\t')
- params[fields[0]] = fields[1]
- except:
- continue
-
- URL = params.get( 'URL', None )
- if not URL:
- open( filename, 'w' ).write( "" )
- stop_err( 'Datasource has not sent back a URL parameter.' )
- URL = URL + '&_export=1&GALAXY_URL=0'
- CHUNK_SIZE = 2**20 # 1Mb
- MAX_SIZE = CHUNK_SIZE * 100
- try:
- page = urllib.urlopen(URL)
- except Exception, exc:
- stop_err('Problems connecting to %s (%s)' % (URL, exc) )
-
- fp = open(filename, 'w')
- size = 0
- max_size_exceeded = False
- while 1:
- chunk = page.read(CHUNK_SIZE)
- if not chunk:
- break
- size += len(chunk)
- if size > MAX_SIZE:
- max_size_exceeded = True
- break
- fp.write(chunk)
- fp.close()
-
- if max_size_exceeded:
- print 'Maximum data size of 100 MB exceeded, incomplete data retrieval.'
-
-if __name__ == "__main__":
- __main__()
diff -r 960820cccaaa -r 64c0734ff262 tools/data_source/biomart.xml
--- a/tools/data_source/biomart.xml Tue Oct 07 11:58:32 2008 -0400
+++ b/tools/data_source/biomart.xml Tue Oct 07 15:21:46 2008 -0400
@@ -1,24 +1,24 @@
<?xml version="1.0"?>
-<tool name="BioMart" id="biomart">
+<tool name="BioMart" id="biomart" tool_type="data_source">
<description>Central server</description>
- <command interpreter="python">
- biomart.py
- $output
- </command>
+ <command interpreter="python">data_source.py $output</command>
<inputs action="http://www.biomart.org/biomart/martview" check_values="false" method="get" target="_top">
<display>go to BioMart Central $GALAXY_URL</display>
<param name="GALAXY_URL" type="baseurl" value="/tool_runner/biomart" />
</inputs>
-
+ <request_param_translation>
+ <request_param galaxy_name="URL" remote_name="URL" missing="" />
+ <request_param galaxy_name="dbkey" remote_name="dbkey" missing="?" />
+ <request_param galaxy_name="organism" remote_name="organism" missing="" />
+ <request_param galaxy_name="table" remote_name="table" missing="" />
+ <request_param galaxy_name="description" remote_name="description" missing="" />
+ <request_param galaxy_name="name" remote_name="name" missing="Biomart query" />
+ <request_param galaxy_name="info" remote_name="info" missing="" />
+ <request_param galaxy_name="data_type" remote_name="type" missing="txt" />
+ </request_param_translation>
<uihints minwidth="800"/>
-
- <code file="biomart_filter.py"/>
-
<outputs>
<data name="output" format="txt" />
</outputs>
-
<options sanitize="False" refresh="True"/>
-
</tool>
-
diff -r 960820cccaaa -r 64c0734ff262 tools/data_source/biomart_filter.py
--- a/tools/data_source/biomart_filter.py Tue Oct 07 11:58:32 2008 -0400
+++ /dev/null Thu Jan 01 00:00:00 1970 +0000
@@ -1,87 +0,0 @@
-# Greg Von Kuster
-import urllib
-from galaxy import eggs
-from galaxy.datatypes import sniff
-from galaxy import datatypes, config
-import tempfile, shutil
-
-def exec_before_job( app, inp_data, out_data, param_dict, tool=None):
- """Sets the name of the data"""
- data_name = param_dict.get( 'name', 'Biomart query' )
- data_type = param_dict.get( 'type', 'txt' )
- name, data = out_data.items()[0]
- if data_type == 'txt':
- data_type = sniff.guess_ext( data.file_name, sniff_order=app.datatypes_registry.sniff_order )
- data = app.datatypes_registry.change_datatype(data, data_type)
- data.name = data_name
- #store BIOMART parameters temporarily in output file
- out = open(data.file_name,'w')
- for key, value in param_dict.items():
- print >> out, "%s\t%s" % (key,value)
- out.close()
- out_data[name] = data
-
-
-def exec_after_process(app, inp_data, out_data, param_dict, tool=None, stdout=None, stderr=None):
- name, data = out_data.items()[0]
- if not isinstance(data.datatype, datatypes.interval.Bed) and isinstance(data.datatype, datatypes.interval.Interval):
- #Set meta data, format file to be valid interval type
- data.set_meta(first_line_is_header=True)
- #check for missing meta data, if all there, comment first line and process file
- if not data.missing_meta():
- line_ctr = -1
- temp = tempfile.NamedTemporaryFile('w')
- temp_filename = temp.name
- temp.close()
- temp = open(temp_filename,'w')
- chromCol = int(data.metadata.chromCol) - 1
- startCol = int(data.metadata.startCol) - 1
- strandCol = int(data.metadata.strandCol) - 1
-
- for line in open(data.file_name, 'r'):
- line_ctr += 1
- #First line is a non-commented header line, lets comment it out here
- if line_ctr == 0:
- temp.write("#%s" % line)
- continue
- fields = line.strip().split('\t')
- #If chrom col is an int, make it chrInt
- try:
- int(fields[chromCol])
- fields[chromCol] = "chr%s" % fields[chromCol]
- except:
- try:
- if fields[chromCol].upper()== "X" or fields[chromCol].upper()== "Y":
- fields[chromCol] = "chr%s" % fields[chromCol].upper()
- except:
- pass
- #change to BED coordinate system
- try:
- fields[startCol] = str(int(fields[startCol]) - 1)
- except:
- pass
- #set strand to +/-, instead of +1/-1
- try:
- if strandCol > 0:
- if int(fields[strandCol]) > 0:
- fields[strandCol] = "+"
- else:
- fields[strandCol] = "-"
- except:
- pass
- temp.write("%s\n" % '\t'.join(fields))
- temp.close()
- shutil.move(temp_filename,data.file_name)
- else:
- data_type = sniff.guess_ext(data.file_name)
- data = app.datatypes_registry.change_datatype(data, data_type)
- if data.missing_meta():
- data.set_meta()
- else:
- data_type = sniff.guess_ext(data.file_name)
- data = app.datatypes_registry.change_datatype(data, data_type)
- if data.missing_meta():
- data.set_meta()
- data.set_peek()
- data.set_size()
- data.flush()
diff -r 960820cccaaa -r 64c0734ff262 tools/data_source/biomart_test.xml
--- a/tools/data_source/biomart_test.xml Tue Oct 07 11:58:32 2008 -0400
+++ b/tools/data_source/biomart_test.xml Tue Oct 07 15:21:46 2008 -0400
@@ -1,27 +1,24 @@
<?xml version="1.0"?>
-<tool name="BioMart" id="biomart">
-
+<tool name="BioMart" id="biomart" tool_type="data_source">
<description>Test server</description>
-
- <command interpreter="python">
- biomart.py
- $output
- </command>
-
+ <command interpreter="python">data_source.py $output</command>
<inputs action="http://test.biomart.org/biomart/martview" check_values="false" method="get" target="_top">
<display>go to BioMart Central $GALAXY_URL</display>
<param name="GALAXY_URL" type="baseurl" value="/tool_runner/biomart" />
</inputs>
-
- <uihints minwidth="800"/>
-
- <code file="biomart_filter.py"/>
-
+ <request_param_translation>
+ <request_param galaxy_name="URL" remote_name="URL" missing="" />
+ <request_param galaxy_name="dbkey" remote_name="dbkey" missing="?" />
+ <request_param galaxy_name="organism" remote_name="organism" missing="" />
+ <request_param galaxy_name="table" remote_name="table" missing="" />
+ <request_param galaxy_name="description" remote_name="description" missing="" />
+ <request_param galaxy_name="name" remote_name="name" missing="Biomart test query" />
+ <request_param galaxy_name="info" remote_name="info" missing="" />
+ <request_param galaxy_name="data_type" remote_name="type" missing="txt" />
+ </request_param_translation>
+ <uihints minwidth="800"/>
<outputs>
<data name="output" format="txt" />
</outputs>
-
<options sanitize="False" refresh="True"/>
-
</tool>
-
diff -r 960820cccaaa -r 64c0734ff262 tools/data_source/data_source.py
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/tools/data_source/data_source.py Tue Oct 07 15:21:46 2008 -0400
@@ -0,0 +1,66 @@
+#!/usr/bin/env python
+#Retreives data from UCSC and stores in a file. UCSC parameters are provided in the input/output file.
+import urllib, sys, os, gzip, tempfile, shutil
+from galaxy import eggs
+from galaxy.datatypes import data
+
+assert sys.version_info[:2] >= ( 2, 4 )
+
+def stop_err( msg ):
+ sys.stderr.write( msg )
+ sys.exit()
+
+def check_gzip( filename ):
+ temp = open( filename, "U" )
+ magic_check = temp.read( 2 )
+ temp.close()
+ if magic_check != data.gzip_magic:
+ return False
+ return True
+
+def __main__():
+ filename = sys.argv[1]
+ params = {}
+ for line in open( filename, 'r' ):
+ try:
+ line = line.strip()
+ fields = line.split( '\t' )
+ params[ fields[0] ] = fields[1]
+ except:
+ continue
+ URL = params.get( 'URL', None )
+ if not URL:
+ open( filename, 'w' ).write( "" )
+ stop_err( 'The remote data source application has not sent back a URL parameter in the request.' )
+ out = open( filename, 'w' )
+ CHUNK_SIZE = 2**20 # 1Mb
+ try:
+ page = urllib.urlopen( URL, urllib.urlencode( params ) )
+ except:
+ stop_err( 'It appears that the remote data source application is currently off line. Please try again later.' )
+ while 1:
+ chunk = page.read( CHUNK_SIZE )
+ if not chunk:
+ break
+ out.write( chunk )
+ out.close()
+ if check_gzip( filename ):
+ fd, uncompressed = tempfile.mkstemp()
+ gzipped_file = gzip.GzipFile( filename )
+ while 1:
+ try:
+ chunk = gzipped_file.read( CHUNK_SIZE )
+ except IOError:
+ os.close( fd )
+ os.remove( uncompressed )
+ gzipped_file.close()
+ stop_err( 'Problem uncompressing gzipped data, please try retrieving the data uncompressed.' )
+ if not chunk:
+ break
+ os.write( fd, chunk )
+ os.close( fd )
+ gzipped_file.close()
+ # Replace the gzipped file with the uncompressed file
+ shutil.move( uncompressed, filename )
+
+if __name__ == "__main__": __main__()
diff -r 960820cccaaa -r 64c0734ff262 tools/data_source/epigraph.py
--- a/tools/data_source/epigraph.py Tue Oct 07 11:58:32 2008 -0400
+++ /dev/null Thu Jan 01 00:00:00 1970 +0000
@@ -1,66 +0,0 @@
-#!/usr/bin/env python
-#Retreives data from EpiGRAPH and stores in a file. EpiGRAPH request parameters are provided in the input/output file.
-import urllib, sys, os, gzip, tempfile, shutil
-from galaxy import eggs
-from galaxy.datatypes import data
-
-assert sys.version_info[:2] >= ( 2, 4 )
-
-def stop_err( msg ):
- sys.stderr.write( msg )
- sys.exit()
-
-def check_gzip( filename ):
- temp = open( filename, "U" )
- magic_check = temp.read( 2 )
- temp.close()
- if magic_check != data.gzip_magic:
- return False
- return True
-
-def __main__():
- filename = sys.argv[1]
- params = {}
- for line in open( filename, 'r' ):
- try:
- line = line.strip()
- fields = line.split( '\t' )
- params[ fields[0] ] = fields[1]
- except:
- continue
- URL = params.get( 'URL', None )
- if not URL:
- open( filename, 'w' ).write( "" )
- stop_err( 'EpiGRAPH has not sent back a URL parameter.' )
- out = open( filename, 'w' )
- CHUNK_SIZE = 2**20 # 1Mb
- try:
- page = urllib.urlopen( URL, urllib.urlencode( params ) )
- except:
- stop_err( 'It appears that the EpiGRAPH server is currently off-line. Please try again later.' )
- while 1:
- chunk = page.read( CHUNK_SIZE )
- if not chunk:
- break
- out.write( chunk )
- out.close()
- if check_gzip( filename ):
- fd, uncompressed = tempfile.mkstemp()
- gzipped_file = gzip.GzipFile( filename )
- while 1:
- try:
- chunk = gzipped_file.read( CHUNK_SIZE )
- except IOError:
- os.close( fd )
- os.remove( uncompressed )
- gzipped_file.close()
- stop_err( 'Problem uncompressing gzipped data, please try retrieving the data uncompressed.' )
- if not chunk:
- break
- os.write( fd, chunk )
- os.close( fd )
- gzipped_file.close()
- # Replace the gzipped file with the uncompressed file
- shutil.move( uncompressed, filename )
-
-if __name__ == "__main__": __main__()
diff -r 960820cccaaa -r 64c0734ff262 tools/data_source/epigraph_code.py
--- a/tools/data_source/epigraph_code.py Tue Oct 07 11:58:32 2008 -0400
+++ /dev/null Thu Jan 01 00:00:00 1970 +0000
@@ -1,41 +0,0 @@
-#Code for direct connection to EpiGRAPH
-from galaxy.datatypes import sniff
-import urllib
-
-def exec_before_job( app, inp_data, out_data, param_dict, tool=None ):
- """
- EpiGRAPH sends data to Galaxy by passing the following parameters in the request:
- 1. URL - the url to which Galaxy should post a request to retrieve the data
- 2. GENOME - the name of the UCSC genome assembly (e.g. hg18), dbkey in Galaxy
- 3. NAME - data.name in Galaxy
- 4. INFO - data.info in Galaxy
- """
- items = out_data.items()
- for name, data in items:
- NAME = urllib.unquote( param_dict.get( 'NAME', None ) )
- if NAME is not None:
- data.name = NAME
- INFO = urllib.unquote( param_dict.get( 'INFO', None ) )
- if INFO is not None:
- data.info = INFO
- GENOME = urllib.unquote( param_dict.get( 'GENOME', None ) )
- if GENOME is not None:
- data.dbkey = GENOME
- else:
- data.dbkey = '?'
- # Store EpiGRAPH request parameters temporarily in output file
- out = open( data.file_name, 'w' )
- for key, value in param_dict.items():
- print >> out, "%s\t%s" % ( key, value )
- out.close()
- out_data[ name ] = data
-
-def exec_after_process( app, inp_data, out_data, param_dict, tool=None, stdout=None, stderr=None ):
- """Verifies the datatype after the run"""
- name, data = out_data.items()[0]
- if data.extension == 'txt':
- data_type = sniff.guess_ext( data.file_name, sniff_order=app.datatypes_registry.sniff_order )
- data = app.datatypes_registry.change_datatype( data, data_type )
- data.set_peek()
- data.set_size()
- data.flush()
diff -r 960820cccaaa -r 64c0734ff262 tools/data_source/epigraph_import.xml
--- a/tools/data_source/epigraph_import.xml Tue Oct 07 11:58:32 2008 -0400
+++ b/tools/data_source/epigraph_import.xml Tue Oct 07 15:21:46 2008 -0400
@@ -1,15 +1,24 @@
<?xml version="1.0"?>
-<tool name="EpiGRAPH" id="epigraph_import">
- <description> server</description>
- <command interpreter="python">epigraph.py $output</command>
- <inputs action="http://epigraph.mpi-inf.mpg.de/WebGRAPH_Public_Test/faces/Login.jsp" check_values="false" method="get">
- <display>go to EpiGRAPH server $GALAXY_URL</display>
- <param name="GALAXY_URL" type="baseurl" value="/tool_runner?tool_id=epigraph_import" />
- </inputs>
- <uihints minwidth="800"/>
- <code file="epigraph_code.py"/>
- <outputs>
- <data name="output" format="txt" />
- </outputs>
- <options sanitize="False" refresh="True"/>
+<tool name="EpiGRAPH" id="epigraph_import" tool_type="data_source">
+ <description> server</description>
+ <command interpreter="python">data_source.py $output</command>
+ <inputs action="http://epigraph.mpi-inf.mpg.de/WebGRAPH_Public_Test/faces/Login.jsp" check_values="false" method="get">
+ <display>go to EpiGRAPH server $GALAXY_URL</display>
+ <param name="GALAXY_URL" type="baseurl" value="/tool_runner?tool_id=epigraph_import" />
+ </inputs>
+ <request_param_translation>
+ <request_param galaxy_name="URL" remote_name="URL" missing="" />
+ <request_param galaxy_name="dbkey" remote_name="GENOME" missing="?" />
+ <request_param galaxy_name="organism" remote_name="organism" missing="" />
+ <request_param galaxy_name="table" remote_name="table" missing="" />
+ <request_param galaxy_name="description" remote_name="description" missing="" />
+ <request_param galaxy_name="name" remote_name="NAME" missing="EpiGRAPH query" />
+ <request_param galaxy_name="info" remote_name="INFO" missing="" />
+ <request_param galaxy_name="data_type" remote_name="data_type" missing="txt" />
+ </request_param_translation>
+ <uihints minwidth="800"/>
+ <outputs>
+ <data name="output" format="txt" />
+ </outputs>
+ <options sanitize="False" refresh="True"/>
</tool>
diff -r 960820cccaaa -r 64c0734ff262 tools/data_source/flymine.xml
--- a/tools/data_source/flymine.xml Tue Oct 07 11:58:32 2008 -0400
+++ b/tools/data_source/flymine.xml Tue Oct 07 15:21:46 2008 -0400
@@ -1,13 +1,22 @@
<?xml version="1.0"?>
-<tool name="Flymine" id="flymine">
+<tool name="Flymine" id="flymine" tool_type="data_source">
<description>server</description>
- <command interpreter="python">intermine.py $output</command>
+ <command interpreter="python">data_source.py $output</command>
<inputs action="http://preview.flymine.org/preview/begin.do" check_values="false" method="get" target="_top">
<display>go to Flymine server $GALAXY_URL</display>
<param name="GALAXY_URL" type="baseurl" value="/tool_runner?tool_id=flymine" />
</inputs>
+ <request_param_translation>
+ <request_param galaxy_name="URL" remote_name="URL" missing="" />
+ <request_param galaxy_name="dbkey" remote_name="db" missing="?" />
+ <request_param galaxy_name="organism" remote_name="organism" missing="" />
+ <request_param galaxy_name="table" remote_name="table" missing="" />
+ <request_param galaxy_name="description" remote_name="description" missing="" />
+ <request_param galaxy_name="name" remote_name="name" missing="FlyMine query" />
+ <request_param galaxy_name="info" remote_name="info" missing="" />
+ <request_param galaxy_name="data_type" remote_name="data_type" missing="interval" />
+ </request_param_translation>
<uihints minwidth="800"/>
- <code file="flymine_filter_code.py"/>
<outputs>
<data name="output" format="txt" />
</outputs>
diff -r 960820cccaaa -r 64c0734ff262 tools/data_source/flymine_filter_code.py
--- a/tools/data_source/flymine_filter_code.py Tue Oct 07 11:58:32 2008 -0400
+++ /dev/null Thu Jan 01 00:00:00 1970 +0000
@@ -1,31 +0,0 @@
-# Code for direct connection to flymine
-from galaxy.datatypes import sniff
-import urllib
-
-import logging
-log = logging.getLogger( __name__ )
-
-def exec_before_job( app, inp_data, out_data, param_dict, tool=None ):
- """Sets the attributes of the data"""
- items = out_data.items()
- for name, data in items:
- data.dbkey = param_dict.get( 'dbkey', '?' )
- # Store flymine parameters temporarily in output file
- out = open( data.file_name, 'w' )
- for key, value in param_dict.items():
- out.write( "%s\t%s\n" % ( key, value ) )
- out.close()
- out_data[ name ] = data
-
-def exec_after_process( app, inp_data, out_data, param_dict, tool=None, stdout=None, stderr=None ):
- """Verifies the data after the run"""
- name, data = out_data.items()[0]
- if data.state == data.states.OK:
- data.info = data.name
- if data.extension == 'txt':
- data_type = sniff.guess_ext( data.file_name, sniff_order=app.datatypes_registry.sniff_order )
- data = app.datatypes_registry.change_datatype( data, data_type )
- data.set_peek()
- data.set_size()
- data.flush()
-
diff -r 960820cccaaa -r 64c0734ff262 tools/data_source/intermine.py
--- a/tools/data_source/intermine.py Tue Oct 07 11:58:32 2008 -0400
+++ /dev/null Thu Jan 01 00:00:00 1970 +0000
@@ -1,45 +0,0 @@
-#!/usr/bin/env python
-#Retreives data from intermine and stores in a file. Intermine parameters are provided in the input/output file.
-import urllib, sys, os, gzip, tempfile, shutil
-from galaxy import eggs
-from galaxy.datatypes import data
-
-assert sys.version_info[:2] >= ( 2, 4 )
-
-def stop_err( msg ):
- sys.stderr.write( msg )
- sys.exit()
-
-def __main__():
- filename = sys.argv[1]
- params = {}
-
- for line in open( filename, 'r' ):
- try:
- line = line.strip()
- fields = line.split( '\t' )
- params[ fields[0] ] = fields[1]
- except:
- continue
-
- URL = params.get( 'URL', None )
- if not URL:
- open( filename, 'w' ).write( "" )
- stop_err( 'Datasource has not sent back a URL parameter.' )
-
- CHUNK_SIZE = 2**20 # 1Mb
- try:
- page = urllib.urlopen( URL )
- except Exception, exc:
- raise Exception( 'Problems connecting to %s (%s)' % ( URL, exc ) )
- sys.exit( 1 )
-
- fp = open( filename, 'wb' )
- while 1:
- chunk = page.read( CHUNK_SIZE )
- if not chunk:
- break
- fp.write( chunk )
- fp.close()
-
-if __name__ == "__main__": __main__()
diff -r 960820cccaaa -r 64c0734ff262 tools/data_source/ucsc_tablebrowser.py
--- a/tools/data_source/ucsc_tablebrowser.py Tue Oct 07 11:58:32 2008 -0400
+++ /dev/null Thu Jan 01 00:00:00 1970 +0000
@@ -1,70 +0,0 @@
-#!/usr/bin/env python
-#Retreives data from UCSC and stores in a file. UCSC parameters are provided in the input/output file.
-import urllib, sys, os, gzip, tempfile, shutil
-from galaxy import eggs
-from galaxy.datatypes import data
-
-assert sys.version_info[:2] >= ( 2, 4 )
-
-def stop_err( msg ):
- sys.stderr.write( msg )
- sys.exit()
-
-def check_gzip( filename ):
- temp = open( filename, "U" )
- magic_check = temp.read( 2 )
- temp.close()
- if magic_check != data.gzip_magic:
- return False
- return True
-
-def __main__():
- filename = sys.argv[1]
- params = {}
-
- for line in open(filename, 'r'):
- try:
- line = line.strip()
- fields = line.split('\t')
- params[fields[0]] = fields[1]
- except:
- continue
-
- URL = params.get( 'URL', None )
- if not URL:
- open( filename, 'w' ).write( "" )
- #raise Exception('Datasource has not sent back a URL parameter')
- stop_err( 'Datasource has not sent back a URL parameter.' )
- out = open( filename, 'w' )
- CHUNK_SIZE = 2**20 # 1Mb
- try:
- page = urllib.urlopen( URL, urllib.urlencode( params ) )
- except:
- stop_err( 'It appears that the UCSC Table Browser is currently offline. Please try again later.' )
-
- while 1:
- chunk = page.read( CHUNK_SIZE )
- if not chunk:
- break
- out.write( chunk )
- out.close()
- if check_gzip( filename ):
- fd, uncompressed = tempfile.mkstemp()
- gzipped_file = gzip.GzipFile( filename )
- while 1:
- try:
- chunk = gzipped_file.read( CHUNK_SIZE )
- except IOError:
- os.close( fd )
- os.remove( uncompressed )
- gzipped_file.close()
- stop_err( 'Problem decompressing gzipped data, please try retrieving the data uncompressed.' )
- if not chunk:
- break
- os.write( fd, chunk )
- os.close( fd )
- gzipped_file.close()
- # Replace the gzipped file with the decompressed file
- shutil.move( uncompressed, filename )
-
-if __name__ == "__main__": __main__()
diff -r 960820cccaaa -r 64c0734ff262 tools/data_source/ucsc_tablebrowser.xml
--- a/tools/data_source/ucsc_tablebrowser.xml Tue Oct 07 11:58:32 2008 -0400
+++ b/tools/data_source/ucsc_tablebrowser.xml Tue Oct 07 15:21:46 2008 -0400
@@ -1,10 +1,7 @@
<?xml version="1.0"?>
-<tool name="UCSC Main" id="ucsc_table_direct1">
-
+<tool name="UCSC Main" id="ucsc_table_direct1" tool_type="data_source">
<description>table browser</description>
-
- <command interpreter="python">ucsc_tablebrowser.py $output</command>
-
+ <command interpreter="python">data_source.py $output</command>
<inputs action="http://genome.ucsc.edu/cgi-bin/hgTables" check_values="false" method="get">
<display>go to UCSC Table Browser $GALAXY_URL</display>
<param name="GALAXY_URL" type="baseurl" value="/tool_runner" />
@@ -13,15 +10,17 @@
<param name="hgta_compressType" type="hidden" value="none" />
<param name="hgta_outputType" type="hidden" value="bed" />
</inputs>
-
+ <request_param_translation>
+ <request_param galaxy_name="URL" remote_name="URL" missing="" />
+ <request_param galaxy_name="dbkey" remote_name="db" missing="?" />
+ <request_param galaxy_name="organism" remote_name="org" missing="unknown species" />
+ <request_param galaxy_name="table" remote_name="hgta_track" missing="unknown table" />
+ <request_param galaxy_name="description" remote_name="hgta_regionType" missing="no description" />
+ <request_param galaxy_name="data_type" remote_name="hgta_outputType" missing="interval" />
+ </request_param_translation>
<uihints minwidth="800"/>
-
- <code file="ucsc_tablebrowser_code.py"/>
-
<outputs>
<data name="output" format="bed" />
</outputs>
<options sanitize="False" refresh="True"/>
-
</tool>
-
diff -r 960820cccaaa -r 64c0734ff262 tools/data_source/ucsc_tablebrowser_archaea.xml
--- a/tools/data_source/ucsc_tablebrowser_archaea.xml Tue Oct 07 11:58:32 2008 -0400
+++ b/tools/data_source/ucsc_tablebrowser_archaea.xml Tue Oct 07 15:21:46 2008 -0400
@@ -1,10 +1,7 @@
<?xml version="1.0"?>
-<tool name="UCSC Archaea" id="ucsc_table_direct_archaea1">
-
+<tool name="UCSC Archaea" id="ucsc_table_direct_archaea1" tool_type="data_source">
<description>table browser</description>
-
- <command interpreter="python">ucsc_tablebrowser.py $output</command>
-
+ <command interpreter="python">data_source.py $output</command>
<inputs action="http://archaea.ucsc.edu/cgi-bin/hgTables" check_values="false" method="get">
<display>go to UCSC Table Browser $GALAXY_URL</display>
<param name="GALAXY_URL" type="baseurl" value="/tool_runner" />
@@ -13,15 +10,17 @@
<param name="hgta_compressType" type="hidden" value="none" />
<param name="hgta_outputType" type="hidden" value="bed" />
</inputs>
-
+ <request_param_translation>
+ <request_param galaxy_name="URL" remote_name="URL" missing="" />
+ <request_param galaxy_name="dbkey" remote_name="db" missing="?" />
+ <request_param galaxy_name="organism" remote_name="org" missing="unknown species" />
+ <request_param galaxy_name="table" remote_name="hgta_track" missing="" />
+ <request_param galaxy_name="description" remote_name="hgta_regionType" missing="" />
+ <request_param galaxy_name="data_type" remote_name="hgta_outputType" missing="interval" />
+ </request_param_translation>
<uihints minwidth="800"/>
-
- <code file="ucsc_tablebrowser_code.py"/>
-
<outputs>
<data name="output" format="bed" />
</outputs>
<options sanitize="False" refresh="True"/>
-
</tool>
-
diff -r 960820cccaaa -r 64c0734ff262 tools/data_source/ucsc_tablebrowser_code.py
--- a/tools/data_source/ucsc_tablebrowser_code.py Tue Oct 07 11:58:32 2008 -0400
+++ /dev/null Thu Jan 01 00:00:00 1970 +0000
@@ -1,51 +0,0 @@
-#Code for direct connection to UCSC
-from galaxy import datatypes
-
-def exec_before_job( app, inp_data, out_data, param_dict, tool=None):
- """Sets the name of the data"""
- outputType = param_dict.get( 'hgta_outputType', "interval" ).lower() #assume all data is interval, we will fix later if not the case
- #list for converting ucsc to galaxy exts, if not in following dictionary, use provided datatype
- outputType_to_ext = {'wigdata':'wig','tab':'interval','hyperlinks':'html','sequence':'fasta'}
- items = out_data.items()
- description = param_dict.get('hgta_regionType',"")
- organism = param_dict.get('org',"unkown species")
- table = param_dict.get('hgta_track',"")
- if description == 'range':
- try:
- description = param_dict.get('position',"")
- except:
- description = "unkown position"
- for name, data in items:
- data.name = "%s on %s: %s (%s)" % (data.name, organism, table, description)
- data.dbkey = param_dict.get('db', '?')
- ext = outputType
- try:
- ext = outputType_to_ext[outputType]
- except:
- pass
- if ext not in app.datatypes_registry.datatypes_by_extension:
- ext = 'interval'
- data = app.datatypes_registry.change_datatype(data, ext)
-
- #store ucsc parameters temporarily in output file
- out = open(data.file_name,'w')
- for key, value in param_dict.items():
- print >> out, "%s\t%s" % (key,value)
- out.close()
-
- out_data[name] = data
-
-def exec_after_process(app, inp_data, out_data, param_dict, tool=None, stdout=None, stderr=None):
- """Verifies the datatype after the run"""
-
- name, data = out_data.items()[0]
- if data.state == data.states.OK:
- data.info = data.name
-
- if not isinstance(data.datatype, datatypes.interval.Bed) and isinstance(data.datatype, datatypes.interval.Interval):
- data.set_meta()
- if data.missing_meta():
- data = app.datatypes_registry.change_datatype(data, 'tabular')
- data.set_peek()
- data.set_size()
- data.flush()
diff -r 960820cccaaa -r 64c0734ff262 tools/data_source/ucsc_tablebrowser_test.xml
--- a/tools/data_source/ucsc_tablebrowser_test.xml Tue Oct 07 11:58:32 2008 -0400
+++ b/tools/data_source/ucsc_tablebrowser_test.xml Tue Oct 07 15:21:46 2008 -0400
@@ -1,10 +1,7 @@
<?xml version="1.0"?>
-<tool name="UCSC Test" id="ucsc_table_direct_test1">
-
+<tool name="UCSC Test" id="ucsc_table_direct_test1" tool_type="data_source">
<description>table browser</description>
-
- <command interpreter="python">ucsc_tablebrowser.py $output</command>
-
+ <command interpreter="python">data_source.py $output</command>
<inputs action="http://genome-test.cse.ucsc.edu/cgi-bin/hgTables" check_values="false" method="get">
<display>go to UCSC Table Browser $GALAXY_URL</display>
<param name="GALAXY_URL" type="baseurl" value="/tool_runner" />
@@ -13,15 +10,17 @@
<param name="hgta_compressType" type="hidden" value="none" />
<param name="hgta_outputType" type="hidden" value="bed" />
</inputs>
-
+ <request_param_translation>
+ <request_param galaxy_name="URL" remote_name="URL" missing="" />
+ <request_param galaxy_name="dbkey" remote_name="db" missing="?" />
+ <request_param galaxy_name="organism" remote_name="org" missing="unknown species" />
+ <request_param galaxy_name="table" remote_name="hgta_track" missing="" />
+ <request_param galaxy_name="description" remote_name="hgta_regionType" missing="" />
+ <request_param galaxy_name="data_type" remote_name="hgta_outputType" missing="interval" />
+ </request_param_translation>
<uihints minwidth="800"/>
-
- <code file="ucsc_tablebrowser_code.py"/>
-
<outputs>
<data name="output" format="bed" />
</outputs>
<options sanitize="False" refresh="True"/>
-
</tool>
-
1
0
10 Oct '08
Revision: 2774
Author: nate
Date: 2008-10-10 14:27:43 -0400 (Fri, 10 Oct 2008)
Log Message:
-----------
Changed a couple constants in sputnik, as per Guru.
Modified Paths:
--------------
dependencies/sputnik/sputnik.c
Modified: dependencies/sputnik/sputnik.c
===================================================================
--- dependencies/sputnik/sputnik.c 2008-09-24 14:26:07 UTC (rev 2773)
+++ dependencies/sputnik/sputnik.c 2008-10-10 18:27:43 UTC (rev 2774)
@@ -53,9 +53,9 @@
/* this is the point score for each exact match */
#define EXACT_MATCH_POINTS 1
/* this is the point score for a mismatch, insertion or deletion */
-#define ERROR_MATCH_POINTS -6
+#define ERROR_MATCH_POINTS -100
/* this is the minimum score required to be considered a match */
-#define MATCH_MIN_SCORE 8
+#define MATCH_MIN_SCORE 4
/* this is the low score at which we stop trying */
#define MATCH_FAIL_SCORE -1
/* this is the max recursion depth we try to recover errors */
1
0
I may be missing something, but right now, and without any changes to
Galaxy, there's no reason why a new tool should not wrap an existing
(or new) command line executable program that uses web services to do
something interesting!
I have an rgenetics tool that uses the Haploview java executable in
command line mode to prepare image files of LD patterns in the Hapmap
populations with an inhouse sample over a region, and then stitches
them together into a single image so they can be visually compared -
Galaxy doesn't know or care if the tool executable uses web services -
the job runner sets up the call and then deals with the resulting tool
outputs correctly - as long as the cluster node executing the tool has
internet access, it can interact with the web services transparently -
as Greg says, the tool will be fragile to changes in the web service
call syntax but that's always a problem in the long term.
I agree with Greg that bringing web services interfaces *directly*
into the existing Galaxy framework does not appear to have a high
priority at this time and I'm not even sure that it makes an awful lot
of sense in the long term - especially since we can let web service
interaction live out in tool space where it already works? Is there a
specific use-case anyone has in mind where the web services interface
really needs to be brought inside the framework itself?
On Tue, Sep 30, 2008 at 2:00 AM, <galaxy-dev-request(a)bx.psu.edu> wrote:
>
> Today's Topics:
>
> 1. Re: Galaxy with web services (Greg Von Kuster)
>
>
> ----------------------------------------------------------------------
>
> Message: 1
> Date: Mon, 29 Sep 2008 09:52:39 -0400
> From: Greg Von Kuster <ghv2(a)psu.edu>
> Subject: Re: [galaxy-dev] Galaxy with web services
> To: Praveen Agrawal <praveen(a)scfbio-iitd.res.in>
> Cc: galaxy-dev(a)bx.psu.edu
> Message-ID: <48E0DDA7.1090200(a)psu.edu>
> Content-Type: text/plain; charset=ISO-8859-1; format=flowed
>
> Hello Praveen,
>
> Currently web-based interaction is only used for external data sources
> (biomart, UCSC table browser, most other things under "Get Data"). For
> the other tools, we generate a command line based on the user input,
> which Galaxy then runs. The motivation for this is control and trust.
> One of our major goals with Galaxy is to ensure reproducibility, and
> relying on web services which can change their interfaces or behavior
> makes this very challenging. We are certainly interested in supporting
> web services in the Galaxy framework, but implementation is not yet
> under way. Several of the methods currently in ~/tools/__init__.py (
> among others ) will need to evolve and grow in order to support a web
> services interface for remote tools. We'll certainly keep you informed
> as we begin to support this, but it may be a while...
>
> Greg Von Kuster
> Galaxy Development Team
>
>
> Praveen Agrawal wrote:
>> Hi,
>> I am trying to use Galaxy at my site and cann't figure out Galaxy with web
>> services. I understand that it's possible to integrate a data source using
>> WSDL/SOAP. Is the same possible for a remote tool as well?
--
python -c "foo = map(None,'moc.liamg(a)surazal.ssor'); foo.reverse();
print ''.join(foo)"
2
3
06 Oct '08
details: http://www.bx.psu.edu/hg/galaxy/rev/45033114f82d
changeset: 1543:45033114f82d
user: Dan Blankenberg <dan(a)bx.psu.edu>
date: Thu Oct 02 16:37:32 2008 -0400
description:
Add the ability to undelete deleted datasets from a history.
A new selection under History Options allows users to select a history view containing deleted (and undeletable) datasets.
Known Issues:
1. When adding a new dataset to a history (tool execution), the history is always refreshed to show only non-deleted datasets.
2. When doing an ajax delete of a dataset while viewing deleted datasets, the newly deleted dataset is still removed from the current view of the history (instead of appearing with the deleted message), a refresh will bring it back.
8 file(s) affected in this change:
lib/galaxy/model/__init__.py
lib/galaxy/model/mapping.py
lib/galaxy/util/__init__.py
lib/galaxy/web/controllers/dataset.py
lib/galaxy/web/controllers/root.py
templates/history/options.mako
templates/root/history.mako
templates/root/history_common.mako
diffs (190 lines):
diff -r 20591fa0d05d -r 45033114f82d lib/galaxy/model/__init__.py
--- a/lib/galaxy/model/__init__.py Thu Oct 02 15:51:17 2008 -0400
+++ b/lib/galaxy/model/__init__.py Thu Oct 02 16:37:32 2008 -0400
@@ -281,7 +281,15 @@
for child in self.children:
child.mark_deleted()
-
+ def mark_undeleted( self, include_children=True ):
+ self.deleted = False
+ if include_children:
+ for child in self.children:
+ child.mark_undeleted()
+ def undeletable( self ):
+ if self.purged:
+ return False
+ return True
class History( object ):
def __init__( self, id=None, name=None, user=None ):
diff -r 20591fa0d05d -r 45033114f82d lib/galaxy/model/mapping.py
--- a/lib/galaxy/model/mapping.py Thu Oct 02 15:51:17 2008 -0400
+++ b/lib/galaxy/model/mapping.py Thu Oct 02 16:37:32 2008 -0400
@@ -288,7 +288,9 @@
assign_mapper( context, History, History.table,
properties=dict( galaxy_sessions=relation( GalaxySessionToHistoryAssociation ),
datasets=relation( HistoryDatasetAssociation, backref="history", order_by=asc(HistoryDatasetAssociation.table.c.hid) ),
- active_datasets=relation( HistoryDatasetAssociation, primaryjoin=( ( HistoryDatasetAssociation.table.c.history_id == History.table.c.id ) & ( not_( HistoryDatasetAssociation.table.c.deleted ) ) ), order_by=asc( HistoryDatasetAssociation.table.c.hid ), lazy=False, viewonly=True ) ) )
+ active_datasets=relation( HistoryDatasetAssociation, primaryjoin=( ( HistoryDatasetAssociation.table.c.history_id == History.table.c.id ) & ( not_( HistoryDatasetAssociation.table.c.deleted ) ) ), order_by=asc( HistoryDatasetAssociation.table.c.hid ), lazy=False, viewonly=True ),
+ activatable_datasets=relation( HistoryDatasetAssociation, primaryjoin=( ( HistoryDatasetAssociation.table.c.history_id == History.table.c.id ) & ( not_( Dataset.table.c.purged ) ) ), order_by=asc( HistoryDatasetAssociation.table.c.hid ), lazy=False, viewonly=True )
+ ) )
assign_mapper( context, User, User.table,
properties=dict( histories=relation( History, backref="user",
diff -r 20591fa0d05d -r 45033114f82d lib/galaxy/util/__init__.py
--- a/lib/galaxy/util/__init__.py Thu Oct 02 15:51:17 2008 -0400
+++ b/lib/galaxy/util/__init__.py Thu Oct 02 16:37:32 2008 -0400
@@ -206,7 +206,7 @@
return ''
def string_as_bool( string ):
- if string.lower() in ( 'true', 'yes', 'on' ):
+ if str( string ).lower() in ( 'true', 'yes', 'on' ):
return True
else:
return False
diff -r 20591fa0d05d -r 45033114f82d lib/galaxy/web/controllers/dataset.py
--- a/lib/galaxy/web/controllers/dataset.py Thu Oct 02 15:51:17 2008 -0400
+++ b/lib/galaxy/web/controllers/dataset.py Thu Oct 02 16:37:32 2008 -0400
@@ -128,3 +128,19 @@
return open(file_path)
except:
raise paste.httpexceptions.HTTPNotFound( "File Not Found (%s)." % (filename) )
+
+ @web.expose
+ def undelete( self, trans, id ):
+ history = trans.get_history()
+ data = self.app.model.HistoryDatasetAssociation.get( id )
+ if data and data.undeletable:
+ # Walk up parent datasets to find the containing history
+ topmost_parent = data
+ while topmost_parent.parent:
+ topmost_parent = topmost_parent.parent
+ assert topmost_parent in history.datasets, "Data does not belong to current history"
+ # Mark undeleted
+ data.mark_undeleted()
+ self.app.model.flush()
+ trans.log_event( "Dataset id %s has been undeleted" % str(id) )
+ return trans.response.send_redirect( web.url_for( controller='root', action='history', show_deleted = True ) )
diff -r 20591fa0d05d -r 45033114f82d lib/galaxy/web/controllers/root.py
--- a/lib/galaxy/web/controllers/root.py Thu Oct 02 15:51:17 2008 -0400
+++ b/lib/galaxy/web/controllers/root.py Thu Oct 02 16:37:32 2008 -0400
@@ -48,7 +48,7 @@
## ---- Root history display ---------------------------------------------
@web.expose
- def history( self, trans, as_xml=False ):
+ def history( self, trans, as_xml=False, show_deleted=False ):
"""
Display the current history, creating a new history if neccesary.
@@ -63,7 +63,7 @@
return trans.fill_template_mako( "root/history_as_xml.mako", history=history )
else:
template = "root/history.mako"
- return trans.fill_template( "root/history.mako", history=history )
+ return trans.fill_template( "root/history.mako", history = history, show_deleted = util.string_as_bool( show_deleted ) )
@web.expose
def dataset_state ( self, trans, id=None, stamp=None ):
@@ -271,7 +271,7 @@
datatypes=ldatatypes, err=None )
@web.expose
- def delete( self, trans, id = None, **kwd):
+ def delete( self, trans, id = None, show_deleted_on_refresh = False, **kwd):
if id:
if isinstance( id, list ):
dataset_ids = id
@@ -300,7 +300,7 @@
self.app.job_stop_queue.put( data.creating_job_associations[0].job )
except IndexError:
pass # upload tool will cause this since it doesn't have a job
- return self.history( trans )
+ return self.history( trans, show_deleted = show_deleted_on_refresh )
@web.expose
def delete_async( self, trans, id = None, **kwd):
diff -r 20591fa0d05d -r 45033114f82d templates/history/options.mako
--- a/templates/history/options.mako Thu Oct 02 15:51:17 2008 -0400
+++ b/templates/history/options.mako Thu Oct 02 16:37:32 2008 -0400
@@ -19,6 +19,7 @@
<li><a href="${h.url_for( controller='workflow', action='build_from_current_history' )}">Construct workflow</a> from the current history</li>
<li><a href="${h.url_for( action='history_share' )}" target="galaxy_main">Share</a> current history</div>
%endif
+ <li><a href="${h.url_for( action='history', show_deleted=True)}" target="galaxy_history">Show deleted</a> datasets in history</li>
<li><a href="${h.url_for( action='history_delete', id=history.id )}" confirm="Are you sure you want to delete the current history?">Delete</a> current history</div>
</ul>
diff -r 20591fa0d05d -r 45033114f82d templates/root/history.mako
--- a/templates/root/history.mako Thu Oct 02 15:51:17 2008 -0400
+++ b/templates/root/history.mako Thu Oct 02 16:37:32 2008 -0400
@@ -209,7 +209,10 @@
<body class="historyPage">
<div id="top-links" class="historyLinks">
- <a href="${h.url_for('history')}">refresh</a>
+ <a href="${h.url_for('history', show_deleted=show_deleted)}">refresh</a>
+ %if show_deleted:
+ | <a href="${h.url_for('history', show_deleted=False)}">hide deleted</a>
+ %endif
</div>
%if history.deleted:
@@ -221,11 +224,19 @@
<%namespace file="history_common.mako" import="render_dataset" />
-%if len(history.active_datasets) < 1:
+%if ( show_deleted and len( history.datasets ) < 1 ) or len( history.active_datasets ) < 1:
<div class="infomessagesmall" id="emptyHistoryMessage">
%else:
- ## Render all active (not deleted) datasets, ordered from newest to oldest
- %for data in reversed( history.active_datasets ):
+ <%
+ if show_deleted:
+ #all datasets
+ datasets_to_show = history.activatable_datasets
+ else:
+ #active (not deleted)
+ datasets_to_show = history.active_datasets
+ %>
+ ## Render requested datasets, ordered from newest to oldest
+ %for data in reversed( datasets_to_show ):
%if data.visible:
<div class="historyItemContainer" id="historyItemContainer-${data.id}">
${render_dataset( data, data.hid )}
diff -r 20591fa0d05d -r 45033114f82d templates/root/history_common.mako
--- a/templates/root/history_common.mako Thu Oct 02 15:51:17 2008 -0400
+++ b/templates/root/history_common.mako Thu Oct 02 16:37:32 2008 -0400
@@ -8,8 +8,12 @@
%>
<div class="historyItemWrapper historyItem historyItem-${data_state}" id="historyItem-${data.id}">
+ %if data.deleted:
+ <div class="warningmessagesmall">
+ <strong>This dataset has been deleted. Click <a href="${h.url_for( controller='dataset', action='undelete', id=data.id )}" target="galaxy_history">here</a> to undelete.</strong>
+ </div>
+ %endif
## Header row for history items (name, state, action buttons)
-
<div style="overflow: hidden;" class="historyItemTitleBar">
<div style="float: left; padding-right: 3px;">
<div style='display: none;' id="progress-${data.id}">
@@ -24,7 +28,7 @@
<div style="float: right;">
<a href="${h.url_for( controller='dataset', dataset_id=data.id, action='display', filename='index')}" target="galaxy_main"><img src="${h.url_for('/static/images/eye_icon.png')}" rollover="${h.url_for('/static/images/eye_icon_dark.png')}" width='16' height='16' alt='display data' title='display data' class='displayButton' border='0'></a>
<a href="${h.url_for( action='edit', id=data.id )}" target="galaxy_main"><img src="${h.url_for('/static/images/pencil_icon.png')}" rollover="${h.url_for('/static/images/pencil_icon_dark.png')}" width='16' height='16' alt='edit attributes' title='edit attributes' class='editButton' border='0'></a>
- <a href="${h.url_for( action='delete', id=data.id )}" class="historyItemDelete" id="historyItemDelter-${data.id}"><img src="${h.url_for('/static/images/delete_icon.png')}" rollover="${h.url_for('/static/images/delete_icon_dark.png')}" width='16' height='16' alt='delete' class='deleteButton' border='0'></a>
+ <a href="${h.url_for( action='delete', id=data.id, show_deleted_on_refresh=show_deleted )}" class="historyItemDelete" id="historyItemDelter-${data.id}"><img src="${h.url_for('/static/images/delete_icon.png')}" rollover="${h.url_for('/static/images/delete_icon_dark.png')}" width='16' height='16' alt='delete' class='deleteButton' border='0'></a>
</div>
<span class="historyItemTitle"><b>${hid}: ${data.display_name()}</b></span>
</div>
@@ -100,4 +104,4 @@
</div>
</div>
-</%def>
\ No newline at end of file
+</%def>
1
0
details: http://www.bx.psu.edu/hg/galaxy/rev/7e2ec07716b8
changeset: 1544:7e2ec07716b8
user: Dan Blankenberg <dan(a)bx.psu.edu>
date: Fri Oct 03 16:06:01 2008 -0400
description:
Add axaj async undelete ability.
Fix redisplay of dataset when asynchronously deleted under history view showing deleted datasets.
5 file(s) affected in this change:
lib/galaxy/tools/parameters/basic.py
lib/galaxy/web/controllers/dataset.py
templates/root/history.mako
templates/root/history_common.mako
templates/root/history_item.mako
diffs (183 lines):
diff -r 45033114f82d -r 7e2ec07716b8 lib/galaxy/tools/parameters/basic.py
--- a/lib/galaxy/tools/parameters/basic.py Thu Oct 02 16:37:32 2008 -0400
+++ b/lib/galaxy/tools/parameters/basic.py Fri Oct 03 16:06:01 2008 -0400
@@ -1052,7 +1052,7 @@
hid = "%s.%d" % ( parent_hid, i + 1 )
else:
hid = str( data.hid )
- if not data.deleted and data.state not in [data.states.ERROR] and data.visible:
+ if not data.deleted and data.state not in [data.states.ERROR, data.states.DISCARDED] and data.visible:
if self.options and data.get_dbkey() != filter_value:
continue
if isinstance( data.datatype, self.formats):
@@ -1112,7 +1112,7 @@
return True
return False
for i, data in enumerate( datasets ):
- if data.visible and not data.deleted and data.state not in [data.states.ERROR] and ( isinstance( data.datatype, self.formats) or is_convertable( data ) ):
+ if data.visible and not data.deleted and data.state not in [data.states.ERROR, data.states.DISCARDED] and ( isinstance( data.datatype, self.formats) or is_convertable( data ) ):
if self.options and data.get_dbkey() != filter_value:
continue
most_recent_dataset[0] = data
diff -r 45033114f82d -r 7e2ec07716b8 lib/galaxy/web/controllers/dataset.py
--- a/lib/galaxy/web/controllers/dataset.py Thu Oct 02 16:37:32 2008 -0400
+++ b/lib/galaxy/web/controllers/dataset.py Fri Oct 03 16:06:01 2008 -0400
@@ -128,9 +128,8 @@
return open(file_path)
except:
raise paste.httpexceptions.HTTPNotFound( "File Not Found (%s)." % (filename) )
-
- @web.expose
- def undelete( self, trans, id ):
+
+ def _undelete( self, trans, id ):
history = trans.get_history()
data = self.app.model.HistoryDatasetAssociation.get( id )
if data and data.undeletable:
@@ -143,4 +142,16 @@
data.mark_undeleted()
self.app.model.flush()
trans.log_event( "Dataset id %s has been undeleted" % str(id) )
+ return True
+ return False
+
+ @web.expose
+ def undelete( self, trans, id ):
+ self._undelete( trans, id )
return trans.response.send_redirect( web.url_for( controller='root', action='history', show_deleted = True ) )
+
+ @web.expose
+ def undelete_async( self, trans, id ):
+ if self._undelete( trans, id ):
+ return "OK"
+ raise "Error undeleting"
diff -r 45033114f82d -r 7e2ec07716b8 templates/root/history.mako
--- a/templates/root/history.mako Thu Oct 02 16:37:32 2008 -0400
+++ b/templates/root/history.mako Fri Oct 03 16:06:01 2008 -0400
@@ -91,17 +91,41 @@
url: "${h.url_for( action='delete_async', id='XXX' )}".replace( 'XXX', data_id ),
error: function() { alert( "Delete failed" ) },
success: function() {
- q( "#historyItem-" + data_id ).fadeOut( "fast", function() {
- q( "div#historyItemContainer-" + data_id ).remove();
- if ( q( "div.historyItemContainer" ).length < 1 ) {
- q ( "div#emptyHistoryMessage" ).show();
+ if ( "${show_deleted}" == "True" ){
+ var to_update = {};
+ to_update[data_id] = "none";
+ updater( to_update );
}
- });
+ else {
+ q( "#historyItem-" + data_id ).fadeOut( "fast", function() {
+ q( "div#historyItemContainer-" + data_id ).remove();
+ if ( q( "div.historyItemContainer" ).length < 1 ) {
+ q ( "div#emptyHistoryMessage" ).show();
+ }
+ });
+ }
}
});
return false;
});
});
+ // Undelete link
+ q(this).find( "a.historyItemUndelete" ).each( function() {
+ var data_id = this.id.split( "-" )[1];
+ q(this).click( function() {
+ q( '#progress-' + data_id ).show();
+ q.ajax({
+ url: "${h.url_for( controller='dataset', action='undelete_async', id='XXX' )}".replace( 'XXX', data_id ),
+ error: function() { alert( "Undelete failed" ) },
+ success: function() {
+ var to_update = {};
+ to_update[data_id] = "none";
+ updater( to_update );
+ }
+ });
+ return false;
+ });
+ });
});
};
// Looks for changes in dataset state using an async request. Keeps
@@ -143,7 +167,7 @@
setupHistoryItem( container.children( ".historyItemWrapper" ) );
initShowHide();
// If new state was terminal, stop tracking
- if (( val.state == "ok") || ( val.state == "error") || ( val.state == "empty") || ( val.state == "deleted" )) {
+ if (( val.state == "ok") || ( val.state == "error") || ( val.state == "empty") || ( val.state == "deleted" ) || ( val.state == "discarded" )) {
delete tracked_datasets[ parseInt(id) ];
} else {
tracked_datasets[ parseInt(id) ] = val.state;
@@ -239,7 +263,7 @@
%for data in reversed( datasets_to_show ):
%if data.visible:
<div class="historyItemContainer" id="historyItemContainer-${data.id}">
- ${render_dataset( data, data.hid )}
+ ${render_dataset( data, data.hid, show_deleted_on_refresh = show_deleted )}
</div>
%endif
%endfor
diff -r 45033114f82d -r 7e2ec07716b8 templates/root/history_common.mako
--- a/templates/root/history_common.mako Thu Oct 02 16:37:32 2008 -0400
+++ b/templates/root/history_common.mako Fri Oct 03 16:06:01 2008 -0400
@@ -1,5 +1,5 @@
## Render the dataset `data` as history item, using `hid` as the displayed id
-<%def name="render_dataset( data, hid )">
+<%def name="render_dataset( data, hid, show_deleted_on_refresh = False )">
<%
if data.state in ['no state','',None]:
data_state = "queued"
@@ -10,7 +10,7 @@
%if data.deleted:
<div class="warningmessagesmall">
- <strong>This dataset has been deleted. Click <a href="${h.url_for( controller='dataset', action='undelete', id=data.id )}" target="galaxy_history">here</a> to undelete.</strong>
+ <strong>This dataset has been deleted. Click <a href="${h.url_for( controller='dataset', action='undelete', id=data.id )}" class="historyItemUndelete" id="historyItemUndeleter-${data.id}" target="galaxy_history">here</a> to undelete.</strong>
</div>
%endif
## Header row for history items (name, state, action buttons)
@@ -28,7 +28,7 @@
<div style="float: right;">
<a href="${h.url_for( controller='dataset', dataset_id=data.id, action='display', filename='index')}" target="galaxy_main"><img src="${h.url_for('/static/images/eye_icon.png')}" rollover="${h.url_for('/static/images/eye_icon_dark.png')}" width='16' height='16' alt='display data' title='display data' class='displayButton' border='0'></a>
<a href="${h.url_for( action='edit', id=data.id )}" target="galaxy_main"><img src="${h.url_for('/static/images/pencil_icon.png')}" rollover="${h.url_for('/static/images/pencil_icon_dark.png')}" width='16' height='16' alt='edit attributes' title='edit attributes' class='editButton' border='0'></a>
- <a href="${h.url_for( action='delete', id=data.id, show_deleted_on_refresh=show_deleted )}" class="historyItemDelete" id="historyItemDelter-${data.id}"><img src="${h.url_for('/static/images/delete_icon.png')}" rollover="${h.url_for('/static/images/delete_icon_dark.png')}" width='16' height='16' alt='delete' class='deleteButton' border='0'></a>
+ <a href="${h.url_for( action='delete', id=data.id, show_deleted_on_refresh=show_deleted_on_refresh )}" class="historyItemDelete" id="historyItemDeleter-${data.id}"><img src="${h.url_for('/static/images/delete_icon.png')}" rollover="${h.url_for('/static/images/delete_icon_dark.png')}" width='16' height='16' alt='delete' class='deleteButton' border='0'></a>
</div>
<span class="historyItemTitle"><b>${hid}: ${data.display_name()}</b></span>
</div>
@@ -44,6 +44,10 @@
<div>
An error occurred running this job: <i>${data.display_info().strip()}</i>,
<a href="${h.url_for( controller='dataset', action='errors', id=data.id )}" target="galaxy_main">report this error</a>
+ </div>
+ %elif data_state == "discarded":
+ <div>
+ The job creating this dataset was cancelled before completion.
</div>
%elif data_state == "empty":
<div>No data: <i>${data.display_info()}</i></div>
@@ -95,7 +99,7 @@
<div>
There are ${len( children )} secondary datasets.
%for idx, child in enumerate(children):
- ${render_dataset( child, idx + 1 )}
+ ${render_dataset( child, idx + 1, show_deleted_on_refresh = show_deleted_on_refresh )}
%endfor
</div>
%endif
diff -r 45033114f82d -r 7e2ec07716b8 templates/root/history_item.mako
--- a/templates/root/history_item.mako Thu Oct 02 16:37:32 2008 -0400
+++ b/templates/root/history_item.mako Fri Oct 03 16:06:01 2008 -0400
@@ -1,8 +1,3 @@
<%namespace file="history_common.mako" import="render_dataset" />
-## this is necessary because this dataset remains in history.active_datasets
-## after deletion, until the history is reloaded
-## FIXME: still necessary now that we don't re-pull finished datasets? test.
-%if data.deleted is not True:
- ${render_dataset( data, hid )}
-%endif
\ No newline at end of file
+${render_dataset( data, hid )}
1
0
06 Oct '08
details: http://www.bx.psu.edu/hg/galaxy/rev/cec03ea2d5aa
changeset: 1545:cec03ea2d5aa
user: guru
date: Mon Oct 06 09:33:52 2008 -0400
description:
Modified help message for indel rate tool.
1 file(s) affected in this change:
tools/regVariation/getIndelRates_3way.xml
diffs (25 lines):
diff -r 7e2ec07716b8 -r cec03ea2d5aa tools/regVariation/getIndelRates_3way.xml
--- a/tools/regVariation/getIndelRates_3way.xml Fri Oct 03 16:06:01 2008 -0400
+++ b/tools/regVariation/getIndelRates_3way.xml Mon Oct 06 09:33:52 2008 -0400
@@ -14,8 +14,8 @@
<conditional name="region">
<param name="type" type="select" label="Estimate rates corresponding to" multiple="false">
- <option value="align" selected="True">Alignment block</option>
- <option value="win">Intervals in your history</option>
+ <option value="win" selected="True">Intervals in your history</option>
+ <option value="align">Alignment block</option>
</param>
<when value="win">
<param format="interval" name="input2" type="data" label="Choose intervals">
@@ -45,8 +45,8 @@
**What it does**
-This tool estimates the insertion and deletion rates for alignments in a window of specified size.
-
+This tool estimates the insertion and deletion rates for alignments in a window of specified size. Rates are computed over the total adjusted lengths (adjusted by disregarding masked bases) of all the alignments blocks from the indel file that fall within that window.
+
-----
.. class:: warningmark
1
0
06 Oct '08
details: http://www.bx.psu.edu/hg/galaxy/rev/d93cf9d961bb
changeset: 1546:d93cf9d961bb
user: guru
date: Mon Oct 06 09:40:47 2008 -0400
description:
Updating "regional variation' section in tool_conf.xml.main.
1 file(s) affected in this change:
tool_conf.xml.main
diffs (33 lines):
diff -r cec03ea2d5aa -r d93cf9d961bb tool_conf.xml.main
--- a/tool_conf.xml.main Mon Oct 06 09:33:52 2008 -0400
+++ b/tool_conf.xml.main Mon Oct 06 09:40:47 2008 -0400
@@ -122,14 +122,22 @@
<tool file="visualization/build_ucsc_custom_track.xml" />
</section>
<section name="Regional Variation" id="regVar">
- <tool file="regVariation/windowSplitter.xml" />
- <tool file="regVariation/featureCounter.xml" />
- <tool file="regVariation/quality_filter.xml" />
+ <tool file="regVariation/windowSplitter.xml" />
+ <tool file="regVariation/featureCounter.xml" />
+ <tool file="regVariation/quality_filter.xml" />
<tool file="regVariation/maf_cpg_filter.xml" />
- <!--
- <tool file="regVariation/getIndels_2way.xml" />
- <tool file="regVariation/getIndels_3way.xml" />
- -->
+ <tool file="regVariation/getIndels_2way.xml" />
+ <tool file="regVariation/getIndels_3way.xml" />
+ <tool file="regVariation/getIndelRates_3way.xml" />
+ <tool file="regVariation/substitutions.xml" />
+ <tool file="regVariation/substitution_rates.xml" />
+ <tool file="regVariation/microsats_alignment_level.xml" />
+ <tool file="regVariation/microsats_mutability.xml" />
+ </section>
+ <section name="Multiple regression" id="multReg">
+ <tool file="regVariation/linear_regression.xml" />
+ <tool file="regVariation/best_regression_subsets.xml" />
+ <tool file="regVariation/rcve.xml" />
</section>
<section name="Evolution: HyPhy" id="hyphy">
<tool file="hyphy/hyphy_branch_lengths_wrapper.xml" />
1
0
fresh (this morning) security hg pull -u: OperationalError: (OperationalError) no such column: dataset.purged - same error with a new, empty db
by Ross 05 Oct '08
by Ross 05 Oct '08
05 Oct '08
URL: http://localhost:8080/user/create
File '/home/rossl/py/galaxy_security/eggs/py2.5-noplatform/WebError-0.8a-py2.5.egg/weberror/evalexception/middleware.py',
line 364 in respond
File '/home/rossl/py/galaxy_security/eggs/py2.5-noplatform/Paste-1.5.1-py2.5.egg/paste/debug/prints.py',
line 98 in __call__
File '/home/rossl/py/galaxy_security/eggs/py2.5-noplatform/Paste-1.5.1-py2.5.egg/paste/wsgilib.py',
line 539 in intercept_output
File '/home/rossl/py/galaxy_security/eggs/py2.5-noplatform/Beaker-0.5-py2.5.egg/beaker/session.py',
line 103 in __call__
File '/home/rossl/py/galaxy_security/eggs/py2.5-noplatform/Paste-1.5.1-py2.5.egg/paste/recursive.py',
line 80 in __call__
File '/home/rossl/py/galaxy_security/eggs/py2.5-noplatform/Paste-1.5.1-py2.5.egg/paste/httpexceptions.py',
line 632 in __call__
File '/home/rossl/py/galaxy_security/lib/galaxy/web/framework/base.py',
line 125 in __call__
body = method( trans, **kwargs )
File '/home/rossl/py/galaxy_security/lib/galaxy/web/controllers/user.py',
line 120 in create
trans.app.security_agent.setup_new_user( user )
File '/home/rossl/py/galaxy_security/lib/galaxy/security/__init__.py',
line 42 in setup_new_user
self.user_set_default_access( user, history = True, dataset = True )
File '/home/rossl/py/galaxy_security/lib/galaxy/security/__init__.py',
line 196 in user_set_default_access
for history in user.active_histories:
File '/home/rossl/py/galaxy_security/eggs/py2.5-noplatform/SQLAlchemy-0.4.7p1-py2.5.egg/sqlalchemy/orm/attributes.py',
line 44 in __get__
File '/home/rossl/py/galaxy_security/eggs/py2.5-noplatform/SQLAlchemy-0.4.7p1-py2.5.egg/sqlalchemy/orm/attributes.py',
line 279 in get
File '/home/rossl/py/galaxy_security/eggs/py2.5-noplatform/SQLAlchemy-0.4.7p1-py2.5.egg/sqlalchemy/orm/strategies.py',
line 466 in __call__
File '/home/rossl/py/galaxy_security/eggs/py2.5-noplatform/SQLAlchemy-0.4.7p1-py2.5.egg/sqlalchemy/orm/query.py',
line 878 in all
File '/home/rossl/py/galaxy_security/eggs/py2.5-noplatform/SQLAlchemy-0.4.7p1-py2.5.egg/sqlalchemy/orm/query.py',
line 938 in __iter__
File '/home/rossl/py/galaxy_security/eggs/py2.5-noplatform/SQLAlchemy-0.4.7p1-py2.5.egg/sqlalchemy/orm/query.py',
line 941 in _execute_and_instances
File '/home/rossl/py/galaxy_security/eggs/py2.5-noplatform/SQLAlchemy-0.4.7p1-py2.5.egg/sqlalchemy/orm/session.py',
line 628 in execute
File '/home/rossl/py/galaxy_security/eggs/py2.5-noplatform/SQLAlchemy-0.4.7p1-py2.5.egg/sqlalchemy/engine/base.py',
line 844 in execute
File '/home/rossl/py/galaxy_security/eggs/py2.5-noplatform/SQLAlchemy-0.4.7p1-py2.5.egg/sqlalchemy/engine/base.py',
line 895 in execute_clauseelement
File '/home/rossl/py/galaxy_security/eggs/py2.5-noplatform/SQLAlchemy-0.4.7p1-py2.5.egg/sqlalchemy/engine/base.py',
line 907 in _execute_compiled
File '/home/rossl/py/galaxy_security/eggs/py2.5-noplatform/SQLAlchemy-0.4.7p1-py2.5.egg/sqlalchemy/engine/base.py',
line 916 in __execute_raw
File '/home/rossl/py/galaxy_security/eggs/py2.5-noplatform/SQLAlchemy-0.4.7p1-py2.5.egg/sqlalchemy/engine/base.py',
line 960 in _cursor_execute
File '/home/rossl/py/galaxy_security/eggs/py2.5-noplatform/SQLAlchemy-0.4.7p1-py2.5.egg/sqlalchemy/engine/base.py',
line 942 in _handle_dbapi_exception
OperationalError: (OperationalError) no such column: dataset.purged
u'SELECT history.id AS history_id, history.create_time AS
history_create_time, history.update_time AS history_update_time,
history.user_id AS history_user_id, history.name AS history_name,
history.hid_counter AS history_hid_counter, history.deleted AS
history_deleted, history.purged AS history_purged,
history.genome_build AS history_genome_build, dataset_1.id AS
dataset_1_id, dataset_1.create_time AS dataset_1_create_time,
dataset_1.update_time AS dataset_1_update_time, dataset_1.state AS
dataset_1_state, dataset_1.deleted AS dataset_1_deleted,
dataset_1.purged AS dataset_1_purged, dataset_1.purgable AS
dataset_1_purgable, dataset_1.external_filename AS
dataset_1_external_filename, dataset_1._extra_files_path AS
dataset_1__extra_files_path, dataset_1.file_size AS
dataset_1_file_size, history_dataset_association_1.id AS
history_dataset_association_1_id,
history_dataset_association_1.history_id AS
history_dataset_association_1_history_id,
history_dataset_association_1.dataset_id AS
history_dataset_association_1_dataset_id,
history_dataset_association_1.create_time AS
history_dataset_association_1_create_time,
history_dataset_association_1.update_time AS
history_dataset_association_1_update_time,
history_dataset_association_1.copied_from_history_dataset_association_id
AS history_dataset_association_1_copied_from_history_dataset_association_id,
history_dataset_association_1.copied_from_library_folder_dataset_association_id
AS history_dataset_association_1_copied_from_library_folder_dataset_association_id,
history_dataset_association_1.hid AS
history_dataset_association_1_hid, history_dataset_association_1.name
AS history_dataset_association_1_name,
history_dataset_association_1.info AS
history_dataset_association_1_info,
history_dataset_association_1.blurb AS
history_dataset_association_1_blurb,
history_dataset_association_1.peek AS
history_dataset_association_1_peek,
history_dataset_association_1.extension AS
history_dataset_association_1_extension,
history_dataset_association_1.metadata AS
history_dataset_association_1_metadata,
history_dataset_association_1.parent_id AS
history_dataset_association_1_parent_id,
history_dataset_association_1.designation AS
history_dataset_association_1_designation,
history_dataset_association_1.deleted AS
history_dataset_association_1_deleted,
history_dataset_association_1.visible AS
history_dataset_association_1_visible, dataset_2.id AS dataset_2_id,
dataset_2.create_time AS dataset_2_create_time, dataset_2.update_time
AS dataset_2_update_time, dataset_2.state AS dataset_2_state,
dataset_2.deleted AS dataset_2_deleted, dataset_2.purged AS
dataset_2_purged, dataset_2.purgable AS dataset_2_purgable,
dataset_2.external_filename AS dataset_2_external_filename,
dataset_2._extra_files_path AS dataset_2__extra_files_path,
dataset_2.file_size AS dataset_2_file_size,
history_dataset_association_2.id AS history_dataset_association_2_id,
history_dataset_association_2.history_id AS
history_dataset_association_2_history_id,
history_dataset_association_2.dataset_id AS
history_dataset_association_2_dataset_id,
history_dataset_association_2.create_time AS
history_dataset_association_2_create_time,
history_dataset_association_2.update_time AS
history_dataset_association_2_update_time,
history_dataset_association_2.copied_from_history_dataset_association_id
AS history_dataset_association_2_copied_from_history_dataset_association_id,
history_dataset_association_2.copied_from_library_folder_dataset_association_id
AS history_dataset_association_2_copied_from_library_folder_dataset_association_id,
history_dataset_association_2.hid AS
history_dataset_association_2_hid, history_dataset_association_2.name
AS history_dataset_association_2_name,
history_dataset_association_2.info AS
history_dataset_association_2_info,
history_dataset_association_2.blurb AS
history_dataset_association_2_blurb,
history_dataset_association_2.peek AS
history_dataset_association_2_peek,
history_dataset_association_2.extension AS
history_dataset_association_2_extension,
history_dataset_association_2.metadata AS
history_dataset_association_2_metadata,
history_dataset_association_2.parent_id AS
history_dataset_association_2_parent_id,
history_dataset_association_2.designation AS
history_dataset_association_2_designation,
history_dataset_association_2.deleted AS
history_dataset_association_2_deleted,
history_dataset_association_2.visible AS
history_dataset_association_2_visible \nFROM history LEFT OUTER JOIN
history_dataset_association AS history_dataset_association_1 ON
history_dataset_association_1.history_id = history.id AND NOT
history_dataset_association_1.deleted LEFT OUTER JOIN dataset AS
dataset_1 ON dataset_1.id = history_dataset_association_1.dataset_id
LEFT OUTER JOIN history_dataset_association AS
history_dataset_association_2 ON
history_dataset_association_2.history_id = history.id AND NOT
dataset.purged LEFT OUTER JOIN dataset AS dataset_2 ON dataset_2.id =
history_dataset_association_2.dataset_id \nWHERE history.user_id = ?
AND NOT history.deleted ORDER BY history.update_time DESC,
history_dataset_association_1.hid ASC, dataset_1.oid,
history_dataset_association_2.hid ASC, dataset_2.oid' [1]
1
0