galaxy-commits
Threads by month
- ----- 2024 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2023 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2022 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2021 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2020 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2019 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2018 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2017 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2016 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2015 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2014 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2013 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2012 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2011 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2010 -----
- December
- November
- October
- September
- August
- July
- June
- May
June 2010
- 1 participants
- 75 discussions
galaxy-dist commit beb30aadccd7: Fix IE crashing on pages with grids
by commits-noreply@bitbucket.org 29 Jun '10
by commits-noreply@bitbucket.org 29 Jun '10
29 Jun '10
# HG changeset patch -- Bitbucket.org
# Project galaxy-dist
# URL http://bitbucket.org/galaxy/galaxy-dist/overview
# User Kanwei Li <kanwei(a)gmail.com>
# Date 1277239345 14400
# Node ID beb30aadccd75bfdec6b06d4afcd704c2c16dbc3
# Parent 5e75bad5a780acb7c2975bcc375216c1abb5d148
Fix IE crashing on pages with grids
--- a/templates/grid_base.mako
+++ b/templates/grid_base.mako
@@ -245,14 +245,13 @@
%endfor
// Initialize URL args with filter arguments.
- var url_args = ${h.to_json_string( cur_filter_dict )};
+ var url_args_init = ${h.to_json_string( cur_filter_dict )},
+ url_args = {};
// Place "f-" in front of all filter arguments.
- for (arg in url_args)
- {
- value = url_args[arg];
- delete url_args[arg];
- url_args["f-" + arg] = value;
+
+ for (arg in url_args_init) {
+ url_args["f-" + arg] = url_args_init[arg];
}
// Add sort argument to URL args.
1
0
galaxy-dist commit f2f31fe4cf08: New: Send Email Action, Delete Action.
by commits-noreply@bitbucket.org 29 Jun '10
by commits-noreply@bitbucket.org 29 Jun '10
29 Jun '10
# HG changeset patch -- Bitbucket.org
# Project galaxy-dist
# URL http://bitbucket.org/galaxy/galaxy-dist/overview
# User Dannon Baker <dannon.baker(a)emory.edu>
# Date 1277238844 14400
# Node ID f2f31fe4cf08f9500a886184b5c5f15fa4184e4d
# Parent 1f8035729ccd73f31522e816cf5467409e841b91
New: Send Email Action, Delete Action.
Note that delete action is dangerous and will break a workflow if you delete datasets that are required in later steps.
Partial refactoring to make addition of actions simpler.
--- a/lib/galaxy/jobs/actions/post.py
+++ b/lib/galaxy/jobs/actions/post.py
@@ -5,18 +5,50 @@ from galaxy.util.json import from_json_s
from galaxy.web.form_builder import *
+
+# For email notification PJA
+from email.MIMEText import MIMEText
+import smtplib
+
log = logging.getLogger( __name__ )
# DBTODO This still needs refactoring and general cleanup.
+def get_form_template(action_type, title, content, help, on_output = True ):
+ if on_output:
+ form = """
+ if (pja.action_type == "%s"){
+ p_str = "<div class='pjaForm toolForm'><span class='action_tag' style='display:none'>"+ pja.action_type + pja.output_name + "</span><div class='toolFormTitle'> %s <br/> on " + pja.output_name + "\
+ <div style='float: right;' class='buttons'><img src='../images/delete_icon.png'></div></div><div class='toolFormBody'>";
+ %s
+ p_str += "</div><div class='toolParamHelp'>%s</div></div>";
+ }""" % (action_type, title, content, help)
+ else:
+ form = """
+ if (pja.action_type == "%s"){
+ p_str = "<div class='pjaForm toolForm'><span class='action_tag' style='display:none'>"+ pja.action_type + "</span><div class='toolFormTitle'> %s \
+ <div style='float: right;' class='buttons'><img src='../images/delete_icon.png'></div></div><div class='toolFormBody'>";
+ %s
+ p_str += "</div><div class='toolParamHelp'>%s</div></div>";
+ }""" % (action_type, title, content, help)
+ return form
+
+# def get_field(action, argument, i_type, label = None):
+# fstr = ''
+# fname = """pja__"+pja.output_name+"__%s__%s""" % (action, argument)
+# if label:
+# fstr += """<label for='pja__"+pja.output_name+"__ColumnSetAction__chromCol'>Chrom Column</label>"""
+# fstr += """<input type='text' value=" + chromCol + " name='pja__"+pja.output_name+"__ColumnSetAction__chromCol'/>"""
+
class DefaultJobAction(object):
name = "DefaultJobAction"
+ verbose_name = "Default Job"
@classmethod
def execute(cls, job):
pass
- @classmethod
+ @classmethod
def get_config_form(cls, trans):
return "<p>Default Job Action Config Form</p>"
@@ -26,11 +58,42 @@ class DefaultJobAction(object):
return "%s -> %s" % (pja.action_type, pja.action_arguments)
else:
return "%s" % pja.action_type
-
+
+
+class EmailAction(DefaultJobAction):
+ name = "EmailAction"
+ verbose_name = "Email Notification"
+ @classmethod
+ def execute(cls, trans, action, job):
+ smtp_server = trans.app.config.smtp_server
+ if smtp_server is None:
+ return trans.show_error_message( "Mail is not configured for this galaxy instance, workflow action aborted." )
+ # Build the email message
+ msg = MIMEText( "Your job '%s' at Galaxy instance %s is complete as of %s." % (job.history.name, trans.request.host, job.update_time))
+ msg[ 'To' ] = job.user.email
+ msg[ 'From' ] = job.user.email
+ msg[ 'Subject' ] = "Galaxy workflow step notification '%s'"
+ try:
+ s = smtplib.SMTP()
+ s.connect( smtp_server )
+ s.sendmail( frm, [ to ], msg.as_string() )
+ s.close()
+ return trans.show_ok_message( "Your error report has been sent" )
+ except Exception, e:
+ return trans.show_error_message( "An error occurred sending the report by email: %s" % str( e ) )
+
+ @classmethod
+ def get_config_form(cls, trans):
+ form = """
+ p_str += "<label for='pja__"+pja.output_name+"__EmailAction'>There are no additional options for this action. You will be emailed upon job completion.</label>\
+ <input type='hidden' name='pja__"+pja.output_name+"__EmailAction'/>";
+ """
+ return get_form_template(cls.name, cls.verbose_name, form, "This action will send an email notifying you when the job is done.", on_output = False)
+
class ChangeDatatypeAction(DefaultJobAction):
name = "ChangeDatatypeAction"
-
+ verbose_name = "Change Datatype"
@classmethod
def execute(cls, trans, action, job):
for dataset_assoc in job.output_datasets:
@@ -45,24 +108,21 @@ class ChangeDatatypeAction(DefaultJobAct
for dt_name in dtnames:
dt_list += """<option id='pja__"+pja.output_name+"__ChangeDatatypeAction__newtype__%s' value='%s'>%s</option>""" % (dt_name, dt_name, dt_name)
ps = """
- if (pja.action_type == "ChangeDatatypeAction"){
- p_str = "<div class='pjaForm toolForm'><span class='action_tag' style='display:none'>"+ pja.action_type + pja.output_name + "</span><div class='toolFormTitle'>" + pja.action_type + " <br/> on " + pja.output_name + "\
- <div style='float: right;' class='buttons'><img src='../images/delete_icon.png'></div></div>\
- <div class='toolFormBody'><label for='pja__"+pja.output_name+"__ChangeDatatypeAction__newtype'>New Datatype:</label><select id='pja__"+pja.output_name+"__ChangeDatatypeAction__newtype' name='pja__"+pja.output_name+"__ChangeDatatypeAction__newtype'>\
+ p_str += "<label for='pja__"+pja.output_name+"__ChangeDatatypeAction__newtype'>New Datatype:</label>\
+ <select id='pja__"+pja.output_name+"__ChangeDatatypeAction__newtype' name='pja__"+pja.output_name+"__ChangeDatatypeAction__newtype'>\
%s\
</select>";
- if (pja.action_arguments != undefined && pja.action_arguments.newtype != undefined){
- p_str += "<scrip" + "t type='text/javascript'>$('#pja__" + pja.output_name + "__ChangeDatatypeAction__newtype').val('" + pja.action_arguments.newtype + "');</scrip" + "t>"
- }
- p_str += "</div><div class='toolParamHelp'>This action will change the datatype of the output to the indicated value.</div></div>";
- }
+ if (pja.action_arguments != undefined && pja.action_arguments.newtype != undefined){
+ p_str += "<scrip" + "t type='text/javascript'>$('#pja__" + pja.output_name + "__ChangeDatatypeAction__newtype').val('" + pja.action_arguments.newtype + "');</scrip" + "t>";
+ }
""" % dt_list
# Note the scrip + t hack above. Is there a better way?
- return ps
+ return get_form_template(cls.name, cls.verbose_name, ps, 'This action will change the datatype of the output to the indicated value.')
class RenameDatasetAction(DefaultJobAction):
name = "RenameDatasetAction"
-
+ verbose_name = "Rename Dataset"
+
@classmethod
def execute(cls, trans, action, job):
for dataset_assoc in job.output_datasets:
@@ -71,23 +131,22 @@ class RenameDatasetAction(DefaultJobActi
@classmethod
def get_config_form(cls, trans):
- return """
- if (pja.action_type == "RenameDatasetAction"){
- p_str = "<div class='pjaForm toolForm'><span class='action_tag' style='display:none'>"+ pja.action_type + pja.output_name + "</span><div class='toolFormTitle'>"+ pja.action_type + " <br/> on " + pja.output_name + "\
- <div style='float: right;' class='buttons'><img src='../images/delete_icon.png'></div></div><div class='toolFormBody'>";
- if ((pja.action_arguments != undefined) && (pja.action_arguments.newname != undefined)){
- p_str += "<label for='pja__"+pja.output_name+"__RenameDatasetAction__newname'>New output name:</label><input type='text' name='pja__"+pja.output_name+"__RenameDatasetAction__newname' value='"+pja.action_arguments.newname + "'/>";
- }
- else{
- p_str += "<label for='pja__"+pja.output_name+"__RenameDatasetAction__newname'>New output name:</label><input type='text' name='pja__"+pja.output_name+"__RenameDatasetAction__newname' value='New Name'/>";
- }
- p_str += "</div><div class='toolParamHelp'>This action will rename the result dataset.</div></div>";
- }
+ form = """
+ if ((pja.action_arguments != undefined) && (pja.action_arguments.newname != undefined)){
+ p_str += "<label for='pja__"+pja.output_name+"__RenameDatasetAction__newname'>New output name:</label>\
+ <input type='text' name='pja__"+pja.output_name+"__RenameDatasetAction__newname' value='"+pja.action_arguments.newname + "'/>";
+ }
+ else{
+ p_str += "<label for='pja__"+pja.output_name+"__RenameDatasetAction__newname'>New output name:</label>\
+ <input type='text' name='pja__"+pja.output_name+"__RenameDatasetAction__newname' value=''/>";
+ }
"""
+ return get_form_template(cls.name, cls.verbose_name, form, "This action will rename the result dataset.")
class HideDatasetAction(DefaultJobAction):
name = "HideDatasetAction"
-
+ verbose_name = "Hide Dataset"
+
@classmethod
def execute(cls, trans, action, job):
for dataset_assoc in job.output_datasets:
@@ -96,14 +155,79 @@ class HideDatasetAction(DefaultJobAction
@classmethod
def get_config_form(cls, trans):
- return """
- if (pja.action_type == "HideDatasetAction"){
- p_str = "<div class='pjaForm toolForm'><span class='action_tag' style='display:none'>"+ pja.action_type + pja.output_name + "</span><div class='toolFormTitle'>"+ pja.action_type + " <br/> on " + pja.output_name + "\
- <div style='float: right;' class='buttons'><img src='../images/delete_icon.png'></div></div><div class='toolFormBody'>";
- p_str += "<label for='pja__"+pja.output_name+"__HideDatasetAction'>There are no additional options for this action.</label><input type='hidden' name='pja__"+pja.output_name+"__HideDatasetAction'/>";
- p_str += "</div><div class='toolParamHelp'>This action will *hide* the result dataset from your history.</div></div>";
+ form = """
+ p_str += "<label for='pja__"+pja.output_name+"__HideDatasetAction'>There are no additional options for this action.</label>\
+ <input type='hidden' name='pja__"+pja.output_name+"__HideDatasetAction'/>";
+ """
+ return get_form_template(cls.name, cls.verbose_name, form, "This action will rename the result dataset.")
+
+class DeleteDatasetAction(DefaultJobAction):
+ # This is disabled for right now. Deleting a dataset in the middle of a workflow causes errors (obviously) for the subsequent steps using the data.
+ name = "DeleteDatasetAction"
+ verbose_name = "Delete Dataset"
+
+ @classmethod
+ def execute(cls, trans, action, job):
+ for dataset_assoc in job.output_datasets:
+ if action.output_name == '' or dataset_assoc.name == action.output_name:
+ dataset_assoc.dataset.deleted=True
+
+ @classmethod
+ def get_config_form(cls, trans):
+ form = """
+ p_str += "<label for='pja__"+pja.output_name+"__DeleteDatasetAction'>There are no additional options for this action. This dataset will be marked deleted.</label>\
+ <input type='hidden' name='pja__"+pja.output_name+"__DeleteDatasetAction'/>";
+ """
+ return get_form_template(cls.name, cls.verbose_name, form, "This action will rename the result dataset.")
+
+
+class ColumnSetAction(DefaultJobAction):
+ name = "ColumnSetAction"
+ verbose_name = "Assign Columns"
+ @classmethod
+ def execute(cls, trans, action, job):
+ for dataset_assoc in job.output_datasets:
+ if action.output_name == '' or dataset_assoc.name == action.output_name:
+ for k, v in action.action_arguments.items():
+ if v != '':
+ # Try to use both pure integer and 'cX' format.
+ if v[0] == 'c':
+ v = v[1:]
+ v = int(v)
+ if v != 0:
+ setattr(dataset_assoc.dataset.metadata, k, v)
+
+ @classmethod
+ def get_config_form(cls, trans):
+ form = """
+ var chrom_col = ''
+ if (pja.action_arguments != undefined){
+ (pja.action_arguments.chromCol == undefined) ? chromCol = "" : chromCol=pja.action_arguments.chromCol;
+ (pja.action_arguments.startCol == undefined) ? startCol = "" : startCol=pja.action_arguments.startCol;
+ (pja.action_arguments.endCol == undefined) ? endCol = "" : endCol=pja.action_arguments.endCol;
+ (pja.action_arguments.strandCol == undefined) ? strandCol = "" : strandCol=pja.action_arguments.strandCol;
+ (pja.action_arguments.nameCol == undefined) ? nameCol = "" : nameCol=pja.action_arguments.nameCol;
+ }else{
+ chromCol = '';
+ startCol = '';
+ endCol = '';
+ strandCol = '';
+ nameCol = '';
}
+ p_str += "<p>Leave any of these fields blank if they do not need to be set.</p>\
+ <label for='pja__"+pja.output_name+"__ColumnSetAction__chromCol'>Chrom Column</label>\
+ <input type='text' value='" + chromCol + "' name='pja__"+pja.output_name+"__ColumnSetAction__chromCol'/>\
+ <label for='pja__"+pja.output_name+"__ColumnSetAction__startCol'>Start Column</label>\
+ <input type='text' value='" + startCol + "' name='pja__"+pja.output_name+"__ColumnSetAction__startCol'/>\
+ <label for='pja__"+pja.output_name+"__ColumnSetAction__endCol'>End Column</label>\
+ <input type='text' value='" + endCol + "' name='pja__"+pja.output_name+"__ColumnSetAction__endCol'/>\
+ <label for='pja__"+pja.output_name+"__ColumnSetAction__strandCol'>Strand Column</label>\
+ <input type='text' value='" + strandCol + "' name='pja__"+pja.output_name+"__ColumnSetAction__strandCol'/>\
+ <label for='pja__"+pja.output_name+"__ColumnSetAction__nameCol'>Name Column</label>\
+ <input type='text' value='" + nameCol + "' name='pja__"+pja.output_name+"__ColumnSetAction__nameCol'/>\";
"""
+ return get_form_template(cls.name, cls.verbose_name, form, "This action will set column assignments in the output dataset. Blank fields are ignored.")
+
class SetMetadataAction(DefaultJobAction):
name = "SetMetadataAction"
@@ -116,28 +240,38 @@ class SetMetadataAction(DefaultJobAction
@classmethod
def get_config_form(cls, trans):
- dt_list = ""
- mdict = {}
- for dtype_name, dtype_value in trans.app.datatypes_registry.datatypes_by_extension.iteritems():
- for mn, mt in dtype_value.metadata_spec.items():
- if mt.visible:
- mdict[mt.desc] = mt.param.get_html(value= mn).replace('"', "'").strip().replace('\n','')
- for k, v in mdict.items():
- dt_list += "<p><strong>" + k + ":</strong><br/>" + v + "</p>"
- return """
- if (pja.action_type == "SetMetadataAction"){
- p_str = "<div class='pjaForm toolForm'><span class='action_tag' style='display:none'>"+ pja.action_type + pja.output_name + "</span><div class='toolFormTitle'>"+ pja.action_type + " <br/> on " + pja.output_name + "\
- <div style='float: right;' class='buttons'><img src='../images/delete_icon.png'></div></div>\
- <div class='toolFormBody'>\
- %s\
- </div><div class='toolParamHelp'>This tool sets metadata in output.</div></div>";
- }
- """ % dt_list
+ # dt_list = ""
+ # mdict = {}
+ # for dtype_name, dtype_value in trans.app.datatypes_registry.datatypes_by_extension.iteritems():
+ # for mn, mt in dtype_value.metadata_spec.items():
+ # if mt.visible:
+ # mdict[mt.desc] = mt.param.get_html(value= mn).replace('"', "'").strip().replace('\n','')
+ # for k, v in mdict.items():
+ # dt_list += "<p><strong>" + k + ":</strong><br/>" + v + "</p>"
+ # form = """
+ # p_str += "%s";
+ # """ % dt_list
+ # return get_form_template('SetMetadataAction', 'Set Metadata', form, "This action will change metadata for the dataset.")
+ form = """
+ p_str += "<p>Leave any of these fields blank if they do not need to be set.</p><label for='pja__"+pja.output_name+"__SetMetadataAction__chromCol'>Chrom Column</label>\
+ <input type='text' name='pja__"+pja.output_name+"__SetMetadataAction__chromCol'/>\
+ <label for='pja__"+pja.output_name+"__SetMetadataAction__startCol'>Start Column</label>\
+ <input type='text' name='pja__"+pja.output_name+"__SetMetadataAction__startCol'/>\
+ <label for='pja__"+pja.output_name+"__SetMetadataAction__endCol'>End Column</label>\
+ <input type='text' name='pja__"+pja.output_name+"__SetMetadataAction__endCol'/>\
+ <label for='pja__"+pja.output_name+"__SetMetadataAction__comment_lines'>Comment Lines</label>\
+ <input type='text' name='pja__"+pja.output_name+"__SetMetadataAction__comment_lines'/>\
+ ";
+ """
+ return get_form_template(cls.name, cls.verbose_name, form, "This action will set metadata in the output dataset.")
ACTIONS = { "RenameDatasetAction" : RenameDatasetAction,
"HideDatasetAction" : HideDatasetAction,
"ChangeDatatypeAction": ChangeDatatypeAction,
- # "SetMetadataAction" : SetMetadataAction,
+ "ColumnSetAction" : ColumnSetAction,
+ "EmailAction" : EmailAction,
+ # "SetMetadataAction" : SetMetadataAction,
+ # "DeleteDatasetAction" : DeleteDatasetAction,
}
class ActionBox(object):
@@ -147,13 +281,13 @@ class ActionBox(object):
ACTIONS[action.action_type].execute(action, job, trans)
else:
return False
-
+
@classmethod
def get_short_str(cls, action):
if action.action_type in ACTIONS:
return ACTIONS[action.action_type].get_short_str(action)
else:
- return "Unknown PostJobAction"
+ return "Unknown Action"
@classmethod
def handle_incoming(cls, incoming):
@@ -180,7 +314,7 @@ class ActionBox(object):
def get_add_list(cls):
addlist = "<select id='new_pja_list' name='new_pja_list'>"
for action in ACTIONS:
- addlist += "<option value='%s'>%s</option>" % (ACTIONS[action].name, ACTIONS[action].name)
+ addlist += "<option value='%s'>%s</option>" % (ACTIONS[action].name, ACTIONS[action].verbose_name)
addlist += "</select>"
return addlist
@@ -193,4 +327,5 @@ class ActionBox(object):
@classmethod
def execute(cls, trans, pja, job):
- ACTIONS[pja.action_type].execute(trans, pja, job)
+ if ACTIONS.has_key(pja.action_type):
+ ACTIONS[pja.action_type].execute(trans, pja, job)
--- a/templates/workflow/editor.mako
+++ b/templates/workflow/editor.mako
@@ -313,9 +313,9 @@
// DBTODO Refactor to the post module.
// This function preloads how to display known pja's.
function display_pja(pja, node){
- // DBTODO SANITIZE INPUTS. Way too easy to break the page right now with a change dataset name action.
+ // DBTODO SANITIZE INPUTS.
p_str = '';
- ${ActionBox.get_forms(trans)};
+ ${ActionBox.get_forms(trans)}
$("#pja_container").append(p_str);
$("#pja_container>.toolForm:last>.toolFormTitle>.buttons").click(function (){
action_to_rem = $(this).closest(".toolForm", ".action_tag").children(".action_tag:first").text();
@@ -376,7 +376,7 @@
// Add step actions.
if (node && node.type=='tool'){
pjastr = "<p><div class='metadataForm'><div class='metadataFormTitle'>Edit Step Actions</div><div class='form-row'> \
- <label>New Actions:</label><br/>" + display_pja_list() + display_file_list(node) + " <div class='action-button' style='border:1px solid black;display:inline;' id='add_pja'>Create</div>\
+ " + display_pja_list() + " <br/> "+ display_file_list(node) + " <div class='action-button' style='border:1px solid black;display:inline;' id='add_pja'>Create</div>\
</div><div class='form-row'>\
<div style='margin-right: 10px;'><span id='pja_container'></span>";
pjastr += "<div class='toolParamHelp'>Add actions to this step; actions are applied when this workflow step completes.</div></div></div></div>";
1
0
galaxy-dist commit 464734ed09ad: Initial implementation of the Galaxy Web API. Disabled unless
by commits-noreply@bitbucket.org 29 Jun '10
by commits-noreply@bitbucket.org 29 Jun '10
29 Jun '10
# HG changeset patch -- Bitbucket.org
# Project galaxy-dist
# URL http://bitbucket.org/galaxy/galaxy-dist/overview
# User Nate Coraor <nate(a)bx.psu.edu>
# Date 1277226628 14400
# Node ID 464734ed09ad4b36afc3409b5d975732b1c6eb21
# Parent 4b4385fbf52982ee3bf6394c5cb269e7bf3ab912
Initial implementation of the Galaxy Web API. Disabled unless
'enable_api = True' in config file. You should not enable the API on
production sites as this code is brand new and may contain serious bugs and
security flaws! Implemented:
* Display libraries
* Display library info
* Display library contents
* Display library content info
* Create library folders
* Upload datasets to a library from a server directory or with a path paste.
* Basic example scripts in scripts/api/
Framework changes that were made to support this:
* API Key interface in User Preferences.
* New api_keys database table for storing users' API Keys.
* New API-specific route mapper in webapp.
* API controllers in galaxy.web.api
* Return handling in reused library_common methods.
* expose_api decorator for API controller methods validates key and ensures
valid JSON format.
* UniverseWebTransaction renamed to GalaxyWebTransaction and subclassed for
GalaxyWebUITransaction and GalaxyWebAPITransaction.
Things that need to be done next:
* Documentation!
* Refactor reused code from library_common and other controllers into an
even-more-generic location and format. The main changes are that the Web UI
returns redirects and rendered templates, whereas the API returns various
HTTP status codes and JSON.
* Implement more functionality.
* The request and response format should be considered alpha and are subject to
change. They will be standardized as the API matures.
Hints to get started can be found in scripts/api/README
--- /dev/null
+++ b/lib/galaxy/web/api/libraries.py
@@ -0,0 +1,62 @@
+"""
+API operations on a library.
+"""
+import logging, os, string, shutil, urllib, re, socket
+from cgi import escape, FieldStorage
+from galaxy import util, datatypes, jobs, web, util
+from galaxy.web.base.controller import *
+from galaxy.util.sanitize_html import sanitize_html
+from galaxy.model.orm import *
+
+log = logging.getLogger( __name__ )
+
+class LibrariesController( BaseController ):
+
+ @web.expose_api
+ def index( self, trans, **kwd ):
+ """
+ GET /api/libraries
+ Displays a collection (list) of libraries.
+ """
+ query = trans.sa_session.query( trans.app.model.Library ).filter( trans.app.model.Library.table.c.deleted == False )
+ current_user_role_ids = [ role.id for role in trans.get_current_user_roles() ]
+ library_access_action = trans.app.security_agent.permitted_actions.LIBRARY_ACCESS.action
+ restricted_library_ids = [ lp.library_id for lp in trans.sa_session.query( trans.model.LibraryPermissions ) \
+ .filter( trans.model.LibraryPermissions.table.c.action == library_access_action ) \
+ .distinct() ]
+ accessible_restricted_library_ids = [ lp.library_id for lp in trans.sa_session.query( trans.model.LibraryPermissions ) \
+ .filter( and_( trans.model.LibraryPermissions.table.c.action == library_access_action,
+ trans.model.LibraryPermissions.table.c.role_id.in_( current_user_role_ids ) ) ) ]
+ query = query.filter( or_( not_( trans.model.Library.table.c.id.in_( restricted_library_ids ) ),
+ trans.model.Library.table.c.id.in_( accessible_restricted_library_ids ) ) )
+ rval = []
+ for library in query:
+ item = library.get_api_value()
+ item['url'] = url_for( 'library', id=trans.security.encode_id( library.id ) )
+ item['id'] = trans.security.encode_id( item['id'] )
+ rval.append( item )
+ return rval
+
+ @web.expose_api
+ def show( self, trans, id, **kwd ):
+ """
+ GET /api/libraries/{encoded_library_id}
+ Displays information about a library.
+ """
+ library_id = id
+ params = util.Params( kwd )
+ try:
+ decoded_library_id = trans.security.decode_id( library_id )
+ except TypeError:
+ trans.response.status = 400
+ return "Malformed library id ( %s ) specified, unable to decode." % str( library_id )
+ try:
+ library = trans.sa_session.query( trans.app.model.Library ).get( decoded_library_id )
+ except:
+ library = None
+ if not library or not ( trans.user_is_admin() or trans.app.security_agent.can_access_library( trans.get_current_user_roles(), library ) ):
+ trans.response.status = 400
+ return "Invalid library id ( %s ) specified." % str( library_id )
+ item = library.get_api_value( view='element' )
+ item['contents_url'] = url_for( 'contents', library_id=library_id )
+ return item
--- /dev/null
+++ b/scripts/api/common.py
@@ -0,0 +1,94 @@
+import os, sys, urllib, urllib2
+
+new_path = [ os.path.join( os.path.dirname( __file__ ), '..', '..', 'lib' ) ]
+new_path.extend( sys.path[1:] )
+sys.path = new_path
+
+from galaxy import eggs
+import pkg_resources
+
+pkg_resources.require( "simplejson" )
+import simplejson
+
+def make_url( api_key, url, args=None ):
+ # Adds the API Key to the URL if it's not already there.
+ if args is None:
+ args = []
+ argsep = '&'
+ if '?' not in url:
+ argsep = '?'
+ if '?key=' not in url and '&key=' not in url:
+ args.insert( 0, ( 'key', api_key ) )
+ return url + argsep + '&'.join( [ '='.join( t ) for t in args ] )
+
+def get( api_key, url ):
+ # Do the actual GET.
+ url = make_url( api_key, url )
+ return simplejson.loads( urllib2.urlopen( url ).read() )
+
+def post( api_key, url, data ):
+ # Do the actual POST.
+ url = make_url( api_key, url )
+ req = urllib2.Request( url, headers = { 'Content-Type': 'application/json' }, data = simplejson.dumps( data ) )
+ return simplejson.loads( urllib2.urlopen( req ).read() )
+
+def display( api_key, url ):
+ # Sends an API GET request and acts as a generic formatter for the JSON response.
+ try:
+ r = get( api_key, url )
+ except urllib2.HTTPError, e:
+ print e
+ print e.read( 1024 ) # Only return the first 1K of errors.
+ sys.exit( 1 )
+ if type( r ) == unicode:
+ print 'error: %s' % r
+ return None
+ elif type( r ) == list:
+ # Response is a collection as defined in the REST style.
+ print 'Collection Members'
+ print '------------------'
+ for i in r:
+ # All collection members should have a name and url in the response.
+ print i.pop( 'url' )
+ print ' name: %s' % i.pop( 'name' )
+ for k, v in i.items():
+ print ' %s: %s' % ( k, v )
+ print ''
+ print '%d elements in collection' % len( r )
+ elif type( r ) == dict:
+ # Response is an element as defined in the REST style.
+ print 'Member Information'
+ print '------------------'
+ for k, v in r.items():
+ print '%s: %s' % ( k, v )
+ else:
+ print 'response is unknown type: %s' % type( r )
+
+def submit( api_key, url, data ):
+ # Sends an API POST request and acts as a generic formatter for the JSON response.
+ # 'data' will become the JSON payload read by Galaxy.
+ try:
+ r = post( api_key, url, data )
+ except urllib2.HTTPError, e:
+ print e
+ print e.read( 1024 )
+ sys.exit( 1 )
+ print 'Response'
+ print '--------'
+ if type( r ) == list:
+ # Currently the only implemented responses are lists of dicts, because
+ # submission creates some number of collection elements.
+ for i in r:
+ if type( i ) == dict:
+ if 'url' in i:
+ print i.pop( 'url' )
+ else:
+ print '----'
+ if 'name' in i:
+ print ' name: %s' % i.pop( 'name' )
+ for k, v in i.items():
+ print ' %s: %s' % ( k, v )
+ else:
+ print i
+ else:
+ print r
--- a/lib/galaxy/web/framework/__init__.py
+++ b/lib/galaxy/web/framework/__init__.py
@@ -30,6 +30,7 @@ from babel.support import Translations
pkg_resources.require( "SQLAlchemy >= 0.4" )
from sqlalchemy import and_
+from sqlalchemy.orm.exc import NoResultFound
pkg_resources.require( "pexpect" )
pkg_resources.require( "amqplib" )
@@ -87,6 +88,43 @@ def require_login( verb="perform this ac
return decorator
return argcatcher
+def expose_api( func ):
+ def decorator( self, trans, *args, **kwargs ):
+ def error( environ, start_response ):
+ start_response( error_status, [('Content-type', 'text/plain')] )
+ return error_message
+ error_status = '403 Forbidden'
+ if 'key' not in kwargs:
+ error_message = 'No API key provided with request, please consult the API documentation.'
+ return error
+ try:
+ provided_key = trans.sa_session.query( trans.app.model.APIKeys ).filter( trans.app.model.APIKeys.table.c.key == kwargs['key'] ).one()
+ except NoResultFound:
+ error_message = 'Provided API key is not valid.'
+ return error
+ newest_key = provided_key.user.api_keys[0]
+ if newest_key.key != provided_key.key:
+ error_message = 'Provided API key has expired.'
+ return error
+ if trans.request.body:
+ try:
+ payload = util.recursively_stringify_dictionary_keys( simplejson.loads( trans.request.body ) )
+ kwargs['payload'] = payload
+ except ValueError:
+ error_status = '400 Bad Request'
+ error_message = 'Your request did not appear to be valid JSON, please consult the API documentation'
+ return error
+ trans.response.set_content_type( "application/json" )
+ trans.set_user( provided_key.user )
+ if trans.debug:
+ return simplejson.dumps( func( self, trans, *args, **kwargs ), indent=4, sort_keys=True )
+ else:
+ return simplejson.dumps( func( self, trans, *args, **kwargs ) )
+ if not hasattr(func, '_orig'):
+ decorator._orig = func
+ decorator.exposed = True
+ return decorator
+
def require_admin( func ):
def decorator( self, trans, *args, **kwargs ):
admin_users = trans.app.config.get( "admin_users", "" ).split( "," )
@@ -119,7 +157,7 @@ def form( *args, **kwargs ):
class WebApplication( base.WebApplication ):
def __init__( self, galaxy_app, session_cookie='galaxysession' ):
base.WebApplication.__init__( self )
- self.set_transaction_factory( lambda e: UniverseWebTransaction( e, galaxy_app, self, session_cookie ) )
+ self.set_transaction_factory( lambda e: self.transaction_chooser( e, galaxy_app, session_cookie ) )
# Mako support
self.mako_template_lookup = mako.lookup.TemplateLookup(
directories = [ galaxy_app.config.template_path ] ,
@@ -135,21 +173,21 @@ class WebApplication( base.WebApplicatio
if isinstance( body, FormBuilder ):
body = trans.show_form( body )
return base.WebApplication.make_body_iterable( self, trans, body )
+ def transaction_chooser( self, environ, galaxy_app, session_cookie ):
+ if 'is_api_request' in environ:
+ return GalaxyWebAPITransaction( environ, galaxy_app, self )
+ else:
+ return GalaxyWebUITransaction( environ, galaxy_app, self, session_cookie )
-class UniverseWebTransaction( base.DefaultWebTransaction ):
+class GalaxyWebTransaction( base.DefaultWebTransaction ):
"""
- Encapsulates web transaction specific state for the Universe application
+ Encapsulates web transaction specific state for the Galaxy application
(specifically the user's "cookie" session and history)
"""
- def __init__( self, environ, app, webapp, session_cookie ):
+ def __init__( self, environ, app, webapp ):
self.app = app
self.webapp = webapp
self.security = webapp.security
- # FIXME: the following 3 attributes are not currently used
- # Remove them if they are not going to be...
- self.__user = NOT_SET
- self.__history = NOT_SET
- self.__galaxy_session = NOT_SET
base.DefaultWebTransaction.__init__( self, environ )
self.setup_i18n()
self.sa_session.expunge_all()
@@ -158,13 +196,6 @@ class UniverseWebTransaction( base.Defau
# that the current history should not be used for parameter values
# and such).
self.workflow_building_mode = False
- # Always have a valid galaxy session
- self.__ensure_valid_session( session_cookie )
- # Prevent deleted users from accessing Galaxy
- if self.app.config.use_remote_user and self.galaxy_session.user.deleted:
- self.response.send_redirect( url_for( '/static/user_disabled.html' ) )
- if self.app.config.require_login:
- self.__ensure_logged_in_user( environ )
def setup_i18n( self ):
if 'HTTP_ACCEPT_LANGUAGE' in self.environ:
# locales looks something like: ['en', 'en-us;q=0.7', 'ja;q=0.3']
@@ -252,12 +283,7 @@ class UniverseWebTransaction( base.Defau
tstamp = time.localtime ( time.time() + 3600 * 24 * age )
self.response.cookies[name]['expires'] = time.strftime( '%a, %d-%b-%Y %H:%M:%S GMT', tstamp )
self.response.cookies[name]['version'] = version
- #@property
- #def galaxy_session( self ):
- # if not self.__galaxy_session:
- # self.__ensure_valid_session()
- # return self.__galaxy_session
- def __ensure_valid_session( self, session_cookie ):
+ def _ensure_valid_session( self, session_cookie ):
"""
Ensure that a valid Galaxy session exists and is available as
trans.session (part of initialization)
@@ -344,7 +370,7 @@ class UniverseWebTransaction( base.Defau
# If the old session was invalid, get a new history with our new session
if invalidate_existing_session:
self.new_history()
- def __ensure_logged_in_user( self, environ ):
+ def _ensure_logged_in_user( self, environ ):
allowed_paths = (
url_for( controller='root', action='index' ),
url_for( controller='root', action='tool_menu' ),
@@ -537,15 +563,6 @@ class UniverseWebTransaction( base.Defau
self.sa_session.add_all( ( self.galaxy_session, history ) )
self.sa_session.flush()
return history
- def get_user( self ):
- """Return the current user if logged in or None."""
- return self.galaxy_session.user
- def set_user( self, user ):
- """Set the current user."""
- self.galaxy_session.user = user
- self.sa_session.add( self.galaxy_session )
- self.sa_session.flush()
- user = property( get_user, set_user )
def get_current_user_roles( self ):
user = self.get_user()
if user:
@@ -735,6 +752,53 @@ class FormInput( object ):
self.help = help
self.use_label = use_label
+class GalaxyWebAPITransaction( GalaxyWebTransaction ):
+ def __init__( self, environ, app, webapp ):
+ GalaxyWebTransaction.__init__( self, environ, app, webapp )
+ self.__user = None
+ self._ensure_valid_session( None )
+ def _ensure_valid_session( self, session_cookie ):
+ self.galaxy_session = Bunch()
+ self.galaxy_session.history = self.galaxy_session.current_history = Bunch()
+ self.galaxy_session.history.genome_build = None
+ self.galaxy_session.is_api = True
+ def get_user( self ):
+ """Return the current user (the expose_api decorator ensures that it is set)."""
+ return self.__user
+ def set_user( self, user ):
+ """Compatibility method"""
+ self.__user = user
+ user = property( get_user, set_user )
+ @property
+ def db_builds( self ):
+ dbnames = []
+ if 'dbkeys' in self.user.preferences:
+ user_keys = from_json_string( self.user.preferences['dbkeys'] )
+ for key, chrom_dict in user_keys.iteritems():
+ dbnames.append((key, "%s (%s) [Custom]" % (chrom_dict['name'], key) ))
+ dbnames.extend( util.dbnames )
+ return dbnames
+
+class GalaxyWebUITransaction( GalaxyWebTransaction ):
+ def __init__( self, environ, app, webapp, session_cookie ):
+ GalaxyWebTransaction.__init__( self, environ, app, webapp )
+ # Always have a valid galaxy session
+ self._ensure_valid_session( session_cookie )
+ # Prevent deleted users from accessing Galaxy
+ if self.app.config.use_remote_user and self.galaxy_session.user.deleted:
+ self.response.send_redirect( url_for( '/static/user_disabled.html' ) )
+ if self.app.config.require_login:
+ self._ensure_logged_in_user( environ )
+ def get_user( self ):
+ """Return the current user if logged in or None."""
+ return self.galaxy_session.user
+ def set_user( self, user ):
+ """Set the current user."""
+ self.galaxy_session.user = user
+ self.sa_session.add( self.galaxy_session )
+ self.sa_session.flush()
+ user = property( get_user, set_user )
+
class SelectInput( FormInput ):
""" A select form input. """
def __init__( self, name, label, value=None, options=[], error=None, help=None, use_label=True ):
--- a/lib/galaxy/web/buildapp.py
+++ b/lib/galaxy/web/buildapp.py
@@ -48,6 +48,27 @@ def add_controllers( webapp, app ):
if isclass( T ) and T is not BaseController and issubclass( T, BaseController ):
webapp.add_controller( name, T( app ) )
+def add_api_controllers( webapp, app ):
+ from galaxy.web.base.controller import BaseController
+ from galaxy.web.base.controller import ControllerUnavailable
+ import galaxy.web.api
+ controller_dir = galaxy.web.api.__path__[0]
+ for fname in os.listdir( controller_dir ):
+ if not( fname.startswith( "_" ) ) and fname.endswith( ".py" ):
+ name = fname[:-3]
+ module_name = "galaxy.web.api." + name
+ try:
+ module = __import__( module_name )
+ except ControllerUnavailable, exc:
+ log.debug("%s could not be loaded: %s" % (module_name, str(exc)))
+ continue
+ for comp in module_name.split( "." )[1:]:
+ module = getattr( module, comp )
+ for key in dir( module ):
+ T = getattr( module, key )
+ if isclass( T ) and T is not BaseController and issubclass( T, BaseController ):
+ webapp.add_api_controller( name, T( app ) )
+
def app_factory( global_conf, **kwargs ):
"""
Return a wsgi application serving the root object
@@ -80,6 +101,11 @@ def app_factory( global_conf, **kwargs )
webapp.add_route( '/u/:username/h/:slug', controller='history', action='display_by_username_and_slug' )
webapp.add_route( '/u/:username/w/:slug', controller='workflow', action='display_by_username_and_slug' )
webapp.add_route( '/u/:username/v/:slug', controller='visualization', action='display_by_username_and_slug' )
+ # If enabled, add the web API
+ if asbool( kwargs.get( 'enable_api', False ) ):
+ add_api_controllers( webapp, app )
+ webapp.api_mapper.resource( 'content', 'contents', path_prefix='/api/libraries/:library_id', parent_resources=dict( member_name='library', collection_name='libraries' ) )
+ webapp.api_mapper.resource( 'library', 'libraries', path_prefix='/api' )
webapp.finalize_config()
# Wrap the webapp in some useful middleware
if kwargs.get( 'middleware', True ):
--- /dev/null
+++ b/scripts/api/README
@@ -0,0 +1,106 @@
+This is not documentation. These are hints and examples to get you started
+until the documentation is written.
+
+Set these options in universe_wsgi.ini and start the server:
+
+enable_api = True
+admin_users = you(a)example.org
+library_import_dir = /path/to/some/directory
+
+In the directory you specified for 'library_import_dir', create some
+subdirectories, and put (or symlink) files to import into Galaxy into those
+subdirectories.
+
+In Galaxy, create an account that matches the address you put in 'admin_users',
+then browse to that user's preferences and generate a new API Key. Copy the
+key to your clipboard. Create a new library (doing this via the API is not yet
+implemented). Then take your API Key and use the scripts in scripts/api/ to do
+things:
+
+% ./display.py my_key http://localhost:4096/api/libraries
+Collection Members
+------------------
+/api/libraries/f3f73e481f432006
+ name: api_test
+ id: f3f73e481f432006
+
+% ./display.py my_key http://localhost:4096/api/libraries/f3f73e481f432006
+Member Information
+------------------
+synopsys: None
+contents_url: /api/libraries/f3f73e481f432006/contents
+description: API Test Library
+name: api_test
+
+% ./display.py my_key http://localhost:4096/api/libraries/f3f73e481f432006/contents
+Collection Members
+------------------
+/api/libraries/f3f73e481f432006/contents/28202595c0d2591f61ddda595d2c3670
+ name: /
+ type: folder
+ id: 28202595c0d2591f61ddda595d2c3670
+
+% ./library_create_folder.py my_key http://localhost:4096/api/libraries/f3f73e481f432006/contents 28202595c0d2591f61ddda595d2c3670 api_test_folder1 'API Test Folder 1'
+Response
+--------
+/api/libraries/f3f73e481f432006/contents/28202595c0d2591fa4f9089d2303fd89
+ name: api_test_folder1
+ id: 28202595c0d2591fa4f9089d2303fd89
+
+% ./library_upload_from_import_dir.py my_key http://localhost:4096/api/libraries/f3f73e481f432006/contents 28202595c0d2591fa4f9089d2303fd89 bed bed hg19
+Response
+--------
+/api/libraries/f3f73e481f432006/contents/e9ef7fdb2db87d7b
+ name: 2.bed
+ id: e9ef7fdb2db87d7b
+/api/libraries/f3f73e481f432006/contents/3b7f6a31f80a5018
+ name: 3.bed
+ id: 3b7f6a31f80a5018
+
+% ./display.py my_key http://localhost:4096/api/libraries/f3f73e481f432006/contents
+Collection Members
+------------------
+/api/libraries/f3f73e481f432006/contents/28202595c0d2591f61ddda595d2c3670
+ name: /
+ type: folder
+ id: 28202595c0d2591f61ddda595d2c3670
+/api/libraries/f3f73e481f432006/contents/28202595c0d2591fa4f9089d2303fd89
+ name: /api_test_folder1
+ type: folder
+ id: 28202595c0d2591fa4f9089d2303fd89
+/api/libraries/f3f73e481f432006/contents/e9ef7fdb2db87d7b
+ name: /api_test_folder1/2.bed
+ type: file
+ id: e9ef7fdb2db87d7b
+/api/libraries/f3f73e481f432006/contents/3b7f6a31f80a5018
+ name: /api_test_folder1/3.bed
+ type: file
+ id: 3b7f6a31f80a5018
+
+% ./display.py my_key http://localhost:4096/api/libraries/f3f73e481f432006/contents/e9ef7fdb2db87…
+Member Information
+------------------
+misc_blurb: 68 regions
+metadata_endCol: 3
+data_type: bed
+metadata_columns: 6
+metadata_nameCol: 4
+uploaded_by: nate@...
+metadata_strandCol: 6
+name: 2.bed
+genome_build: hg19
+metadata_comment_lines: None
+metadata_startCol: 2
+metadata_chromCol: 1
+file_size: 4272
+metadata_data_lines: 68
+message:
+metadata_dbkey: hg19
+misc_info: uploaded bed file
+date_uploaded: 2010-06-22T17:01:51.266119
+metadata_column_types: str, int, int, str, int, str
+
+Other parameters are valid when uploading, they are the same parameters as are
+used in the web form, like 'link_data_only' and etc.
+
+The request and response format should be considered alpha and are subject to change.
--- a/lib/galaxy/web/framework/base.py
+++ b/lib/galaxy/web/framework/base.py
@@ -45,7 +45,13 @@ class WebApplication( object ):
and `__call__` to handle a request (WSGI style).
"""
self.controllers = dict()
+ self.api_controllers = dict()
self.mapper = routes.Mapper()
+ # FIXME: The following two options are deprecated and should be
+ # removed. Consult the Routes documentation.
+ self.mapper.minimization = True
+ self.mapper.explicit = False
+ self.api_mapper = routes.Mapper()
self.transaction_factory = DefaultWebTransaction
def add_controller( self, controller_name, controller ):
"""
@@ -56,6 +62,10 @@ class WebApplication( object ):
log.debug( "Enabling '%s' controller, class: %s",
controller_name, controller.__class__.__name__ )
self.controllers[ controller_name ] = controller
+ def add_api_controller( self, controller_name, controller ):
+ log.debug( "Enabling '%s' API controller, class: %s",
+ controller_name, controller.__class__.__name__ )
+ self.api_controllers[ controller_name ] = controller
def add_route( self, route, **kwargs ):
"""
Add a route to match a URL with a method. Accepts all keyword
@@ -80,6 +90,7 @@ class WebApplication( object ):
"""
# Create/compile the regular expressions for route mapping
self.mapper.create_regs( self.controllers.keys() )
+ self.api_mapper.create_regs( self.api_controllers.keys() )
def __call__( self, environ, start_response ):
"""
Call interface as specified by WSGI. Wraps the environment in user
@@ -88,12 +99,20 @@ class WebApplication( object ):
"""
# Map url using routes
path_info = environ.get( 'PATH_INFO', '' )
- map = self.mapper.match( path_info )
+ map = self.mapper.match( path_info, environ )
+ if map is None:
+ environ[ 'is_api_request' ] = True
+ map = self.api_mapper.match( path_info, environ )
+ mapper = self.api_mapper
+ controllers = self.api_controllers
+ else:
+ mapper = self.mapper
+ controllers = self.controllers
if map == None:
raise httpexceptions.HTTPNotFound( "No route for " + path_info )
# Setup routes
rc = routes.request_config()
- rc.mapper = self.mapper
+ rc.mapper = mapper
rc.mapper_dict = map
rc.environ = environ
# Setup the transaction
@@ -101,7 +120,7 @@ class WebApplication( object ):
rc.redirect = trans.response.send_redirect
# Get the controller class
controller_name = map.pop( 'controller', None )
- controller = self.controllers.get( controller_name, None )
+ controller = controllers.get( controller_name, None )
if controller_name is None:
raise httpexceptions.HTTPNotFound( "No controller for " + path_info )
# Resolve action method on controller
@@ -112,7 +131,7 @@ class WebApplication( object ):
if method is None:
raise httpexceptions.HTTPNotFound( "No action for " + path_info )
# Is the method exposed
- if not getattr( method, 'exposed', False ):
+ if not getattr( method, 'exposed', False ):
raise httpexceptions.HTTPNotFound( "Action not exposed for " + path_info )
# Is the method callable
if not callable( method ):
--- a/lib/galaxy/web/security/__init__.py
+++ b/lib/galaxy/web/security/__init__.py
@@ -43,6 +43,8 @@ class SecurityHelper( object ):
return self.id_cipher.encrypt( s ).encode( 'hex' )
def decode_id( self, obj_id ):
return int( self.id_cipher.decrypt( obj_id.decode( 'hex' ) ).lstrip( "!" ) )
+ def decode_string_id( self, obj_id ):
+ return self.id_cipher.decrypt( obj_id.decode( 'hex' ) ).lstrip( "!" )
def encode_guid( self, session_key ):
# Session keys are strings
# Pad to a multiple of 8 with leading "!"
--- a/lib/galaxy/config.py
+++ b/lib/galaxy/config.py
@@ -39,6 +39,8 @@ class Configuration( object ):
self.file_path = resolve_path( kwargs.get( "file_path", "database/files" ), self.root )
self.new_file_path = resolve_path( kwargs.get( "new_file_path", "database/tmp" ), self.root )
self.cookie_path = kwargs.get( "cookie_path", "/" )
+ # web API
+ self.enable_api = string_as_bool( kwargs.get( 'enable_api', False ) )
# dataset Track files
self.track_store_path = kwargs.get( "track_store_path", "${extra_files_path}/tracks")
self.tool_path = resolve_path( kwargs.get( "tool_path", "tools" ), self.root )
--- /dev/null
+++ b/lib/galaxy/model/migrate/versions/0049_api_keys_table.py
@@ -0,0 +1,38 @@
+"""
+Migration script to add the api_keys table.
+"""
+
+from sqlalchemy import *
+from migrate import *
+from migrate.changeset import *
+from galaxy.model.custom_types import *
+
+import datetime
+now = datetime.datetime.utcnow
+
+import logging
+log = logging.getLogger( __name__ )
+
+metadata = MetaData( migrate_engine )
+
+APIKeys_table = Table( "api_keys", metadata,
+ Column( "id", Integer, primary_key=True ),
+ Column( "create_time", DateTime, default=now ),
+ Column( "user_id", Integer, ForeignKey( "galaxy_user.id" ), index=True ),
+ Column( "key", TrimmedString( 32 ), index=True, unique=True ) )
+
+def upgrade():
+ print __doc__
+ metadata.reflect()
+ try:
+ APIKeys_table.create()
+ except Exception, e:
+ log.debug( "Creating api_keys table failed: %s" % str( e ) )
+
+def downgrade():
+ # Load existing tables
+ metadata.reflect()
+ try:
+ APIKeys_table.drop()
+ except Exception, e:
+ log.debug( "Dropping api_keys table failed: %s" % str( e ) )
--- a/lib/galaxy/tools/actions/upload_common.py
+++ b/lib/galaxy/tools/actions/upload_common.py
@@ -124,7 +124,7 @@ def new_history_upload( trans, uploaded_
return hda
def new_library_upload( trans, cntrller, uploaded_dataset, library_bunch, state=None ):
current_user_roles = trans.get_current_user_roles()
- if not ( cntrller in [ 'library_admin' ] or trans.app.security_agent.can_add_library_item( current_user_roles, library_bunch.folder ) ):
+ if not ( ( trans.user_is_admin() and cntrller in [ 'library_admin', 'api' ] ) or trans.app.security_agent.can_add_library_item( current_user_roles, library_bunch.folder ) ):
# This doesn't have to be pretty - the only time this should happen is if someone's being malicious.
raise Exception( "User is not authorized to add datasets to this library." )
folder = library_bunch.folder
--- /dev/null
+++ b/lib/galaxy/web/api/contents.py
@@ -0,0 +1,154 @@
+"""
+API operations on the contents of a library.
+"""
+import logging, os, string, shutil, urllib, re, socket
+from cgi import escape, FieldStorage
+from galaxy import util, datatypes, jobs, web, util
+from galaxy.web.base.controller import *
+from galaxy.util.sanitize_html import sanitize_html
+from galaxy.model.orm import *
+
+log = logging.getLogger( __name__ )
+
+class ContentsController( BaseController ):
+
+ @web.expose_api
+ def index( self, trans, library_id, **kwd ):
+ """
+ GET /api/libraries/{encoded_library_id}/contents
+ Displays a collection (list) of library contents (files and folders).
+ """
+ rval = []
+ current_user_roles = trans.get_current_user_roles()
+ def traverse( folder ):
+ admin = trans.user_is_admin()
+ rval = []
+ for subfolder in folder.active_folders:
+ if not admin:
+ can_access, folder_ids = trans.app.security_agent.check_folder_contents( trans.user, current_user_roles, subfolder )
+ if admin or can_access:
+ subfolder.api_path = folder.api_path + '/' + subfolder.name
+ subfolder.api_type = 'folder'
+ rval.append( subfolder )
+ rval.extend( traverse( subfolder ) )
+ for ld in folder.datasets:
+ if not admin:
+ can_access = trans.app.security_agent.can_access_dataset( current_user_roles, ld.library_dataset_dataset_association.dataset )
+ if admin or can_access:
+ ld.api_path = folder.api_path + '/' + ld.name
+ ld.api_type = 'file'
+ rval.append( ld )
+ return rval
+ try:
+ decoded_library_id = trans.security.decode_id( library_id )
+ except TypeError:
+ trans.response.status = 400
+ return "Malformed library id ( %s ) specified, unable to decode." % str( library_id )
+ try:
+ library = trans.sa_session.query( trans.app.model.Library ).get( decoded_library_id )
+ except:
+ library = None
+ if not library or not ( trans.user_is_admin() or trans.app.security_agent.can_access_library( current_user_roles, library ) ):
+ trans.response.status = 400
+ return "Invalid library id ( %s ) specified." % str( library_id )
+ encoded_id = trans.security.encode_id( 'folder.%s' % library.root_folder.id )
+ rval.append( dict( id = encoded_id,
+ type = 'folder',
+ name = '/',
+ url = url_for( 'content', library_id=library_id, id=encoded_id ) ) )
+ library.root_folder.api_path = ''
+ for content in traverse( library.root_folder ):
+ encoded_id = trans.security.encode_id( '%s.%s' % ( content.api_type, content.id ) )
+ rval.append( dict( id = encoded_id,
+ type = content.api_type,
+ name = content.api_path,
+ url = url_for( 'content', library_id=library_id, id=encoded_id, ) ) )
+ return rval
+
+ @web.expose_api
+ def show( self, trans, id, library_id, **kwd ):
+ """
+ GET /api/libraries/{encoded_library_id}/contents/{encoded_content_type_and_id}
+ Displays information about a library content (file or folder).
+ """
+ content_id = id
+ try:
+ decoded_type_and_id = trans.security.decode_string_id( content_id )
+ content_type, decoded_content_id = decoded_type_and_id.split( '.' )
+ except:
+ trans.response.status = 400
+ return "Malformed content id ( %s ) specified, unable to decode." % str( content_id )
+ if content_type == 'folder':
+ model_class = trans.app.model.LibraryFolder
+ elif content_type == 'file':
+ model_class = trans.app.model.LibraryDataset
+ else:
+ trans.response.status = 400
+ return "Invalid type ( %s ) specified." % str( content_type )
+ try:
+ content = trans.sa_session.query( model_class ).get( decoded_content_id )
+ except:
+ content = None
+ if not content or ( not trans.user_is_admin() and not trans.app.security_agent.can_access_library_item( trans.get_current_user_roles(), content, trans.user ) ):
+ trans.response.status = 400
+ return "Invalid %s id ( %s ) specified." % ( content_type, str( content_id ) )
+ return content.get_api_value( view='element' )
+
+ @web.expose_api
+ def create( self, trans, library_id, payload, **kwd ):
+ """
+ POST /api/libraries/{encoded_library_id}/contents
+ Creates a new library content item (file or folder).
+ """
+ create_type = None
+ if 'create_type' not in payload:
+ trans.response.status = 400
+ return "Missing required 'create_type' parameter. Please consult the API documentation for help."
+ else:
+ create_type = payload.pop( 'create_type' )
+ if create_type not in ( 'file', 'folder' ):
+ trans.response.status = 400
+ return "Invalid value for 'create_type' parameter ( %s ) specified. Please consult the API documentation for help." % create_type
+ try:
+ content_id = str( payload.pop( 'folder_id' ) )
+ decoded_type_and_id = trans.security.decode_string_id( content_id )
+ parent_type, decoded_parent_id = decoded_type_and_id.split( '.' )
+ assert parent_type in ( 'folder', 'file' )
+ except:
+ trans.response.status = 400
+ return "Malformed parent id ( %s ) specified, unable to decode." % content_id
+ # "content" can be either a folder or a file, but the parent of new contents can only be folders.
+ if parent_type == 'file':
+ trans.response.status = 400
+ try:
+ # With admins or people who can access the dataset provided as the parent, be descriptive.
+ dataset = trans.sa_session.query( trans.app.model.LibraryDataset ).get( decoded_parent_id ).library_dataset_dataset_association.dataset
+ assert trans.user_is_admin() or trans.app.security_agent.can_access_dataset( trans.get_current_user_roles(), dataset )
+ return "The parent id ( %s ) points to a file, not a folder." % content_id
+ except:
+ # If you can't access the parent we don't want to reveal its existence.
+ return "Invalid parent folder id ( %s ) specified." % content_id
+ # The rest of the security happens in the library_common controller.
+ folder_id = trans.security.encode_id( decoded_parent_id )
+ # Now create the desired content object, either file or folder.
+ if create_type == 'file':
+ status, output = trans.webapp.controllers['library_common'].upload_library_dataset( trans, 'api', library_id, folder_id, **payload )
+ elif create_type == 'folder':
+ status, output = trans.webapp.controllers['library_common'].create_folder( trans, 'api', folder_id, library_id, **payload )
+ if status != 200:
+ trans.response.status = status
+ # We don't want to reveal the encoded folder_id since it's invalid
+ # in the API context. Instead, return the content_id originally
+ # supplied by the client.
+ output = output.replace( folder_id, content_id )
+ return output
+ else:
+ rval = []
+ for k, v in output.items():
+ if type( v ) == trans.app.model.LibraryDatasetDatasetAssociation:
+ v = v.library_dataset
+ encoded_id = trans.security.encode_id( create_type + '.' + str( v.id ) )
+ rval.append( dict( id = encoded_id,
+ name = v.name,
+ url = url_for( 'content', library_id=library_id, id=encoded_id ) ) )
+ return rval
--- a/lib/galaxy/model/__init__.py
+++ b/lib/galaxy/model/__init__.py
@@ -829,6 +829,8 @@ class HistoryDatasetAssociationDisplayAt
class Library( object ):
permitted_actions = get_permitted_actions( filter='LIBRARY' )
+ api_collection_visible_keys = ( 'id', 'name' )
+ api_element_visible_keys = ( 'name', 'description', 'synopsys' )
def __init__( self, name=None, description=None, synopsis=None, root_folder=None ):
self.name = name or "Unnamed library"
self.description = description
@@ -870,8 +872,21 @@ class Library( object ):
if isinstance( name, str ):
name = unicode( name, 'utf-8' )
return name
+ def get_api_value( self, view='collection' ):
+ rval = {}
+ try:
+ visible_keys = self.__getattribute__( 'api_' + view + '_visible_keys' )
+ except AttributeError:
+ raise Exception( 'Unknown API view: %s' % view )
+ for key in visible_keys:
+ try:
+ rval[key] = self.__getattribute__( key )
+ except AttributeError:
+ rval[key] = None
+ return rval
class LibraryFolder( object ):
+ api_element_visible_keys = ( 'name', 'description', 'item_count', 'genome_build' )
def __init__( self, name=None, description=None, item_count=0, order_id=None ):
self.name = name or "Unnamed folder"
self.description = description
@@ -961,6 +976,18 @@ class LibraryFolder( object ):
if isinstance( name, str ):
name = unicode( name, 'utf-8' )
return name
+ def get_api_value( self, view='collection' ):
+ rval = {}
+ try:
+ visible_keys = self.__getattribute__( 'api_' + view + '_visible_keys' )
+ except AttributeError:
+ raise Exception( 'Unknown API view: %s' % view )
+ for key in visible_keys:
+ try:
+ rval[key] = self.__getattribute__( key )
+ except AttributeError:
+ rval[key] = None
+ return rval
@property
def parent_library( self ):
f = self
@@ -1011,6 +1038,28 @@ class LibraryDataset( object ):
if not purged and self.purged:
raise Exception( "Cannot unpurge once purged" )
purged = property( get_purged, set_purged )
+ def get_api_value( self, view='collection' ):
+ # Since this class is a proxy to rather complex attributes we want to
+ # display in other objects, we can't use the simpler method used by
+ # other model classes.
+ ldda = self.library_dataset_dataset_association
+ rval = dict( name = ldda.name,
+ uploaded_by = ldda.user.email,
+ message = ldda.message,
+ date_uploaded = ldda.create_time.isoformat(),
+ file_size = int( ldda.get_size() ),
+ data_type = ldda.ext,
+ genome_build = ldda.dbkey,
+ misc_info = ldda.info,
+ misc_blurb = ldda.blurb )
+ for name, spec in ldda.metadata.spec.items():
+ val = ldda.metadata.get( name )
+ if isinstance( val, MetadataFile ):
+ val = val.file_name
+ elif isinstance( val, list ):
+ val = ', '.join( val )
+ rval['metadata_' + name] = val
+ return rval
class LibraryDatasetDatasetAssociation( DatasetInstance ):
def __init__( self,
@@ -1734,6 +1783,9 @@ class UserAction( object ):
self.params = params
self.context = context
+class APIKeys( object ):
+ pass
+
## ---- Utility methods -------------------------------------------------------
def directory_hash_id( id ):
@@ -1748,5 +1800,3 @@ def directory_hash_id( id ):
padded = padded[:-3]
# Break into chunks of three
return [ padded[i*3:(i+1)*3] for i in range( len( padded ) // 3 ) ]
-
-
--- a/lib/galaxy/util/__init__.py
+++ b/lib/galaxy/util/__init__.py
@@ -417,6 +417,14 @@ def stringify_dictionary_keys( in_dict )
out_dict[ str( key ) ] = value
return out_dict
+def recursively_stringify_dictionary_keys( d ):
+ if isinstance(d, dict):
+ return dict([(k.encode('utf-8'), recursively_stringify_dictionary_keys(v)) for k,v in d.iteritems()])
+ elif isinstance(d, list):
+ return [recursively_stringify_dictionary_keys(x) for x in d]
+ else:
+ return d
+
def mkstemp_ln( src, prefix='mkstemp_ln_' ):
"""
From tempfile._mkstemp_inner, generate a hard link in the same dir with a
--- /dev/null
+++ b/scripts/api/library_upload_from_import_dir.py
@@ -0,0 +1,19 @@
+#!/usr/bin/python
+
+import os, sys
+sys.path.insert( 0, os.path.dirname( __file__ ) )
+from common import submit
+
+try:
+ data = {}
+ data[ 'folder_id' ] = sys.argv[3]
+ data[ 'file_type' ] = sys.argv[4]
+ data[ 'server_dir' ] = sys.argv[5]
+ data[ 'dbkey' ] = sys.argv[6]
+ data[ 'upload_option' ] = 'upload_directory'
+ data[ 'create_type' ] = 'file'
+except IndexError:
+ print 'usage: %s key url folder_id file_type server_dir dbkey' % os.path.basename( sys.argv[0] )
+ sys.exit( 1 )
+
+submit( sys.argv[1], sys.argv[2], data )
--- a/lib/galaxy/model/mapping.py
+++ b/lib/galaxy/model/mapping.py
@@ -889,6 +889,12 @@ UserAction.table = Table( "user_action",
Column( "context", Unicode( 512 ) ),
Column( "params", Unicode( 1024 ) ) )
+APIKeys.table = Table( "api_keys", metadata,
+ Column( "id", Integer, primary_key=True ),
+ Column( "create_time", DateTime, default=now ),
+ Column( "user_id", Integer, ForeignKey( "galaxy_user.id" ), index=True ),
+ Column( "key", TrimmedString( 32 ), index=True, unique=True ) )
+
# With the tables defined we can define the mappers and setup the
# relationships between the model objects.
@@ -1067,8 +1073,9 @@ assign_mapper( context, User, User.table
_preferences=relation( UserPreference, backref="user", collection_class=attribute_mapped_collection('name')),
# addresses=relation( UserAddress,
# primaryjoin=( User.table.c.id == UserAddress.table.c.user_id ) )
- values=relation( FormValues,
- primaryjoin=( User.table.c.form_values_id == FormValues.table.c.id ) ),
+ values=relation( FormValues,
+ primaryjoin=( User.table.c.form_values_id == FormValues.table.c.id ) ),
+ api_keys=relation( APIKeys, backref="user", order_by=desc( APIKeys.table.c.create_time ) ),
) )
# Set up proxy so that this syntax is possible:
@@ -1485,6 +1492,9 @@ assign_mapper( context, UserAction, User
properties = dict( user=relation( User.mapper ) )
)
+assign_mapper( context, APIKeys, APIKeys.table,
+ properties = {} )
+
def db_next_hid( self ):
"""
Override __next_hid to generate from the database in a concurrency
--- a/lib/galaxy/web/controllers/library_common.py
+++ b/lib/galaxy/web/controllers/library_common.py
@@ -280,7 +280,7 @@ class LibraryCommon( BaseController ):
status = params.get( 'status', 'done' )
show_deleted = util.string_as_bool( params.get( 'show_deleted', False ) )
use_panels = util.string_as_bool( params.get( 'use_panels', False ) )
- is_admin = trans.user_is_admin() and cntrller == 'library_admin'
+ is_admin = trans.user_is_admin() and cntrller in ( 'library_admin', 'api' )
current_user_roles = trans.get_current_user_roles()
try:
parent_folder = trans.sa_session.query( trans.app.model.LibraryFolder ).get( trans.security.decode_id( parent_id ) )
@@ -294,6 +294,8 @@ class LibraryCommon( BaseController ):
# its parent library, or if they are not able to see the folder's contents.
if not parent_folder or ( not is_admin and not trans.app.security_agent.can_access_library_item( current_user_roles, parent_folder, trans.user ) ):
message = "Invalid parent folder id ( %s ) specified." % str( parent_id )
+ if cntrller == 'api':
+ return 400, message
# This doesn't give away the library's existence since
# browse_library will simply punt to browse_libraries if the
# user-supplied id is invalid or inaccessible.
@@ -309,6 +311,8 @@ class LibraryCommon( BaseController ):
if not ( is_admin or trans.app.security_agent.can_add_library_item( current_user_roles, parent_folder ) ):
message = "You are not authorized to create a folder in parent folder '%s'." % parent_folder.name
# Redirect to the real parent library since we know we have access to it.
+ if cntrller == 'api':
+ return 403, message
return trans.response.send_redirect( web.url_for( controller='library_common',
action='browse_library',
cntrller=cntrller,
@@ -317,7 +321,7 @@ class LibraryCommon( BaseController ):
show_deleted=show_deleted,
message=util.sanitize_text( message ),
status='error' ) )
- if params.get( 'new_folder_button', False ):
+ if params.get( 'new_folder_button', False ) or cntrller == 'api':
new_folder = trans.app.model.LibraryFolder( name=util.restore_text( params.name ),
description=util.restore_text( params.description ) )
# We are associating the last used genome build with folders, so we will always
@@ -329,6 +333,9 @@ class LibraryCommon( BaseController ):
trans.sa_session.flush()
# New folders default to having the same permissions as their parent folder
trans.app.security_agent.copy_library_permissions( parent_folder, new_folder )
+ # If we're creating in the API, we're done
+ if cntrller == 'api':
+ return 200, dict( created=new_folder )
# If we have an inheritable template, redirect to the folder_info page so information
# can be filled in immediately.
widgets = []
@@ -654,7 +661,7 @@ class LibraryCommon( BaseController ):
show_deleted=show_deleted,
message=util.sanitize_text( message ),
status='error' ) )
- if ( trans.user_is_admin() and cntrller == 'library_admin' ):
+ if is_admin:
# Get all associated hdas and lddas that use the same disk file.
associated_hdas = trans.sa_session.query( trans.model.HistoryDatasetAssociation ) \
.filter( and_( trans.model.HistoryDatasetAssociation.deleted == False,
@@ -848,7 +855,7 @@ class LibraryCommon( BaseController ):
last_used_build = dbkey[0]
else:
last_used_build = dbkey
- is_admin = trans.user_is_admin() and cntrller == 'library_admin'
+ is_admin = trans.user_is_admin() and cntrller in ( 'library_admin', 'api' )
current_user_roles = trans.get_current_user_roles()
if replace_id not in [ None, 'None' ]:
try:
@@ -859,6 +866,8 @@ class LibraryCommon( BaseController ):
# its parent library, or if they are not able to view the dataset itself.
if not replace_dataset or ( not is_admin and not trans.app.security_agent.can_access_library_item( current_user_roles, replace_dataset, trans.user ) ):
message = "Invalid library dataset id ( %s ) to replace specified." % replace_id
+ if cntrller == 'api':
+ return 400, message
return trans.response.send_redirect( web.url_for( controller='library_common',
action='browse_library',
cntrller=cntrller,
@@ -870,6 +879,8 @@ class LibraryCommon( BaseController ):
# Deny access if the user is not an admin and does not have the LIBRARY_MODIFY permission.
if not ( is_admin or trans.app.security_agent.can_modify_library_item( current_user_roles, replace_dataset ) ):
message = "You are not authorized to replace library dataset '%s'." % replace_dataset.name
+ if cntrller == 'api':
+ return 403, message
return trans.response.send_redirect( web.url_for( controller='library_common',
action='browse_library',
cntrller=cntrller,
@@ -896,6 +907,8 @@ class LibraryCommon( BaseController ):
# its parent library, or if they are not able to see the folder's contents.
if not folder or ( not is_admin and not trans.app.security_agent.can_access_library_item( current_user_roles, folder, trans.user ) ):
message = "Invalid parent folder id ( %s ) specified." % str( folder_id )
+ if cntrller == 'api':
+ return 400, message
return trans.response.send_redirect( web.url_for( controller='library_common',
action='browse_library',
cntrller=cntrller,
@@ -907,6 +920,8 @@ class LibraryCommon( BaseController ):
# Deny access if the user is not an admin and does not have the LIBRARY_ADD permission.
if not ( is_admin or trans.app.security_agent.can_add_library_item( current_user_roles, folder ) ):
message = "You are not authorized to create a library dataset in parent folder '%s'." % folder.name
+ if cntrller == 'api':
+ return 403, message
return trans.response.send_redirect( web.url_for( controller='library_common',
action='browse_library',
cntrller=cntrller,
@@ -918,7 +933,7 @@ class LibraryCommon( BaseController ):
library = folder.parent_library
if folder and last_used_build in [ 'None', None, '?' ]:
last_used_build = folder.genome_build
- if params.get( 'runtool_btn', False ) or params.get( 'ajax_upload', False ):
+ if params.get( 'runtool_btn', False ) or params.get( 'ajax_upload', False ) or cntrller == 'api':
# Check to see if the user selected roles to associate with the DATASET_ACCESS permission
# on the dataset that would cause accessibility issues.
roles = params.get( 'roles', False )
@@ -931,7 +946,8 @@ class LibraryCommon( BaseController ):
permissions, in_roles, error, message = \
trans.app.security_agent.derive_roles_from_access( trans, library.id, cntrller, library=True, **vars )
if error:
- status = 'error'
+ if cntrller == 'api':
+ return 400, message
trans.response.send_redirect( web.url_for( controller='library_common',
action='upload_library_dataset',
cntrller=cntrller,
@@ -961,6 +977,11 @@ class LibraryCommon( BaseController ):
replace_dataset=replace_dataset,
**kwd )
if created_outputs_dict:
+ if cntrller == 'api':
+ # created_outputs_dict can only ever be a string if cntrller == 'api'
+ if type( created_outputs_dict ) == str:
+ return 400, created_outputs_dict
+ return 200, created_outputs_dict
total_added = len( created_outputs_dict.keys() )
ldda_id_list = [ str( v.id ) for k, v in created_outputs_dict.items() ]
created_ldda_ids=",".join( ldda_id_list )
@@ -1001,6 +1022,9 @@ class LibraryCommon( BaseController ):
created_ldda_ids = ''
message = "Upload failed"
status='error'
+ if cntrller == 'api':
+ return 400, message
+ response_code = 400
trans.response.send_redirect( web.url_for( controller='library_common',
action='browse_library',
cntrller=cntrller,
@@ -1068,7 +1092,6 @@ class LibraryCommon( BaseController ):
# Library-specific params
params = util.Params( kwd ) # is this filetoolparam safe?
show_deleted = util.string_as_bool( params.get( 'show_deleted', False ) )
- library_bunch = upload_common.handle_library_params( trans, params, folder_id, replace_dataset )
message = util.restore_text( params.get( 'message', '' ) )
status = params.get( 'status', 'done' )
server_dir = util.restore_text( params.get( 'server_dir', '' ) )
@@ -1077,11 +1100,11 @@ class LibraryCommon( BaseController ):
else:
replace_id = None
upload_option = params.get( 'upload_option', 'upload_file' )
- err_redirect = False
+ response_code = 200
if upload_option == 'upload_directory':
if server_dir in [ None, 'None', '' ]:
- err_redirect = True
- if cntrller == 'library_admin':
+ response_code = 400
+ if cntrller == 'library_admin' or ( cntrller == 'api' and trans.user_is_admin ):
import_dir = trans.app.config.library_import_dir
import_dir_desc = 'library_import_dir'
full_dir = os.path.join( import_dir, server_dir )
@@ -1095,21 +1118,35 @@ class LibraryCommon( BaseController ):
if import_dir:
message = 'Select a directory'
else:
+ response_code = 403
message = '"%s" is not defined in the Galaxy configuration file' % import_dir_desc
- # Proceed with (mostly) regular upload processing
- precreated_datasets = upload_common.get_precreated_datasets( trans, tool_params, trans.app.model.LibraryDatasetDatasetAssociation, controller=cntrller )
- if upload_option == 'upload_file':
- tool_params = upload_common.persist_uploads( tool_params )
- uploaded_datasets = upload_common.get_uploaded_datasets( trans, cntrller, tool_params, precreated_datasets, dataset_upload_inputs, library_bunch=library_bunch )
- elif upload_option == 'upload_directory':
- uploaded_datasets, err_redirect, message = self.get_server_dir_uploaded_datasets( trans, cntrller, params, full_dir, import_dir_desc, library_bunch, err_redirect, message )
elif upload_option == 'upload_paths':
- uploaded_datasets, err_redirect, message = self.get_path_paste_uploaded_datasets( trans, cntrller, params, library_bunch, err_redirect, message )
- upload_common.cleanup_unused_precreated_datasets( precreated_datasets )
- if upload_option == 'upload_file' and not uploaded_datasets:
- message = 'Select a file, enter a URL or enter text'
- err_redirect = True
- if err_redirect:
+ if not trans.app.config.allow_library_path_paste:
+ response_code = 403
+ message = '"allow_library_path_paste" is not defined in the Galaxy configuration file'
+ # Some error handling should be added to this method.
+ try:
+ library_bunch = upload_common.handle_library_params( trans, params, folder_id, replace_dataset )
+ except:
+ response_code = 500
+ message = "Unable to parse upload parameters, please report this error."
+ # Proceed with (mostly) regular upload processing if we're still errorless
+ if response_code == 200:
+ precreated_datasets = upload_common.get_precreated_datasets( trans, tool_params, trans.app.model.LibraryDatasetDatasetAssociation, controller=cntrller )
+ if upload_option == 'upload_file':
+ tool_params = upload_common.persist_uploads( tool_params )
+ uploaded_datasets = upload_common.get_uploaded_datasets( trans, cntrller, tool_params, precreated_datasets, dataset_upload_inputs, library_bunch=library_bunch )
+ elif upload_option == 'upload_directory':
+ uploaded_datasets, response_code, message = self.get_server_dir_uploaded_datasets( trans, cntrller, params, full_dir, import_dir_desc, library_bunch, response_code, message )
+ elif upload_option == 'upload_paths':
+ uploaded_datasets, response_code, message = self.get_path_paste_uploaded_datasets( trans, cntrller, params, library_bunch, response_code, message )
+ upload_common.cleanup_unused_precreated_datasets( precreated_datasets )
+ if upload_option == 'upload_file' and not uploaded_datasets:
+ response_code = 400
+ message = 'Select a file, enter a URL or enter text'
+ if response_code != 200:
+ if cntrller == 'api':
+ return ( response_code, message )
trans.response.send_redirect( web.url_for( controller='library_common',
action='upload_library_dataset',
cntrller=cntrller,
@@ -1145,7 +1182,7 @@ class LibraryCommon( BaseController ):
trans.sa_session.add_all( ( uploaded_dataset.data, uploaded_dataset.data.dataset ) )
trans.sa_session.flush()
return uploaded_dataset
- def get_server_dir_uploaded_datasets( self, trans, cntrller, params, full_dir, import_dir_desc, library_bunch, err_redirect, message ):
+ def get_server_dir_uploaded_datasets( self, trans, cntrller, params, full_dir, import_dir_desc, library_bunch, response_code, message ):
files = []
try:
for entry in os.listdir( full_dir ):
@@ -1173,22 +1210,22 @@ class LibraryCommon( BaseController ):
files.append( path )
except Exception, e:
message = "Unable to get file list for configured %s, error: %s" % ( import_dir_desc, str( e ) )
- err_redirect = True
- return None, err_redirect, message
+ response_code = 500
+ return None, response_code, message
if not files:
message = "The directory '%s' contains no valid files" % full_dir
- err_redirect = True
- return None, err_redirect, message
+ response_code = 400
+ return None, response_code, message
uploaded_datasets = []
for file in files:
name = os.path.basename( file )
uploaded_datasets.append( self.make_library_uploaded_dataset( trans, cntrller, params, name, file, 'server_dir', library_bunch ) )
- return uploaded_datasets, None, None
- def get_path_paste_uploaded_datasets( self, trans, cntrller, params, library_bunch, err_redirect, message ):
+ return uploaded_datasets, 200, None
+ def get_path_paste_uploaded_datasets( self, trans, cntrller, params, library_bunch, response_code, message ):
if params.get( 'filesystem_paths', '' ) == '':
message = "No paths entered in the upload form"
- err_redirect = True
- return None, err_redirect, message
+ response_code = 400
+ return None, response_code, message
preserve_dirs = True
if params.get( 'dont_preserve_dirs', False ):
preserve_dirs = False
@@ -1222,8 +1259,8 @@ class LibraryCommon( BaseController ):
in_folder ) )
if bad_paths:
message = "Invalid paths:<br><ul><li>%s</li></ul>" % "</li><li>".join( bad_paths )
- err_redirect = True
- return None, err_redirect, message
+ response_code = 400
+ return None, response_code, message
return uploaded_datasets, None, None
@web.expose
def add_history_datasets_to_library( self, trans, cntrller, library_id, folder_id, hda_ids='', **kwd ):
--- a/eggs.ini
+++ b/eggs.ini
@@ -41,7 +41,7 @@ Paste = 1.6
PasteDeploy = 1.3.3
PasteScript = 1.7.3
pexpect = 2.4
-Routes = 1.11
+Routes = 1.12.3
simplejson = 1.5
SQLAlchemy = 0.5.6
sqlalchemy_migrate = 0.5.4
--- /dev/null
+++ b/scripts/api/library_create_folder.py
@@ -0,0 +1,20 @@
+#!/usr/bin/python
+
+import os, sys
+sys.path.insert( 0, os.path.dirname( __file__ ) )
+from common import submit
+
+try:
+ data = {}
+ data[ 'folder_id' ] = sys.argv[3]
+ data[ 'name' ] = sys.argv[4]
+ data[ 'create_type' ] = 'folder'
+except IndexError:
+ print 'usage: %s key url folder_id name [description]' % os.path.basename( sys.argv[0] )
+ sys.exit( 1 )
+try:
+ data[ 'description' ] = sys.argv[5]
+except IndexError:
+ data[ 'description' ] = ''
+
+submit( sys.argv[1], sys.argv[2], data )
--- a/lib/galaxy/web/controllers/user.py
+++ b/lib/galaxy/web/controllers/user.py
@@ -979,3 +979,25 @@ class User( BaseController ):
lines_skipped=lines_skipped )
+ @web.expose
+ def new_api_key( self, trans, **kwd ):
+ params = util.Params( kwd )
+ message = util.restore_text( params.get( 'message', '' ) )
+ status = params.get( 'status', 'done' )
+ admin_view = util.string_as_bool( params.get( 'admin_view', False ) )
+ error = ''
+ user = trans.sa_session.query( trans.app.model.User ).get( int( params.get( 'user_id', None ) ) )
+ if params.get( 'new_api_key_button', None ) == 'Generate a new key now':
+ new_key = trans.app.model.APIKeys()
+ new_key.user_id = user.id
+ new_key.key = trans.app.security.get_new_guid()
+ trans.sa_session.add( new_key )
+ trans.sa_session.flush()
+ message = "Generated a new web API key"
+ status = "done"
+ return trans.response.send_redirect( web.url_for( controller='user',
+ action='show_info',
+ admin_view=admin_view,
+ user_id=user.id,
+ message=message,
+ status=status ) )
--- a/templates/webapps/galaxy/user/info.mako
+++ b/templates/webapps/galaxy/user/info.mako
@@ -66,8 +66,9 @@
<p></p>
%endif
+<p/>
+
<div class="toolForm">
- <p></p><form name="user_info" id="user_info" action="${h.url_for( controller='user', action='new_address', user_id=user.id, admin_view=admin_view )}" method="post" ><div class="toolFormTitle">User Addresses</div><div class="toolFormBody">
@@ -118,5 +119,37 @@
</div></div></form>
- <p></p></div>
+
+<p/>
+
+%if trans.app.config.enable_api:
+ <div class="toolForm">
+ <form name="user_info" id="user_info" action="${h.url_for( controller='user', action='new_api_key', user_id=user.id, admin_view=admin_view )}" method="post" >
+ <div class="toolFormTitle">Web API Key</div>
+ <div class="toolFormBody">
+ <div class="form-row">
+ <label>Current API key:</label>
+ %if user.api_keys:
+ ${user.api_keys[0].key}
+ %else:
+ none set
+ %endif
+ </div>
+ <div class="form-row">
+ <input type="submit" name="new_api_key_button" value="Generate a new key now"/>
+ %if user.api_keys:
+ (invalidates old key)
+ %endif
+ <div class="toolParamHelp" style="clear: both;">
+ An API key will allow you to access Galaxy via its web
+ API (documentation forthcoming). Please note that
+ <strong>this key acts as an alternate means to access
+ your account, and should be treated with the same care
+ as your login password</strong>.
+ </div>
+ </div>
+ </div>
+ </form>
+ </div>
+%endif
--- a/lib/galaxy/web/__init__.py
+++ b/lib/galaxy/web/__init__.py
@@ -2,6 +2,6 @@
The Galaxy web application.
"""
-from framework import expose, json, json_pretty, require_login, require_admin, url_for, error, form, FormBuilder
+from framework import expose, json, json_pretty, require_login, require_admin, url_for, error, form, FormBuilder, expose_api
from framework.base import httpexceptions
--- a/universe_wsgi.ini.sample
+++ b/universe_wsgi.ini.sample
@@ -112,6 +112,9 @@ gbrowse_display_sites = wormbase,tair,mo
# Define your GeneTrack servers in tool-data/shared/genetrack/genetrack_sites.txt
#genetrack_display_sites =
+# Enable the (experimental! beta!) Web API. Documentation forthcoming.
+#enable_api = False
+
# Serving static files (needed if running standalone)
static_enabled = True
static_cache_time = 360
--- /dev/null
+++ b/scripts/api/display.py
@@ -0,0 +1,11 @@
+#!/usr/bin/python
+
+import os, sys
+sys.path.insert( 0, os.path.dirname( __file__ ) )
+from common import display
+
+try:
+ display( *sys.argv[1:3] )
+except TypeError:
+ print 'usage: %s key url' % os.path.basename( sys.argv[0] )
+ sys.exit( 1 )
1
0
galaxy-dist commit b4237641ca4d: Make the PSU BX browser a UCSC browser instead of being a different display type.
by commits-noreply@bitbucket.org 29 Jun '10
by commits-noreply@bitbucket.org 29 Jun '10
29 Jun '10
# HG changeset patch -- Bitbucket.org
# Project galaxy-dist
# URL http://bitbucket.org/galaxy/galaxy-dist/overview
# User Nate Coraor <nate(a)bx.psu.edu>
# Date 1277829642 14400
# Node ID b4237641ca4d28631c9e65c726693057d4b8dab3
# Parent eea2c040ccb7bd0d2a7c6e5eab8e65f98a163009
Make the PSU BX browser a UCSC browser instead of being a different display type.
--- a/universe_wsgi.ini.sample
+++ b/universe_wsgi.ini.sample
@@ -105,8 +105,7 @@ mailing_join_addr = galaxy-user-join@bx.
# Use the new iframe / javascript based layout
use_new_layout = true
-# Comma separated list of bx / UCSC / gbrowse / GeneTrack browsers to use for viewing
-bx_display_sites = main
+# Comma separated list of UCSC / gbrowse / GeneTrack browsers to use for viewing
ucsc_display_sites = main,test,archaea,ucla
gbrowse_display_sites = wormbase,tair,modencode_worm,modencode_fly
# Define your GeneTrack servers in tool-data/shared/genetrack/genetrack_sites.txt
--- a/lib/galaxy/util/__init__.py
+++ b/lib/galaxy/util/__init__.py
@@ -278,12 +278,6 @@ def object_to_string( obj ):
def string_to_object( s ):
return pickle.loads( binascii.unhexlify( s ) )
-def get_bx_by_build(build):
- sites = []
- for site in bx_build_sites:
- if build in site['builds']:
- sites.append((site['name'],site['url']))
- return sites
def get_ucsc_by_build(build):
sites = []
for site in ucsc_build_sites:
@@ -484,7 +478,6 @@ def umask_fix_perms( path, umask, unmask
galaxy_root_path = os.path.join(__path__[0], "..","..","..")
# The dbnames list is used in edit attributes and the upload tool
dbnames = read_dbnames( os.path.join( galaxy_root_path, "tool-data", "shared", "ucsc", "builds.txt" ) )
-bx_build_sites = read_build_sites( os.path.join( galaxy_root_path, "tool-data", "shared", "bx", "bx_build_sites.txt" ) )
ucsc_build_sites = read_build_sites( os.path.join( galaxy_root_path, "tool-data", "shared", "ucsc", "ucsc_build_sites.txt" ) )
gbrowse_build_sites = read_build_sites( os.path.join( galaxy_root_path, "tool-data", "shared", "gbrowse", "gbrowse_build_sites.txt" ) )
genetrack_sites = read_build_sites( os.path.join( galaxy_root_path, "tool-data", "shared", "genetrack", "genetrack_sites.txt" ), check_builds=False )
--- a/tool-data/shared/bx/bx_build_sites.txt
+++ /dev/null
@@ -1,1 +0,0 @@
-main http://main.genome-browser.bx.psu.edu/cgi-bin/hgTracks? hg18,hg19,mm8,mm9 bx-main
--- a/lib/galaxy/datatypes/interval.py
+++ b/lib/galaxy/datatypes/interval.py
@@ -54,7 +54,6 @@ class Interval( Tabular ):
"""Initialize interval datatype, by adding UCSC display apps"""
Tabular.__init__(self, **kwd)
self.add_display_app ( 'ucsc', 'display at UCSC', 'as_ucsc_display_file', 'ucsc_links' )
- self.add_display_app ( 'main', 'BX', 'as_ucsc_display_file', 'bx_links' )
def init_meta( self, dataset, copy_from=None ):
Tabular.init_meta( self, dataset, copy_from=copy_from )
@@ -247,29 +246,6 @@ class Interval( Tabular ):
link = '%s?redirect_url=%s&display_url=%s' % ( internal_url, redirect_url, display_url )
ret_val.append( (site_name, link) )
return ret_val
- def bx_links( self, dataset, type, app, base_url ):
- # TODO: abstract these methods to eliminate duplicate code.
- ret_val = []
- if dataset.has_data:
- viewport_tuple = self.get_estimated_display_viewport(dataset)
- if viewport_tuple:
- chrom = viewport_tuple[0]
- start = viewport_tuple[1]
- stop = viewport_tuple[2]
- for site_name, site_url in util.get_bx_by_build(dataset.dbkey):
- if site_name in app.config.bx_display_sites:
- # HACK: UCSC doesn't support https, so force http even
- # if our URL scheme is https. Making this work
- # requires additional hackery in your upstream proxy.
- # If UCSC ever supports https, remove this hack.
- internal_url = "%s" % url_for( controller='dataset', dataset_id=dataset.id, action='display_at', filename='bx_' + site_name )
- if base_url.startswith( 'https://' ):
- base_url = base_url.replace( 'https', 'http', 1 )
- display_url = urllib.quote_plus( "%s%s/display_as?id=%i&display_app=%s&authz_method=display_at" % (base_url, url_for( controller='root' ), dataset.id, type) )
- redirect_url = urllib.quote_plus( "%sdb=%s&position=%s:%s-%s&hgt.customText=%%s" % (site_url, dataset.dbkey, chrom, start, stop ) )
- link = '%s?redirect_url=%s&display_url=%s' % ( internal_url, redirect_url, display_url )
- ret_val.append( (site_name, link) )
- return ret_val
def validate( self, dataset ):
"""Validate an interval file using the bx GenomicIntervalReader"""
--- a/tool-data/shared/ucsc/ucsc_build_sites.txt
+++ b/tool-data/shared/ucsc/ucsc_build_sites.txt
@@ -5,3 +5,4 @@ archaea http://archaea.ucsc.edu/cgi-bin/
#Harvested from http://genome-test.cse.ucsc.edu/cgi-bin/das/dsn
test http://genome-test.cse.ucsc.edu/cgi-bin/hgTracks? anoCar1,ce4,ce3,ce2,ce1,loxAfr1,rn2,eschColi_O157H7_1,rn4,droYak1,heliPylo_J99_1,droYak2,dp3,dp2,caeRem2,caeRem1,oryLat1,eschColi_K12_1,homIni13,homIni14,droAna1,droAna2,oryCun1,sacCer1,heliHepa1,droGri1,sc1,dasNov1,choHof1,tupBel1,mm9,mm8,vibrChol1,mm5,mm4,mm7,mm6,mm3,mm2,rn3,venter1,galGal3,galGal2,ornAna1,equCab1,cioSav2,rheMac2,eutHer13,droPer1,droVir2,droVir1,heliPylo_26695_1,euaGli13,calJac1,campJeju1,droSim1,hg13,hg15,hg16,hg17,monDom1,monDom4,droMoj1,petMar1,droMoj2,vibrChol_MO10_1,vibrPara1,gliRes13,vibrVuln_YJ016_1,braFlo1,cioSav1,lauRas13,dm1,canFam1,canFam2,ci1,echTel1,ci2,caePb1,dm3,ponAbe2,falciparum,xenTro1,xenTro2,nonAfr13,fr2,fr1,gasAcu1,dm2,apiMel1,apiMel2,eschColi_O157H7EDL933_1,priPac1,panTro1,hg18,panTro2,campJeju_RM1221_1,canHg12,vibrChol_O395_1,vibrFisc_ES114_1,danRer5,danRer4,danRer3,danRer2,danRer1,tetNig1,afrOth13,bosTau1,eschColi_CFT073_1,bosTau3,bosTau2,bosTau4,rodEnt13,droEre1,priMat13,vibrVu
ln_CMCP6_1,cb2,cb3,cb1,borEut13,droSec1,felCat3,strPur1,strPur2,otoGar1,catArr1,anoGam1,triCas2
ucla http://epigenomics.mcdb.ucla.edu/cgi-bin/hgTracks? araTha1
+psu bx main http://main.genome-browser.bx.psu.edu/cgi-bin/hgTracks? hg18,hg19,mm8,mm9
--- a/lib/galaxy/config.py
+++ b/lib/galaxy/config.py
@@ -83,7 +83,6 @@ class Configuration( object ):
self.log_memory_usage = string_as_bool( kwargs.get( 'log_memory_usage', 'False' ) )
self.log_actions = string_as_bool( kwargs.get( 'log_actions', 'False' ) )
self.log_events = string_as_bool( kwargs.get( 'log_events', 'False' ) )
- self.bx_display_sites = kwargs.get( 'bx_display_sites', "main" ).lower().split(",")
self.ucsc_display_sites = kwargs.get( 'ucsc_display_sites', "main,test,archaea,ucla" ).lower().split(",")
self.gbrowse_display_sites = kwargs.get( 'gbrowse_display_sites', "wormbase,tair,modencode_worm,modencode_fly" ).lower().split(",")
self.genetrack_display_sites = kwargs.get( 'genetrack_display_sites', "main,test" ).lower().split(",")
1
0
galaxy-dist commit 91d047183110: Fix bug that prevented correct toggling of recently used menu.
by commits-noreply@bitbucket.org 29 Jun '10
by commits-noreply@bitbucket.org 29 Jun '10
29 Jun '10
# HG changeset patch -- Bitbucket.org
# Project galaxy-dist
# URL http://bitbucket.org/galaxy/galaxy-dist/overview
# User jeremy goecks <jeremy.goecks(a)emory.edu>
# Date 1277147258 14400
# Node ID 91d047183110888b3bbcbfc99a0b5f2c38c2d6bf
# Parent 4cdf4cca0f313c5c3a6045db287a32a3d1e4d30b
Fix bug that prevented correct toggling of recently used menu.
--- a/templates/root/index.mako
+++ b/templates/root/index.mako
@@ -124,10 +124,7 @@
else
{
// "Show" menu.
- var tool_search_query = $('#galaxy_tools').contents().find('#tool-search-query');
- var tool_search_active = tool_search_query.is(":visible") && tool_search_query.val().length > 3;
-
- if (!tool_search_active)
+ if (!$('#galaxy_tools').contents().find('#tool-search-query').hasClass("search_active"))
// Default.
ru_menu.slideDown();
else
--- a/templates/root/tool_menu.mako
+++ b/templates/root/tool_menu.mako
@@ -113,6 +113,8 @@
if ( this.value.length < 3 ) {
reset_tool_search(false);
} else if ( this.value != this.lastValue ) {
+ // Add class to denote that searching is active.
+ $(this).addClass("search_active");
// input.addClass(config.loadingClass);
// Add '*' to facilitate partial matching.
var q = this.value + '*';
--- a/static/scripts/galaxy.base.js
+++ b/static/scripts/galaxy.base.js
@@ -478,7 +478,8 @@ function reset_tool_search( initValue )
if (tool_menu_frame.length == 0)
tool_menu_frame = $(document);
- // Remove matching class.
+ // Remove classes that indicate searching is active.
+ $(this).removeClass("search_active");
tool_menu_frame.find(".toolTitle").removeClass("search_match");
// Reset visibility of tools and labels.
--- a/static/scripts/packed/galaxy.base.js
+++ b/static/scripts/packed/galaxy.base.js
@@ -1,1 +1,1 @@
-$.fn.makeAbsolute=function(a){return this.each(function(){var b=$(this);var c=b.position();b.css({position:"absolute",marginLeft:0,marginTop:0,top:c.top,left:c.left,right:$(window).width()-(c.left+b.width())});if(a){b.remove().appendTo("body")}})};function ensure_popup_helper(){if($("#popup-helper").length===0){$("<div id='popup-helper'/>").css({background:"white",opacity:0,zIndex:15000,position:"absolute",top:0,left:0,width:"100%",height:"100%"}).appendTo("body").hide()}}function attach_popupmenu(b,d){var a=function(){d.unbind().hide();$("#popup-helper").unbind("click.popupmenu").hide()};var c=function(g){$("#popup-helper").bind("click.popupmenu",a).show();d.click(a).css({left:0,top:-1000}).show();var f=g.pageX-d.width()/2;f=Math.min(f,$(document).scrollLeft()+$(window).width()-$(d).width()-20);f=Math.max(f,$(document).scrollLeft()+20);d.css({top:g.pageY-5,left:f});return false};$(b).click(c)}function make_popupmenu(c,b){ensure_popup_helper();var a=$("<ul id='"+c.attr("id")
+"-menu'></ul>");$.each(b,function(f,e){if(e){$("<li/>").html(f).click(e).appendTo(a)}else{$("<li class='head'/>").html(f).appendTo(a)}});var d=$("<div class='popmenu-wrapper'>");d.append(a).append("<div class='overlay-border'>").css("position","absolute").appendTo("body").hide();attach_popupmenu(c,d)}function make_popup_menus(){jQuery("div[popupmenu]").each(function(){var c={};$(this).find("a").each(function(){var b=$(this).attr("confirm"),d=$(this).attr("href"),e=$(this).attr("target");c[$(this).text()]=function(){if(!b||confirm(b)){var g=window;if(e=="_parent"){g=window.parent}else{if(e=="_top"){g=window.top}}g.location=d}}});var a=$("#"+$(this).attr("popupmenu"));make_popupmenu(a,c);$(this).remove();a.addClass("popup").show()})}function array_length(b){if(b.length){return b.length}var c=0;for(var a in b){c++}return c}function naturalSort(i,g){var n=/(-?[0-9\.]+)/g,j=i.toString().toLowerCase()||"",f=g.toString().toLowerCase()||"",k=String.fromCharCode(0),l=j.replace(n,k+"
$1"+k).split(k),e=f.replace(n,k+"$1"+k).split(k),d=(new Date(j)).getTime(),m=d?(new Date(f)).getTime():null;if(m){if(d<m){return -1}else{if(d>m){return 1}}}for(var h=0,c=Math.max(l.length,e.length);h<c;h++){oFxNcL=parseFloat(l[h])||l[h];oFyNcL=parseFloat(e[h])||e[h];if(oFxNcL<oFyNcL){return -1}else{if(oFxNcL>oFyNcL){return 1}}}return 0}function replace_big_select_inputs(a,b){if(!jQuery().autocomplete){return}if(a===undefined){a=20}if(b===undefined){b=3000}$("select").each(function(){var e=$(this);var h=e.find("option").length;if((h<a)||(h>b)){return}if(e.attr("multiple")==true){return}var l=e.attr("value");var c=$("<input type='text' class='text-and-autocomplete-select'></input>");c.attr("size",40);c.attr("name",e.attr("name"));c.attr("id",e.attr("id"));c.click(function(){var m=$(this).val();$(this).val("Loading...");$(this).showAllInCache();$(this).val(m);$(this).select()});var f=[];var i={};e.children("option").each(function(){var n=$(this).text();var m=$(this).attr("value
");f.push(n);i[n]=m;i[m]=m;if(m==l){c.attr("value",n)}});if(l==""||l=="?"){c.attr("value","Click to Search or Select")}if(e.attr("name")=="dbkey"){f=f.sort(naturalSort)}var g={selectFirst:false,autoFill:false,mustMatch:false,matchContains:true,max:b,minChars:0,hideForLessThanMinChars:false};c.autocomplete(f,g);e.replaceWith(c);var k=function(){var n=c.attr("value");var m=i[n];if(m!==null&&m!==undefined){c.attr("value",m)}else{if(l!=""){c.attr("value",l)}else{c.attr("value","?")}}};c.parents("form").submit(function(){k()});$(document).bind("convert_dbkeys",function(){k()});if(e.attr("refresh_on_change")=="true"){var d=e.attr("refresh_on_change_values");if(d!==undefined){d=d.split(",")}var j=function(){var o=c.attr("value");var n=i[o];if(n!==null&&n!==undefined){refresh=false;if(d!==undefined){for(var m=0;m<d.length;m++){if(n==d[m]){refresh=true;break}}}else{refresh=true}if(refresh){c.attr("value",n);c.parents("form").submit()}}};c.bind("result",j);c.keyup(function(m){if(m.key
Code===13){j()}});c.keydown(function(m){if(m.keyCode===13){return false}})}})}function async_save_text(d,f,e,a,c,h,i,g,b){if(c===undefined){c=30}if(i===undefined){i=4}$("#"+d).live("click",function(){if($("#renaming-active").length>0){return}var l=$("#"+f),k=l.text(),j;if(h){j=$("<textarea></textarea>").attr({rows:i,cols:c}).text(k)}else{j=$("<input type='text'></input>").attr({value:k,size:c})}j.attr("id","renaming-active");j.blur(function(){$(this).remove();l.show();if(b){b(j)}});j.keyup(function(n){if(n.keyCode===27){$(this).trigger("blur")}else{if(n.keyCode===13){var m={};m[a]=$(this).val();$(this).trigger("blur");$.ajax({url:e,data:m,error:function(){alert("Text editing for elt "+f+" failed")},success:function(o){l.text(o);if(b){b(j)}}})}}});if(g){g(j)}l.hide();j.insertAfter(l);j.focus();j.select();return})}function init_history_items(d,a,c){var b=function(){try{var e=$.jStore.store("history_expand_state");if(e){for(var g in e){$("#"+g+" div.historyItemBody").show()}}}c
atch(f){$.jStore.remove("history_expand_state")}if($.browser.mozilla){$("div.historyItemBody").each(function(){if(!$(this).is(":visible")){$(this).find("pre.peek").css("overflow","hidden")}})}d.each(function(){var j=this.id;var h=$(this).children("div.historyItemBody");var i=h.find("pre.peek");$(this).find(".historyItemTitleBar > .historyItemTitle").wrap("<a href='javascript:void(0);'></a>").click(function(){if(h.is(":visible")){if($.browser.mozilla){i.css("overflow","hidden")}h.slideUp("fast");if(!c){var k=$.jStore.store("history_expand_state");if(k){delete k[j];$.jStore.store("history_expand_state",k)}}}else{h.slideDown("fast",function(){if($.browser.mozilla){i.css("overflow","auto")}});if(!c){var k=$.jStore.store("history_expand_state");if(k===undefined){k={}}k[j]=true;$.jStore.store("history_expand_state",k)}}return false})});$("#top-links > a.toggle").click(function(){var h=$.jStore.store("history_expand_state");if(h===undefined){h={}}$("div.historyItemBody:visible").ea
ch(function(){if($.browser.mozilla){$(this).find("pre.peek").css("overflow","hidden")}$(this).slideUp("fast");if(h){delete h[$(this).parent().attr("id")]}});$.jStore.store("history_expand_state",h)}).show()};if(a){b()}else{$.jStore.init("galaxy");$.jStore.engineReady(function(){b()})}}function commatize(b){b+="";var a=/(\d+)(\d{3})/;while(a.test(b)){b=b.replace(a,"$1,$2")}return b}function reset_tool_search(a){var c=$("#galaxy_tools").contents();if(c.length==0){c=$(document)}c.find(".toolTitle").removeClass("search_match");c.find(".toolSectionBody").hide();c.find(".toolTitle").show();c.find(".toolPanelLabel").show();c.find(".toolSectionWrapper").each(function(){if($(this).attr("id")!="recently_used_wrapper"){$(this).show()}else{if($(this).hasClass("user_pref_visible")){$(this).show()}}});c.find("#search-no-results").hide();c.find("#search-spinner").hide();if(a){var b=c.find("#tool-search-query");b.val("search tools");b.css("font-style","italic")}}function GalaxyAsync(a){this
.url_dict={};this.log_action=(a===undefined?false:a)}GalaxyAsync.prototype.set_func_url=function(a,b){this.url_dict[a]=b};GalaxyAsync.prototype.set_user_pref=function(a,b){var c=this.url_dict[arguments.callee];if(c===undefined){return false}$.ajax({url:c,data:{pref_name:a,pref_value:b},error:function(){return false},success:function(){return true}})};GalaxyAsync.prototype.log_user_action=function(c,b,d){if(!this.log_action){return}var a=this.url_dict[arguments.callee];if(a===undefined){return false}$.ajax({url:a,data:{action:c,context:b,params:d},error:function(){return false},success:function(){return true}})};$(document).ready(function(){$("a[confirm]").click(function(){return confirm($(this).attr("confirm"))});if($.fn.tipsy){$(".tooltip").tipsy({gravity:"s"})}make_popup_menus();replace_big_select_inputs(20,1500)});
+$.fn.makeAbsolute=function(a){return this.each(function(){var b=$(this);var c=b.position();b.css({position:"absolute",marginLeft:0,marginTop:0,top:c.top,left:c.left,right:$(window).width()-(c.left+b.width())});if(a){b.remove().appendTo("body")}})};function ensure_popup_helper(){if($("#popup-helper").length===0){$("<div id='popup-helper'/>").css({background:"white",opacity:0,zIndex:15000,position:"absolute",top:0,left:0,width:"100%",height:"100%"}).appendTo("body").hide()}}function attach_popupmenu(b,d){var a=function(){d.unbind().hide();$("#popup-helper").unbind("click.popupmenu").hide()};var c=function(g){$("#popup-helper").bind("click.popupmenu",a).show();d.click(a).css({left:0,top:-1000}).show();var f=g.pageX-d.width()/2;f=Math.min(f,$(document).scrollLeft()+$(window).width()-$(d).width()-20);f=Math.max(f,$(document).scrollLeft()+20);d.css({top:g.pageY-5,left:f});return false};$(b).click(c)}function make_popupmenu(c,b){ensure_popup_helper();var a=$("<ul id='"+c.attr("id")
+"-menu'></ul>");$.each(b,function(f,e){if(e){$("<li/>").html(f).click(e).appendTo(a)}else{$("<li class='head'/>").html(f).appendTo(a)}});var d=$("<div class='popmenu-wrapper'>");d.append(a).append("<div class='overlay-border'>").css("position","absolute").appendTo("body").hide();attach_popupmenu(c,d)}function make_popup_menus(){jQuery("div[popupmenu]").each(function(){var c={};$(this).find("a").each(function(){var b=$(this).attr("confirm"),d=$(this).attr("href"),e=$(this).attr("target");c[$(this).text()]=function(){if(!b||confirm(b)){var g=window;if(e=="_parent"){g=window.parent}else{if(e=="_top"){g=window.top}}g.location=d}}});var a=$("#"+$(this).attr("popupmenu"));make_popupmenu(a,c);$(this).remove();a.addClass("popup").show()})}function array_length(b){if(b.length){return b.length}var c=0;for(var a in b){c++}return c}function naturalSort(i,g){var n=/(-?[0-9\.]+)/g,j=i.toString().toLowerCase()||"",f=g.toString().toLowerCase()||"",k=String.fromCharCode(0),l=j.replace(n,k+"
$1"+k).split(k),e=f.replace(n,k+"$1"+k).split(k),d=(new Date(j)).getTime(),m=d?(new Date(f)).getTime():null;if(m){if(d<m){return -1}else{if(d>m){return 1}}}for(var h=0,c=Math.max(l.length,e.length);h<c;h++){oFxNcL=parseFloat(l[h])||l[h];oFyNcL=parseFloat(e[h])||e[h];if(oFxNcL<oFyNcL){return -1}else{if(oFxNcL>oFyNcL){return 1}}}return 0}function replace_big_select_inputs(a,b){if(!jQuery().autocomplete){return}if(a===undefined){a=20}if(b===undefined){b=3000}$("select").each(function(){var e=$(this);var h=e.find("option").length;if((h<a)||(h>b)){return}if(e.attr("multiple")==true){return}var l=e.attr("value");var c=$("<input type='text' class='text-and-autocomplete-select'></input>");c.attr("size",40);c.attr("name",e.attr("name"));c.attr("id",e.attr("id"));c.click(function(){var m=$(this).val();$(this).val("Loading...");$(this).showAllInCache();$(this).val(m);$(this).select()});var f=[];var i={};e.children("option").each(function(){var n=$(this).text();var m=$(this).attr("value
");f.push(n);i[n]=m;i[m]=m;if(m==l){c.attr("value",n)}});if(l==""||l=="?"){c.attr("value","Click to Search or Select")}if(e.attr("name")=="dbkey"){f=f.sort(naturalSort)}var g={selectFirst:false,autoFill:false,mustMatch:false,matchContains:true,max:b,minChars:0,hideForLessThanMinChars:false};c.autocomplete(f,g);e.replaceWith(c);var k=function(){var n=c.attr("value");var m=i[n];if(m!==null&&m!==undefined){c.attr("value",m)}else{if(l!=""){c.attr("value",l)}else{c.attr("value","?")}}};c.parents("form").submit(function(){k()});$(document).bind("convert_dbkeys",function(){k()});if(e.attr("refresh_on_change")=="true"){var d=e.attr("refresh_on_change_values");if(d!==undefined){d=d.split(",")}var j=function(){var o=c.attr("value");var n=i[o];if(n!==null&&n!==undefined){refresh=false;if(d!==undefined){for(var m=0;m<d.length;m++){if(n==d[m]){refresh=true;break}}}else{refresh=true}if(refresh){c.attr("value",n);c.parents("form").submit()}}};c.bind("result",j);c.keyup(function(m){if(m.key
Code===13){j()}});c.keydown(function(m){if(m.keyCode===13){return false}})}})}function async_save_text(d,f,e,a,c,h,i,g,b){if(c===undefined){c=30}if(i===undefined){i=4}$("#"+d).live("click",function(){if($("#renaming-active").length>0){return}var l=$("#"+f),k=l.text(),j;if(h){j=$("<textarea></textarea>").attr({rows:i,cols:c}).text(k)}else{j=$("<input type='text'></input>").attr({value:k,size:c})}j.attr("id","renaming-active");j.blur(function(){$(this).remove();l.show();if(b){b(j)}});j.keyup(function(n){if(n.keyCode===27){$(this).trigger("blur")}else{if(n.keyCode===13){var m={};m[a]=$(this).val();$(this).trigger("blur");$.ajax({url:e,data:m,error:function(){alert("Text editing for elt "+f+" failed")},success:function(o){l.text(o);if(b){b(j)}}})}}});if(g){g(j)}l.hide();j.insertAfter(l);j.focus();j.select();return})}function init_history_items(d,a,c){var b=function(){try{var e=$.jStore.store("history_expand_state");if(e){for(var g in e){$("#"+g+" div.historyItemBody").show()}}}c
atch(f){$.jStore.remove("history_expand_state")}if($.browser.mozilla){$("div.historyItemBody").each(function(){if(!$(this).is(":visible")){$(this).find("pre.peek").css("overflow","hidden")}})}d.each(function(){var j=this.id;var h=$(this).children("div.historyItemBody");var i=h.find("pre.peek");$(this).find(".historyItemTitleBar > .historyItemTitle").wrap("<a href='javascript:void(0);'></a>").click(function(){if(h.is(":visible")){if($.browser.mozilla){i.css("overflow","hidden")}h.slideUp("fast");if(!c){var k=$.jStore.store("history_expand_state");if(k){delete k[j];$.jStore.store("history_expand_state",k)}}}else{h.slideDown("fast",function(){if($.browser.mozilla){i.css("overflow","auto")}});if(!c){var k=$.jStore.store("history_expand_state");if(k===undefined){k={}}k[j]=true;$.jStore.store("history_expand_state",k)}}return false})});$("#top-links > a.toggle").click(function(){var h=$.jStore.store("history_expand_state");if(h===undefined){h={}}$("div.historyItemBody:visible").ea
ch(function(){if($.browser.mozilla){$(this).find("pre.peek").css("overflow","hidden")}$(this).slideUp("fast");if(h){delete h[$(this).parent().attr("id")]}});$.jStore.store("history_expand_state",h)}).show()};if(a){b()}else{$.jStore.init("galaxy");$.jStore.engineReady(function(){b()})}}function commatize(b){b+="";var a=/(\d+)(\d{3})/;while(a.test(b)){b=b.replace(a,"$1,$2")}return b}function reset_tool_search(a){var c=$("#galaxy_tools").contents();if(c.length==0){c=$(document)}$(this).removeClass("search_active");c.find(".toolTitle").removeClass("search_match");c.find(".toolSectionBody").hide();c.find(".toolTitle").show();c.find(".toolPanelLabel").show();c.find(".toolSectionWrapper").each(function(){if($(this).attr("id")!="recently_used_wrapper"){$(this).show()}else{if($(this).hasClass("user_pref_visible")){$(this).show()}}});c.find("#search-no-results").hide();c.find("#search-spinner").hide();if(a){var b=c.find("#tool-search-query");b.val("search tools");b.css("font-style","i
talic")}}function GalaxyAsync(a){this.url_dict={};this.log_action=(a===undefined?false:a)}GalaxyAsync.prototype.set_func_url=function(a,b){this.url_dict[a]=b};GalaxyAsync.prototype.set_user_pref=function(a,b){var c=this.url_dict[arguments.callee];if(c===undefined){return false}$.ajax({url:c,data:{pref_name:a,pref_value:b},error:function(){return false},success:function(){return true}})};GalaxyAsync.prototype.log_user_action=function(c,b,d){if(!this.log_action){return}var a=this.url_dict[arguments.callee];if(a===undefined){return false}$.ajax({url:a,data:{action:c,context:b,params:d},error:function(){return false},success:function(){return true}})};$(document).ready(function(){$("a[confirm]").click(function(){return confirm($(this).attr("confirm"))});if($.fn.tipsy){$(".tooltip").tipsy({gravity:"s"})}make_popup_menus();replace_big_select_inputs(20,1500)});
1
0
galaxy-dist commit 072d5f7ba872: Remove the rest of the memory_usage debugging module since all of its calls were removed from controller methods.
by commits-noreply@bitbucket.org 29 Jun '10
by commits-noreply@bitbucket.org 29 Jun '10
29 Jun '10
# HG changeset patch -- Bitbucket.org
# Project galaxy-dist
# URL http://bitbucket.org/galaxy/galaxy-dist/overview
# User Nate Coraor <nate(a)bx.psu.edu>
# Date 1277831187 14400
# Node ID 072d5f7ba872c828fc21f6ddd9e8485deded89d3
# Parent b4237641ca4d28631c9e65c726693057d4b8dab3
Remove the rest of the memory_usage debugging module since all of its calls were removed from controller methods.
--- a/lib/galaxy/util/memory_usage.py
+++ /dev/null
@@ -1,44 +0,0 @@
-import os, platform, logging
-from galaxy.datatypes.data import nice_size
-
-log = logging.getLogger( __name__ )
-
-_proc_status = '/proc/%d/status' % os.getpid()
-
-_scale = { 'kB': 1024.0, 'mB': 1024.0*1024.0, 'KB': 1024.0, 'MB': 1024.0*1024.0 }
-
-def _VmB( VmKey ):
- '''Private.
- '''
- global _proc_status, _scale
- # get pseudo file /proc/<pid>/status
- try:
- t = open( _proc_status )
- v = t.read()
- t.close()
- except:
- log.debug("memory_usage is currently supported only on Linux, your platform is %s %s" % ( platform.system(), platform.release() ) )
- return 0.0 # non-Linux?
- # get VmKey line e.g. 'VmRSS: 9999 kB\n ...'
- i = v.index( VmKey )
- v = v[ i: ].split( None, 3 ) # whitespace
- if len( v ) < 3:
- return 0.0 # invalid format?
- # convert Vm value to bytes
- return float( v[ 1 ] ) * _scale[ v[ 2 ] ]
-def memory( since=0.0, pretty=False ):
- '''Return memory usage in bytes.
- '''
- size = _VmB( 'VmSize:' ) - since
- if pretty:
- return nice_size( size )
- else:
- return size
-def resident( since=0.0 ):
- '''Return resident memory usage in bytes.
- '''
- return _VmB( 'VmRSS:' ) - since
-def stacksize( since=0.0 ):
- '''Return stack size in bytes.
- '''
- return _VmB( 'VmStk:' ) - since
--- a/lib/galaxy/app.py
+++ b/lib/galaxy/app.py
@@ -64,10 +64,6 @@ class UniverseApplication( object ):
from galaxy.util import memdump
if memdump.Memdump:
self.memdump = memdump.Memdump()
- # Enable memory_usage logging if configured
- if self.config.log_memory_usage:
- from galaxy.util import memory_usage
- self.memory_usage = memory_usage
# Start the job queue
self.job_manager = jobs.JobManager( self )
# FIXME: These are exposed directly for backward compatibility
--- a/lib/galaxy/config.py
+++ b/lib/galaxy/config.py
@@ -80,7 +80,6 @@ class Configuration( object ):
self.pbs_stage_path = kwargs.get('pbs_stage_path', "" )
self.use_heartbeat = string_as_bool( kwargs.get( 'use_heartbeat', 'False' ) )
self.use_memdump = string_as_bool( kwargs.get( 'use_memdump', 'False' ) )
- self.log_memory_usage = string_as_bool( kwargs.get( 'log_memory_usage', 'False' ) )
self.log_actions = string_as_bool( kwargs.get( 'log_actions', 'False' ) )
self.log_events = string_as_bool( kwargs.get( 'log_events', 'False' ) )
self.ucsc_display_sites = kwargs.get( 'ucsc_display_sites', "main,test,archaea,ucla" ).lower().split(",")
1
0
galaxy-dist commit 7d3a9212b15e: Enable recently used tools menu to show only tools that are currently in the toolbox.
by commits-noreply@bitbucket.org 29 Jun '10
by commits-noreply@bitbucket.org 29 Jun '10
29 Jun '10
# HG changeset patch -- Bitbucket.org
# Project galaxy-dist
# URL http://bitbucket.org/galaxy/galaxy-dist/overview
# User jeremy goecks <jeremy.goecks(a)emory.edu>
# Date 1277147965 14400
# Node ID 7d3a9212b15e82327c2af7c3222995f358532d60
# Parent 91d047183110888b3bbcbfc99a0b5f2c38c2d6bf
Enable recently used tools menu to show only tools that are currently in the toolbox.
--- a/lib/galaxy/web/controllers/root.py
+++ b/lib/galaxy/web/controllers/root.py
@@ -6,7 +6,6 @@ from cgi import escape, FieldStorage
from galaxy import util, datatypes, jobs, web, util
from galaxy.web.base.controller import *
from galaxy.util.sanitize_html import sanitize_html
-from galaxy.util.odict import odict
from galaxy.model.orm import *
log = logging.getLogger( __name__ )
@@ -33,20 +32,18 @@ class RootController( BaseController, Us
else:
## Get most recently used tools.
toolbox = self.get_toolbox()
- recent_tool_ids = odict()
+ recent_tools = []
if trans.user:
for row in trans.sa_session.query( self.app.model.Job.tool_id ). \
filter( self.app.model.Job.user==trans.user ). \
order_by( self.app.model.Job.create_time.desc() ):
tool_id = row[0]
- recent_tool_ids[tool_id] = tool_id
- ## TODO: make number of recently used tools a user preference.
- if len ( recent_tool_ids ) == 5:
- break
-
- # Create list of recently used tools.
- recent_tools = [ toolbox.tools_by_id[ tool_id ] for tool_id in recent_tool_ids.keys() ]
-
+ if tool_id in toolbox.tools_by_id:
+ recent_tools.append( toolbox.tools_by_id[tool_id] )
+ ## TODO: make number of recently used tools a user preference.
+ if len ( recent_tools ) == 5:
+ break
+
return trans.fill_template('/root/tool_menu.mako', toolbox=toolbox, recent_tools=recent_tools )
@web.json
1
0
# HG changeset patch -- Bitbucket.org
# Project galaxy-dist
# URL http://bitbucket.org/galaxy/galaxy-dist/overview
# User Dannon Baker <dannon.baker(a)emory.edu>
# Date 1277490919 14400
# Node ID 71b1a5920fc12514a9e151770eaf250d6fc6da46
# Parent c02ecbcc8dab54e7007b35db5a05e31211d77b01
# Parent d666e89f178eddb4175c388e81d8758096de72df
Merge.
1
0
galaxy-dist commit c37eed98f8c9: Update UCSC table browser datasource tools to treat data marked as 'gff' as 'gtf'.
by commits-noreply@bitbucket.org 29 Jun '10
by commits-noreply@bitbucket.org 29 Jun '10
29 Jun '10
# HG changeset patch -- Bitbucket.org
# Project galaxy-dist
# URL http://bitbucket.org/galaxy/galaxy-dist/overview
# User Dan Blankenberg <dan(a)bx.psu.edu>
# Date 1277740225 14400
# Node ID c37eed98f8c974d7591db0b845a515c731d00787
# Parent 980a38b7f8748e5a852ff06d9cdf59be810f97f7
Update UCSC table browser datasource tools to treat data marked as 'gff' as 'gtf'.
--- a/tools/data_source/ucsc_tablebrowser_archaea.xml
+++ b/tools/data_source/ucsc_tablebrowser_archaea.xml
@@ -30,6 +30,7 @@
<value galaxy_value="interval" remote_value="tab" /><value galaxy_value="html" remote_value="hyperlinks" /><value galaxy_value="fasta" remote_value="sequence" />
+ <value galaxy_value="gtf" remote_value="gff" /></value_translation></request_param></request_param_translation>
--- a/tools/data_source/ucsc_tablebrowser.xml
+++ b/tools/data_source/ucsc_tablebrowser.xml
@@ -30,6 +30,7 @@
<value galaxy_value="interval" remote_value="tab" /><value galaxy_value="html" remote_value="hyperlinks" /><value galaxy_value="fasta" remote_value="sequence" />
+ <value galaxy_value="gtf" remote_value="gff" /></value_translation></request_param></request_param_translation>
--- a/tools/data_source/ucsc_tablebrowser_test.xml
+++ b/tools/data_source/ucsc_tablebrowser_test.xml
@@ -30,6 +30,7 @@
<value galaxy_value="interval" remote_value="tab" /><value galaxy_value="html" remote_value="hyperlinks" /><value galaxy_value="fasta" remote_value="sequence" />
+ <value galaxy_value="gtf" remote_value="gff" /></value_translation></request_param></request_param_translation>
1
0
galaxy-dist commit 89fbba1fc87e: Include annotations and tags when exporting histories to and importing histories from file.
by commits-noreply@bitbucket.org 29 Jun '10
by commits-noreply@bitbucket.org 29 Jun '10
29 Jun '10
# HG changeset patch -- Bitbucket.org
# Project galaxy-dist
# URL http://bitbucket.org/galaxy/galaxy-dist/overview
# User jeremy goecks <jeremy.goecks(a)emory.edu>
# Date 1277733095 14400
# Node ID 89fbba1fc87e5dc59e449bab7a38addd24493337
# Parent 71b1a5920fc12514a9e151770eaf250d6fc6da46
Include annotations and tags when exporting histories to and importing histories from file.
--- a/lib/galaxy/web/controllers/history.py
+++ b/lib/galaxy/web/controllers/history.py
@@ -476,12 +476,24 @@ class HistoryController( BaseController,
history_attrs = from_json_string( history_attr_str )
# Create history.
- # TODO: set tags, annotations.
- new_history = model.History( name='imported: %s' % history_attrs['name'].encode( 'utf-8' ), user=trans.user )
+ new_history = model.History( name='imported from archive: %s' % history_attrs['name'].encode( 'utf-8' ), user=trans.user )
trans.sa_session.add( new_history )
- trans.sa_session.flush()
- # TODO: Ignore hid_counter for now since it just artificially increases the hid for all the history's HDAs.
- #new_history.hid_counter = history_attrs['hid_counter']
+
+ # Builds a tag string for a tag, value pair.
+ def get_tag_str( tag, value ):
+ if not value:
+ return tag
+ else:
+ return tag + ":" + value
+
+ # Add annotation, tags.
+ if trans.user:
+ self.add_item_annotation( trans, new_history, history_attrs[ 'annotation' ] )
+ for tag, value in history_attrs[ 'tags' ].items():
+ trans.app.tag_handler.apply_item_tags( trans, trans.user, new_history, get_tag_str( tag, value ) )
+
+ # Ignore hid_counter since it artificially increases the hid for all HDAs?
+ # new_history.hid_counter = history_attrs['hid_counter']
new_history.genome_build = history_attrs['genome_build']
trans.sa_session.flush()
@@ -515,7 +527,7 @@ class HistoryController( BaseController,
metadata = metadata,
history = new_history,
create_dataset = True,
- sa_session = trans.sa_session )
+ sa_session = trans.sa_session )
hda.state = hda.states.OK
trans.sa_session.add( hda )
trans.sa_session.flush()
@@ -528,7 +540,12 @@ class HistoryController( BaseController,
temp_dataset_name = '%s/datasets/%s' % ( temp_output_dir, dataset_attrs['file_name'] )
shutil.copyfile( temp_dataset_name, hda.file_name )
- # TODO: set tags, annotations.
+ # Set tags, annotations.
+ if trans.user:
+ self.add_item_annotation( trans, hda, dataset_attrs[ 'annotation' ] )
+ for tag, value in dataset_attrs[ 'tags' ].items():
+ trans.app.tag_handler.apply_item_tags( trans, trans.user, hda, get_tag_str( tag, value ) )
+ trans.sa_session.flush()
# Cleanup.
if os.path.exists( temp_output_dir ):
@@ -537,7 +554,7 @@ class HistoryController( BaseController,
return trans.show_ok_message( message="History '%s' has been imported. " % history_attrs['name'] )
except Exception, e:
return trans.show_error_message( 'Error importing history archive. ' + str( e ) )
-
+
return trans.show_form(
web.FormBuilder( web.url_for(), "Import a History from an Archive", submit_text="Submit" )
@@ -567,7 +584,7 @@ class HistoryController( BaseController,
# Create archive and stream back to client.
- # Simple method to convert strings to unicode in utf-8 format. Method should be used for all user input.
+ # Convert strings to unicode in utf-8 format. Method should be used for all user input.
def unicode_wrangler( a_string ):
a_string_type = type ( a_string )
if a_string_type is str:
@@ -575,18 +592,28 @@ class HistoryController( BaseController,
elif a_string_type is unicode:
return a_string.encode( 'utf-8' )
+ # Create dictionary of an item's tags.
+ def get_item_tag_dict( item ):
+ tags = {}
+ for tag in item.tags:
+ tag_user_tname = unicode_wrangler( tag.user_tname )
+ tag_user_value = unicode_wrangler( tag.user_value )
+ tags[ tag_user_tname ] = tag_user_value
+ return tags
+
try:
# Use temporary directory for temp output files.
temp_output_dir = tempfile.mkdtemp()
# Write history attributes to file.
- # TODO: include tags, annotations.
history_attrs = {
"create_time" : history.create_time.__str__(),
"update_time" : history.update_time.__str__(),
"name" : unicode_wrangler( history.name ),
"hid_counter" : history.hid_counter,
- "genome_build" : history.genome_build
+ "genome_build" : history.genome_build,
+ "annotation" : unicode_wrangler( self.get_item_annotation_str( trans, history.user, history ) ),
+ "tags" : get_item_tag_dict( history )
}
history_attrs_file_name = tempfile.NamedTemporaryFile( dir=temp_output_dir ).name
history_attrs_out = open( history_attrs_file_name, 'w' )
@@ -597,7 +624,6 @@ class HistoryController( BaseController,
history_attrs_file_name = new_name
# Write datasets' attributes to file.
- # TODO: include tags, annotations.
datasets = self.get_history_datasets( trans, history )
datasets_attrs = []
for dataset in datasets:
@@ -615,7 +641,9 @@ class HistoryController( BaseController,
"designation" : dataset.designation,
"deleted" : dataset.deleted,
"visible" : dataset.visible,
- "file_name" : dataset.file_name.split('/')[-1]
+ "file_name" : dataset.file_name.split('/')[-1],
+ "annotation" : unicode_wrangler( self.get_item_annotation_str( trans, history.user, dataset ) ),
+ "tags" : get_item_tag_dict( dataset )
}
datasets_attrs.append( attribute_dict )
datasets_attrs_file_name = tempfile.NamedTemporaryFile( dir=temp_output_dir ).name
1
0