galaxy-commits
Threads by month
- ----- 2024 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2023 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2022 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2021 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2020 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2019 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2018 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2017 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2016 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2015 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2014 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2013 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2012 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2011 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2010 -----
- December
- November
- October
- September
- August
- July
- June
- May
August 2012
- 1 participants
- 118 discussions
2 new commits in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/af13620fe935/
changeset: af13620fe935
user: dan
date: 2012-08-29 19:50:31
summary: Have form_builder class's get_html methods return unicode.
affected #: 1 file
diff -r 068a043f6bb17b4faa3e171ca244029fdd66bbfe -r af13620fe9353bf8cfc243e0a2981a949b284523 lib/galaxy/web/form_builder.py
--- a/lib/galaxy/web/form_builder.py
+++ b/lib/galaxy/web/form_builder.py
@@ -4,7 +4,7 @@
import logging, sys, os, time
from cgi import escape
-from galaxy.util import restore_text, relpath, nice_size
+from galaxy.util import restore_text, relpath, nice_size, unicodify
from galaxy.web import url_for
from binascii import hexlify
@@ -34,8 +34,8 @@
self.size = int( size or 10 )
self.value = value or ""
def get_html( self, prefix="", disabled=False ):
- return '<input type="text" name="%s%s" size="%d" value="%s"%s>' \
- % ( prefix, self.name, self.size, escape( str( self.value ), quote=True ), self.get_disabled_str( disabled ) )
+ return unicodify( '<input type="text" name="%s%s" size="%d" value="%s"%s>' \
+ % ( prefix, self.name, self.size, escape( str( self.value ), quote=True ), self.get_disabled_str( disabled ) ) )
def set_size(self, size):
self.size = int( size )
@@ -53,8 +53,8 @@
self.size = int( size or 10 )
self.value = value or ""
def get_html( self, prefix="", disabled=False ):
- return '<input type="password" name="%s%s" size="%d" value="%s"%s>' \
- % ( prefix, self.name, self.size, escape( str( self.value ), quote=True ), self.get_disabled_str( disabled ) )
+ return unicodify( '<input type="password" name="%s%s" size="%d" value="%s"%s>' \
+ % ( prefix, self.name, self.size, escape( str( self.value ), quote=True ), self.get_disabled_str( disabled ) ) )
def set_size(self, size):
self.size = int( size )
@@ -74,8 +74,8 @@
self.cols = int(self.size[-1])
self.value = value or ""
def get_html( self, prefix="", disabled=False ):
- return '<textarea name="%s%s" rows="%d" cols="%d"%s>%s</textarea>' \
- % ( prefix, self.name, self.rows, self.cols, self.get_disabled_str( disabled ), escape( str( self.value ), quote=True ) )
+ return unicodify( '<textarea name="%s%s" rows="%d" cols="%d"%s>%s</textarea>' \
+ % ( prefix, self.name, self.rows, self.cols, self.get_disabled_str( disabled ), escape( str( self.value ), quote=True ) ) )
def set_size(self, rows, cols):
self.rows = rows
self.cols = cols
@@ -111,8 +111,8 @@
# parsing the request, the value 'true' in the hidden field actually means it is NOT checked.
# See the is_checked() method below. The prefix is necessary in each case to ensure functional
# correctness when the param is inside a conditional.
- return '<input type="checkbox" id="%s" name="%s" value="true"%s%s%s><input type="hidden" name="%s%s" value="true"%s>' \
- % ( id_name, id_name, checked_text, self.get_disabled_str( disabled ), self.refresh_on_change_text, prefix, self.name, self.get_disabled_str( disabled ) )
+ return unicodify( '<input type="checkbox" id="%s" name="%s" value="true"%s%s%s><input type="hidden" name="%s%s" value="true"%s>' \
+ % ( id_name, id_name, checked_text, self.get_disabled_str( disabled ), self.refresh_on_change_text, prefix, self.name, self.get_disabled_str( disabled ) ) )
@staticmethod
def is_checked( value ):
if value == True:
@@ -148,7 +148,7 @@
ajax_text = ""
if self.ajax:
ajax_text = ' galaxy-ajax-upload="true"'
- return '<input type="file" name="%s%s"%s%s>' % ( prefix, self.name, ajax_text, value_text )
+ return unicodify( '<input type="file" name="%s%s"%s%s>' % ( prefix, self.name, ajax_text, value_text ) )
class FTPFileField(BaseField):
"""
@@ -223,7 +223,7 @@
self.name = name
self.value = value or ""
def get_html( self, prefix="" ):
- return '<input type="hidden" name="%s%s" value="%s">' % ( prefix, self.name, escape( str( self.value ), quote=True ) )
+ return unicodify( '<input type="hidden" name="%s%s" value="%s">' % ( prefix, self.name, escape( str( self.value ), quote=True ) ) )
class SelectField(BaseField):
"""
@@ -308,7 +308,7 @@
rval.append( '<div%s><input type="checkbox" name="%s%s" value="%s" id="%s"%s%s><label class="inline" for="%s">%s</label></div>' % \
( style, prefix, self.name, escaped_value, uniq_id, selected_text, self.get_disabled_str( disabled ), uniq_id, escape( str( text ), quote=True ) ) )
ctr += 1
- return "\n".join( rval )
+ return unicodify( "\n".join( rval ) )
def get_html_radio( self, prefix="", disabled=False ):
rval = []
ctr = 0
@@ -333,7 +333,7 @@
uniq_id,
text ) )
ctr += 1
- return "\n".join( rval )
+ return unicodify( "\n".join( rval ) )
def get_html_default( self, prefix="", disabled=False ):
if self.multiple:
multiple = " multiple"
@@ -357,7 +357,7 @@
rval.insert( 0, '<select name="%s%s"%s%s%s%s%s>' % \
( prefix, self.name, multiple, size, self.refresh_on_change_text, last_selected_value, self.get_disabled_str( disabled ) ) )
rval.append( '</select>' )
- return "\n".join( rval )
+ return unicodify( "\n".join( rval ) )
def get_selected( self, return_label=False, return_value=False, multi=False ):
'''
Return the currently selected option's label, value or both as a tuple. For
@@ -513,7 +513,7 @@
find_expanded_options( expanded_options, self.options )
recurse_options( rval, self.options, drilldown_id, expanded_options )
rval.append( '</div>' )
- return '\n'.join( rval )
+ return unicodify( '\n'.join( rval ) )
class AddressField(BaseField):
@staticmethod
@@ -688,8 +688,8 @@
else:
ldda_ids = "||".join( [ self.trans.security.encode_id( ldda.id ) for ldda in self.lddas ] )
text = "<br />".join( [ "%s. %s" % (i+1, ldda.name) for i, ldda in enumerate(self.lddas)] )
- return '<a href="javascript:void(0);" class="add-librarydataset">%s</a> \
- <input type="hidden" name="%s%s" value="%s">' % ( text, prefix, self.name, escape( str(ldda_ids), quote=True ) )
+ return unicodify( '<a href="javascript:void(0);" class="add-librarydataset">%s</a> \
+ <input type="hidden" name="%s%s" value="%s">' % ( text, prefix, self.name, escape( str(ldda_ids), quote=True ) ) )
def get_display_text(self):
if self.ldda:
https://bitbucket.org/galaxy/galaxy-central/changeset/61a2c343081f/
changeset: 61a2c343081f
user: dan
date: 2012-08-29 19:50:32
summary: Unicode fixes for running and extracting workflows.
affected #: 2 files
diff -r af13620fe9353bf8cfc243e0a2981a949b284523 -r 61a2c343081f2dc194caaacb3c379392353caa7a templates/workflow/build_from_current_history.mako
--- a/templates/workflow/build_from_current_history.mako
+++ b/templates/workflow/build_from_current_history.mako
@@ -78,7 +78,7 @@
<form method="post" action="${h.url_for()}"><div class='form-row'><label>${_('Workflow name')}</label>
- <input name="workflow_name" type="text" value="Workflow constructed from history '${history.name}'" size="60"/>
+ <input name="workflow_name" type="text" value="Workflow constructed from history '${ util.unicodify( history.name )}'" size="60"/></div><p><input type="submit" value="${_('Create Workflow')}" />
diff -r af13620fe9353bf8cfc243e0a2981a949b284523 -r 61a2c343081f2dc194caaacb3c379392353caa7a templates/workflow/run_complete.mako
--- a/templates/workflow/run_complete.mako
+++ b/templates/workflow/run_complete.mako
@@ -1,7 +1,7 @@
<%inherit file="/base.mako"/><div class="donemessagelarge">
- Successfully ran workflow "${workflow.name}". The following datasets have been added to the queue:
+ Successfully ran workflow "${util.unicodify( workflow.name )}". The following datasets have been added to the queue:
%for invocation in invocations:
<div class="workflow-invocation-complete">
%if invocation['new_history']:
@@ -14,7 +14,7 @@
%for step_outputs in invocation['outputs'].itervalues():
%for data in step_outputs.itervalues():
%if not invocation['new_history'] or data.history == invocation['new_history']:
- <p><strong>${data.hid}</strong>: ${data.name}</p>
+ <p><strong>${data.hid}</strong>: ${util.unicodify( data.name )}</p>
%endif
%endfor
%endfor
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: natefoo: When creating the temp directory for library zip downloads, use the Galaxy user's umask and group on the directory.
by Bitbucket 29 Aug '12
by Bitbucket 29 Aug '12
29 Aug '12
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/068a043f6bb1/
changeset: 068a043f6bb1
user: natefoo
date: 2012-08-29 18:36:21
summary: When creating the temp directory for library zip downloads, use the Galaxy user's umask and group on the directory.
affected #: 1 file
diff -r 36b70e29192f0ddb92f36ddd6c9b3fcbf996783d -r 068a043f6bb17b4faa3e171ca244029fdd66bbfe lib/galaxy/web/controllers/library_common.py
--- a/lib/galaxy/web/controllers/library_common.py
+++ b/lib/galaxy/web/controllers/library_common.py
@@ -1752,6 +1752,7 @@
if action == 'zip':
# Can't use mkstemp - the file must not exist first
tmpd = tempfile.mkdtemp()
+ util.umask_fix_perms( tmpd, trans.app.config.umask, 0777, self.app.config.gid )
tmpf = os.path.join( tmpd, 'library_download.' + action )
if ziptype == '64' and trans.app.config.upstream_gzip:
archive = zipfile.ZipFile( tmpf, 'w', zipfile.ZIP_STORED, True )
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
2 new commits in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/5722c0ff5d7a/
changeset: 5722c0ff5d7a
user: Clare Sloggett
date: 2012-08-28 09:54:15
summary: In workflows api, when show workflow request is made, now displaying info on steps and connectors.
affected #: 1 file
diff -r 80dd03582ea9041d0c897b3a2c58e12a5bb494bb -r 5722c0ff5d7a1fcf2b568e4f1710322ec2b9df08 lib/galaxy/web/api/workflows.py
--- a/lib/galaxy/web/api/workflows.py
+++ b/lib/galaxy/web/api/workflows.py
@@ -73,6 +73,16 @@
# Eventually, allow regular tool parameters to be inserted and modified at runtime.
# p = step.get_required_parameters()
item['inputs'] = inputs
+ steps = {}
+ for step in latest_workflow.steps:
+ steps[step.id] = {'id': step.id,
+ 'type': step.type,
+ 'tool_id': step.tool_id,
+ 'input_steps': {}}
+ for conn in step.input_connections:
+ steps[step.id]['input_steps'][conn.input_name] = {'source_step': conn.output_step_id,
+ 'step_output': conn.output_name}
+ item['steps'] = steps
return item
@web.expose_api
https://bitbucket.org/galaxy/galaxy-central/changeset/36b70e29192f/
changeset: 36b70e29192f
user: dannon
date: 2012-08-29 17:41:13
summary: Merge.
affected #: 1 file
diff -r cc1c82662d88e0953d8df588d6449020eb25d0bf -r 36b70e29192f0ddb92f36ddd6c9b3fcbf996783d lib/galaxy/web/api/workflows.py
--- a/lib/galaxy/web/api/workflows.py
+++ b/lib/galaxy/web/api/workflows.py
@@ -75,6 +75,16 @@
# Eventually, allow regular tool parameters to be inserted and modified at runtime.
# p = step.get_required_parameters()
item['inputs'] = inputs
+ steps = {}
+ for step in latest_workflow.steps:
+ steps[step.id] = {'id': step.id,
+ 'type': step.type,
+ 'tool_id': step.tool_id,
+ 'input_steps': {}}
+ for conn in step.input_connections:
+ steps[step.id]['input_steps'][conn.input_name] = {'source_step': conn.output_step_id,
+ 'step_output': conn.output_name}
+ item['steps'] = steps
return item
@web.expose_api
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
7 new commits in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/a76cfda22bcd/
changeset: a76cfda22bcd
user: dan
date: 2012-08-29 16:49:37
summary: Unicode fixes for template selection.
affected #: 2 files
diff -r 36591da93d40a924a6276aefa49edcf8a675437c -r a76cfda22bcd50350fb4ebba00ce37b85a89bba3 templates/common/select_template.mako
--- a/templates/common/select_template.mako
+++ b/templates/common/select_template.mako
@@ -25,7 +25,7 @@
%endif
<div class="toolForm">
- <div class="toolFormTitle">Select a template for the ${item_desc} '${item_name}'</div>
+ <div class="toolFormTitle">Select a template for the ${item_desc} '${util.unicodify( item_name )}'</div><div class="toolFormBody">
%if form_type == trans.model.FormDefinition.types.LIBRARY_INFO_TEMPLATE:
<form id="select_template" name="select_template" action="${h.url_for( controller='library_common', action='add_template', cntrller=cntrller, item_type=item_type, form_type=trans.model.FormDefinition.types.LIBRARY_INFO_TEMPLATE, library_id=library_id, folder_id=folder_id, ldda_id=ldda_id, use_panels=use_panels, show_deleted=show_deleted )}" method="post" >
diff -r 36591da93d40a924a6276aefa49edcf8a675437c -r a76cfda22bcd50350fb4ebba00ce37b85a89bba3 templates/common/template_common.mako
--- a/templates/common/template_common.mako
+++ b/templates/common/template_common.mako
@@ -63,7 +63,7 @@
if history_user:
for history in history_user.histories:
if not history.deleted and str( widget.value ) == str( history.id ):
- value = history.name
+ value = util.unicodify( history.name )
break
else:
# If we didn't find the selected workflow option above, we'll just print the value
@@ -188,7 +188,7 @@
%elif widget_fields_have_contents:
<p/><div class="toolForm">
- <div class="toolFormTitle">Other information about ${item.name}</div>
+ <div class="toolFormTitle">Other information about ${ util.unicodify( item.name )}</div><div class="toolFormBody">
%for i, field in enumerate( widgets ):
${render_template_field( field )}
https://bitbucket.org/galaxy/galaxy-central/changeset/0e0f1d52e011/
changeset: 0e0f1d52e011
user: dan
date: 2012-08-29 16:49:37
summary: Unicode fixes for browsing libraries.
affected #: 2 files
diff -r a76cfda22bcd50350fb4ebba00ce37b85a89bba3 -r 0e0f1d52e011207f4c1a9b1b67e8359cd30b2acb templates/library/common/browse_library.mako
--- a/templates/library/common/browse_library.mako
+++ b/templates/library/common/browse_library.mako
@@ -247,14 +247,14 @@
%endif
/>
%if simple:
- <label for="${trans.security.encode_id( ldda.id )}">${ldda.name}</label>
+ <label for="${trans.security.encode_id( ldda.id )}">${ util.unicodify( ldda.name )}</label>
%else:
<div style="float: left; margin-left: 1px;" class="menubutton split popup" id="dataset-${ldda.id}-popup"><a class="view-info" href="${h.url_for( controller='library_common', action='ldda_info', cntrller=cntrller, library_id=trans.security.encode_id( library.id ), folder_id=trans.security.encode_id( folder.id ), id=trans.security.encode_id( ldda.id ), use_panels=use_panels, show_deleted=show_deleted )}">
%if ldda.library_dataset.deleted:
- <div class="libraryItem-error">${ldda.name}</div>
+ <div class="libraryItem-error">${util.unicodify( ldda.name )}</div>
%else:
- ${ldda.name}
+ ${util.unicodify( ldda.name )}
%endif
</a></div>
@@ -288,7 +288,7 @@
%endif
%if can_modify:
%if not library.deleted and not branch_deleted( folder ) and not ldda.library_dataset.deleted:
- <a class="action-button" confirm="Click OK to delete dataset '${ldda.name}'." href="${h.url_for( controller='library_common', action='delete_library_item', cntrller=cntrller, library_id=trans.security.encode_id( library.id ), item_id=trans.security.encode_id( library_dataset.id ), item_type='library_dataset', show_deleted=show_deleted )}">Delete this dataset</a>
+ <a class="action-button" confirm="Click OK to delete dataset '${util.unicodify( ldda.name )}'." href="${h.url_for( controller='library_common', action='delete_library_item', cntrller=cntrller, library_id=trans.security.encode_id( library.id ), item_id=trans.security.encode_id( library_dataset.id ), item_type='library_dataset', show_deleted=show_deleted )}">Delete this dataset</a>
%elif not library.deleted and not branch_deleted( folder ) and not ldda.library_dataset.purged and ldda.library_dataset.deleted:
<a class="action-button" href="${h.url_for( controller='library_common', action='undelete_library_item', cntrller=cntrller, library_id=trans.security.encode_id( library.id ), item_id=trans.security.encode_id( library_dataset.id ), item_type='library_dataset', show_deleted=show_deleted )}">Undelete this dataset</a>
%endif
diff -r a76cfda22bcd50350fb4ebba00ce37b85a89bba3 -r 0e0f1d52e011207f4c1a9b1b67e8359cd30b2acb templates/library/common/common.mako
--- a/templates/library/common/common.mako
+++ b/templates/library/common/common.mako
@@ -100,7 +100,7 @@
%if replace_dataset not in [ None, 'None' ]:
<input type="hidden" name="replace_id" value="${trans.security.encode_id( replace_dataset.id )}"/><div class="form-row">
- You are currently selecting a new file to replace '<a href="${h.url_for( controller='library_common', action='ldda_info', cntrller=cntrller, library_id=library_id, folder_id=folder_id, id=trans.security.encode_id( replace_dataset.library_dataset_dataset_association.id ) )}">${replace_dataset.name}</a>'.
+ You are currently selecting a new file to replace '<a href="${h.url_for( controller='library_common', action='ldda_info', cntrller=cntrller, library_id=library_id, folder_id=folder_id, id=trans.security.encode_id( replace_dataset.library_dataset_dataset_association.id ) )}">${util.unicodify( replace_dataset.name )}</a>'.
<div style="clear: both"></div></div>
%endif
@@ -340,7 +340,7 @@
</script>
%elif upload_option == 'import_from_history':
<div class="toolForm">
- <div class="toolFormTitle">Active datasets in your current history (${history.name})</div>
+ <div class="toolFormTitle">Active datasets in your current history (${ util.unicodify( history.name )})</div><div class="toolFormBody">
%if history and history.active_datasets:
<form name="add_history_datasets_to_library" action="${h.url_for( controller='library_common', action='add_history_datasets_to_library', cntrller=cntrller, library_id=library_id )}" enctype="multipart/form-data" method="post">
@@ -359,7 +359,7 @@
%if replace_dataset not in [ None, 'None' ]:
<input type="hidden" name="replace_id" value="${trans.security.encode_id( replace_dataset.id )}"/><div class="form-row">
- You are currently selecting a new file to replace '<a href="${h.url_for( controller='library_common', action='ldda_info', cntrller=cntrller, library_id=library_id, folder_id=folder_id, id=trans.security.encode_id( replace_dataset.library_dataset_dataset_association.id ) )}">${replace_dataset.name}</a>'.
+ You are currently selecting a new file to replace '<a href="${h.url_for( controller='library_common', action='ldda_info', cntrller=cntrller, library_id=library_id, folder_id=folder_id, id=trans.security.encode_id( replace_dataset.library_dataset_dataset_association.id ) )}">${ util.unicodify( replace_dataset.name )}</a>'.
<div style="clear: both"></div></div>
%endif
@@ -367,7 +367,7 @@
<% encoded_id = trans.security.encode_id( hda.id ) %><div class="form-row"><input name="hda_ids" id="hist_${encoded_id}" value="${encoded_id}" type="checkbox"/>
- <label for="hist_${encoded_id}" style="display: inline;font-weight:normal;">${hda.hid}: ${hda.name}</label>
+ <label for="hist_${encoded_id}" style="display: inline;font-weight:normal;">${hda.hid}: ${ util.unicodify( hda.name )}</label></div>
%endfor
%if widgets:
https://bitbucket.org/galaxy/galaxy-central/changeset/35cbec7cebd2/
changeset: 35cbec7cebd2
user: dan
date: 2012-08-29 16:49:37
summary: Unicode fixes for importing library datasets.
affected #: 1 file
diff -r 0e0f1d52e011207f4c1a9b1b67e8359cd30b2acb -r 35cbec7cebd2a4a24164b124a33a91202bbe3915 templates/library/common/import_datasets_to_histories.mako
--- a/templates/library/common/import_datasets_to_histories.mako
+++ b/templates/library/common/import_datasets_to_histories.mako
@@ -18,7 +18,7 @@
</%def>
%if message:
- ${render_msg( message, status )}
+ ${render_msg( util.unicodify( message ), status )}
%endif
<b>Import library datasets into histories</b>
@@ -36,7 +36,7 @@
%><div class="form-row"><input type="checkbox" name="ldda_ids" id="dataset_${encoded_id}" value="${encoded_id}" ${checked}/>
- <label for="dataset_${encoded_id}" style="display: inline;font-weight:normal;">${source_ldda.name}</label>
+ <label for="dataset_${encoded_id}" style="display: inline;font-weight:normal;">${util.unicodify( source_ldda.name )}</label></div>
%endfor
%else:
@@ -62,7 +62,7 @@
else:
current_history_text = ""
%>
- <option value="${encoded_id}"${selected_text}>${i + 1}: ${h.truncate( target_history.name, 30 )}${current_history_text}</option>
+ <option value="${encoded_id}"${selected_text}>${i + 1}: ${h.truncate( util.unicodify( target_history.name ), 30 )}${current_history_text}</option>
%endfor
</select><br/><br/>
@@ -79,7 +79,7 @@
%><div class="form-row"><input type="checkbox" name="target_history_ids" id="target_history_${encoded_id}" value="${encoded_id}"/>
- <label for="target_history_${encoded_id}" style="display: inline; font-weight:normal;">${i + 1}: ${target_history.name}${current_history_text}</label>
+ <label for="target_history_${encoded_id}" style="display: inline; font-weight:normal;">${i + 1}: ${util.unicodify( target_history.name )}${current_history_text}</label></div>
%endfor
</div>
https://bitbucket.org/galaxy/galaxy-central/changeset/6e16daab2c88/
changeset: 6e16daab2c88
user: dan
date: 2012-08-29 16:49:37
summary: Unicode fixes for editing library datasets.
affected #: 1 file
diff -r 35cbec7cebd2a4a24164b124a33a91202bbe3915 -r 6e16daab2c882515633a41d1a48ce1ebbfed5b6b templates/library/common/ldda_edit_info.mako
--- a/templates/library/common/ldda_edit_info.mako
+++ b/templates/library/common/ldda_edit_info.mako
@@ -44,18 +44,18 @@
%if ( trans.user_is_admin() and cntrller=='library_admin' ) or trans.app.security_agent.can_modify_library_item( current_user_roles, ldda.library_dataset ):
<div class="toolForm">
- <div class="toolFormTitle">Edit attributes of ${ldda.name}</div>
+ <div class="toolFormTitle">Edit attributes of ${util.unicodify( ldda.name )}</div><div class="toolFormBody"><form name="edit_attributes" action="${h.url_for( controller='library_common', action='ldda_edit_info', cntrller=cntrller, library_id=library_id, folder_id=trans.security.encode_id( ldda.library_dataset.folder.id ), use_panels=use_panels, show_deleted=show_deleted, )}" method="post"><input type="hidden" name="id" value="${trans.security.encode_id( ldda.id )}"/><div class="form-row"><label>Name:</label>
- <input type="text" name="name" value="${ldda.name}" size="40"/>
+ <input type="text" name="name" value="${util.unicodify( ldda.name )}" size="40"/><div style="clear: both"></div></div><div class="form-row"><label>Info:</label>
- <input type="text" name="info" value="${ldda.info}" size="40"/>
+ <input type="text" name="info" value="${util.unicodify( ldda.info )}" size="40"/><div style="clear: both"></div></div><div class="form-row">
@@ -125,16 +125,16 @@
<p/>
%else:
<div class="toolForm">
- <div class="toolFormTitle">View information about ${ldda.name}</div>
+ <div class="toolFormTitle">View information about ${util.unicodify( ldda.name )}</div><div class="toolFormBody"><div class="form-row"><label>Name:</label>
- ${ldda.name}
+ ${util.unicodify( ldda.name )}
<div style="clear: both"></div></div><div class="form-row"><label>Info:</label>
- ${ldda.info}
+ ${util.unicodify( ldda.info )}
<div style="clear: both"></div></div><div class="form-row">
https://bitbucket.org/galaxy/galaxy-central/changeset/2b713397767f/
changeset: 2b713397767f
user: dan
date: 2012-08-29 16:49:38
summary: Unicode fixes for viewing library datasets.
affected #: 2 files
diff -r 6e16daab2c882515633a41d1a48ce1ebbfed5b6b -r 2b713397767fd6552a755234ad4d8cf8aae7b439 templates/library/common/ldda_info.mako
--- a/templates/library/common/ldda_info.mako
+++ b/templates/library/common/ldda_info.mako
@@ -47,7 +47,7 @@
<div class="toolForm"><div class="toolFormTitle">
- Information about <div class="menubutton popup" id="dataset-${ldda.id}-popup">${ldda.name}</div>
+ Information about <div class="menubutton popup" id="dataset-${ldda.id}-popup">${util.unicodify( ldda.name )}</div>
%if not library.deleted and not branch_deleted( ldda.library_dataset.folder ) and not ldda.library_dataset.deleted:
<div popupmenu="dataset-${ldda.id}-popup">
%if can_modify:
@@ -119,7 +119,7 @@
</div><div class="form-row"><label>Miscellaneous information:</label>
- ${ldda.info}
+ ${util.unicodify( ldda.info )}
<div style="clear: both"></div></div>
%if ldda.creating_job_associations:
@@ -163,7 +163,7 @@
<div class="form-row"><div id="info${ldda.id}" class="historyItemBody"><label>Peek:</label>
- <div><pre id="peek${ldda.id}" class="peek">${ldda.display_peek()}</pre></div>
+ <div><pre id="peek${ldda.id}" class="peek">${util.unicodify( ldda.display_peek() )}</pre></div></div></div>
%endif
@@ -278,10 +278,10 @@
<% expired_lddas = [ e_ldda for e_ldda in ldda.library_dataset.expired_datasets ] %>
%if expired_lddas:
<br/>
- <div class="toolFormTitle">Expired versions of ${ldda.name}</div>
+ <div class="toolFormTitle">Expired versions of ${util.unicodify( ldda.name )}</div>
%for expired_ldda in expired_lddas:
<div class="form-row">
- <a href="${h.url_for( controller='library_common', action='ldda_info', cntrller=cntrller, library_id=trans.security.encode_id( library.id ), folder_id=trans.security.encode_id( expired_ldda.library_dataset.folder.id ), id=trans.security.encode_id( expired_ldda.id ), use_panels=use_panels, show_deleted=show_deleted )}">${expired_ldda.name}</a>
+ <a href="${h.url_for( controller='library_common', action='ldda_info', cntrller=cntrller, library_id=trans.security.encode_id( library.id ), folder_id=trans.security.encode_id( expired_ldda.library_dataset.folder.id ), id=trans.security.encode_id( expired_ldda.id ), use_panels=use_panels, show_deleted=show_deleted )}">${util.unicodify( expired_ldda.name )}</a></div>
%endfor
%endif
diff -r 6e16daab2c882515633a41d1a48ce1ebbfed5b6b -r 2b713397767fd6552a755234ad4d8cf8aae7b439 templates/library/common/library_dataset_info.mako
--- a/templates/library/common/library_dataset_info.mako
+++ b/templates/library/common/library_dataset_info.mako
@@ -25,20 +25,20 @@
%if ( trans.user_is_admin() and cntrller=='library_admin' ) or trans.app.security_agent.can_modify_library_item( current_user_roles, library_dataset ):
<div class="toolForm">
- <div class="toolFormTitle">Edit attributes of ${library_dataset.name}</div>
+ <div class="toolFormTitle">Edit attributes of ${util.unicodify( library_dataset.name )}</div><div class="toolFormBody"><form name="edit_attributes" action="${h.url_for( controller='library_common', action='library_dataset_info', id=trans.security.encode_id( library_dataset.id ), library_id=library_id, show_deleted=show_deleted )}" method="post"><div class="form-row"><label>Name:</label><div style="float: left; width: 250px; margin-right: 10px;">
- <input type="text" name="name" value="${library_dataset.name}" size="40"/>
+ <input type="text" name="name" value="${util.unicodify( library_dataset.name )}" size="40"/></div><div style="clear: both"></div></div><div class="form-row"><label>Info:</label><div style="float: left; width: 250px; margin-right: 10px;">
- <input type="text" name="info" value="${library_dataset.info}" size="40"/>
+ <input type="text" name="info" value="${util.unicodify( library_dataset.info )}" size="40"/></div><div style="clear: both"></div></div>
@@ -50,12 +50,12 @@
</div>
%else:
<div class="toolForm">
- <div class="toolFormTitle">View information about ${library_dataset.name}</div>
+ <div class="toolFormTitle">View information about ${util.unicodify( library_dataset.name )}</div><div class="toolFormBody"><div class="form-row">
- <b>Name:</b> ${library_dataset.name}
+ <b>Name:</b> ${util.unicodify( library_dataset.name )}
<div style="clear: both"></div>
- <b>Info:</b> ${library_dataset.info}
+ <b>Info:</b> ${util.unicodify( library_dataset.info )}
<div style="clear: both"></div><b>Dataset Versions:</b><div style="clear: both"></div>
https://bitbucket.org/galaxy/galaxy-central/changeset/e2902ab09446/
changeset: e2902ab09446
user: dan
date: 2012-08-29 16:49:38
summary: Unicode fixes for editing library permissions.
affected #: 1 file
diff -r 2b713397767fd6552a755234ad4d8cf8aae7b439 -r e2902ab094466c3591bacde61233d9da186c74a3 templates/library/common/ldda_permissions.mako
--- a/templates/library/common/ldda_permissions.mako
+++ b/templates/library/common/ldda_permissions.mako
@@ -7,7 +7,7 @@
name_str = '%d selected datasets' % len( lddas )
else:
ldda = lddas[0]
- name_str = ldda.name
+ name_str = util.unicodify( ldda.name )
%><br/><br/>
https://bitbucket.org/galaxy/galaxy-central/changeset/cc1c82662d88/
changeset: cc1c82662d88
user: dan
date: 2012-08-29 16:49:38
summary: Unicode fixes for moving library datasets.
affected #: 1 file
diff -r e2902ab094466c3591bacde61233d9da186c74a3 -r cc1c82662d88e0953d8df588d6449020eb25d0bf templates/library/common/move_library_item.mako
--- a/templates/library/common/move_library_item.mako
+++ b/templates/library/common/move_library_item.mako
@@ -45,7 +45,7 @@
%><div class="form-row"><input type="checkbox" name="item_id" id="dataset_${encoded_id}" value="${encoded_id}" ${checked}/>
- <label for="dataset_${encoded_id}" style="display: inline;font-weight:normal;">${move_ldda.name}</label>
+ <label for="dataset_${encoded_id}" style="display: inline;font-weight:normal;">${util.unicodify( move_ldda.name )}</label></div>
%endfor
%elif item_type == 'folder':
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
29 Aug '12
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/36591da93d40/
changeset: 36591da93d40
user: dannon
date: 2012-08-29 16:20:49
summary: Missed .hgignore change, reverted.
affected #: 1 file
diff -r 07045f4895170a154c955f48291f2b5e6feb94c2 -r 36591da93d40a924a6276aefa49edcf8a675437c .hgignore
--- a/.hgignore
+++ b/.hgignore
@@ -78,7 +78,3 @@
*.rej
*~
-syntax: regexp
-^database$
-syntax: regexp
-^scripts/api/spp_submodule\.ga$
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
10 new commits in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/849e1713f613/
changeset: 849e1713f613
user: rpark37
date: 2012-02-01 22:21:38
summary: Updated scripts for API workflow enhancements and changing wor=
kflow parameters programatically
affected #: 6 files
diff -r 90aa7ae565d60d38c90f444322a68b55fc895701 -r 849e1713f613a1932595b82=
fcd0e65a19bf5e366 .hgignore
--- a/.hgignore
+++ b/.hgignore
@@ -78,3 +78,7 @@
*.rej
*~
=20
+syntax: regexp
+^database$
+syntax: regexp
+^scripts/api/spp_submodule\.ga$
diff -r 90aa7ae565d60d38c90f444322a68b55fc895701 -r 849e1713f613a1932595b82=
fcd0e65a19bf5e366 lib/galaxy/web/api/workflows.py
--- a/lib/galaxy/web/api/workflows.py
+++ b/lib/galaxy/web/api/workflows.py
@@ -84,6 +84,20 @@
=20
However, we will import them if installed_repository_file is speci=
fied
"""
+
+ # ----------------------------------------------------------------=
--------------- # =20
+ ### RPARK: dictionary containing which workflows to change and edi=
t ###
+ param_map =3D {};
+ if (payload.has_key('parameters') ):
+ #if (payload['parameters']):
+ param_map =3D payload['parameters'];
+ print("PARAMETER MAP:");
+ print(param_map);
+ # ----------------------------------------------------------------=
--------------- # =20
+ =20
+
+ =20
+ =20
if 'workflow_id' not in payload:
# create new
if 'installed_repository_file' in payload:
@@ -168,6 +182,30 @@
# are not persisted so we need to do it every time)
step.module.add_dummy_datasets( connections=3Dstep.input_c=
onnections )
step.state =3D step.module.state
+ =20
+ ####################################################
+ ####################################################
+ #print("CHECKING WORKFLOW STEPS:")
+ #print(step.tool_id);
+ #print(step.state.inputs);
+ #print("upgard messages");
+ #print(step.state);
+ #print("\n");
+ # RPARK: IF TOOL_NAME IN PARAMETER MAP #
+ if step.tool_id in param_map:
+ #print("-------------------------FOUND IN PARAMETER DI=
CTIONARY")
+ #print(param_map[step.tool_id]);
+ change_param =3D param_map[step.tool_id]['param'];
+ change_value =3D param_map[step.tool_id]['value'];
+ #step.state.inputs['refGenomeSource']['index'] =3D "cr=
apolo";
+ #print(step.state.inputs[change_param]);
+ step.state.inputs[change_param] =3D change_value;
+ #print(step.state.inputs[change_param]);
+ #print(param_map[step.tool_id][change_value]);
+ #print("----------------------------------------------=
----")
+ ####################################################
+ ####################################################
+ =20
if step.tool_errors:
trans.response.status =3D 400
return "Workflow cannot be run because of validation e=
rrors in some steps: %s" % step_errors
@@ -220,3 +258,343 @@
trans.sa_session.flush()
return rval
=20
+ # --------------------------------------------------------------------=
-------------------------- #
+ # --------------------------------------------------------------------=
-------------------------- #
+ # ---- RPARK EDITS ---- #
+ # --------------------------------------------------------------------=
-------------------------- #
+ # --------------------------------------------------------------------=
-------------------------- #
+ @web.expose_api
+ @web.json
+ def workflow_dict( self, trans, workflow_id, **kwd ):
+ """
+ GET /api/workflows/{encoded_workflow_id}/download
+ Returns a selected workflow as a json dictionary.=20
+ """
+ print "workflow controller: workflow dict called"
+ print workflow_id
+ =20
+ try:
+ stored_workflow =3D trans.sa_session.query(self.app.model.Stor=
edWorkflow).get(trans.security.decode_id(workflow_id))
+ except Exception,e:
+ return ("Workflow with ID=3D'%s' can not be found\n Exception:=
%s") % (workflow_id, str( e ))
+ =20
+ # check to see if user has permissions to selected workflow=20
+ if stored_workflow.user !=3D trans.user and not trans.user_is_admi=
n():
+ if trans.sa_session.query(trans.app.model.StoredWorkflowUserSh=
areAssociation).filter_by(user=3Dtrans.user, stored_workflow=3Dstored_workf=
low).count() =3D=3D 0:
+ trans.response.status =3D 400
+ return("Workflow is not owned by or shared with current us=
er")
+ =20
+ return self._workflow_to_dict( trans, stored_workflow )
+ =20
+ @web.expose_api
+ def delete( self, trans, id, **kwd ): =20
+ """
+ DELETE /api/workflows/{encoded_workflow_id}
+ Deletes a specified workflow
+ Author: rpark
+ =20
+ copied from galaxy.web.controllers.workflows.py (delete)
+ """
+ workflow_id =3D id;
+ =20
+ try:
+ stored_workflow =3D trans.sa_session.query(self.app.model.Stor=
edWorkflow).get(trans.security.decode_id(workflow_id))
+ except Exception,e:
+ return ("Workflow with ID=3D'%s' can not be found\n Exception:=
%s") % (workflow_id, str( e ))
+ =20
+ # check to see if user has permissions to selected workflow=20
+ if stored_workflow.user !=3D trans.user and not trans.user_is_admi=
n():
+ if trans.sa_session.query(trans.app.model.StoredWorkflowUserSh=
areAssociation).filter_by(user=3Dtrans.user, stored_workflow=3Dstored_workf=
low).count() =3D=3D 0:
+ trans.response.status =3D 400
+ return("Workflow is not owned by or shared with current us=
er")
+
+ #Mark a workflow as deleted
+ stored_workflow.deleted =3D True
+ trans.sa_session.flush()
+ =20
+ # Python Debugger
+ #import pdb; pdb.set_trace()
+ =20
+ # TODO: Unsure of response message to let api know that a workflow=
was successfully deleted
+ #return 'OK'
+ return ( "Workflow '%s' successfully deleted" % stored_workflow.na=
me )
+ =20
+ @web.expose_api
+ def import_new_workflow(self, trans, payload, **kwd):
+ """
+ POST /api/workflows
+ Importing dynamic workflows from the api. Return newly generated w=
orkflow id.
+ Author: rpark=20
+ =20
+ # currently assumes payload['workflow'] is a json representation o=
f a workflow to be inserted into the database
+ """
+ =20
+ #import pdb; pdb.set_trace()
+ =20
+ data =3D payload['workflow'];
+ workflow, missing_tool_tups =3D self._workflow_from_dict( trans, d=
ata, source=3D"API" )
+ =20
+ # galaxy workflow newly created id =20
+ workflow_id =3D workflow.id;
+ # api encoded, id=20
+ encoded_id =3D trans.security.encode_id(workflow_id);
+ =20
+ =20
+ =20
+ # return list
+ rval=3D [];
+ =20
+ item =3D workflow.get_api_value(value_mapper=3D{'id':trans.securit=
y.encode_id})
+ item['url'] =3D url_for('workflow', id=3Dencoded_id)
+ =20
+ rval.append(item); =20
+ =20
+ return rval;
+ =20
+
+ def _workflow_from_dict( self, trans, data, source=3DNone ):
+ """
+ RPARK: copied from galaxy.web.controllers.workflows.py
+ Creates a workflow from a dict. Created workflow is stored in the =
database and returned.
+ """
+ # Put parameters in workflow mode
+ trans.workflow_building_mode =3D True
+ # Create new workflow from incoming dict
+ workflow =3D model.Workflow()
+ # If there's a source, put it in the workflow name.
+ if source:
+ name =3D "%s (imported from %s)" % ( data['name'], source )
+ else:
+ name =3D data['name']
+ workflow.name =3D name
+ # Assume no errors until we find a step that has some
+ workflow.has_errors =3D False
+ # Create each step
+ steps =3D []
+ # The editor will provide ids for each step that we don't need to =
save,
+ # but do need to use to make connections
+ steps_by_external_id =3D {}
+ # Keep track of tools required by the workflow that are not availa=
ble in
+ # the local Galaxy instance. Each tuple in the list of missing_to=
ol_tups
+ # will be ( tool_id, tool_name, tool_version ).
+ missing_tool_tups =3D []
+ # First pass to build step objects and populate basic values
+ for key, step_dict in data[ 'steps' ].iteritems():
+ # Create the model class for the step
+ step =3D model.WorkflowStep()
+ steps.append( step )
+ steps_by_external_id[ step_dict['id' ] ] =3D step
+ # FIXME: Position should be handled inside module
+ step.position =3D step_dict['position']
+ module =3D module_factory.from_dict( trans, step_dict, secure=
=3DFalse )
+ if module.type =3D=3D 'tool' and module.tool is None:
+ # A required tool is not available in the local Galaxy ins=
tance.
+ missing_tool_tup =3D ( step_dict[ 'tool_id' ], step_dict[ =
'name' ], step_dict[ 'tool_version' ] )
+ if missing_tool_tup not in missing_tool_tups:
+ missing_tool_tups.append( missing_tool_tup )
+ module.save_to_step( step )
+ if step.tool_errors:
+ workflow.has_errors =3D True
+ # Stick this in the step temporarily
+ step.temp_input_connections =3D step_dict['input_connections']
+ =20
+ # Save step annotation.
+ annotation =3D step_dict[ 'annotation' ]
+ if annotation:
+ annotation =3D sanitize_html( annotation, 'utf-8', 'text/h=
tml' )
+ # ------------------------------------------ #
+ # RPARK REMOVING: user annotation b/c of API
+ #self.add_item_annotation( trans.sa_session, trans.get_use=
r(), step, annotation )
+ # ------------------------------------------ #
+ =20
+ # Unpack and add post-job actions.
+ post_job_actions =3D step_dict.get( 'post_job_actions', {} )
+ for name, pja_dict in post_job_actions.items():
+ pja =3D PostJobAction( pja_dict[ 'action_type' ],=20
+ step, pja_dict[ 'output_name' ],=20
+ pja_dict[ 'action_arguments' ] )
+ # Second pass to deal with connections between steps
+ for step in steps:
+ # Input connections
+ for input_name, conn_dict in step.temp_input_connections.iteri=
tems():
+ if conn_dict:
+ conn =3D model.WorkflowStepConnection()
+ conn.input_step =3D step
+ conn.input_name =3D input_name
+ conn.output_name =3D conn_dict['output_name']
+ conn.output_step =3D steps_by_external_id[ conn_dict['=
id'] ]
+ del step.temp_input_connections
+ # Order the steps if possible
+ attach_ordered_steps( workflow, steps )
+ # Connect up
+ stored =3D model.StoredWorkflow()
+ stored.name =3D workflow.name
+ workflow.stored_workflow =3D stored
+ stored.latest_workflow =3D workflow
+ stored.user =3D trans.user
+ # Persist
+ trans.sa_session.add( stored )
+ trans.sa_session.flush()
+ return stored, missing_tool_tups
+ =20
+ def _workflow_to_dict( self, trans, stored ):
+ """
+ RPARK: copied from galaxy.web.controllers.workflows.py
+ Converts a workflow to a dict of attributes suitable for exporting.
+ """
+ workflow =3D stored.latest_workflow
+ =20
+ ### ----------------------------------- ###
+ ## RPARK EDIT ##
+ workflow_annotation =3D self.get_item_annotation_obj( trans.sa_ses=
sion, trans.user, stored )
+ annotation_str =3D ""
+ if workflow_annotation:
+ annotation_str =3D workflow_annotation.annotation
+ ### ----------------------------------- ###
+ =20
+ =20
+ # Pack workflow data into a dictionary and return
+ data =3D {}
+ data['a_galaxy_workflow'] =3D 'true' # Placeholder for identifying=
galaxy workflow
+ data['format-version'] =3D "0.1"
+ data['name'] =3D workflow.name
+ ### ----------------------------------- ###
+ ## RPARK EDIT ##
+ data['annotation'] =3D annotation_str
+ ### ----------------------------------- ###
+ =20
+ data['steps'] =3D {}
+ # For each step, rebuild the form and encode the state
+ for step in workflow.steps:
+ # Load from database representation
+ module =3D module_factory.from_workflow_step( trans, step )
+ =20
+ ### ----------------------------------- ###
+ ## RPARK EDIT ##
+ # Get user annotation.
+ step_annotation =3D self.get_item_annotation_obj(trans.sa_sess=
ion, trans.user, step )
+ annotation_str =3D ""
+ if step_annotation:
+ annotation_str =3D step_annotation.annotation =20
+ ### ----------------------------------- ###
+ =20
+ # Step info
+ step_dict =3D {
+ 'id': step.order_index,
+ 'type': module.type,
+ 'tool_id': module.get_tool_id(),
+ 'tool_version' : step.tool_version,
+ 'name': module.get_name(),
+ 'tool_state': module.get_state( secure=3DFalse ),
+ 'tool_errors': module.get_errors(),
+ ## 'data_inputs': module.get_data_inputs(),
+ ## 'data_outputs': module.get_data_outputs(),
+ =20
+ ### ----------------------------------- ###
+ ## RPARK EDIT ##
+ 'annotation' : annotation_str
+ ### ----------------------------------- ###
+ =20
+ }
+ # Add post-job actions to step dict.
+ if module.type =3D=3D 'tool':
+ pja_dict =3D {}
+ for pja in step.post_job_actions:
+ pja_dict[pja.action_type+pja.output_name] =3D dict( ac=
tion_type =3D pja.action_type,=20
+ outp=
ut_name =3D pja.output_name,
+ acti=
on_arguments =3D pja.action_arguments )
+ step_dict[ 'post_job_actions' ] =3D pja_dict
+ # Data inputs
+ step_dict['inputs'] =3D []
+ if module.type =3D=3D "data_input":
+ # Get input dataset name; default to 'Input Dataset'
+ name =3D module.state.get( 'name', 'Input Dataset')
+ step_dict['inputs'].append( { "name" : name, "description"=
: annotation_str } )
+ else:
+ # Step is a tool and may have runtime inputs.
+ for name, val in module.state.inputs.items():
+ input_type =3D type( val )
+ if input_type =3D=3D RuntimeValue:
+ step_dict['inputs'].append( { "name" : name, "desc=
ription" : "runtime parameter for tool %s" % module.get_name() } )
+ elif input_type =3D=3D dict:
+ # Input type is described by a dict, e.g. indexed =
parameters.
+ for partname, partval in val.items():
+ if type( partval ) =3D=3D RuntimeValue:
+ step_dict['inputs'].append( { "name" : nam=
e, "description" : "runtime parameter for tool %s" % module.get_name() } )
+ # User outputs
+ step_dict['user_outputs'] =3D []
+ """
+ module_outputs =3D module.get_data_outputs()
+ step_outputs =3D trans.sa_session.query( WorkflowOutput ).filt=
er( step=3Dstep )
+ for output in step_outputs:
+ name =3D output.output_name
+ annotation =3D ""
+ for module_output in module_outputs:
+ if module_output.get( 'name', None ) =3D=3D name:
+ output_type =3D module_output.get( 'extension', ''=
)
+ break
+ data['outputs'][name] =3D { 'name' : name, 'annotation' : =
annotation, 'type' : output_type }
+ """
+
+ # All step outputs
+ step_dict['outputs'] =3D []
+ if type( module ) is ToolModule:
+ for output in module.get_data_outputs():
+ step_dict['outputs'].append( { 'name' : output['name']=
, 'type' : output['extensions'][0] } )
+ # Connections
+ input_connections =3D step.input_connections
+ if step.type is None or step.type =3D=3D 'tool':
+ # Determine full (prefixed) names of valid input datasets
+ data_input_names =3D {}
+ def callback( input, value, prefixed_name, prefixed_label =
):
+ if isinstance( input, DataToolParameter ):
+ data_input_names[ prefixed_name ] =3D True
+ visit_input_values( module.tool.inputs, module.state.input=
s, callback )
+ # Filter
+ # FIXME: this removes connection without displaying a mess=
age currently!
+ input_connections =3D [ conn for conn in input_connections=
if conn.input_name in data_input_names ]
+ # Encode input connections as dictionary
+ input_conn_dict =3D {}
+ for conn in input_connections:
+ input_conn_dict[ conn.input_name ] =3D \
+ dict( id=3Dconn.output_step.order_index, output_name=
=3Dconn.output_name )
+ step_dict['input_connections'] =3D input_conn_dict
+ # Position
+ step_dict['position'] =3D step.position
+ # Add to return value
+ data['steps'][step.order_index] =3D step_dict
+ return data
+ =20
+ def get_item_annotation_obj( self, db_session, user, item ):
+ """=20
+ RPARK: copied from galaxy.model.item_attr.py
+ Returns a user's annotation object for an item. """
+ # Get annotation association class.
+ annotation_assoc_class =3D self._get_annotation_assoc_class( item )
+ if not annotation_assoc_class:
+ return None
+ =20
+ # Get annotation association object.
+ annotation_assoc =3D db_session.query( annotation_assoc_class ).fi=
lter_by( user=3Duser )
+ =20
+ # TODO: use filtering like that in _get_item_id_filter_str()
+ if item.__class__ =3D=3D galaxy.model.History:
+ annotation_assoc =3D annotation_assoc.filter_by( history=3Dite=
m )
+ elif item.__class__ =3D=3D galaxy.model.HistoryDatasetAssociation:
+ annotation_assoc =3D annotation_assoc.filter_by( hda=3Ditem )
+ elif item.__class__ =3D=3D galaxy.model.StoredWorkflow:
+ annotation_assoc =3D annotation_assoc.filter_by( stored_workfl=
ow=3Ditem )
+ elif item.__class__ =3D=3D galaxy.model.WorkflowStep:
+ annotation_assoc =3D annotation_assoc.filter_by( workflow_step=
=3Ditem )
+ elif item.__class__ =3D=3D galaxy.model.Page:
+ annotation_assoc =3D annotation_assoc.filter_by( page=3Ditem )
+ elif item.__class__ =3D=3D galaxy.model.Visualization:
+ annotation_assoc =3D annotation_assoc.filter_by( visualization=
=3Ditem )
+ return annotation_assoc.first()
+ =20
+ def _get_annotation_assoc_class( self, item ):
+ """=20
+ RPARK: copied from galaxy.model.item_attr.py
+ Returns an item's item-annotation association class. """
+ class_name =3D '%sAnnotationAssociation' % item.__class__.__name__
+ return getattr( galaxy.model, class_name, None )
diff -r 90aa7ae565d60d38c90f444322a68b55fc895701 -r 849e1713f613a1932595b82=
fcd0e65a19bf5e366 lib/galaxy/web/buildapp.py
--- a/lib/galaxy/web/buildapp.py
+++ b/lib/galaxy/web/buildapp.py
@@ -151,6 +151,20 @@
webapp.api_mapper.resource_with_deleted( 'history', 'histories', path_=
prefix=3D'/api' )
#webapp.api_mapper.connect( 'run_workflow', '/api/workflow/{workflow_i=
d}/library/{library_id}', controller=3D'workflows', action=3D'run', workflo=
w_id=3DNone, library_id=3DNone, conditions=3Ddict(method=3D["GET"]) )
=20
+ # ---------------------------------------------- #
+ # ---------------------------------------------- #
+ # RPARK EDIT=20
+ =20
+ # How to extend API: url_mapping=20
+ # "POST /api/workflows/import" =3D> ``workflows.import_workflow(=
)``.
+ # Defines a named route "import_workflow".
+ webapp.api_mapper.connect("import_workflow", "/api/workflows/uploa=
d", controller=3D"workflows", action=3D"import_new_workflow", conditions=3D=
dict(method=3D["POST"]))
+ webapp.api_mapper.connect("workflow_dict", '/api/workflows/downloa=
d/{workflow_id}', controller=3D'workflows', action=3D'workflow_dict', condi=
tions=3Ddict(method=3D['GET']))
+ =20
+ #import pdb; pdb.set_trace() =20
+ # ---------------------------------------------- #
+ # ---------------------------------------------- #
+ =20
webapp.finalize_config()
# Wrap the webapp in some useful middleware
if kwargs.get( 'middleware', True ):
diff -r 90aa7ae565d60d38c90f444322a68b55fc895701 -r 849e1713f613a1932595b82=
fcd0e65a19bf5e366 scripts/api/workflow_delete_workflow_rpark.py
--- /dev/null
+++ b/scripts/api/workflow_delete_workflow_rpark.py
@@ -0,0 +1,22 @@
+#!/usr/bin/env python
+"""
+# Author: RPARK
+API script for deleting workflows=20
+"""
+
+import os, sys
+sys.path.insert( 0, os.path.dirname( __file__ ) )
+from common import delete
+
+try:
+ assert sys.argv[2]
+except IndexError:
+ print 'usage: %s key url [purge (true/false)] ' % os.path.basename( sy=
s.argv[0] )
+ sys.exit( 1 )
+try:
+ data =3D {}
+ data[ 'purge' ] =3D sys.argv[3]
+except IndexError:
+ pass
+
+delete( sys.argv[1], sys.argv[2], data )
diff -r 90aa7ae565d60d38c90f444322a68b55fc895701 -r 849e1713f613a1932595b82=
fcd0e65a19bf5e366 scripts/api/workflow_execute_rpark.py
--- /dev/null
+++ b/scripts/api/workflow_execute_rpark.py
@@ -0,0 +1,70 @@
+#!/usr/bin/env python
+"""
+Execute workflows from the command line.
+Example calls:
+python workflow_execute.py <api_key><galaxy_url>/api/workflows f2db41e1fa3=
31b3e 'Test API History' '38=3Dldda=3D0qr350234d2d192f'
+python workflow_execute.py <api_key><galaxy_url>/api/workflows f2db41e1fa3=
31b3e 'hist_id=3Da912e9e5d84530d4' '38=3Dhda=3D03501d7626bd192f'
+"""
+
+"""
+python workflow_execute.py <api_key><galaxy_url>/api/workflows f2db41e1fa3=
31b3e 'hist_id=3Da912e9e5d84530d4' '38=3Dhda=3D03501d7626bd192f' 'param=3Dt=
ool=3Dname=3Dvalue'=20
+
+'param=3Dtool=3Dname=3Dvalue'
+
+Example=20
+python workflow_execute_rpark.py 35a24ae2643785ff3d046c98ea362c7f http://l=
ocalhost:8080/api/workflows 1cd8e2f6b131e891 'Test API' '69=3Dld=3Da799d386=
79e985db' '70=3Dld=3D33b43b4e7093c91f' 'param=3Dpeakcalling_spp=3Daligner=
=3Dbwa'=20
+
+python workflow_execute_rpark.py 35a24ae2643785ff3d046c98ea362c7f http://l=
ocalhost:8080/api/workflows 1cd8e2f6b131e891 'Test API' '69=3Dld=3Da799d386=
79e985db' '70=3Dld=3D33b43b4e7093c91f' 'param=3Dpeakcalling_spp=3Daligner=
=3Darachne' 'param=3Dbowtie_wrapper=3DsuppressHeader=3DTrue'
+
+python workflow_execute_rpark.py 35a24ae2643785ff3d046c98ea362c7f http://l=
ocalhost:8080/api/workflows 1cd8e2f6b131e891 'Test API' '69=3Dld=3Da799d386=
79e985db' '70=3Dld=3D33b43b4e7093c91f' 'param=3Dpeakcalling_spp=3Daligner=
=3Dbowtie' 'param=3Dbowtie_wrapper=3DsuppressHeader=3DTrue' 'param=3Dpeakca=
lling_spp=3Dwindow_size=3D1000'=20
+
+"""
+
+import os, sys
+sys.path.insert( 0, os.path.dirname( __file__ ) )
+from common import submit
+
+
+def main():
+ try:
+ print("workflow_execute:py:");
+ data =3D {}
+ data['workflow_id'] =3D sys.argv[3]
+ data['history'] =3D sys.argv[4]
+ data['ds_map'] =3D {}
+
+ #########################################################
+ ### MY EDITS ############################################
+ ### Trying to pass in parameter for my own dictionary ###
+ data['parameters'] =3D {};
+
+ # DBTODO If only one input is given, don't require a step
+ # mapping, just use it for everything?
+ for v in sys.argv[5:]:
+ print("Multiple arguments ");
+ print(v);
+
+ try:
+ step, src, ds_id =3D v.split('=3D');
+ data['ds_map'][step] =3D {'src':src, 'id':ds_id};
+
+ except ValueError:
+ print("VALUE ERROR:");
+ wtype, wtool, wparam, wvalue =3D v.split('=3D');
+ data['parameters'][wtool] =3D {'param':wparam, 'value':wva=
lue}
+
+
+ #########################################################
+ ### MY EDITS ############################################
+ ### Trying to pass in parameter for my own dictionary ###
+ #data['parameters']['bowtie'] =3D {'param':'stepSize', 'value':100}
+ #data['parameters']['sam_to_bam'] =3D {'param':'genome', 'value':'=
hg18'}
+
+ except IndexError:
+ print 'usage: %s key url workflow_id history step=3Dsrc=3Ddataset_=
id' % os.path.basename(sys.argv[0])
+ sys.exit(1)
+ submit( sys.argv[1], sys.argv[2], data )
+
+if __name__ =3D=3D '__main__':
+ main()
+
diff -r 90aa7ae565d60d38c90f444322a68b55fc895701 -r 849e1713f613a1932595b82=
fcd0e65a19bf5e366 scripts/api/workflow_import_from_file_rpark.py
--- /dev/null
+++ b/scripts/api/workflow_import_from_file_rpark.py
@@ -0,0 +1,39 @@
+#!/usr/bin/env python
+
+"""
+
+python rpark_import_workflow_from_file.py 35a24ae2643785ff3d046c98ea362c7f=
http://localhost:8080/api/workflows/import 'spp_submodule.ga'
+python rpark_import_workflow_from_file.py 35a24ae2643785ff3d046c98ea362c7f=
http://localhost:8080/api/workflows/import 'spp_submodule.ga'
+"""
+
+import os, sys
+sys.path.insert( 0, os.path.dirname( __file__ ) )
+from common import submit
+
+### Rpark edit ###
+import simplejson
+
+def openWorkflow(in_file): =20
+ with open(in_file) as f:
+ temp_data =3D simplejson.load(f)
+ return temp_data;
+
+
+
+try:
+ assert sys.argv[2]
+except IndexError:
+ print 'usage: %s key url [name] ' % os.path.basename( sys.argv[0] )
+ sys.exit( 1 )
+try:
+ #data =3D {}
+ #data[ 'name' ] =3D sys.argv[3]
+ data =3D {};
+ workflow_dict =3D openWorkflow(sys.argv[3]);
+ data ['workflow'] =3D workflow_dict;
+ =20
+ =20
+except IndexError:
+ pass
+
+submit( sys.argv[1], sys.argv[2], data )
https://bitbucket.org/galaxy/galaxy-central/changeset/67e4caf2a34f/
changeset: 67e4caf2a34f
user: rpark37
date: 2012-02-01 22:28:00
summary: Updated import statements in workflows api controller
affected #: 1 file
diff -r 849e1713f613a1932595b82fcd0e65a19bf5e366 -r 67e4caf2a34f4dc6ae37697=
04696b18db98a3540 lib/galaxy/web/api/workflows.py
--- a/lib/galaxy/web/api/workflows.py
+++ b/lib/galaxy/web/api/workflows.py
@@ -11,6 +11,21 @@
from galaxy.workflow.modules import module_factory
from galaxy.jobs.actions.post import ActionBox
=20
+# ------------------------------------------------------------------------=
---------------------- #
+# ------------------------------------------------------------------------=
---------------------- #
+# ---- RPARK EDITS ---- #
+import pkg_resources
+pkg_resources.require( "simplejson" )
+from galaxy import model
+from galaxy.web.controllers.workflow import attach_ordered_steps
+from galaxy.util.sanitize_html import sanitize_html
+from galaxy.workflow.modules import *
+from galaxy.model.item_attrs import *
+
+# ------------------------------------------------------------------------=
---------------------- #
+# ------------------------------------------------------------------------=
---------------------- #=20
+
+
log =3D logging.getLogger(__name__)
=20
class WorkflowsAPIController(BaseAPIController):
https://bitbucket.org/galaxy/galaxy-central/changeset/7738795047c6/
changeset: 7738795047c6
user: rpark37
date: 2012-02-01 23:22:50
summary: updated workflow_dict function for returning a selected workfl=
ow as a json object via API
affected #: 1 file
diff -r 67e4caf2a34f4dc6ae3769704696b18db98a3540 -r 7738795047c6a2af15d6b8a=
58173212b95c9f528 lib/galaxy/web/api/workflows.py
--- a/lib/galaxy/web/api/workflows.py
+++ b/lib/galaxy/web/api/workflows.py
@@ -285,8 +285,6 @@
GET /api/workflows/{encoded_workflow_id}/download
Returns a selected workflow as a json dictionary.=20
"""
- print "workflow controller: workflow dict called"
- print workflow_id
=20
try:
stored_workflow =3D trans.sa_session.query(self.app.model.Stor=
edWorkflow).get(trans.security.decode_id(workflow_id))
https://bitbucket.org/galaxy/galaxy-central/changeset/a81a7e9ef257/
changeset: a81a7e9ef257
user: rpark37
date: 2012-02-08 23:23:58
summary: Updated import new workflow function
affected #: 1 file
diff -r 7738795047c6a2af15d6b8a58173212b95c9f528 -r a81a7e9ef257d26ee0fdcdc=
d124f6205a4692653 lib/galaxy/web/api/workflows.py
--- a/lib/galaxy/web/api/workflows.py
+++ b/lib/galaxy/web/api/workflows.py
@@ -279,7 +279,7 @@
# --------------------------------------------------------------------=
-------------------------- #
# --------------------------------------------------------------------=
-------------------------- #
@web.expose_api
- @web.json
+ #(a)web.json
def workflow_dict( self, trans, workflow_id, **kwd ):
"""
GET /api/workflows/{encoded_workflow_id}/download
@@ -297,8 +297,9 @@
trans.response.status =3D 400
return("Workflow is not owned by or shared with current us=
er")
=20
- return self._workflow_to_dict( trans, stored_workflow )
- =20
+ ret_dict =3D self._workflow_to_dict( trans, stored_workflow );
+ return ret_dict
+ =20
@web.expose_api
def delete( self, trans, id, **kwd ): =20
"""
@@ -352,8 +353,6 @@
# api encoded, id=20
encoded_id =3D trans.security.encode_id(workflow_id);
=20
- =20
- =20
# return list
rval=3D [];
=20
@@ -362,8 +361,7 @@
=20
rval.append(item); =20
=20
- return rval;
- =20
+ return item;
=20
def _workflow_from_dict( self, trans, data, source=3DNone ):
"""
https://bitbucket.org/galaxy/galaxy-central/changeset/edcfb659dc4f/
changeset: edcfb659dc4f
user: rpark37
date: 2012-03-16 22:23:15
summary: Updated workflow API
affected #: 1 file
diff -r a81a7e9ef257d26ee0fdcdcd124f6205a4692653 -r edcfb659dc4fc03e4385d63=
e36e5df380a20c898 lib/galaxy/web/api/workflows.py
--- a/lib/galaxy/web/api/workflows.py
+++ b/lib/galaxy/web/api/workflows.py
@@ -336,7 +336,7 @@
@web.expose_api
def import_new_workflow(self, trans, payload, **kwd):
"""
- POST /api/workflows
+ POST /api/workflows/upload
Importing dynamic workflows from the api. Return newly generated w=
orkflow id.
Author: rpark=20
=20
@@ -344,7 +344,7 @@
"""
=20
#import pdb; pdb.set_trace()
- =20
+ =20
data =3D payload['workflow'];
workflow, missing_tool_tups =3D self._workflow_from_dict( trans, d=
ata, source=3D"API" )
=20
https://bitbucket.org/galaxy/galaxy-central/changeset/0f3e6e68d8f5/
changeset: 0f3e6e68d8f5
user: rpark37
date: 2012-04-11 05:00:41
summary: Updated notes on how to run api/workflow_execute_parameters.py
affected #: 2 files
diff -r edcfb659dc4fc03e4385d63e36e5df380a20c898 -r 0f3e6e68d8f53ffdaee2f37=
1cc3d21a0d6a3b3aa scripts/api/workflow_execute_parameters.py
--- /dev/null
+++ b/scripts/api/workflow_execute_parameters.py
@@ -0,0 +1,70 @@
+#!/usr/bin/env python
+"""
+Execute workflows from the command line.
+Example calls:
+python workflow_execute.py <api_key><galaxy_url>/api/workflows f2db41e1fa3=
31b3e 'Test API History' '38=3Dldda=3D0qr350234d2d192f'
+python workflow_execute.py <api_key><galaxy_url>/api/workflows f2db41e1fa3=
31b3e 'hist_id=3Da912e9e5d84530d4' '38=3Dhda=3D03501d7626bd192f'
+"""
+
+"""
+python workflow_execute.py <api_key><galaxy_url>/api/workflows f2db41e1fa3=
31b3e 'hist_id=3Da912e9e5d84530d4' '38=3Dhda=3D03501d7626bd192f' 'param=3Dt=
ool=3Dname=3Dvalue'=20
+
+'param=3Dtool=3Dname=3Dvalue'
+
+Example=20
+python workflow_execute_parameters.py 35a24ae2643785ff3d046c98ea362c7f htt=
p://localhost:8080/api/workflows 1cd8e2f6b131e891 'Test API' '69=3Dld=3Da79=
9d38679e985db' '70=3Dld=3D33b43b4e7093c91f' 'param=3Dpeakcalling_spp=3Dalig=
ner=3Dbwa'=20
+
+python workflow_execute_parameters.py 35a24ae2643785ff3d046c98ea362c7f htt=
p://localhost:8080/api/workflows 1cd8e2f6b131e891 'Test API' '69=3Dld=3Da79=
9d38679e985db' '70=3Dld=3D33b43b4e7093c91f' 'param=3Dpeakcalling_spp=3Dalig=
ner=3Darachne' 'param=3Dbowtie_wrapper=3DsuppressHeader=3DTrue'
+
+python workflow_execute_parameters.py 35a24ae2643785ff3d046c98ea362c7f htt=
p://localhost:8080/api/workflows 1cd8e2f6b131e891 'Test API' '69=3Dld=3Da79=
9d38679e985db' '70=3Dld=3D33b43b4e7093c91f' 'param=3Dpeakcalling_spp=3Dalig=
ner=3Dbowtie' 'param=3Dbowtie_wrapper=3DsuppressHeader=3DTrue' 'param=3Dpea=
kcalling_spp=3Dwindow_size=3D1000'=20
+
+"""
+
+import os, sys
+sys.path.insert( 0, os.path.dirname( __file__ ) )
+from common import submit
+
+
+def main():
+ try:
+ print("workflow_execute:py:");
+ data =3D {}
+ data['workflow_id'] =3D sys.argv[3]
+ data['history'] =3D sys.argv[4]
+ data['ds_map'] =3D {}
+
+ #########################################################
+ ### MY EDITS ############################################
+ ### Trying to pass in parameter for my own dictionary ###
+ data['parameters'] =3D {};
+
+ # DBTODO If only one input is given, don't require a step
+ # mapping, just use it for everything?
+ for v in sys.argv[5:]:
+ print("Multiple arguments ");
+ print(v);
+
+ try:
+ step, src, ds_id =3D v.split('=3D');
+ data['ds_map'][step] =3D {'src':src, 'id':ds_id};
+
+ except ValueError:
+ print("VALUE ERROR:");
+ wtype, wtool, wparam, wvalue =3D v.split('=3D');
+ data['parameters'][wtool] =3D {'param':wparam, 'value':wva=
lue}
+
+
+ #########################################################
+ ### MY EDITS ############################################
+ ### Trying to pass in parameter for my own dictionary ###
+ #data['parameters']['bowtie'] =3D {'param':'stepSize', 'value':100}
+ #data['parameters']['sam_to_bam'] =3D {'param':'genome', 'value':'=
hg18'}
+
+ except IndexError:
+ print 'usage: %s key url workflow_id history step=3Dsrc=3Ddataset_=
id' % os.path.basename(sys.argv[0])
+ sys.exit(1)
+ submit( sys.argv[1], sys.argv[2], data )
+
+if __name__ =3D=3D '__main__':
+ main()
+
diff -r edcfb659dc4fc03e4385d63e36e5df380a20c898 -r 0f3e6e68d8f53ffdaee2f37=
1cc3d21a0d6a3b3aa scripts/api/workflow_execute_rpark.py
--- a/scripts/api/workflow_execute_rpark.py
+++ /dev/null
@@ -1,70 +0,0 @@
-#!/usr/bin/env python
-"""
-Execute workflows from the command line.
-Example calls:
-python workflow_execute.py <api_key><galaxy_url>/api/workflows f2db41e1fa3=
31b3e 'Test API History' '38=3Dldda=3D0qr350234d2d192f'
-python workflow_execute.py <api_key><galaxy_url>/api/workflows f2db41e1fa3=
31b3e 'hist_id=3Da912e9e5d84530d4' '38=3Dhda=3D03501d7626bd192f'
-"""
-
-"""
-python workflow_execute.py <api_key><galaxy_url>/api/workflows f2db41e1fa3=
31b3e 'hist_id=3Da912e9e5d84530d4' '38=3Dhda=3D03501d7626bd192f' 'param=3Dt=
ool=3Dname=3Dvalue'=20
-
-'param=3Dtool=3Dname=3Dvalue'
-
-Example=20
-python workflow_execute_rpark.py 35a24ae2643785ff3d046c98ea362c7f http://l=
ocalhost:8080/api/workflows 1cd8e2f6b131e891 'Test API' '69=3Dld=3Da799d386=
79e985db' '70=3Dld=3D33b43b4e7093c91f' 'param=3Dpeakcalling_spp=3Daligner=
=3Dbwa'=20
-
-python workflow_execute_rpark.py 35a24ae2643785ff3d046c98ea362c7f http://l=
ocalhost:8080/api/workflows 1cd8e2f6b131e891 'Test API' '69=3Dld=3Da799d386=
79e985db' '70=3Dld=3D33b43b4e7093c91f' 'param=3Dpeakcalling_spp=3Daligner=
=3Darachne' 'param=3Dbowtie_wrapper=3DsuppressHeader=3DTrue'
-
-python workflow_execute_rpark.py 35a24ae2643785ff3d046c98ea362c7f http://l=
ocalhost:8080/api/workflows 1cd8e2f6b131e891 'Test API' '69=3Dld=3Da799d386=
79e985db' '70=3Dld=3D33b43b4e7093c91f' 'param=3Dpeakcalling_spp=3Daligner=
=3Dbowtie' 'param=3Dbowtie_wrapper=3DsuppressHeader=3DTrue' 'param=3Dpeakca=
lling_spp=3Dwindow_size=3D1000'=20
-
-"""
-
-import os, sys
-sys.path.insert( 0, os.path.dirname( __file__ ) )
-from common import submit
-
-
-def main():
- try:
- print("workflow_execute:py:");
- data =3D {}
- data['workflow_id'] =3D sys.argv[3]
- data['history'] =3D sys.argv[4]
- data['ds_map'] =3D {}
-
- #########################################################
- ### MY EDITS ############################################
- ### Trying to pass in parameter for my own dictionary ###
- data['parameters'] =3D {};
-
- # DBTODO If only one input is given, don't require a step
- # mapping, just use it for everything?
- for v in sys.argv[5:]:
- print("Multiple arguments ");
- print(v);
-
- try:
- step, src, ds_id =3D v.split('=3D');
- data['ds_map'][step] =3D {'src':src, 'id':ds_id};
-
- except ValueError:
- print("VALUE ERROR:");
- wtype, wtool, wparam, wvalue =3D v.split('=3D');
- data['parameters'][wtool] =3D {'param':wparam, 'value':wva=
lue}
-
-
- #########################################################
- ### MY EDITS ############################################
- ### Trying to pass in parameter for my own dictionary ###
- #data['parameters']['bowtie'] =3D {'param':'stepSize', 'value':100}
- #data['parameters']['sam_to_bam'] =3D {'param':'genome', 'value':'=
hg18'}
-
- except IndexError:
- print 'usage: %s key url workflow_id history step=3Dsrc=3Ddataset_=
id' % os.path.basename(sys.argv[0])
- sys.exit(1)
- submit( sys.argv[1], sys.argv[2], data )
-
-if __name__ =3D=3D '__main__':
- main()
-
https://bitbucket.org/galaxy/galaxy-central/changeset/414b1b3fb029/
changeset: 414b1b3fb029
user: rpark37
date: 2012-04-11 05:05:23
summary: Cleaned up code for api/workflows.py
affected #: 1 file
diff -r 0f3e6e68d8f53ffdaee2f371cc3d21a0d6a3b3aa -r 414b1b3fb029a1acdd925b5=
bef41d2a38415c695 lib/galaxy/web/api/workflows.py
--- a/lib/galaxy/web/api/workflows.py
+++ b/lib/galaxy/web/api/workflows.py
@@ -104,10 +104,7 @@
### RPARK: dictionary containing which workflows to change and edi=
t ###
param_map =3D {};
if (payload.has_key('parameters') ):
- #if (payload['parameters']):
param_map =3D payload['parameters'];
- print("PARAMETER MAP:");
- print(param_map);
# ----------------------------------------------------------------=
--------------- # =20
=20
=20
@@ -200,24 +197,11 @@
=20
####################################################
####################################################
- #print("CHECKING WORKFLOW STEPS:")
- #print(step.tool_id);
- #print(step.state.inputs);
- #print("upgard messages");
- #print(step.state);
- #print("\n");
# RPARK: IF TOOL_NAME IN PARAMETER MAP #
if step.tool_id in param_map:
- #print("-------------------------FOUND IN PARAMETER DI=
CTIONARY")
- #print(param_map[step.tool_id]);
change_param =3D param_map[step.tool_id]['param'];
change_value =3D param_map[step.tool_id]['value'];
- #step.state.inputs['refGenomeSource']['index'] =3D "cr=
apolo";
- #print(step.state.inputs[change_param]);
step.state.inputs[change_param] =3D change_value;
- #print(step.state.inputs[change_param]);
- #print(param_map[step.tool_id][change_value]);
- #print("----------------------------------------------=
----")
####################################################
####################################################
=20
https://bitbucket.org/galaxy/galaxy-central/changeset/8c33567aeca5/
changeset: 8c33567aeca5
user: rpark37
date: 2012-08-03 08:09:05
summary: Cleaned up codebase for attempting adding api additions to the=
galaxy main branch
affected #: 3 files
diff -r 414b1b3fb029a1acdd925b5bef41d2a38415c695 -r 8c33567aeca58c4e7306d3b=
eef590b82cff90be2 scripts/api/workflow_delete.py
--- /dev/null
+++ b/scripts/api/workflow_delete.py
@@ -0,0 +1,27 @@
+#!/usr/bin/env python
+"""
+# ---------------------------------------------- #
+# PARKLAB, Author: RPARK
+API example script for deleting workflows=20
+# ---------------------------------------------- #
+
+Example calls:
+python workflow_delete.py <api_key><galaxy_url>/api/workflows/<workflow id=
> True
+"""
+
+import os, sys
+sys.path.insert( 0, os.path.dirname( __file__ ) )
+from common import delete
+
+try:
+ assert sys.argv[2]
+except IndexError:
+ print 'usage: %s key url [purge (true/false)] ' % os.path.basename( sy=
s.argv[0] )
+ sys.exit( 1 )
+try:
+ data =3D {}
+ data[ 'purge' ] =3D sys.argv[3]
+except IndexError:
+ pass
+
+delete( sys.argv[1], sys.argv[2], data )
diff -r 414b1b3fb029a1acdd925b5bef41d2a38415c695 -r 8c33567aeca58c4e7306d3b=
eef590b82cff90be2 scripts/api/workflow_delete_workflow_rpark.py
--- a/scripts/api/workflow_delete_workflow_rpark.py
+++ /dev/null
@@ -1,22 +0,0 @@
-#!/usr/bin/env python
-"""
-# Author: RPARK
-API script for deleting workflows=20
-"""
-
-import os, sys
-sys.path.insert( 0, os.path.dirname( __file__ ) )
-from common import delete
-
-try:
- assert sys.argv[2]
-except IndexError:
- print 'usage: %s key url [purge (true/false)] ' % os.path.basename( sy=
s.argv[0] )
- sys.exit( 1 )
-try:
- data =3D {}
- data[ 'purge' ] =3D sys.argv[3]
-except IndexError:
- pass
-
-delete( sys.argv[1], sys.argv[2], data )
diff -r 414b1b3fb029a1acdd925b5bef41d2a38415c695 -r 8c33567aeca58c4e7306d3b=
eef590b82cff90be2 scripts/api/workflow_execute_parameters.py
--- a/scripts/api/workflow_execute_parameters.py
+++ b/scripts/api/workflow_execute_parameters.py
@@ -1,23 +1,13 @@
#!/usr/bin/env python
"""
+# ---------------------------------------------- #
+# PARKLAB, Author: RPARK
+# ---------------------------------------------- #
+
Execute workflows from the command line.
Example calls:
-python workflow_execute.py <api_key><galaxy_url>/api/workflows f2db41e1fa3=
31b3e 'Test API History' '38=3Dldda=3D0qr350234d2d192f'
-python workflow_execute.py <api_key><galaxy_url>/api/workflows f2db41e1fa3=
31b3e 'hist_id=3Da912e9e5d84530d4' '38=3Dhda=3D03501d7626bd192f'
-"""
-
-"""
-python workflow_execute.py <api_key><galaxy_url>/api/workflows f2db41e1fa3=
31b3e 'hist_id=3Da912e9e5d84530d4' '38=3Dhda=3D03501d7626bd192f' 'param=3Dt=
ool=3Dname=3Dvalue'=20
-
-'param=3Dtool=3Dname=3Dvalue'
-
-Example=20
-python workflow_execute_parameters.py 35a24ae2643785ff3d046c98ea362c7f htt=
p://localhost:8080/api/workflows 1cd8e2f6b131e891 'Test API' '69=3Dld=3Da79=
9d38679e985db' '70=3Dld=3D33b43b4e7093c91f' 'param=3Dpeakcalling_spp=3Dalig=
ner=3Dbwa'=20
-
-python workflow_execute_parameters.py 35a24ae2643785ff3d046c98ea362c7f htt=
p://localhost:8080/api/workflows 1cd8e2f6b131e891 'Test API' '69=3Dld=3Da79=
9d38679e985db' '70=3Dld=3D33b43b4e7093c91f' 'param=3Dpeakcalling_spp=3Dalig=
ner=3Darachne' 'param=3Dbowtie_wrapper=3DsuppressHeader=3DTrue'
-
-python workflow_execute_parameters.py 35a24ae2643785ff3d046c98ea362c7f htt=
p://localhost:8080/api/workflows 1cd8e2f6b131e891 'Test API' '69=3Dld=3Da79=
9d38679e985db' '70=3Dld=3D33b43b4e7093c91f' 'param=3Dpeakcalling_spp=3Dalig=
ner=3Dbowtie' 'param=3Dbowtie_wrapper=3DsuppressHeader=3DTrue' 'param=3Dpea=
kcalling_spp=3Dwindow_size=3D1000'=20
-
+python workflow_execute.py <api_key><galaxy_url>/api/workflows <workflow_i=
d> 'hist_id=3D<history_id>' '38=3Dhda=3D<file_id>' 'param=3Dtool=3Dname=3Dv=
alue'=20
+python workflow_execute_parameters.py <api_key> http://localhost:8080/api/=
workflows 1cd8e2f6b131e891 'Test API' '69=3Dld=3Da799d38679e985db' '70=3Dld=
=3D33b43b4e7093c91f' 'param=3Dpeakcalling_spp=3Daligner=3Dbowtie' 'param=3D=
bowtie_wrapper=3DsuppressHeader=3DTrue' 'param=3Dpeakcalling_spp=3Dwindow_s=
ize=3D1000'=20
"""
=20
import os, sys
@@ -34,7 +24,6 @@
data['ds_map'] =3D {}
=20
#########################################################
- ### MY EDITS ############################################
### Trying to pass in parameter for my own dictionary ###
data['parameters'] =3D {};
=20
@@ -51,14 +40,10 @@
except ValueError:
print("VALUE ERROR:");
wtype, wtool, wparam, wvalue =3D v.split('=3D');
- data['parameters'][wtool] =3D {'param':wparam, 'value':wva=
lue}
-
-
- #########################################################
- ### MY EDITS ############################################
- ### Trying to pass in parameter for my own dictionary ###
- #data['parameters']['bowtie'] =3D {'param':'stepSize', 'value':100}
- #data['parameters']['sam_to_bam'] =3D {'param':'genome', 'value':'=
hg18'}
+ try:
+ data['parameters'][wtool] =3D {'param':wparam, 'value'=
:wvalue}
+ except ValueError:
+ print("TOOL ID ERROR:");
=20
except IndexError:
print 'usage: %s key url workflow_id history step=3Dsrc=3Ddataset_=
id' % os.path.basename(sys.argv[0])
https://bitbucket.org/galaxy/galaxy-central/changeset/6a644558eed7/
changeset: 6a644558eed7
user: dannon
date: 2012-08-29 16:17:53
summary: Merge of rpark's workflow API changes to support parameter exe=
cution and workflow creation.
TODO: Refactor copied workflow methods back out.
affected #: 2 files
diff -r 8c33567aeca58c4e7306d3beef590b82cff90be2 -r 6a644558eed7aba74a74060=
ccf0aa4ec211809c2 lib/galaxy/web/api/workflows.py
--- a/lib/galaxy/web/api/workflows.py
+++ b/lib/galaxy/web/api/workflows.py
@@ -10,25 +10,12 @@
from galaxy.web.base.controller import BaseAPIController, url_for
from galaxy.workflow.modules import module_factory
from galaxy.jobs.actions.post import ActionBox
-
-# ------------------------------------------------------------------------=
---------------------- #
-# ------------------------------------------------------------------------=
---------------------- #
-# ---- RPARK EDITS ---- #
-import pkg_resources
-pkg_resources.require( "simplejson" )
-from galaxy import model
+from galaxy.model.item_attrs import UsesAnnotations
from galaxy.web.controllers.workflow import attach_ordered_steps
-from galaxy.util.sanitize_html import sanitize_html
-from galaxy.workflow.modules import *
-from galaxy.model.item_attrs import *
-
-# ------------------------------------------------------------------------=
---------------------- #
-# ------------------------------------------------------------------------=
---------------------- #=20
-
=20
log =3D logging.getLogger(__name__)
=20
-class WorkflowsAPIController(BaseAPIController):
+class WorkflowsAPIController(BaseAPIController, UsesAnnotations):
@web.expose_api
def index(self, trans, **kwd):
"""
@@ -100,16 +87,16 @@
However, we will import them if installed_repository_file is speci=
fied
"""
=20
- # ----------------------------------------------------------------=
--------------- # =20
+ # ----------------------------------------------------------------=
--------------- #
### RPARK: dictionary containing which workflows to change and edi=
t ###
param_map =3D {};
if (payload.has_key('parameters') ):
param_map =3D payload['parameters'];
- # ----------------------------------------------------------------=
--------------- # =20
- =20
+ # ----------------------------------------------------------------=
--------------- #
=20
- =20
- =20
+
+
+
if 'workflow_id' not in payload:
# create new
if 'installed_repository_file' in payload:
@@ -194,7 +181,7 @@
# are not persisted so we need to do it every time)
step.module.add_dummy_datasets( connections=3Dstep.input_c=
onnections )
step.state =3D step.module.state
- =20
+
####################################################
####################################################
# RPARK: IF TOOL_NAME IN PARAMETER MAP #
@@ -204,7 +191,7 @@
step.state.inputs[change_param] =3D change_value;
####################################################
####################################################
- =20
+
if step.tool_errors:
trans.response.status =3D 400
return "Workflow cannot be run because of validation e=
rrors in some steps: %s" % step_errors
@@ -267,40 +254,40 @@
def workflow_dict( self, trans, workflow_id, **kwd ):
"""
GET /api/workflows/{encoded_workflow_id}/download
- Returns a selected workflow as a json dictionary.=20
+ Returns a selected workflow as a json dictionary.
"""
- =20
+
try:
stored_workflow =3D trans.sa_session.query(self.app.model.Stor=
edWorkflow).get(trans.security.decode_id(workflow_id))
except Exception,e:
return ("Workflow with ID=3D'%s' can not be found\n Exception:=
%s") % (workflow_id, str( e ))
- =20
- # check to see if user has permissions to selected workflow=20
+
+ # check to see if user has permissions to selected workflow
if stored_workflow.user !=3D trans.user and not trans.user_is_admi=
n():
if trans.sa_session.query(trans.app.model.StoredWorkflowUserSh=
areAssociation).filter_by(user=3Dtrans.user, stored_workflow=3Dstored_workf=
low).count() =3D=3D 0:
trans.response.status =3D 400
return("Workflow is not owned by or shared with current us=
er")
- =20
+
ret_dict =3D self._workflow_to_dict( trans, stored_workflow );
return ret_dict
- =20
+
@web.expose_api
- def delete( self, trans, id, **kwd ): =20
+ def delete( self, trans, id, **kwd ):
"""
DELETE /api/workflows/{encoded_workflow_id}
Deletes a specified workflow
Author: rpark
- =20
+
copied from galaxy.web.controllers.workflows.py (delete)
"""
workflow_id =3D id;
- =20
+
try:
stored_workflow =3D trans.sa_session.query(self.app.model.Stor=
edWorkflow).get(trans.security.decode_id(workflow_id))
except Exception,e:
return ("Workflow with ID=3D'%s' can not be found\n Exception:=
%s") % (workflow_id, str( e ))
- =20
- # check to see if user has permissions to selected workflow=20
+
+ # check to see if user has permissions to selected workflow
if stored_workflow.user !=3D trans.user and not trans.user_is_admi=
n():
if trans.sa_session.query(trans.app.model.StoredWorkflowUserSh=
areAssociation).filter_by(user=3Dtrans.user, stored_workflow=3Dstored_workf=
low).count() =3D=3D 0:
trans.response.status =3D 400
@@ -309,42 +296,37 @@
#Mark a workflow as deleted
stored_workflow.deleted =3D True
trans.sa_session.flush()
- =20
- # Python Debugger
- #import pdb; pdb.set_trace()
- =20
+
# TODO: Unsure of response message to let api know that a workflow=
was successfully deleted
#return 'OK'
return ( "Workflow '%s' successfully deleted" % stored_workflow.na=
me )
- =20
+
@web.expose_api
def import_new_workflow(self, trans, payload, **kwd):
"""
POST /api/workflows/upload
Importing dynamic workflows from the api. Return newly generated w=
orkflow id.
- Author: rpark=20
- =20
+ Author: rpark
+
# currently assumes payload['workflow'] is a json representation o=
f a workflow to be inserted into the database
"""
- =20
- #import pdb; pdb.set_trace()
- =20
+
data =3D payload['workflow'];
workflow, missing_tool_tups =3D self._workflow_from_dict( trans, d=
ata, source=3D"API" )
- =20
- # galaxy workflow newly created id =20
+
+ # galaxy workflow newly created id
workflow_id =3D workflow.id;
- # api encoded, id=20
+ # api encoded, id
encoded_id =3D trans.security.encode_id(workflow_id);
- =20
+
# return list
rval=3D [];
- =20
+
item =3D workflow.get_api_value(value_mapper=3D{'id':trans.securit=
y.encode_id})
item['url'] =3D url_for('workflow', id=3Dencoded_id)
- =20
- rval.append(item); =20
- =20
+
+ rval.append(item);
+
return item;
=20
def _workflow_from_dict( self, trans, data, source=3DNone ):
@@ -392,21 +374,19 @@
workflow.has_errors =3D True
# Stick this in the step temporarily
step.temp_input_connections =3D step_dict['input_connections']
- =20
# Save step annotation.
annotation =3D step_dict[ 'annotation' ]
- if annotation:
- annotation =3D sanitize_html( annotation, 'utf-8', 'text/h=
tml' )
+ #if annotation:
+ #annotation =3D sanitize_html( annotation, 'utf-8', 'text/=
html' )
# ------------------------------------------ #
# RPARK REMOVING: user annotation b/c of API
#self.add_item_annotation( trans.sa_session, trans.get_use=
r(), step, annotation )
# ------------------------------------------ #
- =20
# Unpack and add post-job actions.
post_job_actions =3D step_dict.get( 'post_job_actions', {} )
for name, pja_dict in post_job_actions.items():
- pja =3D PostJobAction( pja_dict[ 'action_type' ],=20
- step, pja_dict[ 'output_name' ],=20
+ pja =3D PostJobAction( pja_dict[ 'action_type' ],
+ step, pja_dict[ 'output_name' ],
pja_dict[ 'action_arguments' ] )
# Second pass to deal with connections between steps
for step in steps:
@@ -431,14 +411,14 @@
trans.sa_session.add( stored )
trans.sa_session.flush()
return stored, missing_tool_tups
- =20
+
def _workflow_to_dict( self, trans, stored ):
"""
RPARK: copied from galaxy.web.controllers.workflows.py
Converts a workflow to a dict of attributes suitable for exporting.
"""
workflow =3D stored.latest_workflow
- =20
+
### ----------------------------------- ###
## RPARK EDIT ##
workflow_annotation =3D self.get_item_annotation_obj( trans.sa_ses=
sion, trans.user, stored )
@@ -446,8 +426,8 @@
if workflow_annotation:
annotation_str =3D workflow_annotation.annotation
### ----------------------------------- ###
- =20
- =20
+
+
# Pack workflow data into a dictionary and return
data =3D {}
data['a_galaxy_workflow'] =3D 'true' # Placeholder for identifying=
galaxy workflow
@@ -457,22 +437,22 @@
## RPARK EDIT ##
data['annotation'] =3D annotation_str
### ----------------------------------- ###
- =20
+
data['steps'] =3D {}
# For each step, rebuild the form and encode the state
for step in workflow.steps:
# Load from database representation
module =3D module_factory.from_workflow_step( trans, step )
- =20
+
### ----------------------------------- ###
## RPARK EDIT ##
# Get user annotation.
step_annotation =3D self.get_item_annotation_obj(trans.sa_sess=
ion, trans.user, step )
annotation_str =3D ""
if step_annotation:
- annotation_str =3D step_annotation.annotation =20
+ annotation_str =3D step_annotation.annotation
### ----------------------------------- ###
- =20
+
# Step info
step_dict =3D {
'id': step.order_index,
@@ -484,18 +464,18 @@
'tool_errors': module.get_errors(),
## 'data_inputs': module.get_data_inputs(),
## 'data_outputs': module.get_data_outputs(),
- =20
+
### ----------------------------------- ###
## RPARK EDIT ##
'annotation' : annotation_str
### ----------------------------------- ###
- =20
+
}
# Add post-job actions to step dict.
if module.type =3D=3D 'tool':
pja_dict =3D {}
for pja in step.post_job_actions:
- pja_dict[pja.action_type+pja.output_name] =3D dict( ac=
tion_type =3D pja.action_type,=20
+ pja_dict[pja.action_type+pja.output_name] =3D dict( ac=
tion_type =3D pja.action_type,
outp=
ut_name =3D pja.output_name,
acti=
on_arguments =3D pja.action_arguments )
step_dict[ 'post_job_actions' ] =3D pja_dict
@@ -559,37 +539,4 @@
# Add to return value
data['steps'][step.order_index] =3D step_dict
return data
- =20
- def get_item_annotation_obj( self, db_session, user, item ):
- """=20
- RPARK: copied from galaxy.model.item_attr.py
- Returns a user's annotation object for an item. """
- # Get annotation association class.
- annotation_assoc_class =3D self._get_annotation_assoc_class( item )
- if not annotation_assoc_class:
- return None
- =20
- # Get annotation association object.
- annotation_assoc =3D db_session.query( annotation_assoc_class ).fi=
lter_by( user=3Duser )
- =20
- # TODO: use filtering like that in _get_item_id_filter_str()
- if item.__class__ =3D=3D galaxy.model.History:
- annotation_assoc =3D annotation_assoc.filter_by( history=3Dite=
m )
- elif item.__class__ =3D=3D galaxy.model.HistoryDatasetAssociation:
- annotation_assoc =3D annotation_assoc.filter_by( hda=3Ditem )
- elif item.__class__ =3D=3D galaxy.model.StoredWorkflow:
- annotation_assoc =3D annotation_assoc.filter_by( stored_workfl=
ow=3Ditem )
- elif item.__class__ =3D=3D galaxy.model.WorkflowStep:
- annotation_assoc =3D annotation_assoc.filter_by( workflow_step=
=3Ditem )
- elif item.__class__ =3D=3D galaxy.model.Page:
- annotation_assoc =3D annotation_assoc.filter_by( page=3Ditem )
- elif item.__class__ =3D=3D galaxy.model.Visualization:
- annotation_assoc =3D annotation_assoc.filter_by( visualization=
=3Ditem )
- return annotation_assoc.first()
- =20
- def _get_annotation_assoc_class( self, item ):
- """=20
- RPARK: copied from galaxy.model.item_attr.py
- Returns an item's item-annotation association class. """
- class_name =3D '%sAnnotationAssociation' % item.__class__.__name__
- return getattr( galaxy.model, class_name, None )
+
diff -r 8c33567aeca58c4e7306d3beef590b82cff90be2 -r 6a644558eed7aba74a74060=
ccf0aa4ec211809c2 lib/galaxy/web/buildapp.py
--- a/lib/galaxy/web/buildapp.py
+++ b/lib/galaxy/web/buildapp.py
@@ -151,20 +151,11 @@
webapp.api_mapper.resource_with_deleted( 'history', 'histories', path_=
prefix=3D'/api' )
#webapp.api_mapper.connect( 'run_workflow', '/api/workflow/{workflow_i=
d}/library/{library_id}', controller=3D'workflows', action=3D'run', workflo=
w_id=3DNone, library_id=3DNone, conditions=3Ddict(method=3D["GET"]) )
=20
- # ---------------------------------------------- #
- # ---------------------------------------------- #
- # RPARK EDIT=20
- =20
- # How to extend API: url_mapping=20
- # "POST /api/workflows/import" =3D> ``workflows.import_workflow(=
)``.
- # Defines a named route "import_workflow".
- webapp.api_mapper.connect("import_workflow", "/api/workflows/uploa=
d", controller=3D"workflows", action=3D"import_new_workflow", conditions=3D=
dict(method=3D["POST"]))
- webapp.api_mapper.connect("workflow_dict", '/api/workflows/downloa=
d/{workflow_id}', controller=3D'workflows', action=3D'workflow_dict', condi=
tions=3Ddict(method=3D['GET']))
- =20
- #import pdb; pdb.set_trace() =20
- # ---------------------------------------------- #
- # ---------------------------------------------- #
- =20
+ # "POST /api/workflows/import" =3D> ``workflows.import_workflow()``.
+ # Defines a named route "import_workflow".
+ webapp.api_mapper.connect("import_workflow", "/api/workflows/upload", =
controller=3D"workflows", action=3D"import_new_workflow", conditions=3Ddict=
(method=3D["POST"]))
+ webapp.api_mapper.connect("workflow_dict", '/api/workflows/download/{w=
orkflow_id}', controller=3D'workflows', action=3D'workflow_dict', condition=
s=3Ddict(method=3D['GET']))
+
webapp.finalize_config()
# Wrap the webapp in some useful middleware
if kwargs.get( 'middleware', True ):
https://bitbucket.org/galaxy/galaxy-central/changeset/07045f489517/
changeset: 07045f489517
user: dannon
date: 2012-08-29 16:18:23
summary: Merge.
affected #: 36 files
diff -r 6a644558eed7aba74a74060ccf0aa4ec211809c2 -r 07045f4895170a154c955f4=
8291f2b5e6feb94c2 lib/galaxy/datatypes/data.py
--- a/lib/galaxy/datatypes/data.py
+++ b/lib/galaxy/datatypes/data.py
@@ -719,7 +719,49 @@
pass
=20
class Newick( Text ):
- pass
+ """New Hampshire/Newick Format"""
+ file_ext =3D "nhx"
+
+ MetadataElement( name=3D"columns", default=3D3, desc=3D"Number of colu=
mns", readonly=3DTrue )
+
+ def __init__(self, **kwd):
+ """Initialize foobar datatype"""
+ Text.__init__(self, **kwd)
+
+ def init_meta( self, dataset, copy_from=3DNone ):
+ Text.init_meta( self, dataset, copy_from=3Dcopy_from )
+
+
+ def sniff( self, filename ):
+ """ Returning false as the newick format is too general and cannot=
be sniffed."""
+ return False
+
+
+class Nexus( Text ):
+ """Nexus format as used By Paup, Mr Bayes, etc"""
+ file_ext =3D "nex"
+
+ MetadataElement( name=3D"columns", default=3D3, desc=3D"Number of colu=
mns", readonly=3DTrue )
+
+ def __init__(self, **kwd):
+ """Initialize foobar datatype"""
+ Text.__init__(self, **kwd)
+
+ def init_meta( self, dataset, copy_from=3DNone ):
+ Text.init_meta( self, dataset, copy_from=3Dcopy_from )
+
+
+ def sniff( self, filename ):
+ """All Nexus Files Simply puts a '#NEXUS' in its first line"""
+ f =3D open(filename, "r")
+ firstline =3D f.readline().upper()
+ f.close()
+
+ if "#NEXUS" in firstline:
+ return True
+ else:
+ return False
+
=20
# ------------- Utility methods --------------
=20
diff -r 6a644558eed7aba74a74060ccf0aa4ec211809c2 -r 07045f4895170a154c955f4=
8291f2b5e6feb94c2 lib/galaxy/datatypes/sniff.py
--- a/lib/galaxy/datatypes/sniff.py
+++ b/lib/galaxy/datatypes/sniff.py
@@ -6,6 +6,7 @@
from galaxy import util
from galaxy.datatypes.checkers import *
from galaxy.datatypes.binary import unsniffable_binary_formats
+from encodings import search_function as encodings_search_function
=20
log =3D logging.getLogger(__name__)
=20
@@ -15,7 +16,7 @@
full_path =3D os.path.join(path, 'test', fname)
return full_path
=20
-def stream_to_open_named_file( stream, fd, filename ):
+def stream_to_open_named_file( stream, fd, filename, source_encoding=3DNon=
e, source_error=3D'strict', target_encoding=3DNone, target_error=3D'strict'=
):
"""Writes a stream to the provided file descriptor, returns the file's=
name and bool( is_multi_byte ). Closes file descriptor"""
#signature and behavor is somewhat odd, due to backwards compatibility=
, but this can/should be done better
CHUNK_SIZE =3D 1048576
@@ -23,6 +24,10 @@
is_compressed =3D False
is_binary =3D False
is_multi_byte =3D False
+ if not target_encoding or not encodings_search_function( target_encodi=
ng ):
+ target_encoding =3D util.DEFAULT_ENCODING #utf-8
+ if not source_encoding:
+ source_encoding =3D util.DEFAULT_ENCODING #sys.getdefaultencoding(=
) would mimic old behavior (defaults to ascii)
while 1:
chunk =3D stream.read( CHUNK_SIZE )
if not chunk:
@@ -42,13 +47,12 @@
chars =3D chunk[:100]
is_multi_byte =3D util.is_multi_byte( chars )
if not is_multi_byte:
- for char in chars:
- if ord( char ) > 128:
- is_binary =3D True
- break
+ is_binary =3D util.is_binary( chunk )
data_checked =3D True
if not is_compressed and not is_binary:
- os.write( fd, chunk.encode( "utf-8" ) )
+ if not isinstance( chunk, unicode ):
+ chunk =3D chunk.decode( source_encoding, source_error )
+ os.write( fd, chunk.encode( target_encoding, target_error ) )
else:
# Compressed files must be encoded after they are uncompressed=
in the upload utility,
# while binary files should not be encoded at all.
@@ -56,10 +60,10 @@
os.close( fd )
return filename, is_multi_byte
=20
-def stream_to_file( stream, suffix=3D'', prefix=3D'', dir=3DNone, text=3DF=
alse ):
+def stream_to_file( stream, suffix=3D'', prefix=3D'', dir=3DNone, text=3DF=
alse, **kwd ):
"""Writes a stream to a temporary file, returns the temporary file's n=
ame"""
fd, temp_name =3D tempfile.mkstemp( suffix=3Dsuffix, prefix=3Dprefix, =
dir=3Ddir, text=3Dtext )
- return stream_to_open_named_file( stream, fd, temp_name )
+ return stream_to_open_named_file( stream, fd, temp_name, **kwd )
=20
def check_newlines( fname, bytes_to_read=3D52428800 ):
"""
@@ -305,14 +309,9 @@
else:
for hdr in headers:
for char in hdr:
- if len( char ) > 1:
- for c in char:
- if ord( c ) > 128:
- is_binary =3D True
- break
- elif ord( char ) > 128:
- is_binary =3D True
- break
+ #old behavior had 'char' possibly having length > 1,
+ #need to determine when/if this occurs=20
+ is_binary =3D util.is_binary( char )
if is_binary:
break
if is_binary:
diff -r 6a644558eed7aba74a74060ccf0aa4ec211809c2 -r 07045f4895170a154c955f4=
8291f2b5e6feb94c2 lib/galaxy/datatypes/xml.py
--- a/lib/galaxy/datatypes/xml.py
+++ b/lib/galaxy/datatypes/xml.py
@@ -76,3 +76,24 @@
dataset.blurb =3D 'file purged from disk'
def sniff( self, filename ):
return False
+
+class Phyloxml( GenericXml ):
+ """Format for defining phyloxml data http://www.phyloxml.org/"""
+ file_ext =3D "phyloxml"
+ def set_peek( self, dataset, is_multi_byte=3DFalse ):
+ """Set the peek and blurb text"""
+ if not dataset.dataset.purged:
+ dataset.peek =3D data.get_file_peek( dataset.file_name, is_mul=
ti_byte=3Dis_multi_byte )
+ dataset.blurb =3D 'Phyloxml data'
+ else:
+ dataset.peek =3D 'file does not exist'
+ dataset.blurb =3D 'file purged from disk'
+
+ def sniff( self, filename ):
+ """"Checking for keyword - 'phyloxml' always in lowercase in the f=
irst few lines"""
+ f =3D open(filename, "r")
+ firstlines =3D "".join(f.readlines(5))
+ f.close()
+ if "phyloxml" in firstlines:
+ return True
+ return False
\ No newline at end of file
diff -r 6a644558eed7aba74a74060ccf0aa4ec211809c2 -r 07045f4895170a154c955f4=
8291f2b5e6feb94c2 lib/galaxy/jobs/__init__.py
--- a/lib/galaxy/jobs/__init__.py
+++ b/lib/galaxy/jobs/__init__.py
@@ -471,7 +471,7 @@
job.user.total_disk_usage +=3D bytes
=20
# fix permissions
- for path in [ dp.real_path for dp in self.get_output_fnames() ]:
+ for path in [ dp.real_path for dp in self.get_mutable_output_fname=
s() ]:
util.umask_fix_perms( path, self.app.config.umask, 0666, self.=
app.config.gid )
self.sa_session.flush()
log.debug( 'job %d ended' % self.job_id )
@@ -679,6 +679,11 @@
self.compute_outputs()
return self.output_paths
=20
+ def get_mutable_output_fnames( self ):
+ if self.output_paths is None:
+ self.compute_outputs()
+ return filter( lambda dsp: dsp.mutable, self.output_paths )
+
def get_output_hdas_and_fnames( self ):
if self.output_hdas_and_paths is None:
self.compute_outputs()
@@ -686,10 +691,11 @@
=20
def compute_outputs( self ) :
class DatasetPath( object ):
- def __init__( self, dataset_id, real_path, false_path =3D None=
):
+ def __init__( self, dataset_id, real_path, false_path =3D None=
, mutable =3D True ):
self.dataset_id =3D dataset_id
self.real_path =3D real_path
self.false_path =3D false_path
+ self.mutable =3D mutable
def __str__( self ):
if self.false_path is None:
return self.real_path
@@ -706,13 +712,13 @@
self.output_hdas_and_paths =3D {}
for name, hda in [ ( da.name, da.dataset ) for da in job.outpu=
t_datasets + job.output_library_datasets ]:
false_path =3D os.path.abspath( os.path.join( self.working=
_directory, "galaxy_dataset_%d.dat" % hda.dataset.id ) )
- dsp =3D DatasetPath( hda.dataset.id, hda.dataset.file_name=
, false_path )
+ dsp =3D DatasetPath( hda.dataset.id, hda.dataset.file_name=
, false_path, mutable =3D hda.dataset.external_filename is None )
self.output_paths.append( dsp )
self.output_hdas_and_paths[name] =3D hda, dsp
if special:
false_path =3D os.path.abspath( os.path.join( self.working=
_directory, "galaxy_dataset_%d.dat" % special.dataset.id ) )
else:
- results =3D [ ( da.name, da.dataset, DatasetPath( da.dataset.d=
ataset.id, da.dataset.file_name ) ) for da in job.output_datasets + job.out=
put_library_datasets ]
+ results =3D [ ( da.name, da.dataset, DatasetPath( da.dataset.d=
ataset.id, da.dataset.file_name, mutable =3D da.dataset.dataset.external_fi=
lename is None ) ) for da in job.output_datasets + job.output_library_datas=
ets ]
self.output_paths =3D [t[2] for t in results]
self.output_hdas_and_paths =3D dict([(t[0], t[1:]) for t in r=
esults])
if special:
diff -r 6a644558eed7aba74a74060ccf0aa4ec211809c2 -r 07045f4895170a154c955f4=
8291f2b5e6feb94c2 lib/galaxy/jobs/deferred/genome_transfer.py
--- a/lib/galaxy/jobs/deferred/genome_transfer.py
+++ b/lib/galaxy/jobs/deferred/genome_transfer.py
@@ -115,15 +115,16 @@
files =3D tar.getmembers()
for filename in files:
z =3D tar.extractfile(filename)
- try:
- chunk =3D z.read( CHUNK_SIZE )
- except IOError:
- os.close( fd )
- log.error( 'Problem decompressing compressed data'=
)
- exit()
- if not chunk:
- break
- os.write( fd, chunk )
+ while 1:
+ try:
+ chunk =3D z.read( CHUNK_SIZE )
+ except IOError:
+ os.close( fd )
+ log.error( 'Problem decompressing compressed d=
ata' )
+ exit()
+ if not chunk:
+ break
+ os.write( fd, chunk )
os.write( fd, '\n' )
os.close( fd )
tar.close()
diff -r 6a644558eed7aba74a74060ccf0aa4ec211809c2 -r 07045f4895170a154c955f4=
8291f2b5e6feb94c2 lib/galaxy/model/item_attrs.py
--- a/lib/galaxy/model/item_attrs.py
+++ b/lib/galaxy/model/item_attrs.py
@@ -95,7 +95,7 @@
""" Returns a user's annotation string for an item. """
annotation_obj =3D self.get_item_annotation_obj( db_session, user,=
item )
if annotation_obj:
- return annotation_obj.annotation
+ return galaxy.util.unicodify( annotation_obj.annotation )
return None
=20
def get_item_annotation_obj( self, db_session, user, item ):
diff -r 6a644558eed7aba74a74060ccf0aa4ec211809c2 -r 07045f4895170a154c955f4=
8291f2b5e6feb94c2 lib/galaxy/tools/__init__.py
--- a/lib/galaxy/tools/__init__.py
+++ b/lib/galaxy/tools/__init__.py
@@ -187,7 +187,9 @@
section.elems[ section_key ] =3D workflow
log.debug( "Loaded workflow: %s %s" % ( workfl=
ow_id, workflow.name ) )
elif section_key.startswith( 'label_' ):
- section.elems[ section_key ] =3D section_val
+ if section_val:
+ section.elems[ section_key ] =3D section_val
+ log.debug( "Loaded label: %s" % ( section_val.=
text ) )
self.tool_panel[ key ] =3D section
def load_integrated_tool_panel_keys( self ):
"""
@@ -215,12 +217,12 @@
section.elems[ key ] =3D None
elif section_elem.tag =3D=3D 'label':
key =3D 'label_%s' % section_elem.get( 'id' )
- section.elems[ key ] =3D ToolSectionLabel( section=
_elem )
+ section.elems[ key ] =3D None
key =3D 'section_%s' % elem.get( 'id' )
self.integrated_tool_panel[ key ] =3D section
elif elem.tag =3D=3D 'label':
key =3D 'label_%s' % elem.get( 'id' )
- self.integrated_tool_panel[ key ] =3D ToolSectionLabel( el=
em )
+ self.integrated_tool_panel[ key ] =3D None
def write_integrated_tool_panel_config_file( self ):
"""
Write the current in-memory version of the integrated_tool_panel.x=
ml file to disk. Since Galaxy administrators=20
@@ -254,10 +256,11 @@
if section_item:
os.write( fd, ' <workflow id=3D"%s" />\=
n' % section_item.id )
elif section_key.startswith( 'label_' ):
- label_id =3D section_item.id or ''
- label_text =3D section_item.text or ''
- label_version =3D section_item.version or ''
- os.write( fd, ' <label id=3D"%s" text=3D"%s=
" version=3D"%s" />\n' % ( label_id, label_text, label_version ) )
+ if section_item:
+ label_id =3D section_item.id or ''
+ label_text =3D section_item.text or ''
+ label_version =3D section_item.version or ''
+ os.write( fd, ' <label id=3D"%s" text=
=3D"%s" version=3D"%s" />\n' % ( label_id, label_text, label_version ) )
os.write( fd, ' </section>\n' )
os.write( fd, '</toolbox>\n' )
os.close( fd )
diff -r 6a644558eed7aba74a74060ccf0aa4ec211809c2 -r 07045f4895170a154c955f4=
8291f2b5e6feb94c2 lib/galaxy/tools/genome_index/index_genome.py
--- a/lib/galaxy/tools/genome_index/index_genome.py
+++ b/lib/galaxy/tools/genome_index/index_genome.py
@@ -54,6 +54,7 @@
self._log( self.locations )
self._log( 'Indexer %s completed successfully.' % inde=
xer )
self._flush_files()
+ exit(0)
=20
def _check_link( self ):
self._log( 'Checking symlink to %s' % self.fafile )
diff -r 6a644558eed7aba74a74060ccf0aa4ec211809c2 -r 07045f4895170a154c955f4=
8291f2b5e6feb94c2 lib/galaxy/util/__init__.py
--- a/lib/galaxy/util/__init__.py
+++ b/lib/galaxy/util/__init__.py
@@ -34,6 +34,9 @@
=20
gzip_magic =3D '\037\213'
bz2_magic =3D 'BZh'
+DEFAULT_ENCODING =3D 'utf-8'
+NULL_CHAR =3D '\000'
+BINARY_CHARS =3D [ NULL_CHAR ]
=20
from inflection import Inflector, English
inflector =3D Inflector(English)
@@ -57,6 +60,32 @@
return True
return False
=20
+def is_binary( value, binary_chars=3DNone ):
+ """
+ File is binary if it contains a null-byte by default (e.g. behavior of=
grep, etc.).
+ This may fail for utf-16 files, but so would ASCII encoding.
+ >>> is_binary( string.printable )
+ False
+ >>> is_binary( '\\xce\\x94' )
+ False
+ >>> is_binary( '\\000' )
+ True
+ """
+ if binary_chars is None:
+ binary_chars =3D BINARY_CHARS
+ for binary_char in binary_chars:
+ if binary_char in value:
+ return True
+ return False
+
+def get_charset_from_http_headers( headers, default=3DNone ):
+ rval =3D headers.get('content-type', None )
+ if rval and 'charset=3D' in rval:
+ rval =3D rval.split('charset=3D')[-1].split(';')[0].strip()
+ if rval:
+ return rval
+ return default
+
def synchronized(func):
"""This wrapper will serialize access to 'func' to a single thread. Us=
e it as a decorator."""
def caller(*params, **kparams):
@@ -333,6 +362,17 @@
else:
return amount[0:sfs] + '0'*(len(amount) - sfs)
=20
+def unicodify( value, encoding=3DDEFAULT_ENCODING, error=3D'replace', defa=
ult=3DNone ):
+ """
+ Returns a unicode string or None
+ """
+ if isinstance( value, unicode ):
+ return value
+ try:
+ return unicode( value, encoding, error )
+ except:
+ return default
+
def object_to_string( obj ):
return binascii.hexlify( pickle.dumps( obj, 2 ) )
=20
@@ -502,7 +542,7 @@
=20
def recursively_stringify_dictionary_keys( d ):
if isinstance(d, dict):
- return dict([(k.encode('utf-8'), recursively_stringify_dictionary_=
keys(v)) for k,v in d.iteritems()])
+ return dict([(k.encode( DEFAULT_ENCODING ), recursively_stringify_=
dictionary_keys(v)) for k,v in d.iteritems()])
elif isinstance(d, list):
return [recursively_stringify_dictionary_keys(x) for x in d]
else:
@@ -622,7 +662,7 @@
Sends an email.
"""
to =3D listify( to )
- msg =3D MIMEText( body )
+ msg =3D MIMEText( body.encode( 'ascii', 'replace' ) )
msg[ 'To' ] =3D ', '.join( to )
msg[ 'From' ] =3D frm
msg[ 'Subject' ] =3D subject
diff -r 6a644558eed7aba74a74060ccf0aa4ec211809c2 -r 07045f4895170a154c955f4=
8291f2b5e6feb94c2 lib/galaxy/util/shed_util.py
--- a/lib/galaxy/util/shed_util.py
+++ b/lib/galaxy/util/shed_util.py
@@ -454,7 +454,7 @@
def generate_clone_url( trans, repository ):
"""Generate the URL for cloning a repository."""
tool_shed_url =3D get_url_from_repository_tool_shed( trans.app, reposi=
tory )
- return '%s/repos/%s/%s' % ( tool_shed_url, repository.owner, repositor=
y.name )
+ return url_join( tool_shed_url, 'repos', repository.owner, repository.=
name )
def generate_datatypes_metadata( datatypes_config, metadata_dict ):
"""Update the received metadata_dict with information from the parsed =
datatypes_config."""
tree =3D ElementTree.parse( datatypes_config )
@@ -993,7 +993,7 @@
break
return converter_path, display_path
def get_ctx_rev( tool_shed_url, name, owner, changeset_revision ):
- url =3D '%s/repository/get_ctx_rev?name=3D%s&owner=3D%s&changeset_revi=
sion=3D%s&webapp=3Dgalaxy' % ( tool_shed_url, name, owner, changeset_revisi=
on )
+ url =3D url_join( tool_shed_url, 'repository/get_ctx_rev?name=3D%s&own=
er=3D%s&changeset_revision=3D%s&webapp=3Dgalaxy' % ( name, owner, changeset=
_revision ) )
response =3D urllib2.urlopen( url )
ctx_rev =3D response.read()
response.close()
@@ -1221,8 +1221,8 @@
def get_update_to_changeset_revision_and_ctx_rev( trans, repository ):
"""Return the changeset revision hash to which the repository can be u=
pdated."""
tool_shed_url =3D get_url_from_repository_tool_shed( trans.app, reposi=
tory )
- url =3D '%s/repository/get_changeset_revision_and_ctx_rev?name=3D%s&ow=
ner=3D%s&changeset_revision=3D%s' % \
- ( tool_shed_url, repository.name, repository.owner, repository.ins=
talled_changeset_revision )
+ url =3D url_join( tool_shed_url, 'repository/get_changeset_revision_an=
d_ctx_rev?name=3D%s&owner=3D%s&changeset_revision=3D%s' % \
+ ( repository.name, repository.owner, repository.installed_changese=
t_revision ) )
try:
response =3D urllib2.urlopen( url )
encoded_update_dict =3D response.read()
@@ -1645,3 +1645,8 @@
tool_shed_repository.status =3D status
sa_session.add( tool_shed_repository )
sa_session.flush()
+def url_join( *args ):
+ parts =3D []
+ for arg in args:
+ parts.append( arg.strip( '/' ) )
+ return '/'.join( parts )
diff -r 6a644558eed7aba74a74060ccf0aa4ec211809c2 -r 07045f4895170a154c955f4=
8291f2b5e6feb94c2 lib/galaxy/visualization/phyloviz/__init__.py
--- /dev/null
+++ b/lib/galaxy/visualization/phyloviz/__init__.py
@@ -0,0 +1,1 @@
+__author__ =3D 'Tomithy'
diff -r 6a644558eed7aba74a74060ccf0aa4ec211809c2 -r 07045f4895170a154c955f4=
8291f2b5e6feb94c2 lib/galaxy/visualization/phyloviz/baseparser.py
--- /dev/null
+++ b/lib/galaxy/visualization/phyloviz/baseparser.py
@@ -0,0 +1,125 @@
+import json
+
+class Node(object):
+ """Node class of PhyloTree, which represents a CLAUDE in a phylogeneti=
c tree"""
+ def __init__(self, nodeName, **kwargs):
+ """Creates a node and adds in the typical annotations"""
+ self.name, self.id =3D nodeName, kwargs.get("id", 0)
+ self.depth =3D kwargs.get("depth", 0)
+ self.children =3D []
+
+ self.isInternal =3D kwargs.get("isInternal", 0)
+ self.length, self.bootstrap =3D kwargs.get("length", 0), kwargs.ge=
t("bootstrap", None)
+ self.events =3D kwargs.get("events", "")
+
+ # clean up boot strap values
+ if self.bootstrap =3D=3D -1:
+ self.bootstrap =3D None
+
+ def addChildNode(self, child):
+ """Adds a child node to the current node"""
+ if isinstance(child, Node):
+ self.children.append(child)
+ else:
+ self.children +=3D child
+
+
+ def __str__(self):
+ return self.name + " id:" + str(self.id) + ", depth: " + str(self.=
depth)
+
+
+ def toJson(self):
+ """Converts the data in the node to a dict representation of json"=
""
+ thisJson =3D {
+ "name" : self.name,
+ "id" : self.id,
+ "depth" : self.depth,
+ "dist" : self.length
+ }
+ thisJson =3D self.addChildrenToJson(thisJson)
+ thisJson =3D self.addMiscToJson(thisJson)
+ return thisJson
+
+ def addChildrenToJson(self, jsonDict):
+ """Needs a special method to addChildren, such that the key does n=
ot appear in the Jsondict when the children is empty
+ this requirement is due to the layout algorithm used by d3 layout =
for hiding subtree """
+ if len(self.children) > 0:
+ children =3D [ node.toJson() for node in self.children]
+ jsonDict["children"] =3D children
+ return jsonDict
+
+
+ def addMiscToJson(self, jsonDict):
+ """Adds other misc attributes to json if they are present"""
+ if not self.events =3D=3D "":
+ jsonDict["events"] =3D self.events
+ if not self.bootstrap =3D=3D None:
+ jsonDict["bootstrap"] =3D self.bootstrap
+ return jsonDict
+
+
+
+class PhyloTree(object):
+ """Standardized python based class to represent the phylogenetic tree =
parsed from different
+ phylogenetic file formats."""
+
+ def __init__(self):
+ self.root, self.rootAttr =3D None, {}
+ self.nodes =3D {}
+ self.title =3D None
+ self.id =3D 1
+
+ def addAttributesToRoot(self, attrDict):
+ """Adds attributes to root, but first we put it in a temp store an=
d bind it with root when .toJson is called"""
+ for key, value in attrDict.items():
+ self.rootAttr[key] =3D value
+
+ def makeNode(self, nodeName, **kwargs):
+ """Called to make a node within PhyloTree, arbitrary kwargs can be=
passed to annotate nodes
+ Tracks the number of nodes via internally incremented id"""
+ kwargs["id"] =3D self.id
+ self.id +=3D 1
+ return Node(nodeName, **kwargs)
+
+ def addRoot(self, root):
+ """Creates a root for phyloTree"""
+ assert isinstance(root, Node)
+ root.parent =3D None
+ self.root =3D root
+
+ def generateJsonableDict(self):
+ """Changes itself into a dictonary by recurssively calling the toj=
son on all its nodes. Think of it
+ as a dict in an array of dict in an array of dict and so on..."""
+ jsonTree =3D ""
+ if self.root:
+ assert isinstance(self.root, Node)
+ jsonTree =3D self.root.toJson()
+ for key, value in self.rootAttr.items():
+ # transfer temporary stored attr to root
+ jsonTree[key] =3D value
+ else:
+ raise Exception("Root is not assigned!")
+ return jsonTree
+
+
+
+class Base_Parser(object):
+ """Base parsers contain all the methods to handle phylogeny tree creat=
ion and
+ converting the data to json that all parsers should have"""
+
+ def __init__(self):
+ self.phyloTrees =3D []
+
+ def parseFile(self, filePath):
+ """Base method that all phylogeny file parser should have"""
+ raise Exception("Base method for phylogeny file parsers is not imp=
lemented")
+
+ def toJson(self, jsonDict):
+ """Convenience method to get a json string from a python json dict=
"""
+ return json.dumps(jsonDict)
+
+ def _writeJsonToFile(self, filepath, json):
+ """Writes the file out to the system"""
+ f =3D open(filepath, "w")
+ f.writelines(json)
+ f.close()
diff -r 6a644558eed7aba74a74060ccf0aa4ec211809c2 -r 07045f4895170a154c955f4=
8291f2b5e6feb94c2 lib/galaxy/visualization/phyloviz/newickparser.py
--- /dev/null
+++ b/lib/galaxy/visualization/phyloviz/newickparser.py
@@ -0,0 +1,185 @@
+from baseparser import Base_Parser, PhyloTree
+import re
+
+class Newick_Parser(Base_Parser):
+ """For parsing trees stored in the newick format (.nhx)
+ It is necessarily more complex because this parser is later extended b=
y Nexus for parsing newick as well.."""
+
+
+ def __init__(self):
+ super(Newick_Parser, self).__init__()
+
+
+ def parseFile(self, filePath):
+ """Parses a newick file to obtain the string inside. Returns: json=
ableDict"""
+ with open(filePath, "r") as newickFile:
+ newickString =3D newickFile.read()
+ newickString =3D newickString.replace("\n", "").replace("\r", =
"")
+ return [self.parseData(newickString)], "Success"
+
+
+ def parseData(self, newickString):
+ """To be called on a newickString directly to parse it. Returns: j=
sonableDict"""
+ return self._parseNewickToJson(newickString)
+
+
+ def _parseNewickToJson(self, newickString, treeName=3DNone, nameMap=3D=
None):
+ """parses a newick representation of a tree into a PhyloTree data =
structure,
+ which can be easily converted to json"""
+ self.phyloTree =3D PhyloTree()
+ newickString =3D self.cleanNewickString(newickString)
+ if nameMap:
+ newickString =3D self._mapName(newickString, nameMap)
+
+ self.phyloTree.root =3D self.parseNode(newickString, 0)
+ if nameMap:
+ self.phyloTree.addAttributesToRoot({"treeName": treeName})
+
+ return self.phyloTree.generateJsonableDict()
+
+
+ def cleanNewickString(self, rawNewick):
+ """removing semi colon, and illegal json characters (\,',") and wh=
ite spaces"""
+ return re.sub(r'\s|;|\"|\'|\\', '', rawNewick)
+
+
+ def _makeNodesFromString(self, string, depth):
+ """elements separated by comma could be empty"""
+
+ if string.find("(") !=3D -1:
+ raise Exception("Tree is not well form, location: " + string)
+
+ childrenString =3D string.split(",")
+ childrenNodes =3D []
+
+ for childString in childrenString:
+ if len(childString) =3D=3D 0:
+ continue
+ nodeInfo =3D childString.split(":")
+ name, length, bootstrap =3D "", None, -1
+ if len(nodeInfo) =3D=3D 2: # has length info
+ length =3D nodeInfo[1]
+ # checking for bootstap values
+ name =3D nodeInfo[0]
+ try: # Nexus may bootstrap in names position
+ name =3D float(name)
+ if 0<=3D name <=3D 1:
+ bootstrap =3D name
+ elif 1 <=3D name <=3D 100:
+ bootstrap =3D name / 100
+ name =3D ""
+ except ValueError:
+ name =3D nodeInfo[0]
+ else:
+ name =3D nodeInfo[0] # string only contains name
+ node =3D self.phyloTree.makeNode(name, length=3Dlength, depth=
=3Ddepth, bootstrap=3D bootstrap)
+ childrenNodes +=3D [node]
+ return childrenNodes
+
+
+
+ def _mapName(self, newickString, nameMap):
+ """
+ Necessary to replace names of terms inside nexus representation
+ Also, its here because Mailaud's doesnt deal with id_strings outsi=
de of quotes(" ")
+ """
+ newString =3D ""
+ start =3D 0
+ end =3D 0
+
+ for i in xrange(len(newickString)):
+ if newickString[i] =3D=3D "(" or newickString[i] =3D=3D ",":
+ if re.match(r"[,(]", newickString[i+1:]):
+ continue
+ else:
+ end =3D i + 1
+ # i now refers to the starting position of the term to=
be replaced,
+ # we will next find j which is the ending pos of the t=
erm
+ for j in xrange(i+1, len(newickString)):
+ enclosingSymbol =3D newickString[j] # the immedi=
ate symbol after a common or left bracket which denotes the end of a term
+ if enclosingSymbol =3D=3D ")" or enclosingSymbol =
=3D=3D ":" or enclosingSymbol =3D=3D ",":
+ termToReplace =3D newickString[end:j]
+
+ newString +=3D newickString[start : end] + na=
meMap[termToReplace] #+ "'" "'" +
+ start =3D j
+ break
+
+ newString +=3D newickString[start:]
+ return newString
+
+
+ def parseNode(self, string, depth):
+ """ Recursive method for parsing newick string, works by stripping=
down the string into substring
+ of newick contained with brackers, which is used to call itself.
+ Eg ... ( A, B, (D, E)C, F, G ) ...
+ We will make the preceeding nodes first A, B, then the internal no=
de C, its children D, E,
+ and finally the succeeding nodes F, G"""
+
+ # Base case where there is only an empty string
+ if string =3D=3D "":
+ return
+ # Base case there its only an internal claude
+ if string.find("(") =3D=3D -1:
+ return self._makeNodesFromString(string, depth)
+
+ nodes, children =3D [], [] # nodes refer to the nodes on this=
level, children refers to the child of the
+ start =3D 0
+ lenOfPreceedingInternalNodeString =3D 0
+ bracketStack =3D []
+
+ for j in xrange(len(string)):
+ if string[j] =3D=3D "(": #finding the positions of all the =
open brackets
+ bracketStack.append(j)
+ continue
+ if string[j] =3D=3D ")": #finding the positions of all the =
closed brackets to extract claude
+ i =3D bracketStack.pop()
+
+ if len(bracketStack) =3D=3D 0: # is child of current node
+
+ InternalNode =3D None
+
+ #First flat call to make nodes of the same depth but f=
rom the preceeding string.
+ startSubstring =3D string[start + lenOfPreceedingInter=
nalNodeString: i]
+ preceedingNodes =3D self._makeNodesFromString(startSu=
bstring, depth)
+ nodes +=3D preceedingNodes
+
+ # Then We will try to see if the substring has any int=
ernal nodes first, make it then make nodes preceeding it and succeeding it.
+ if j + 1 < len(string):
+ stringRightOfBracket =3D string[j+1:] # Eg. '=
(b:0.4,a:0.3)c:0.3, stringRightOfBracket =3D c:0.3
+ match =3D re.search(r"[\)\,\(]", stringRightOfBrac=
ket)
+ if match:
+ indexOfNextSymbol =3D match.start()
+ stringRepOfInternalNode =3D stringRightOfBrack=
et[:indexOfNextSymbol]
+ internalNodes =3D self._makeNodesFromString( s=
tringRepOfInternalNode, depth)
+ if len(internalNodes) > 0:
+ InternalNode =3D internalNodes[0]
+ lenOfPreceedingInternalNodeString =3D len(stri=
ngRepOfInternalNode)
+ else: # sometimes the node can be the last eleme=
nt of a string
+ InternalNode =3D self._makeNodesFromString(str=
ing[j+1:], depth)[0]
+ lenOfPreceedingInternalNodeString =3D len(stri=
ng) - j
+ if InternalNode =3D=3D None: #creating a generic=
node if it is unnamed
+ InternalNode =3D self.phyloTree.makeNode( "", dept=
h=3Ddepth, isInternal=3DTrue ) #"internal-" + str(depth)
+ lenOfPreceedingInternalNodeString =3D 0
+
+ # recussive call to make the internal claude
+ childSubString =3D string[ i + 1 : j ]
+ InternalNode.addChildNode(self.parseNode(childSubStrin=
g, depth + 1))
+
+ nodes.append(InternalNode) # we append the internal n=
ode later to preserve order
+
+ start =3D j + 1
+ continue
+
+ if depth =3D=3D 0: # if its the root node, we do nothing about =
it and return
+ return nodes[0]
+
+ # Adding last most set of children
+ endString =3D string[start:]
+ if string[start-1] =3D=3D ")": # if the symbol belongs to an inte=
rnal node which is created previously, then we remove it from the string le=
ft to parse
+ match =3D re.search(r"[\)\,\(]", endString)
+ if match:
+ endOfNodeName =3D start + match.start() + 1
+ endString =3D string[endOfNodeName:]
+ nodes +=3D self._makeNodesFromString(endString, depth)
+
+ return nodes
diff -r 6a644558eed7aba74a74060ccf0aa4ec211809c2 -r 07045f4895170a154c955f4=
8291f2b5e6feb94c2 lib/galaxy/visualization/phyloviz/nexusparser.py
--- /dev/null
+++ b/lib/galaxy/visualization/phyloviz/nexusparser.py
@@ -0,0 +1,107 @@
+from newickparser import Newick_Parser
+import re
+
+MAX_READLINES =3D 200000
+
+
+class Nexus_Parser(Newick_Parser):
+
+ def __init__(self):
+ super(Nexus_Parser, self).__init__()
+
+ def parseFile(self, filePath):
+ """passes a file and extracts its Nexus content."""
+ return self.parseNexus(filePath)
+
+
+ def parseNexus(self, filename):
+ """ Nexus data is stored in blocks between a line starting with be=
gin and another line starting with end;
+ Commends inside square brackets are to be ignored,
+ For more information: http://wiki.christophchamp.com/index.php/NEX=
US_file_format
+ Nexus can store multiple trees
+ """
+
+ with open( filename, "rt") as nex_file:
+ nexlines =3D nex_file.readlines()
+
+ rowCount =3D 0
+ inTreeBlock =3D False # sentinel to check if we are in a t=
ree block
+ intranslateBlock =3D False # sentinel to check if we are in the=
translate region of the tree. Stores synonyms of the labellings
+ self.inCommentBlock =3D False
+ self.nameMapping =3D None # stores mapping representation us=
ed in nexus format
+ treeNames =3D []
+
+ for line in nexlines:
+ line =3D line.replace(";\n", "")
+ lline =3D line.lower()
+
+ if rowCount > MAX_READLINES or (not nex_file) :
+ break
+ rowCount +=3D1
+ # We are only interested in the tree block.
+ if "begin" in lline and "tree" in lline and not inTreeBlock:
+ inTreeBlock =3D True
+ continue
+ if inTreeBlock and "end" in lline[:3]:
+ inTreeBlock, currPhyloTree =3D False, None
+ continue
+
+ if inTreeBlock:
+
+ if "title" in lline: # Adding title to the tree
+ titleLoc =3D lline.find("title")
+ title =3D line[titleLoc + 5:].replace(" ", "")
+
+ continue
+
+ if "translate" in lline:
+ intranslateBlock =3D True
+ self.nameMapping =3D {}
+ continue
+
+ if intranslateBlock:
+ mappingLine =3D self.splitLinebyWhitespaces(line)
+ key, value =3D mappingLine[1], mappingLine[2].replace(=
",", "").replace("'","") #replacing illegal json characters
+ self.nameMapping[key] =3D value
+
+ # Extracting newick Trees
+ if "tree" in lline:
+ intranslateBlock =3D False
+
+ treeLineCols =3D self.splitLinebyWhitespaces(line)
+ treeName, newick =3D treeLineCols[2], treeLineCols[-1]
+
+ if newick =3D=3D "": # Empty lines can be found in =
tree blocks
+ continue
+
+ currPhyloTree =3D self._parseNewickToJson(newick, tree=
Name, nameMap=3Dself.nameMapping)
+
+ self.phyloTrees.append(currPhyloTree)
+ treeIndex =3D len(self.phyloTrees) - 1
+ treeNames.append( (treeName, treeIndex) ) # appendi=
ng name of tree, and its index
+ continue
+
+ return self.phyloTrees, treeNames
+
+
+ def splitLinebyWhitespaces(self, line):
+ """replace tabs and write spaces to a single write space, so we ca=
n properly split it."""
+ return re.split(r"\s+", line)
+
+
+ def checkComments(self, line):
+ """Check to see if the line/lines is a comment."""
+ if not self.inCommentBlock:
+ if "[" in line:
+ if "]" not in line:
+ self.inCommentBlock =3D True
+ else:
+ return "Nextline" # need to move on to the nextline =
after getting out of comment
+ else :
+ if "]" in line:
+ if line.rfind("[") > line.rfind("]"):
+ pass # a comment block is closed but an=
other is open.
+ else:
+ self.inCommentBlock =3D False
+ return "Nextline" # need to move on to the nextline =
after getting out of comment
+ return ""
\ No newline at end of file
diff -r 6a644558eed7aba74a74060ccf0aa4ec211809c2 -r 07045f4895170a154c955f4=
8291f2b5e6feb94c2 lib/galaxy/visualization/phyloviz/phyloviz_dataprovider.py
--- /dev/null
+++ b/lib/galaxy/visualization/phyloviz/phyloviz_dataprovider.py
@@ -0,0 +1,35 @@
+from newickparser import Newick_Parser
+from nexusparser import Nexus_Parser
+from phyloxmlparser import Phyloxml_Parser
+
+class Phyloviz_DataProvider(object):
+
+ def __init__(self):
+ pass
+
+ def parseFile(self, filepath, fileExt):
+ """returns [trees], meta
+ Trees are actually an array of JsonDicts. It's usually one tre=
e, except in the case of Nexus
+ """
+ jsonDicts, meta =3D [], {}
+ try:
+ if fileExt =3D=3D "nhx": # parses newick files
+ newickParser =3D Newick_Parser()
+ jsonDicts, parseMsg =3D newickParser.parseFile(filepath)
+ elif fileExt =3D=3D "phyloxml": # parses phyloXML files
+ phyloxmlParser =3D Phyloxml_Parser()
+ jsonDicts, parseMsg =3D phyloxmlParser.parseFile(filepath)
+ elif fileExt =3D=3D "nex": # parses nexus files
+ nexusParser =3D Nexus_Parser()
+ jsonDicts, parseMsg =3D nexusParser.parseFile(filepath)
+ meta["trees"] =3D parseMsg
+ else:
+ raise Exception("File type is not supported")
+
+ meta["msg"] =3D parseMsg
+
+ except Exception:
+ jsonDicts, meta["msg"] =3D [], "Parse failed"
+
+ return jsonDicts, meta
+
diff -r 6a644558eed7aba74a74060ccf0aa4ec211809c2 -r 07045f4895170a154c955f4=
8291f2b5e6feb94c2 lib/galaxy/visualization/phyloviz/phyloxmlparser.py
--- /dev/null
+++ b/lib/galaxy/visualization/phyloviz/phyloxmlparser.py
@@ -0,0 +1,134 @@
+from baseparser import Base_Parser, PhyloTree, Node
+from xml.etree import ElementTree
+
+class Phyloxml_Parser(Base_Parser):
+ """Parses a phyloxml file into a json file that will be passed to Phyl=
oViz for display"""
+
+ def __init__(self):
+ super(Phyloxml_Parser, self).__init__()
+ self.phyloTree =3D PhyloTree()
+ self.tagsOfInterest =3D {
+ "clade": "",
+ "name" : "name",
+ "branch_length" : "length",
+ "confidence" : "bootstrap",
+ "events" : "events"
+ }
+
+ def parseFile(self, filePath):
+ """passes a file and extracts its Phylogeny Tree content."""
+ phyloXmlFile =3D open(filePath, "r")
+
+ xmlTree =3D ElementTree.parse(phyloXmlFile)
+ xmlRoot =3D xmlTree.getroot()[0]
+ self.nameSpaceIndex =3D xmlRoot.tag.rfind("}") + 1 # used later by=
the clean tag method to remove the name space in every element.tag
+
+ phyloRoot =3D None
+ for child in xmlRoot:
+ childTag =3D self.cleanTag(child.tag)
+ if childTag =3D=3D "clade":
+ phyloRoot =3D child
+ elif childTag =3D=3D "name":
+ self.phyloTree.title =3D child.text
+
+ self.phyloTree.root =3D self.parseNode(phyloRoot, 0)
+ jsonDict =3D self.phyloTree.generateJsonableDict()
+ return [jsonDict], "Success"
+
+
+ def parseNode(self, node, depth):
+ """Parses any node within a phyloxml tree and looks out for claude=
, which signals the creation of
+ nodes - internal OR leaf"""
+ assert isinstance(node, etree._Element)
+
+ tag =3D self.cleanTag(node.tag)
+ if not tag =3D=3D "clade":
+ return None
+ hasInnerClade =3D False
+
+ # peeking once for parent and once for child to check if the node =
is internal
+ for child in node:
+ childTag =3D self.cleanTag(child.tag)
+ if childTag =3D=3D "clade":
+ hasInnerClade =3D True
+ break
+
+ if hasInnerClade: # this node is an internal node
+ currentNode =3D self._makeInternalNode(node, depth=3D depth)
+ for child in node:
+ child =3D self.parseNode(child, depth + 1)
+ if isinstance(child, Node):
+ currentNode.addChildNode(child)
+
+ else: # this node is a leaf node
+ currentNode =3D self._makeLeafNode(node, depth=3Ddepth+1)
+
+ return currentNode
+
+
+ def _makeLeafNode(self, leafNode, depth =3D 0 ):
+ """Makes leaf nodes by calling Phylotree methods"""
+ node =3D {}
+ for child in leafNode:
+ childTag =3D self.cleanTag(child.tag)
+ if childTag in self.tagsOfInterest:
+ key =3D self.tagsOfInterest[childTag] # need to map phy=
loxml terms to ours
+ node[key] =3D child.text
+
+ node["depth"] =3D depth
+ return self.phyloTree.makeNode(self._getNodeName(leafNode), **node)
+
+ def _getNodeName(self, node, depth=3D-1):
+ """Gets the name of a claude. It handles the case where a taxonomy=
node is involved"""
+
+ def getTagFromTaxonomyNode(node):
+ """Returns the name of a taxonomy node. A taxonomy node have t=
o be treated differently as the name
+ is embedded one level deeper"""
+ phyloxmlTaxoNames =3D {
+ "common_name" : "",
+ "scientific_name" : "",
+ "code" : ""
+ }
+ for child in node:
+ childTag =3D self.cleanTag(child.tag)
+ if childTag in phyloxmlTaxoNames:
+ return child.text
+ return ""
+
+ nodeName =3D ""
+ for child in node:
+ childTag =3D self.cleanTag(child.tag)
+ if childTag =3D=3D "name" :
+ nodeName =3D child.text
+ break
+ elif childTag =3D=3D "taxonomy":
+ nodeName =3D getTagFromTaxonomyNode(child)
+ break
+
+ return nodeName
+
+
+ def _makeInternalNode(self, internalNode, depth=3D0):
+ """ Makes an internal node from an element object that is gurantee=
d to be a parent node.
+ Gets the value of interests like events and appends it to a custom=
node object that will be passed to PhyloTree to make nodes
+ """
+ node =3D {}
+ for child in internalNode:
+ childTag =3D self.cleanTag(child.tag)
+ if childTag =3D=3D "clade":
+ continue
+ elif childTag in self.tagsOfInterest:
+ if childTag =3D=3D "events": # events is nested 1 more =
level deeper than others
+ key, text =3D "events", self.cleanTag(child[0].tag)
+ else:
+ key =3D self.tagsOfInterest[childTag]
+ text =3D child.text
+ node[key] =3D text
+
+
+ return self.phyloTree.makeNode(self._getNodeName(internalNode, dep=
th), **node)
+
+
+ def cleanTag(self, tagString):
+ return tagString[self.nameSpaceIndex:]
+ =20
\ No newline at end of file
diff -r 6a644558eed7aba74a74060ccf0aa4ec211809c2 -r 07045f4895170a154c955f4=
8291f2b5e6feb94c2 lib/galaxy/web/controllers/admin_toolshed.py
--- a/lib/galaxy/web/controllers/admin_toolshed.py
+++ b/lib/galaxy/web/controllers/admin_toolshed.py
@@ -374,7 +374,7 @@
def browse_tool_shed( self, trans, **kwd ):
tool_shed_url =3D kwd[ 'tool_shed_url' ]
galaxy_url =3D url_for( '/', qualified=3DTrue )
- url =3D '%srepository/browse_valid_categories?galaxy_url=3D%s&weba=
pp=3Dgalaxy' % ( tool_shed_url, galaxy_url )
+ url =3D url_join( tool_shed_url, 'repository/browse_valid_categori=
es?galaxy_url=3D%s&webapp=3Dgalaxy' % ( galaxy_url ) )
return trans.response.send_redirect( url )
@web.expose
@web.require_admin
@@ -392,8 +392,9 @@
# Send a request to the relevant tool shed to see if there are any=
updates.
repository =3D get_repository( trans, kwd[ 'id' ] )
tool_shed_url =3D get_url_from_repository_tool_shed( trans.app, re=
pository )
- url =3D '%s/repository/check_for_updates?galaxy_url=3D%s&name=3D%s=
&owner=3D%s&changeset_revision=3D%s&webapp=3Dgalaxy' % \
- ( tool_shed_url, url_for( '/', qualified=3DTrue ), repository.=
name, repository.owner, repository.changeset_revision )
+ url =3D url_join( tool_shed_url,
+ 'repository/check_for_updates?galaxy_url=3D%s&name=
=3D%s&owner=3D%s&changeset_revision=3D%s&webapp=3Dgalaxy' % \
+ ( url_for( '/', qualified=3DTrue ), repository.nam=
e, repository.owner, repository.changeset_revision ) )
return trans.response.send_redirect( url )
@web.expose
@web.require_admin
@@ -467,14 +468,14 @@
def find_tools_in_tool_shed( self, trans, **kwd ):
tool_shed_url =3D kwd[ 'tool_shed_url' ]
galaxy_url =3D url_for( '/', qualified=3DTrue )
- url =3D '%srepository/find_tools?galaxy_url=3D%s&webapp=3Dgalaxy' =
% ( tool_shed_url, galaxy_url )
+ url =3D url_join( tool_shed_url, 'repository/find_tools?galaxy_url=
=3D%s&webapp=3Dgalaxy' % galaxy_url )
return trans.response.send_redirect( url )
@web.expose
@web.require_admin
def find_workflows_in_tool_shed( self, trans, **kwd ):
tool_shed_url =3D kwd[ 'tool_shed_url' ]
galaxy_url =3D url_for( '/', qualified=3DTrue )
- url =3D '%srepository/find_workflows?galaxy_url=3D%s&webapp=3Dgala=
xy' % ( tool_shed_url, galaxy_url )
+ url =3D url_join( tool_shed_url, 'repository/find_workflows?galaxy=
_url=3D%s&webapp=3Dgalaxy' % galaxy_url )
return trans.response.send_redirect( url )
def generate_tool_path( self, repository_clone_url, changeset_revision=
):
"""
@@ -489,7 +490,7 @@
tool_shed_url =3D items[ 0 ]
repo_path =3D items[ 1 ]
tool_shed_url =3D clean_tool_shed_url( tool_shed_url )
- return '%s/repos%s/%s' % ( tool_shed_url, repo_path, changeset_rev=
ision )
+ return url_join( tool_shed_url, 'repos', repo_path, changeset_revi=
sion )
@web.json
@web.require_admin
def get_file_contents( self, trans, file_path ):
@@ -634,8 +635,9 @@
tool_shed_repository,
trans.model.ToolShedRe=
pository.installation_status.SETTING_TOOL_VERSIONS )
tool_shed_url =3D get_url_from_repository_tool_shed( trans=
.app, tool_shed_repository )
- url =3D '%s/repository/get_tool_versions?name=3D%s&owner=
=3D%s&changeset_revision=3D%s&webapp=3Dgalaxy' % \
- ( tool_shed_url, tool_shed_repository.name, tool_shed_=
repository.owner, tool_shed_repository.changeset_revision )
+ url =3D url_join( tool_shed_url,
+ '/repository/get_tool_versions?name=3D%s&o=
wner=3D%s&changeset_revision=3D%s&webapp=3Dgalaxy' % \
+ ( tool_shed_repository.name, tool_shed_rep=
ository.owner, tool_shed_repository.changeset_revision ) )
response =3D urllib2.urlopen( url )
text =3D response.read()
response.close()
@@ -954,7 +956,9 @@
repository_ids =3D kwd.get( 'repository_ids', None )
changeset_revisions =3D kwd.get( 'changeset_revisions', None )
# Get the information necessary to install each repository.
- url =3D '%srepository/get_repository_information?repository_id=
s=3D%s&changeset_revisions=3D%s&webapp=3Dgalaxy' % ( tool_shed_url, reposit=
ory_ids, changeset_revisions )
+ url =3D url_join( tool_shed_url,
+ 'repository/get_repository_information?reposit=
ory_ids=3D%s&changeset_revisions=3D%s&webapp=3Dgalaxy' % \
+ ( repository_ids, changeset_revisions ) )
response =3D urllib2.urlopen( url )
raw_text =3D response.read()
response.close()
@@ -1097,8 +1101,9 @@
name =3D repo_info_dict.keys()[ 0 ]
repo_info_tuple =3D repo_info_dict[ name ]
description, repository_clone_url, changeset_revision, ctx_rev=
, repository_owner, tool_dependencies =3D repo_info_tuple
- url =3D '%srepository/get_readme?name=3D%s&owner=3D%s&changese=
t_revision=3D%s&webapp=3Dgalaxy' % \
- ( tool_shed_url, name, repository_owner, changeset_revisio=
n )
+ url =3D url_join( tool_shed_url,
+ 'repository/get_readme?name=3D%s&owner=3D%s&ch=
angeset_revision=3D%s&webapp=3Dgalaxy' % \
+ ( name, repository_owner, changeset_revision )=
)
response =3D urllib2.urlopen( url )
raw_text =3D response.read()
response.close()
@@ -1273,8 +1278,9 @@
tool_shed =3D get_tool_shed_from_clone_url( repository_clone_url )
# Get all previous change set revisions from the tool shed for the=
repository back to, but excluding, the previous valid changeset
# revision to see if it was previously installed using one of them.
- url =3D '%s/repository/previous_changeset_revisions?galaxy_url=3D%=
s&name=3D%s&owner=3D%s&changeset_revision=3D%s&webapp=3Dgalaxy' % \
- ( tool_shed_url, url_for( '/', qualified=3DTrue ), repository_=
name, repository_owner, changeset_revision )
+ url =3D url_join( tool_shed_url,
+ 'repository/previous_changeset_revisions?galaxy_ur=
l=3D%s&name=3D%s&owner=3D%s&changeset_revision=3D%s&webapp=3Dgalaxy' % \
+ ( url_for( '/', qualified=3DTrue ), repository_nam=
e, repository_owner, changeset_revision ) )
response =3D urllib2.urlopen( url )
text =3D response.read()
response.close()
@@ -1350,8 +1356,9 @@
# Get the tool_versions from the tool shed for each tool in the in=
stalled change set.
repository =3D get_repository( trans, kwd[ 'id' ] )
tool_shed_url =3D get_url_from_repository_tool_shed( trans.app, re=
pository )
- url =3D '%s/repository/get_tool_versions?name=3D%s&owner=3D%s&chan=
geset_revision=3D%s&webapp=3Dgalaxy' % \
- ( tool_shed_url, repository.name, repository.owner, repository=
.changeset_revision )
+ url =3D url_join( tool_shed_url,
+ 'repository/get_tool_versions?name=3D%s&owner=3D%s=
&changeset_revision=3D%s&webapp=3Dgalaxy' % \
+ ( repository.name, repository.owner, repository.ch=
angeset_revision ) )
response =3D urllib2.urlopen( url )
text =3D response.read()
response.close()
@@ -1522,7 +1529,7 @@
def __generate_clone_url( self, trans, repository ):
"""Generate the URL for cloning a repository."""
tool_shed_url =3D get_url_from_repository_tool_shed( trans.app, re=
pository )
- return '%s/repos/%s/%s' % ( tool_shed_url, repository.owner, repos=
itory.name )
+ return url_join( tool_shed_url, 'repos', repository.owner, reposit=
ory.name )
=20
## ---- Utility methods --------------------------------------------------=
-----
=20
diff -r 6a644558eed7aba74a74060ccf0aa4ec211809c2 -r 07045f4895170a154c955f4=
8291f2b5e6feb94c2 lib/galaxy/web/controllers/data_admin.py
--- a/lib/galaxy/web/controllers/data_admin.py
+++ b/lib/galaxy/web/controllers/data_admin.py
@@ -148,7 +148,8 @@
dbkey =3D build[0]
longname =3D build[1]
break =20
- assert dbkey is not '?', 'That build was not found'
+ if dbkey =3D=3D '?':
+ return trans.fill_template( '/admin/data_admin/generic_err=
or.mako', message=3D'An invalid build was specified.' )
ftp =3D ftplib.FTP('hgdownload.cse.ucsc.edu')
ftp.login('anonymous', trans.get_user().email)
checker =3D []
@@ -189,7 +190,8 @@
dbkeys=3Dtrans.ucsc_builds )
elif source =3D=3D 'Ensembl':
dbkey =3D params.get( 'ensembl_dbkey', None )
- assert dbkey is not '?', 'That build was not found'
+ if dbkey =3D=3D '?':
+ return trans.fill_template( '/admin/data_admin/generic_err=
or.mako', message=3D'An invalid build was specified.' )
for build in trans.ensembl_builds:
if build[ 'dbkey' ] =3D=3D dbkey:
dbkey =3D build[ 'dbkey' ]
@@ -199,7 +201,7 @@
break
url =3D 'ftp://ftp.ensembl.org/pub/release-%s/fasta/%s/dna/%s.=
%s.%s.dna.toplevel.fa.gz' % ( release, pathname.lower(), pathname, dbkey, r=
elease )
else:
- return trans.fill_template( '/admin/data_admin/generic_error.m=
ako', message=3D'Somehow an invalid data source was specified.' )
+ return trans.fill_template( '/admin/data_admin/generic_error.m=
ako', message=3D'An invalid data source was specified.' )
if url is None:
return trans.fill_template( '/admin/data_admin/generic_error.m=
ako', message=3D'Unable to generate a valid URL with the specified paramete=
rs.' )
params =3D dict( protocol=3D'http', name=3Ddbkey, datatype=3D'fast=
a', url=3Durl, user=3Dtrans.user.id )
@@ -248,7 +250,8 @@
sa =3D trans.app.model.context.current
if jobtype =3D=3D 'liftover':
job =3D sa.query( model.TransferJob ).filter_by( id=3Djobid ).=
first()
- joblabel =3D 'Download liftOver'
+ liftover =3D trans.app.job_manager.deferred_job_queue.plugins[=
'LiftOverTransferPlugin'].get_job_status( jobid )
+ joblabel =3D 'Download liftOver (%s to %s)' % ( liftover.param=
s[ 'from_genome' ], liftover.params[ 'to_genome' ] )
elif jobtype =3D=3D 'transfer':
job =3D sa.query( model.TransferJob ).filter_by( id=3Djobid ).=
first()
joblabel =3D 'Download Genome'
diff -r 6a644558eed7aba74a74060ccf0aa4ec211809c2 -r 07045f4895170a154c955f4=
8291f2b5e6feb94c2 lib/galaxy/web/controllers/dataset.py
--- a/lib/galaxy/web/controllers/dataset.py
+++ b/lib/galaxy/web/controllers/dataset.py
@@ -203,12 +203,12 @@
job_id=3Djob.id,
job_tool_id=3Djob.tool_id,
job_command_line=3Djob.command_line,
- job_stderr=3Djob.stderr,
- job_stdout=3Djob.stdout,
- job_info=3Djob.info,
- job_traceback=3Djob.traceback,
+ job_stderr=3Dutil.unicodify( job.stderr ),
+ job_stdout=3Dutil.unicodify( job.stdout ),
+ job_info=3Dutil.unicodify( job.info ),
+ job_traceback=3Dutil.unicodify( job.tracebac=
k ),
email=3Demail,
- message=3Dmessage )
+ message=3Dutil.unicodify( message ) )
frm =3D to_address
# Check email a bit
email =3D email.strip()
@@ -644,7 +644,10 @@
dataset =3D self.get_dataset( trans, id, False, True )
if not dataset:
web.httpexceptions.HTTPNotFound()
- return self.get_item_annotation_str( trans.sa_session, trans.user,=
dataset )
+ annotation =3D self.get_item_annotation_str( trans.sa_session, tra=
ns.user, dataset )
+ if annotation and isinstance( annotation, unicode ):
+ annotation =3D annotation.encode( 'ascii', 'replace' ) #paste =
needs ascii here
+ return annotation
=20
@web.expose
def display_at( self, trans, dataset_id, filename=3DNone, **kwd ):
diff -r 6a644558eed7aba74a74060ccf0aa4ec211809c2 -r 07045f4895170a154c955f4=
8291f2b5e6feb94c2 lib/galaxy/web/controllers/phyloviz.py
--- /dev/null
+++ b/lib/galaxy/web/controllers/phyloviz.py
@@ -0,0 +1,97 @@
+import pkg_resources
+pkg_resources.require( "bx-python" )
+
+from galaxy.util.json import to_json_string, from_json_string
+from galaxy.web.base.controller import *
+from galaxy.visualization.phyloviz.phyloviz_dataprovider import Phyloviz_D=
ataProvider
+
+
+class PhyloVizController( BaseUIController, UsesVisualizationMixin, UsesHi=
storyDatasetAssociationMixin, SharableMixin ):
+ """
+ Controller for phyloViz browser interface.
+ """
+ def __init__(self, app ):
+ BaseUIController.__init__( self, app )
+
+ @web.expose
+ @web.require_login()
+ def index( self, trans, dataset_id =3D None, **kwargs ):
+ """
+ The index method is called using phyloviz/ with a dataset id passe=
d in.
+ The relevant data set is then retrieved via get_json_from_datasetI=
d which interfaces with the parser
+ The json representation of the phylogenetic tree along with the co=
nfig is then written in the .mako template and passed back to the user
+ """
+ json, config =3D self.get_json_from_datasetId(trans, dataset_id)
+ config["saved_visualization"] =3D False
+ return trans.fill_template( "visualization/phyloviz.mako", data =
=3D json, config=3Dconfig)
+
+
+ @web.expose
+ def visualization(self, trans, id):
+ """
+ Called using a viz_id (id) to retrieved stored visualization data =
(in json format) and all the viz_config
+ """
+ viz =3D self.get_visualization(trans, id)
+ config =3D self.get_visualization_config(trans, viz)
+ config["saved_visualization"] =3D True
+ data =3D config["root"]
+
+ return trans.fill_template( "visualization/phyloviz.mako", data =
=3D data, config=3Dconfig)
+
+
+ @web.expose
+ @web.json
+ def load_visualization_json(self, trans, viz_id):
+ """
+ Though not used in current implementation, this provides user with=
a convenient method to retrieve the viz_data & viz_config via json.
+ """
+ viz =3D self.get_visualization(trans, viz_id)
+ viz_config =3D self.get_visualization_config(trans, viz)
+ viz_config["saved_visualization"] =3D True
+ return {
+ "data" : viz_config["root"],
+ "config" : viz_config
+ }
+
+
+ @web.expose
+ @web.json
+ def getJsonData(self, trans, dataset_id, treeIndex=3D0):
+ """
+ Method to retrieve data asynchronously via json format. Retriving =
from here rather than
+ making a direct datasets/ call allows for some processing and even=
t capturing
+ """
+ treeIndex =3D int(treeIndex)
+ json, config =3D self.get_json_from_datasetId(trans, dataset_id, t=
reeIndex)
+ packedJson =3D {
+ "data" : json,
+ "config" : config
+ }
+
+ return packedJson
+
+
+ def get_json_from_datasetId(self, trans, dataset_id, treeIndex=3D0):
+ """
+ For interfacing phyloviz controllers with phyloviz visualization d=
ata provider (parsers)
+ """
+ dataset =3D self.get_dataset(trans, dataset_id)
+ fileExt, filepath =3D dataset.ext, dataset.file_name # .name=
stores the name of the dataset from the orginal upload
+ json, config =3D "", {} # config contains propertie=
s of the tree and file
+
+ if fileExt =3D=3D "json":
+ something, json =3D self.get_data(dataset)
+ else:
+ try:
+ pd =3D Phyloviz_DataProvider()
+ json, config =3D pd.parseFile(filepath, fileExt)
+ json =3D json[treeIndex]
+ except Exception:
+ pass
+
+ config["title"] =3D dataset.display_name()
+ config["ext"] =3D fileExt
+ config["dataset_id"] =3D dataset_id
+ config["treeIndex"] =3D treeIndex
+
+ return json, config
diff -r 6a644558eed7aba74a74060ccf0aa4ec211809c2 -r 07045f4895170a154c955f4=
8291f2b5e6feb94c2 lib/galaxy/web/controllers/visualization.py
--- a/lib/galaxy/web/controllers/visualization.py
+++ b/lib/galaxy/web/controllers/visualization.py
@@ -16,6 +16,10 @@
action =3D "paramamonster"
elif item.type =3D=3D "circster":
action =3D "circster"
+ elif item.type =3D=3D "phyloviz":
+ # Support phyloviz
+ controller =3D "phyloviz"
+ action =3D "visualization"
return dict( controller=3Dcontroller, action=3Daction, id=3Ditem.i=
d )
=20
# Grid definition
diff -r 6a644558eed7aba74a74060ccf0aa4ec211809c2 -r 07045f4895170a154c955f4=
8291f2b5e6feb94c2 lib/galaxy/webapps/community/controllers/repository.py
--- a/lib/galaxy/webapps/community/controllers/repository.py
+++ b/lib/galaxy/webapps/community/controllers/repository.py
@@ -11,7 +11,7 @@
from galaxy.model.orm import *
from galaxy.util.shed_util import create_repo_info_dict, get_changectx_for=
_changeset, get_configured_ui, get_repository_file_contents, NOT_TOOL_CONFI=
GS
from galaxy.util.shed_util import open_repository_files_folder, reversed_l=
ower_upper_bounded_changelog, reversed_upper_bounded_changelog, strip_path
-from galaxy.util.shed_util import to_html_escaped, update_repository
+from galaxy.util.shed_util import to_html_escaped, update_repository, url_=
join
from galaxy.tool_shed.encoding_util import *
from common import *
=20
@@ -246,6 +246,25 @@
grids.GridAction( "User preferences", dict( controller=3D'user=
', action=3D'index', cntrller=3D'repository', webapp=3D'community' ) )
]
=20
+class WritableRepositoryListGrid( RepositoryListGrid ):
+ def build_initial_query( self, trans, **kwd ):
+ # TODO: improve performance by adding a db table associating users=
with repositories for which they have write access.
+ username =3D kwd[ 'username' ]
+ clause_list =3D []
+ for repository in trans.sa_session.query( self.model_class ):
+ allow_push_usernames =3D repository.allow_push.split( ',' )
+ if username in allow_push_usernames:
+ clause_list.append( self.model_class.table.c.id =3D=3D rep=
ository.id )
+ if clause_list:
+ return trans.sa_session.query( self.model_class ) \
+ .filter( or_( *clause_list ) ) \
+ .join( model.User.table ) \
+ .outerjoin( model.RepositoryCategoryAss=
ociation.table ) \
+ .outerjoin( model.Category.table )
+ # Return an empty query.
+ return trans.sa_session.query( self.model_class ) \
+ .filter( self.model_class.table.c.id < 0 )
+
class ValidRepositoryListGrid( RepositoryListGrid ):
class CategoryColumn( grids.TextColumn ):
def get_value( self, trans, grid, repository ):
@@ -393,6 +412,7 @@
email_alerts_repository_list_grid =3D EmailAlertsRepositoryListGrid()
category_list_grid =3D CategoryListGrid()
valid_category_list_grid =3D ValidCategoryListGrid()
+ writable_repository_list_grid =3D WritableRepositoryListGrid()
=20
def __add_hgweb_config_entry( self, trans, repository, repository_path=
):
# Add an entry in the hgweb.config file for a new repository. An =
entry looks something like:
@@ -519,12 +539,15 @@
repository_id =3D kwd.get( 'id', None )
repository =3D get_repository( trans, repository_id )
kwd[ 'f-email' ] =3D repository.user.email
- elif operation =3D=3D "my_repositories":
+ elif operation =3D=3D "repositories_i_own":
# Eliminate the current filters if any exist.
for k, v in kwd.items():
if k.startswith( 'f-' ):
del kwd[ k ]
kwd[ 'f-email' ] =3D trans.user.email
+ elif operation =3D=3D "writable_repositories":
+ kwd[ 'username' ] =3D trans.user.username
+ return self.writable_repository_list_grid( trans, **kwd )
elif operation =3D=3D "repositories_by_category":
# Eliminate the current filters if any exist.
for k, v in kwd.items():
@@ -726,9 +749,10 @@
update =3D 'true'
no_update =3D 'false'
else:
- # Start building up the url to redirect back to the calling Ga=
laxy instance.
- url =3D '%sadmin_toolshed/update_to_changeset_revision?tool_sh=
ed_url=3D%s' % ( galaxy_url, url_for( '/', qualified=3DTrue ) )
- url +=3D '&name=3D%s&owner=3D%s&changeset_revision=3D%s&latest=
_changeset_revision=3D' % ( repository.name, repository.user.username, chan=
geset_revision )
+ # Start building up the url to redirect back to the calling Ga=
laxy instance. =20
+ url =3D url_join( galaxy_url,
+ 'admin_toolshed/update_to_changeset_revision?t=
ool_shed_url=3D%s&name=3D%s&owner=3D%s&changeset_revision=3D%s&latest_chang=
eset_revision=3D' % \
+ ( url_for( '/', qualified=3DTrue ), repository=
.name, repository.user.username, changeset_revision ) )
if changeset_revision =3D=3D repository.tip:
# If changeset_revision is the repository tip, there are no ad=
ditional updates.
if from_update_manager:
@@ -1372,10 +1396,9 @@
"""Send the list of repository_ids and changeset_revisions to Gala=
xy so it can begin the installation process."""
galaxy_url =3D trans.get_cookie( name=3D'toolshedgalaxyurl' )
# Redirect back to local Galaxy to perform install.
- url =3D '%sadmin_toolshed/prepare_for_install' % galaxy_url
- url +=3D '?tool_shed_url=3D%s' % url_for( '/', qualified=3DTrue )
- url +=3D '&repository_ids=3D%s' % ','.join( util.listify( reposito=
ry_ids ) )
- url +=3D '&changeset_revisions=3D%s' % ','.join( util.listify( cha=
ngeset_revisions ) )
+ url =3D url_join( galaxy_url,
+ 'admin_toolshed/prepare_for_install?tool_shed_url=
=3D%s&repository_ids=3D%s&changeset_revisions=3D%s' % \
+ ( url_for( '/', qualified=3DTrue ), ','.join( util=
.listify( repository_ids ) ), ','.join( util.listify( changeset_revisions )=
) ) )
return trans.response.send_redirect( url )
@web.expose
def load_invalid_tool( self, trans, repository_id, tool_config, change=
set_revision, **kwd ):
diff -r 6a644558eed7aba74a74060ccf0aa4ec211809c2 -r 07045f4895170a154c955f4=
8291f2b5e6feb94c2 static/scripts/viz/phyloviz.js
--- /dev/null
+++ b/static/scripts/viz/phyloviz.js
@@ -0,0 +1,955 @@
+var UserMenuBase =3D Backbone.View.extend({
+ /**
+ * Base class of any menus that takes in user interaction. Contains ch=
ecking methods.
+ */
+
+ className: 'UserMenuBase',
+
+ isAcceptableValue : function ($inputKey, min, max) {
+ /**
+ * Check if an input value is a number and falls within max min.
+ */
+ var self =3D this,
+ value =3D $inputKey.val(),
+ fieldName =3D $inputKey.attr("displayLabel") || $inputKey.attr=
("id").replace("phyloViz", "");
+
+ function isNumeric(n) {
+ return !isNaN(parseFloat(n)) && isFinite(n);
+ }
+
+ if (!isNumeric(value)){
+ alert(fieldName + " is not a number!");
+ return false;
+ }
+
+ if ( value > max){
+ alert(fieldName + " is too large.");
+ return false;
+ } else if ( value < min) {
+ alert(fieldName + " is too small.");
+ return false;
+ }
+ return true;
+ },
+
+ hasIllegalJsonCharacters : function($inputKey) {
+ /**
+ * Check if any user string inputs has illegal characters that jso=
n cannot accept
+ */
+ if ($inputKey.val().search(/"|'|\\/) !=3D=3D -1){
+ alert("Named fields cannot contain these illegal characters: d=
ouble quote(\"), single guote(\'), or back slash(\\). ");
+ return true;
+ }
+ return false;
+ }
+});
+
+
+function PhyloTreeLayout() {
+ /**
+ * -- Custom Layout call for phyloViz to suit the needs of a phylogene=
tic tree.
+ * -- Specifically: 1) Nodes have a display display of (=3D evo dist X=
depth separation) from their parent
+ * 2) Nodes must appear in other after they have expa=
nd and contracted
+ */
+
+ var self =3D this,
+ hierarchy =3D d3.layout.hierarchy().sort(null).value(null),
+ height =3D 360, // ! represents both the layout angle and the heig=
ht of the layout, in px
+ layoutMode =3D "Linear",
+ leafHeight =3D 18, // height of each individual leaf node
+ depthSeparation =3D 200, // separation between nodes of different =
depth, in px
+ leafIndex =3D 0, // change to recurssive call
+ defaultDist =3D 0.5, // tree defaults to 0.5 dist if no dist is sp=
ecified
+ maxTextWidth =3D 50; // maximum length of the text labels
+
+
+ self.leafHeight =3D function(inputLeafHeight){
+ if (typeof inputLeafHeight =3D=3D=3D "undefined"){ return leafHeig=
ht; }
+ else { leafHeight =3D inputLeafHeight; return self;}
+ };
+
+ self.layoutMode =3D function(mode){
+ if (typeof mode =3D=3D=3D "undefined"){ return layoutMode; }
+ else { layoutMode =3D mode; return self;}
+ };
+
+ self.layoutAngle =3D function(angle) { // changes the layout angle =
of the display, which is really changing the height
+ if (typeof angle =3D=3D=3D "undefined"){ return height; }
+ if (isNaN(angle) || angle < 0 || angle > 360) { return self; } // =
to use default if the user puts in strange values
+ else { height =3D angle; return self;}
+ };
+
+ self.separation =3D function(dist){ // changes the dist between the =
nodes of different depth
+ if (typeof dist =3D=3D=3D "undefined"){ return depthSeparation; }
+ else { depthSeparation =3D dist; return self;}
+ };
+
+ self.links =3D function (nodes) { // uses d3 native method to gene=
rate links. Done.
+ return d3.layout.tree().links(nodes);
+ };
+
+ // -- Custom method for laying out phylogeny tree in a linear fashion
+ self.nodes =3D function (d, i) {
+ var _nodes =3D hierarchy.call(self, d, i), // self is to f=
ind the depth of all the nodes, assumes root is passed in
+ nodes =3D [],
+ maxDepth =3D 0,
+ numLeaves =3D 0;
+
+ // changing from hierarchy's custom format for data to usable form=
at
+ _nodes.forEach(function (_node){
+ var node =3D _node.data;
+ node.depth =3D _node.depth;
+ maxDepth =3D node.depth > maxDepth ? node.depth : maxDepth; /=
/finding max depth of tree
+ nodes.push(node);
+ });
+ // counting the number of leaf nodes and assigning max depth to no=
des that do not have children to flush all the leave nodes
+ nodes.forEach(function(node){
+ if ( !node.children ) { //&& !node._children
+ numLeaves +=3D 1;
+ node.depth =3D maxDepth; // if a leaf has no child it woul=
d be assigned max depth
+ }
+ });
+
+ leafHeight =3D layoutMode =3D=3D=3D "Circular" ? height / numLeave=
s : leafHeight;
+ leafIndex =3D 0;
+ layout(nodes[0], maxDepth, leafHeight, null);
+
+ return nodes;
+ };
+
+
+ function layout (node, maxDepth, vertSeparation, parent) {
+ /**
+ * -- Function with side effect of adding x0, y0 to all child; tak=
e in the root as starting point
+ * assuming that the leave nodes would be sorted in presented ord=
er
+ * horizontal(y0) is calculated according to (=3D evo dis=
t X depth separation) from their parent
+ * vertical (x0) - if leave node: find its order in all o=
f the leave node =3D=3D=3D node.id, then multiply by verticalSeparation
+ * - if parent node: is place in the mid point al=
l of its children nodes
+ * -- The layout will first calculate the y0 field going towards t=
he leaves, and x0 when returning
+ */
+ var children =3D node.children,
+ sumChildVertSeparation =3D 0;
+
+ // calculation of node's dist from parents, going down.
+ var dist =3D node.dist || defaultDist;
+ dist =3D dist > 1 ? 1 : dist; // We constrain all dist to be l=
ess than one
+ node.dist =3D dist;
+ if (parent !=3D=3D null){
+ node.y0 =3D parent.y0 + dist * depthSeparation;
+ } else { //root node
+ node.y0 =3D maxTextWidth;
+ }
+
+
+ // if a node have no children, we will treat it as a leaf and star=
t laying it out first
+ if (!children) {
+ node.x0 =3D leafIndex++ * vertSeparation;
+ } else {
+ // if it has children, we will visit all its children and calc=
ulate its position from its children
+ children.forEach( function (child) {
+ child.parent =3D node;
+ sumChildVertSeparation +=3D layout(child, maxDepth, vertSe=
paration, node);
+ });
+ node.x0 =3D sumChildVertSeparation / children.length;
+ }
+
+ // adding properties to the newly created node
+ node.x =3D node.x0;
+ node.y =3D node.y0;
+ return node.x0;
+ }
+ return self;
+}
+
+
+/**
+ * -- PhyloTree Model --
+ */
+var PhyloTree =3D Visualization.extend({
+ defaults : {
+ layout: "Linear",
+ separation : 250, // px dist between nodes of different depth t=
o represent 1 evolutionary until
+ leafHeight: 18,
+ type : "phyloviz", // visualization type
+ title : "Title",
+ scaleFactor: 1,
+ translate: [0,0],
+ fontSize: 12, //fontSize of node label
+ selectedNode : null,
+ nodeAttrChangedTime : 0
+ },
+
+ root : {}, // Root has to be its own independent object because it is =
not part of the viz_config
+
+ toggle : function (d) {
+ /**
+ * Mechanism to expand or contract a single node. Expanded nodes h=
ave a children list, while for
+ * contracted nodes the list is stored in _children. Nodes with th=
eir children data stored in _children will not have their
+ * children rendered.
+ */
+ if(typeof d =3D=3D=3D "undefined") {return ;}
+ if (d.children ) {
+ d._children =3D d.children;
+ d.children =3D null;
+ } else {
+ d.children =3D d._children;
+ d._children =3D null;
+ }
+ },
+
+ toggleAll : function(d) {
+ /**
+ * Contracts the phylotree to a single node by repeatedly calling=
itself to place all the list
+ * of children under _children.
+ */
+ if (d.children && d.children.length !=3D=3D 0) {
+ d.children.forEach(this.toggleAll);
+ toggle(d);
+ }
+ },
+
+ getData : function (){
+ /**
+ * Return the data of the tree. Used for preserving state.
+ */
+ return this.root;
+ },
+
+ save: function() {
+ /**
+ * Overriding the default save mechanism to do some clean of circu=
lar reference of the
+ * phyloTree and to include phyloTree in the saved json
+ */
+ var root =3D this.root;
+ cleanTree(root);
+ this.set("root", root);
+
+ function cleanTree(node){
+ // we need to remove parent to delete circular reference
+ delete node.parent;
+
+ // removing unnecessary attributes
+ if (node._selected){ delete node._selected;}
+
+ node.children ? node.children.forEach(cleanTree) : 0;
+ node._children ? node._children.forEach(cleanTree) : 0;
+ }
+
+ var config =3D jQuery.extend(true, {}, this.attributes);
+ config["selectedNode"] =3D null;
+
+ show_message("Saving to Galaxy", "progress");
+
+ return $.ajax({
+ url: this.url(),
+ type: "POST",
+ dataType: "json",
+ data: {
+ vis_json: JSON.stringify(config)
+ },
+ success: function(res){
+ var viz_id =3D res.url.split("id=3D")[1].split("&")[0],
+ viz_url =3D "/phyloviz/visualization?id=3D" + viz_id;
+ window.history.pushState({}, "", viz_url + window.location=
.hash);
+ hide_modal();
+ }
+ });
+ }
+});
+
+
+
+/**
+ * -- Views --
+ */
+var PhylovizLayoutBase =3D Backbone.View.extend({
+ /**
+ * Stores the default variable for setting up the visualization
+ */
+ defaults : {
+ nodeRadius : 4.5 // radius of each node in the diagram
+ },
+
+
+ stdInit : function (options) {
+ /**
+ * Common initialization in layouts
+ */
+
+ var self =3D this;
+ self.model.on("change:separation change:leafHeight change:fontSize=
change:nodeAttrChangedTime", self.updateAndRender, self);
+
+ self.vis =3D options.vis;
+ self.i =3D 0;
+ self.maxDepth =3D -1; // stores the max depth of the tree
+
+ self.width =3D options.width;
+ self.height =3D options.height;
+ },
+
+
+ updateAndRender : function(source) {
+ /**
+ * Updates the visualization whenever there are changes in the ex=
pansion and contraction of nodes
+ * AND possibly when the tree is edited.
+ */
+ var vis =3D d3.select(".vis"),
+ self =3D this;
+ source =3D source || self.model.root;
+
+ self.renderNodes(source);
+ self.renderLinks(source);
+ self.addTooltips();
+ },
+
+
+ renderLinks : function(source) {
+ /**
+ * Renders the links for the visualization.
+ */
+ var self =3D this;
+ var diagonal =3D self.diagonal;
+ var duration =3D self.duration;
+ var layoutMode =3D self.layoutMode;
+ var link =3D self.vis.selectAll("g.completeLink")
+ .data(self.tree.links(self.nodes), function(d) { return d.targ=
et.id; });
+
+ var calcalateLinePos =3D function(d) {
+ d.pos0 =3D d.source.y0 + " " + d.source.x0; // position of t=
he source node <=3D> starting location of the line drawn
+ d.pos1 =3D d.source.y0 + " " + d.target.x0; // position where=
the line makes a right angle bend
+ d.pos2 =3D d.target.y0 + " " + d.target.x0; // point where=
the horizontal line becomes a dotted line
+ };
+
+ var linkEnter =3D link.enter().insert("svg:g","g.node")
+ .attr("class", "completeLink");
+
+
+ linkEnter.append("svg:path")
+ .attr("class", "link")
+ .attr("d", function(d) {
+ calcalateLinePos(d);
+ return "M " + d.pos0 + " L " + d.pos1;
+ });
+
+ var linkUpdate =3D link.transition().duration(500);
+
+ linkUpdate.select("path.link")
+ .attr("d", function(d) {
+ calcalateLinePos(d);
+ return "M " + d.pos0 + " L " + d.pos1 + " L " + d.pos2;
+ });
+
+ var linkExit =3D link.exit().remove();
+
+ },
+
+ // User Interaction methods below
+
+ selectNode : function(node){
+ /**
+ * Displays the information for editting
+ */
+ var self =3D this;
+ d3.selectAll("g.node")
+ .classed("selectedHighlight", function(d){
+ if (node.id =3D=3D=3D d.id){
+ if(node._selected) { // for de=3Dselecting node.
+ delete node._selected;
+ return false;
+ } else {
+ node._selected =3D true;
+ return true;
+ }
+ }
+ return false;
+ });
+
+ self.model.set("selectedNode", node);
+ $("#phyloVizSelectedNodeName").val(node.name);
+ $("#phyloVizSelectedNodeDist").val(node.dist);
+ $("#phyloVizSelectedNodeAnnotation").val(node.annotation || "");
+ },
+
+ addTooltips : function (){
+ /**
+ * Creates bootstrap tooltip for the visualization. Has to be cal=
led repeatedly due to newly generated
+ * enterNodes
+ */
+ $(".bs-tooltip").remove(); //clean up tooltip, just in case i=
ts listeners are removed by d3
+ $(".node")
+ .attr("data-original-title", function(){
+ var d =3D this.__data__,
+ annotation =3D d.annotation || "None" ;
+ return d ? (d.name ? d.name + "<br/>" : "") + "Dist: " + d=
.dist + " <br/>Annotation: " + annotation: "";
+ })
+ .tooltip({'placement':'top', 'trigger' : 'hover'});
+
+ }
+});
+
+
+
+
+var PhylovizLinearView =3D PhylovizLayoutBase.extend({
+ /**
+ * Linea layout class of Phyloviz, is responsible for rendering the no=
des
+ * calls PhyloTreeLayout to determine the positions of the nodes
+ */
+ initialize : function(options){
+ // Default values of linear layout
+ var self =3D this;
+ self.margins =3D options.margins;
+ self.layoutMode =3D "Linear";
+
+ self.stdInit(options);
+
+ self.layout();
+ self.updateAndRender(self.model.root);
+ },
+
+ layout : function() {
+ /**
+ * Creates the basic layout of a linear tree by precalculating fix=
ed values.
+ * One of calculations are also made here
+ */
+
+ var self =3D this;
+
+ self.tree =3D new PhyloTreeLayout().layoutMode("Linear");
+ self.diagonal =3D d3.svg.diagonal()
+ .projection(function(d) { return [d.y, d.x ]; });
+ },
+
+ renderNodes : function (source) {
+ /**
+ * Renders the nodes base on Linear layout.
+ */
+ var self =3D this,
+ fontSize =3D self.model.get("fontSize") + "px";
+
+ // assigning properties from models
+ self.tree.separation(self.model.get("separation")).leafHeight(self=
.model.get("leafHeight"));
+
+ var duration =3D 500,
+ nodes =3D self.tree.separation(self.model.get("separation")).n=
odes(self.model.root);
+
+ var node =3D self.vis.selectAll("g.node")
+ .data(nodes, function(d) { return d.name + d.id || (d.id =3D +=
+self.i); });
+
+ // These variables has to be passed into update links which are in=
the base methods
+ self.nodes =3D nodes;
+ self.duration =3D duration;
+
+ // ------- D3 ENTRY --------
+ // Enter any new nodes at the parent's previous position.
+ var nodeEnter =3D node.enter().append("svg:g")
+ .attr("class", "node")
+ .on("dblclick", function(){ d3.event.stopPropagation(); })
+ .on("click", function(d) {
+ if (d3.event.altKey) {
+ self.selectNode(d); // display info if alt is p=
ressed
+ } else {
+ if(d.children && d.children.length =3D=3D=3D 0){ retur=
n;} // there is no need to toggle leaves
+ self.model.toggle(d); // contract/expand nodes at da=
ta level
+ self.updateAndRender(d); // re-render the tree
+ }
+ });
+
+ nodeEnter.attr("transform", function(d) { return "translate(" + so=
urce.y0 + "," + source.x0 + ")"; });
+
+ nodeEnter.append("svg:circle")
+ .attr("r", 1e-6)
+ .style("fill", function(d) { return d._children ? "lightsteelb=
lue" : "#fff"; });
+
+ nodeEnter.append("svg:text")
+ .attr("class", "nodeLabel")
+ .attr("x", function(d) { return d.children || d._children ? -1=
0 : 10; })
+ .attr("dy", ".35em")
+ .attr("text-anchor", function(d) { return d.children || d._chi=
ldren ? "end" : "start"; })
+ .style("fill-opacity", 1e-6);
+
+ // ------- D3 TRANSITION --------
+ // Transition nodes to their new position.
+ var nodeUpdate =3D node.transition()
+ .duration(duration);
+
+ nodeUpdate.attr("transform", function(d) {
+ return "translate(" + d.y + "," + d.x + ")"; });
+
+ nodeUpdate.select("circle")
+ .attr("r", self.defaults.nodeRadius)
+ .style("fill", function(d) { return d._children ? "lightsteelb=
lue" : "#fff"; });
+
+ nodeUpdate.select("text")
+ .style("fill-opacity", 1)
+ .style("font-size", fontSize)
+ .text(function(d) { return d.name; });
+
+ // ------- D3 EXIT --------
+ // Transition exiting nodes to the parent's new position.
+ var nodeExit =3Dnode.exit().transition()
+ .duration(duration)
+ .remove();
+
+ nodeExit.select("circle")
+ .attr("r", 1e-6);
+
+ nodeExit.select("text")
+ .style("fill-opacity", 1e-6);
+
+ // Stash the old positions for transition.
+ nodes.forEach(function(d) {
+ d.x0 =3D d.x; // we need the x0, y0 for parents with children
+ d.y0 =3D d.y;
+ });
+ }
+
+});
+
+var PhylovizView =3D Backbone.View.extend({
+
+ className: 'phyloviz',
+
+ initialize: function(options) {
+ var self =3D this;
+ // -- Default values of the vis
+ self.MIN_SCALE =3D 0.05; //for zooming
+ self.MAX_SCALE =3D 5;
+ self.MAX_DISPLACEMENT =3D 500;
+ self.margins =3D [10, 60, 10, 80];
+
+ self.width =3D $("#PhyloViz").width();
+ self.height =3D $("#PhyloViz").height();
+ self.radius =3D self.width;
+ self.data =3D options.data;
+
+ // -- Events Phyloviz view responses to
+ $(window).resize(function(){
+ self.width =3D $("#PhyloViz").width();
+ self.height =3D $("#PhyloViz").height();
+ self.render();
+ });
+
+ // -- Create phyloTree model
+ self.phyloTree =3D new PhyloTree(options.config);
+ self.phyloTree.root =3D self.data;
+
+ // -- Set up UI functions of main view
+ self.zoomFunc =3D d3.behavior.zoom().scaleExtent([self.MIN_SCALE, =
self.MAX_SCALE]);
+ self.zoomFunc.translate(self.phyloTree.get("translate"));
+ self.zoomFunc.scale(self.phyloTree.get("scaleFactor"));
+
+ // -- set up header buttons, search and settings menu
+ self.navMenu =3D new HeaderButtons(self);
+ self.settingsMenu =3D new SettingsMenu({phyloTree : self.phyloTree=
});
+ self.nodeSelectionView =3D new NodeSelectionView({phyloTree : self=
.phyloTree});
+ self.search =3D new PhyloVizSearch();
+
+
+ setTimeout(function(){ // using settimeout to call the zoomAn=
dPan function according to the stored attributes in viz_config
+ self.zoomAndPan();
+ }, 1000);
+ },
+
+ render: function(){
+ // -- Creating helper function for vis. --
+ var self =3D this;
+ $("#PhyloViz").empty();
+
+ // -- Layout viz. --
+ self.mainSVG =3D d3.select("#PhyloViz").append("svg:svg")
+ .attr("width", self.width)
+ .attr("height", self.height)
+ .attr("pointer-events", "all")
+ .call(self.zoomFunc.on("zoom", function(){
+ self.zoomAndPan();
+ }));
+
+ self.boundingRect =3D self.mainSVG.append("svg:rect")
+ .attr("class", "boundingRect")
+ .attr("width", self.width)
+ .attr("height", self.height)
+ .attr("stroke", "black")
+ .attr("fill", "white");
+
+ self.vis =3D self.mainSVG
+ .append("svg:g")
+ .attr("class", "vis");
+
+ self.layoutOptions =3D {
+ model : self.phyloTree,
+ width : self.width,
+ height : self.height,
+ vis: self.vis,
+ margins: self.margins
+ };
+
+ // -- Creating Title
+ $("#title").text("Phylogenetic Tree from " + self.phyloTree.get("t=
itle") + ":");
+
+ // -- Create Linear view instance --
+ var linearView =3D new PhylovizLinearView(self.layoutOptions)
+ },
+
+ zoomAndPan : function(event){
+ /**
+ * Function to zoom and pan the svg element which the entire tree =
is contained within
+ * Uses d3.zoom events, and extend them to allow manual updates an=
d keeping states in model
+ */
+ if (typeof event !=3D=3D "undefined") {
+ var zoomParams =3D event.zoom,
+ translateParams =3D event.translate;
+ }
+
+ var self =3D this,
+ scaleFactor =3D self.zoomFunc.scale(),
+ translationCoor =3D self.zoomFunc.translate(),
+ zoomStatement =3D "",
+ translateStatement =3D "";
+
+ // Do manual scaling.
+ switch (zoomParams) {
+ case "reset":
+ scaleFactor =3D 1.0;
+ translationCoor =3D [0,0]; break;
+ case "+":
+ scaleFactor *=3D 1.1; break;
+ case "-":
+ scaleFactor *=3D 0.9; break;
+ default:
+ if (typeof zoomParams =3D=3D=3D "number") {
+ scaleFactor =3D zoomParams;
+ } else if (d3.event !=3D=3D null) {
+ scaleFactor =3D d3.event.scale;
+ }
+ }
+ if (scaleFactor < self.MIN_SCALE || scaleFactor > self.MAX_SCALE) =
{ return;}
+ self.zoomFunc.scale(scaleFactor); //update scale Factor
+ zoomStatement =3D "translate(" + self.margins[3] + "," + self.mar=
gins[0] + ")" +
+ " scale(" + scaleFactor + ")";
+
+ // Do manual translation.
+ if( d3.event !=3D=3D null) {
+ translateStatement =3D "translate(" + d3.event.translate + ")";
+ } else {
+ if(typeof translateParams !=3D=3D "undefined") {
+ var x =3D translateParams.split(",")[0];
+ var y =3D translateParams.split(",")[1];
+ if (!isNaN(x) && !isNaN(y)){
+ translationCoor =3D [translationCoor[0] + parseFloat(x=
), translationCoor[1] + parseFloat(y)];
+ }
+ }
+ self.zoomFunc.translate(translationCoor); // update zoomFunc
+ translateStatement =3D "translate(" + translationCoor + ")";
+ }
+
+ self.phyloTree.set("scaleFactor", scaleFactor);
+ self.phyloTree.set("translate", translationCoor);
+ self.vis.attr("transform", translateStatement + zoomStatement); //=
refers to the view that we are actually zooming
+ },
+
+
+ reloadViz : function() {
+ /**
+ * Primes the Ajax URL to load another Nexus tree
+ */
+ var self =3D this,
+ treeIndex =3D $("#phylovizNexSelector :selected").val(),
+ dataset_id =3D self.phyloTree.get("dataset_id"),
+ url =3D "phyloviz/getJsonData?dataset_id=3D" + dataset_id + "&=
treeIndex=3D" + String(treeIndex);
+ $.getJSON(url, function(packedJson){
+ window.initPhyloViz(packedJson.data, packedJson.config);
+ });
+ }
+});
+
+
+var HeaderButtons =3D Backbone.View.extend({
+
+ initialize : function(phylovizView){
+ var self =3D this;
+ self.phylovizView =3D phylovizView;
+
+ // Clean up code - if the class initialized more than once
+ $("#panelHeaderRightBtns").empty();
+ $("#phyloVizNavBtns").empty();
+ $("#phylovizNexSelector").off();
+
+ self.initNavBtns();
+ self.initRightHeaderBtns();
+
+ // Initial a tree selector in the case of nexus
+ $("#phylovizNexSelector").off().on("change", function() {self.phy=
lovizView.reloadViz();} );
+
+ },
+
+ initRightHeaderBtns : function(){
+ var self =3D this;
+
+ rightMenu =3D create_icon_buttons_menu([
+ { icon_class: 'gear', title: 'PhyloViz Settings', on_click: fu=
nction(){
+ $("#SettingsMenu").show();
+ self.settingsMenu.updateUI();
+ } },
+ { icon_class: 'disk', title: 'Save visualization', on_click: f=
unction() {
+ var nexSelected =3D $("#phylovizNexSelector option:selecte=
d").text();
+ if(nexSelected) {
+ self.phylovizView.phyloTree.set("title", nexSelected);
+ }
+ self.phylovizView.phyloTree.save();
+ } },
+ { icon_class: 'chevron-expand', title: 'Search / Edit Nodes', =
on_click: function() {
+ $("#nodeSelectionView").show();
+ } },
+ { icon_class: 'information', title: 'Phyloviz Help', on_click:=
function() {
+ window.open('http://wiki.g2.bx.psu.edu/Learn/Visualization=
/PhylogeneticTree');
+ // https://docs.google.com/document/d/1AXFoJgEpxr21H3LICRs=
3EyMe1B1X_KFPouzIgrCz3zk/edit
+ } }
+ ],
+ {
+ tooltip_config: { placement: 'bottom' }
+ });
+ $("#panelHeaderRightBtns").append(rightMenu.$el);
+ },
+
+ initNavBtns: function() {
+ var self =3D this,
+ navMenu =3D create_icon_buttons_menu([
+ { icon_class: 'zoom-in', title: 'Zoom in', on_click: funct=
ion() {
+ self.phylovizView.zoomAndPan({ zoom : "+"});
+ } },
+ { icon_class: 'zoom-out', title: 'Zoom out', on_click: fun=
ction() {
+ self.phylovizView.zoomAndPan({ zoom : "-"});
+ } },
+ { icon_class: 'arrow-circle', title: 'Reset Zoom/Pan', on_=
click: function() {
+ self.phylovizView.zoomAndPan({ zoom : "reset"});
+ } }
+ ],
+ {
+ tooltip_config: { placement: 'bottom' }
+ });
+ $("#phyloVizNavBtns").append(navMenu.$el);
+ }
+});
+
+
+var SettingsMenu =3D UserMenuBase.extend({
+
+ className: 'Settings',
+
+ initialize: function(options){
+ // settings needs to directly interact with the phyloviz model so =
it will get access to it.
+ var self =3D this;
+ self.phyloTree =3D options.phyloTree;
+ self.el =3D $("#SettingsMenu");
+ self.inputs =3D {
+ separation : $("#phyloVizTreeSeparation"),
+ leafHeight : $("#phyloVizTreeLeafHeight"),
+ fontSize : $("#phyloVizTreeFontSize")
+ };
+
+ //init all buttons of settings
+ $("#settingsCloseBtn").off().on("click", function() { self.el.hide=
(); });
+ $("#phylovizResetSettingsBtn").off().on("click", function() { self=
.resetToDefaults(); });
+ $("#phylovizApplySettingsBtn").off().on("click", function() { self=
.apply(); });
+ },
+
+ apply : function(){
+ /**
+ * Applying user values to phylotree model.
+ */
+ var self =3D this;
+ if (!self.isAcceptableValue(self.inputs["separation"], 50, 2500) ||
+ !self.isAcceptableValue(self.inputs["leafHeight"], 5, 30) ||
+ !self.isAcceptableValue(self.inputs["fontSize"], 5, 20)){
+ return;
+ }
+ $.each(self.inputs, function(key, $input){
+ self.phyloTree.set(key, $input.val());
+ });
+ },
+ updateUI : function(){
+ /**
+ * Called to update the values input to that stored in the model
+ */
+ var self =3D this;
+ $.each(self.inputs, function(key, $input){
+ $input.val(self.phyloTree.get(key));
+ });
+ },
+ resetToDefaults : function(){
+ /**
+ * Resets the value of the phyloTree model to its default
+ */
+ $(".bs-tooltip").remove(); // just in case the tool tip was n=
ot removed
+ var self =3D this;
+ $.each(self.phyloTree.defaults, function(key, value) {
+ self.phyloTree.set(key, value);
+ });
+ self.updateUI();
+ },
+
+ render: function(){
+
+ }
+
+});
+
+
+var NodeSelectionView =3D UserMenuBase.extend({
+ /**
+ * View for inspecting node properties and editing them
+ */
+ className: 'Settings',
+
+ initialize : function (options){
+ var self =3D this;
+ self.el =3D $("#nodeSelectionView");
+ self.phyloTree =3D options.phyloTree;
+
+ self.UI =3D {
+ enableEdit : $('#phylovizEditNodesCheck'),
+ saveChanges : $('#phylovizNodeSaveChanges'),
+ cancelChanges : $("#phylovizNodeCancelChanges"),
+ name : $("#phyloVizSelectedNodeName"),
+ dist : $("#phyloVizSelectedNodeDist"),
+ annotation : $("#phyloVizSelectedNodeAnnotation")
+ };
+
+ self.valuesOfConcern =3D {
+ name : null,
+ dist : null,
+ annotation : null
+ }; // temporarily stores the values in case user change their mind
+
+ //init UI buttons
+ $("#nodeSelCloseBtn").off().on("click", function() { self.el.hide(=
); });
+ self.UI.saveChanges.off().on("click", function(){ self.updateNodes=
(); });
+ self.UI.cancelChanges.off().on("click", function(){ self.cancelCha=
nges(); });
+
+ (function ($) {
+ // extending jquery fxn for enabling and disabling nodes.
+ $.fn.enable =3D function (isEnabled) {
+ return $(this).each(function () {
+ if(isEnabled){
+ $(this).removeAttr('disabled');
+ } else {
+ $(this).attr('disabled', 'disabled');
+ }
+ });
+ };
+ })(jQuery);
+
+ self.UI.enableEdit.off().on("click", function () {
+ self.toggleUI();
+ });
+ },
+
+ toggleUI : function(){
+ /**
+ * For turning on and off the child elements
+ */
+ var self =3D this,
+ checked =3D self.UI.enableEdit.is(':checked');
+
+ !checked ? self.cancelChanges() : "";
+
+ $.each(self.valuesOfConcern, function(key, value) {
+ self.UI[key].enable(checked);
+ });
+ if(checked){
+ self.UI.saveChanges.show();
+ self.UI.cancelChanges.show();
+ } else {
+ self.UI.saveChanges.hide();
+ self.UI.cancelChanges.hide();
+ }
+
+ },
+
+ cancelChanges : function() {
+ /**
+ * Reverting to previous values in case user change their minds
+ */
+ var self =3D this,
+ node =3D self.phyloTree.get("selectedNode");
+ if (node){
+ $.each(self.valuesOfConcern, function(key, value) {
+ self.UI[key].val(node[key]);
+ });
+ }
+ },
+
+ updateNodes : function (){
+ /**
+ * Changing the data in the underlying tree with user-specified va=
lues
+ */
+ var self =3D this,
+ node =3D self.phyloTree.get("selectedNode");
+ if (node){
+ if (!self.isAcceptableValue(self.UI.dist, 0, 1) ||
+ self.hasIllegalJsonCharacters(self.UI.name) ||
+ self.hasIllegalJsonCharacters(self.UI.annotation) ) {
+ return;
+ }
+ $.each(self.valuesOfConcern, function(key, value) {
+ (node[key]) =3D self.UI[key].val();
+ });
+ self.phyloTree.set("nodeAttrChangedTime", new Date());
+ } else {
+ alert("No node selected");
+ }
+ }
+
+
+});
+
+
+
+var PhyloVizSearch =3D UserMenuBase.extend({
+ /**
+ * Initializes the search panel on phyloviz and handles its user inter=
action
+ * It allows user to search the entire free based on some qualifer, li=
ke dist <=3D val.
+ */
+ initialize : function () {
+ var self =3D this;
+
+ $("#phyloVizSearchBtn").on("click", function(){
+ var searchTerm =3D $("#phyloVizSearchTerm"),
+ searchConditionVal =3D $("#phyloVizSearchCondition").val()=
.split("-"),
+ attr =3D searchConditionVal[0],
+ condition =3D searchConditionVal[1];
+ self.hasIllegalJsonCharacters(searchTerm);
+
+ if (attr =3D=3D=3D "dist"){
+ self.isAcceptableValue(searchTerm, 0, 1);
+ }
+ self.searchTree(attr, condition, searchTerm.val());
+ });
+ },
+
+ searchTree : function (attr, condition, val){
+ /**
+ * Searches the entire tree and will highlight the nodes that matc=
h the condition in green
+ */
+ d3.selectAll("g.node")
+ .classed("searchHighlight", function(d){
+ var attrVal =3D d[attr];
+ if (typeof attrVal !=3D=3D "undefined" && attrVal !=3D=3D =
null){
+ if (attr =3D=3D=3D "dist"){
+ switch (condition) {
+ case "greaterEqual":
+ return attrVal >=3D +val;
+ case "lesserEqual":
+ return attrVal <=3D +val;
+ default:
+ return;
+ }
+
+ } else if (attr =3D=3D=3D "name" || attr =3D=3D=3D "an=
notation") {
+ return attrVal.toLowerCase().indexOf(val.toLowerCa=
se()) !=3D=3D -1;
+ }
+ }
+ });
+ }
+});
\ No newline at end of file
diff -r 6a644558eed7aba74a74060ccf0aa4ec211809c2 -r 07045f4895170a154c955f4=
8291f2b5e6feb94c2 templates/dataset/edit_attributes.mako
--- a/templates/dataset/edit_attributes.mako
+++ b/templates/dataset/edit_attributes.mako
@@ -58,7 +58,7 @@
Info:
</label><div style=3D"float: left; width: 250px; margin-ri=
ght: 10px;">
- <textarea name=3D"info" cols=3D"40" rows=3D"2">${data.=
info | h}</textarea>
+ <textarea name=3D"info" cols=3D"40" rows=3D"2">${ util=
.unicodify( data.info ) | h}</textarea></div><div style=3D"clear: both"></d=
iv></div>
diff -r 6a644558eed7aba74a74060ccf0aa4ec211809c2 -r 07045f4895170a154c955f4=
8291f2b5e6feb94c2 templates/dataset/errors.mako
--- a/templates/dataset/errors.mako
+++ b/templates/dataset/errors.mako
@@ -24,21 +24,21 @@
<% job =3D hda.creating_job_associations[0].job %>
%if job.traceback:
The Galaxy framework encountered the following error while=
attempting to run the tool:
- <pre>${job.traceback | h}</pre>
+ <pre>${ util.unicodify( job.traceback ) | h}</pre>
%endif
%if job.stderr or job.info:
Tool execution generated the following error message:
%if job.stderr:
- <pre>${job.stderr | h}</pre>
+ <pre>${ util.unicodify( job.stderr ) | h}</pre>
%elif job.info:
- <pre>${job.info | h}</pre>
+ <pre>${ util.unicodify( job.info ) | h}</pre>
%endif
%else:
Tool execution did not generate any error messages.
%endif
%if job.stdout:
The tool produced the following additional output:
- <pre>${job.stdout | h}</pre>
+ <pre>${ util.unicodify( job.stdout ) | h}</pre>
%endif
%else:
The tool did not create any additional job / error info.
diff -r 6a644558eed7aba74a74060ccf0aa4ec211809c2 -r 07045f4895170a154c955f4=
8291f2b5e6feb94c2 templates/root/history.mako
--- a/templates/root/history.mako
+++ b/templates/root/history.mako
@@ -272,6 +272,17 @@
}
=20
init_trackster_links();
+
+ function init_phyloviz_links() {
+ // PhyloViz links
+ // Add to trackster browser functionality
+ $(".phyloviz-add").live("click", function() {
+ var dataset =3D this,
+ dataset_jquery =3D $(this);
+ window.parent.location =3D dataset_jquery.attr("new-url");
+ });
+ }
+ init_phyloviz_links();
=20
// History rename functionality.
async_save_text("history-name-container", "history-name", "${h.url_for=
( controller=3D"/history", action=3D"rename_async", id=3Dtrans.security.enc=
ode_id(history.id) )}", "new_name", 18);
diff -r 6a644558eed7aba74a74060ccf0aa4ec211809c2 -r 07045f4895170a154c955f4=
8291f2b5e6feb94c2 templates/root/history_common.mako
--- a/templates/root/history_common.mako
+++ b/templates/root/history_common.mako
@@ -29,6 +29,9 @@
## Render the dataset `data` as history item, using `hid` as the displayed=
id
<%def name=3D"render_dataset( data, hid, show_deleted_on_refresh =3D False=
, for_editing =3D True, display_structured =3D False )"><%
+
+ from galaxy.datatypes.xml import Phyloxml
+ from galaxy.datatypes.data import Newick, Nexus
dataset_id =3D trans.security.encode_id( data.id )
=20
if data.state in ['no state','',None]:
@@ -230,6 +233,14 @@
action-url=3D"${h.url_for( controller=3D't=
racks', action=3D'browser', dataset_id=3Ddataset_id)}"
new-url=3D"${h.url_for( controller=3D'trac=
ks', action=3D'index', dataset_id=3Ddataset_id, default_dbkey=3Ddata.dbkey)=
}" title=3D"View in Trackster"></a>
%endif
+ <%
+ isPhylogenyData =3D isinstance(data.datatype, =
(Phyloxml, Nexus, Newick))
+ %>
+ %if isPhylogenyData:
+ <a href=3D"javascript:void(0)" class=3D"i=
con-button chart_curve phyloviz-add"
+ action-url=3D"${h.url_for( controller=
=3D'phyloviz', action=3D'-', dataset_id=3Ddataset_id)}"
+ new-url=3D"${h.url_for( controller=3D'p=
hyloviz', action=3D'index', dataset_id=3Ddataset_id)}" title=3D"View in Phy=
loviz"></a>
+ %endif
%if trans.user:
%if not display_structured:
<div style=3D"float: right">
diff -r 6a644558eed7aba74a74060ccf0aa4ec211809c2 -r 07045f4895170a154c955f4=
8291f2b5e6feb94c2 templates/visualization/phyloviz.mako
--- /dev/null
+++ b/templates/visualization/phyloviz.mako
@@ -0,0 +1,320 @@
+<%inherit file=3D"/webapps/galaxy/base_panels.mako"/>
+##
+<%def name=3D"init()">
+ <%
+ self.has_left_panel=3DFalse
+ self.has_right_panel=3DFalse
+ self.active_view=3D"visualization"
+ self.message_box_visible=3DFalse
+ %>
+</%def>
+
+<%def name=3D"stylesheets()">
+ ${parent.stylesheets()}
+ <style>
+
+ .node circle {
+ cursor: pointer;
+ fill: #fff;
+ stroke: steelblue;
+ stroke-width: 1.5px;
+ }
+
+ .node.searchHighlight circle {
+ stroke-width: 3px;
+ stroke: #7adc26;
+ }
+
+ .node.selectedHighlight circle {
+ stroke-width: 3px;
+ stroke: #dc143c;
+ }
+
+ path.link {
+ fill: none;
+ stroke: #B5BBFF;
+ stroke-width: 4.0px;
+ }
+
+
+ div #phyloVizNavContainer{
+ text-align: center;
+ width: 100%;
+ height: 0px;
+ }
+
+ div #phyloVizNav{
+ font-weight: bold;
+ display: inline-block;
+ background: transparent;
+ top: -2em;
+ position: relative;
+ }
+
+ div .navControl{
+ float: left;
+ }
+
+ div#FloatingMenu {
+ left: 0;
+ top: 15%;
+ width:20%;
+ z-index:100;
+ padding: 5px;
+
+ }
+
+ div#SettingsMenu {
+ width: 25%;
+ top: 350px;
+
+ }
+
+ div#nodeSelectionView {
+ width: 25%;
+ top:70px;
+ }
+
+ .Panel {
+ right: 0%;
+ z-index: 101;
+ position: fixed;
+
+ ## Borrowed from galaxy modal_dialogues
+ background-color: white;
+ border: 1px solid #999;
+ border: 1px solid rgba(0, 0, 0, 0.3);
+ -webkit-border-radius: 6px;
+ -moz-border-radius: 6px;
+ border-radius: 6px;
+ -webkit-border-radius: 6px;
+ -moz-border-radius: 6px;
+ border-radius: 6px;
+ -webkit-box-shadow: 0 3px 7px rgba(0, 0, 0, 0.3);
+ -moz-box-shadow: 0 3px 7px rgba(0, 0, 0, 0.3);
+ box-shadow: 0 3px 7px rgba(0, 0, 0, 0.3);
+ -webkit-box-shadow: 0 3px 7px rgba(0, 0, 0, 0.3);
+ -moz-box-shadow: 0 3px 7px rgba(0, 0, 0, 0.3);
+ box-shadow: 0 3px 7px rgba(0, 0, 0, 0.3);
+ -webkit-background-clip: padding-box;
+ -moz-background-clip: padding-box;
+ background-clip: padding-box;
+ -webkit-background-clip: padding-box;
+ -moz-background-clip: padding-box;
+ background-clip: padding-box;
+ }
+
+ span.PhylovizCloseBtn{
+ cursor: pointer;
+ float : right;
+ }
+
+ #PhyloViz{
+ width: 100%;
+ height: 95%;
+ }
+
+ h2.PhyloVizMenuTitle{
+ color: white;
+ }
+
+ ## Settings Menu
+ .SettingMenuRows{
+ margin: 2px 0 2px 0;
+ }
+
+
+ ## Helper Styles
+ .PhyloVizFloatLeft{
+ float : left;
+ }
+ .icon-button.zoom-in,.icon-button.zoom-out{display:inline-block;he=
ight:16px;width:16px;margin-bottom:-3px;cursor:pointer;}
+ .icon-button.zoom-out{background:transparent url(../images/fugue/m=
agnifier-zoom-out.png) center center no-repeat;}
+ .icon-button.zoom-in{margin-left:10px;background:transparent url(.=
./images/fugue/magnifier-zoom.png) center center no-repeat;}
+
+ </style>
+</%def>
+
+
+<%def name=3D"javascripts()">
+ ${parent.javascripts()}
+ ${h.js( "galaxy.panels", "libs/d3", "mvc/data", "viz/visualization", "=
viz/phyloviz")}
+</%def>
+
+
+
+<%def name=3D"center_panel()">
+
+ <div class=3D"unified-panel-header" unselectable=3D"on">
+ <div class=3D"unified-panel-header-inner">
+ <div style=3D"float:left;" id=3D"title"></div>
+ <div style=3D"float:right;" id=3D"panelHeaderRightBtns"></div>
+ </div>
+ <div style=3D"clear: both"></div>
+ </div>
+
+
+ <div id=3D"phyloVizNavContainer">
+ <div id=3D"phyloVizNav">
+ %if config["ext"] =3D=3D "nex" and not config["saved_visualiza=
tion"]:
+ <div id =3D "phylovizNexInfo" class=3D"navControl">
+ <p>Select a tree to view:
+ <select id=3D"phylovizNexSelector">
+ % for tree, index in config["trees"]:
+ <option value=3D"${index}">${tree}</option>
+ % endfor
+ </select>
+ </p>
+ </div>
+ %endif
+ <div id=3D"phyloVizNavBtns" class=3D"navControl">
+ </div>
+ <div class=3D"navControl">
+ <p> | Alt+click to select nodes</p>
+ </div>
+
+
+ </div>
+
+ </div>
+
+ ## Node Selection Menu
+ <div id=3D"nodeSelectionView" class=3D"Panel">
+ <div class=3D"modal-header">
+ <h3 class=3D"PhyloVizMenuTitle">Search / Edit Nodes :
+ <span class=3D"PhylovizCloseBtn" id=3D"nodeSelCloseBtn"> X=
</span>
+ </h3>
+ </div>
+
+ <div class=3D"modal-body">
+
+ <div class=3D"SettingMenuRows">
+ Search for nodes with:
+ <select id=3D"phyloVizSearchCondition" style=3D"width: 55%=
">
+ <option value=3D"name-containing">Name (containing)</o=
ption>
+ <option value=3D"annotation-containing">Annotation (co=
ntaining)</option>
+ <option value=3D"dist-greaterEqual">Distance (>=3D)</o=
ption>
+ <option value=3D"dist-lesserEqual">Distance (<=3D)</op=
tion>
+ </select>
+ <input type=3D"text" id=3D"phyloVizSearchTerm" value=3D"N=
one" size=3D"15" displayLabel=3D"Distance">
+
+ <div class=3D"SettingMenuRows" style=3D"text-align: center=
;">
+ <button id=3D"phyloVizSearchBtn" > Search! </button>
+ </div>
+ </div>
+
+ <br/>
+
+ <div class=3D"SettingMenuRows">
+ Name: <input type=3D"text" id=3D"phyloVizSelectedNodeName"=
value=3D"None" size=3D"15" disabled=3D"disabled" >
+ </div>
+ <div class=3D"SettingMenuRows">
+ Dist: <input type=3D"text" id=3D"phyloVizSelectedNodeDist"=
value=3D"None" size=3D"15" disabled=3D"disabled" displayLabel=3D"Distance">
+ </div>
+ <div class=3D"SettingMenuRows">
+ Annotation:
+ <textarea id=3D"phyloVizSelectedNodeAnnotation" disabled=
=3D"disabled" ></textarea>
+ </div>
+ <div class=3D"SettingMenuRows">
+ Edit: <input type=3D"checkbox" id=3D"phylovizEditNodesChec=
k" value=3D"You can put custom annotations here and it will be saved">
+ <button id=3D"phylovizNodeSaveChanges" style=3D"display: n=
one;"> Save edits</button>
+ <button id=3D"phylovizNodeCancelChanges" style=3D"display:=
none;"> Cancel</button>
+ </div>
+ </div>
+ </div>
+
+ ## Settings Menus
+ <div id=3D"SettingsMenu" class=3D"Panel">
+ <div class=3D"modal-header">
+ <h3 class=3D"PhyloVizMenuTitle">Phyloviz Settings:
+ <span class=3D"PhylovizCloseBtn" id=3D"settingsCloseBtn"> =
X </span>
+ </h3>
+ </div>
+ <div class=3D"modal-body">
+ <div class=3D"SettingMenuRows">
+ Phylogenetic Spacing (px per unit): <input id=3D"phyloVizT=
reeSeparation" type=3D"text" value=3D"250" size=3D"10" displayLabel=3D"Phyl=
ogenetic Separation"> (50-2500)
+ </div>
+ <div class=3D"SettingMenuRows">
+ Vertical Spacing (px): <input type=3D"text" id=3D"phyloViz=
TreeLeafHeight" value=3D"18" size=3D"10" displayLabel=3D"Vertical Spacing">=
(5-30)
+ </div>
+ <div class=3D"SettingMenuRows">
+ Font Size (px): <input type=3D"text" id=3D"phyloVizTreeFon=
tSize" value=3D"12" size=3D"4" displayLabel=3D"Font Size"> (5-20)
+ </div>
+
+ </div>
+ <div class=3D"modal-footer">
+ <button id=3D"phylovizResetSettingsBtn" class=3D"PhyloVizFloat=
Left" > Reset </button>
+ <button id=3D"phylovizApplySettingsBtn" class=3D"PhyloVizFloat=
Right" > Apply </button>
+ </div>
+ </div>
+
+
+
+
+
+
+ <div class=3D"Panel" id=3D"FloatingMenu" style=3D"display: None;">
+
+ <h2>PhyloViz (<a onclick=3D"displayHelp()" href=3D"javascript:void=
(0);">?</a>)</h2>
+ <div style=3D"display: none;">
+ <h2>Summary of Interactions and Functions:</h2>
+ <div class=3D"hint">1. Expansion of Nodes: click or option-cli=
ck to expand or collapse</div>
+ <div class=3D"hint">2. Zooming and translation: mousewheel, bu=
ttons, click and drag, double click. Reset</div>
+ <div class=3D"hint">3. Tooltip: Displays "Name and Size" on mo=
useOver on nodes</div>
+ <div class=3D"hint">4. Minimap: Currently displays an exact bu=
t scaled down replicate of the tree, orange bounding box is correct for lin=
ear only<br/>
+ Can be switched on or off</div>
+ <div class=3D"hint">5. Changing Layouts: Able to change betwee=
n circular and linear layouts.</div>
+
+ </div>
+
+ <h5>Scaling & Rotation:</h5>
+ <button id=3D"phylovizZoomInBtn" class=3D"" > + </button>
+ <button id=3D"phylovizZoomOutBtn" class=3D"" > - </button>
+
+
+ <h5>Translation:</h5>
+ <button id=3D"phylovizTranslateUpBtn" > Up </button>
+ <button id=3D"phylovizTranslateDownBtn" > Down </button>
+ <br/>
+ <button id=3D"phylovizTranslateLeftBtn" > Left </button>
+ <button id=3D"phylovizTranslateRightBtn" > Right </button>
+
+
+
+ <h5>Others:</h5>
+ <button id=3D"phylovizResetBtn" > Reset Zoom/Translate </button>
+ <button id=3D"phylovizSaveBtn" > Save vizualization </button>
+ <button id=3D"phylovizOpenSettingsBtn" > Settings </button>
+ </div>
+
+ <div id=3D"PhyloViz" >
+ </div>
+
+ <script type=3D"text/javascript">
+
+ function initPhyloViz(data, config) {
+ var phyloviz;
+
+ // -- Initialization code |-->
+ phyloviz =3D new PhylovizView({
+ data: data,
+ layout : "Linear",
+ config : config
+ });
+
+ // -- Render viz. --
+ phyloviz.render();
+
+ }
+
+ $(function firstVizLoad(){ // calls when viz is loaded for t=
he first time
+ var config =3D JSON.parse( '${ h.to_json_string( config )}');
+ var data =3D JSON.parse('${h.to_json_string(data)}');
+ initPhyloViz(data, config);
+ });
+
+ </script>
+
+</%def>
+
+
diff -r 6a644558eed7aba74a74060ccf0aa4ec211809c2 -r 07045f4895170a154c955f4=
8291f2b5e6feb94c2 templates/webapps/community/index.mako
--- a/templates/webapps/community/index.mako
+++ b/templates/webapps/community/index.mako
@@ -60,34 +60,41 @@
%endif
<div class=3D"toolSectionPad"></div><div class=3D"toolSect=
ionTitle">
- Repositories
+ All Repositories
</div>
- <div class=3D"toolSectionBody">
- <div class=3D"toolSectionBg">
- <div class=3D"toolTitle">
- <a target=3D"galaxy_main" href=3D"${h.url_for(=
controller=3D'repository', action=3D'browse_categories', webapp=3D'communi=
ty' )}">Browse by category</a>
- </div>
- %if trans.user:
- <div class=3D"toolTitle">
- <a target=3D"galaxy_main" href=3D"${h.url_=
for( controller=3D'repository', action=3D'browse_repositories', operation=
=3D'my_repositories', webapp=3D'community' )}">Browse my repositories</a>
- </div>
- <div class=3D"toolTitle">
- <a target=3D"galaxy_main" href=3D"${h.url_=
for( controller=3D'repository', action=3D'browse_invalid_tools', cntrller=
=3D'repository', webapp=3D'community' )}">Browse my invalid tools</a>
- </div>
- %endif
+ <div class=3D"toolTitle">
+ <a target=3D"galaxy_main" href=3D"${h.url_for( control=
ler=3D'repository', action=3D'browse_categories', webapp=3D'community' )}">=
Browse by category</a>
+ </div>
+ %if trans.user:
+ <div class=3D"toolSectionPad"></div>
+ <div class=3D"toolSectionTitle">
+ My Repositories and Tools
</div>
- </div>
- <div class=3D"toolSectionBody">
- <div class=3D"toolSectionBg">
- <div class=3D"toolTitle">
- %if trans.user:
- <a target=3D"galaxy_main" href=3D"${h.url_=
for( controller=3D'repository', action=3D'create_repository', webapp=3D'com=
munity' )}">Create new repository</a>
- %else:
- <a target=3D"galaxy_main" href=3D"${h.url_=
for( controller=3D'/user', action=3D'login', webapp=3D'community' )}">Login=
to create a repository</a>
- %endif
- </div>
+ <div class=3D"toolTitle">
+ <a target=3D"galaxy_main" href=3D"${h.url_for( con=
troller=3D'repository', action=3D'browse_repositories', operation=3D'reposi=
tories_i_own', webapp=3D'community' )}">Repositories I own</a></div>
- </div>
+ <div class=3D"toolTitle">
+ <a target=3D"galaxy_main" href=3D"${h.url_for( con=
troller=3D'repository', action=3D'browse_repositories', operation=3D'writab=
le_repositories', webapp=3D'community' )}">My writable repositories</a>
+ </div>
+ <div class=3D"toolTitle">
+ <a target=3D"galaxy_main" href=3D"${h.url_for( con=
troller=3D'repository', action=3D'browse_invalid_tools', cntrller=3D'reposi=
tory', webapp=3D'community' )}">My invalid tools</a>
+ </div>
+ <div class=3D"toolSectionPad"></div>
+ <div class=3D"toolSectionTitle">
+ Available Actions
+ </div>
+ <div class=3D"toolTitle">
+ <a target=3D"galaxy_main" href=3D"${h.url_for( con=
troller=3D'repository', action=3D'create_repository', webapp=3D'community' =
)}">Create new repository</a>
+ </div>
+ %else:
+ <div class=3D"toolSectionPad"></div>
+ <div class=3D"toolSectionTitle">
+ Available Actions
+ </div>
+ <div class=3D"toolTitle">
+ <a target=3D"galaxy_main" href=3D"${h.url_for( con=
troller=3D'/user', action=3D'login', webapp=3D'community' )}">Login to crea=
te a repository</a>
+ </div>
+ %endif
</div></div></div>
diff -r 6a644558eed7aba74a74060ccf0aa4ec211809c2 -r 07045f4895170a154c955f4=
8291f2b5e6feb94c2 test-data/visualization/phyloviz/1_nexus.nex
--- /dev/null
+++ b/test-data/visualization/phyloviz/1_nexus.nex
@@ -0,0 +1,87 @@
+#NEXUS
+
+[!This data set was downloaded from TreeBASE, a relational database of phy=
logenetic knowledge. TreeBASE has been supported by the NSF, Harvard Univer=
sity, Yale University, SDSC and UC Davis. Please do not remove this acknowl=
edgment from the Nexus file.
+
+
+Generated on June 12, 2012; 23:00 GMT
+
+TreeBASE (cc) 1994-2008
+
+Study reference:
+Olariaga I., Grebenc T., Salcedo I., & Mart=C3=ADn M.P. 2012. Two new spec=
ies of Hydnum
+with ovoid basidiospores: H. ovoideisporum and H. vesterholtii. Mycologia,=
.
+
+TreeBASE Study URI: http://purl.org/phylo/treebase/phylows/study/TB2:S128=
31]
+
+BEGIN TREES;
+ TITLE Hydnum_ITS_result;
+ LINK TAXA =3D Taxa1;
+ TRANSLATE
+ 1 Hydnum_aff_ellipsosporum_RUFHYD1_AJ535304,
+ 2 Hydnum_albidum_ALB_AY817135,
+ 3 Hydnum_albidum_ALBHYD1_AJ534974,
+ 4 Hydnum_albomagnum_ALM_DQ218305,
+ 5 Hydnum_ellipsosporum_ELL_AY817138,
+ 6 Hydnum_ellipsosporum_RUFHYD8_AJ547882,
+ 7 Hydnum_ovoidisporum_12317BIOFungi,
+ 8 Hydnum_ovoidisporum_12683BIOFungi,
+ 9 Hydnum_ovoidisporum_12902BIOFungi,
+ 10 Hydnum_ovoidisporum_14130BIOFungi,
+ 11 Hydnum_repandum_RE1_REP1_AJ889978,
+ 12 Hydnum_repandum_RE1_REP2_AJ889949,
+ 13 Hydnum_repandum_RE1_REP3_AY817136,
+ 14 Hydnum_repandum_RE1_REP6_UDB000025,
+ 15 Hydnum_repandum_RE1_REP7_UDB000096,
+ 16 Hydnum_repandum_RE1_REP8_UDB001479,
+ 17 Hydnum_repandum_RE1_REPHYD10_AJ547888,
+ 18 Hydnum_repandum_RE1_REPHYD11_AJ547886,
+ 19 Hydnum_repandum_RE1_REPHYD1_AJ547871,
+ 20 Hydnum_repandum_RE1_REPHYD3_AJ547874,
+ 21 Hydnum_repandum_RE1_REPHYD4_AJ547876,
+ 22 Hydnum_repandum_RE1_REPHYD5_AJ547875,
+ 23 Hydnum_repandum_RE1_REPHYD6_AJ547877,
+ 24 Hydnum_repandum_RE1_REPHYD7_AJ547878,
+ 25 Hydnum_repandum_RE1_REPHYD8_AJ547881,
+ 26 Hydnum_repandum_RE1_REPHYD9_AJ547883,
+ 27 Hydnum_repandum_RE1_RUFHYD10_AJ547866,
+ 28 Hydnum_repandum_RE1_RUFHYD11_AJ547889,
+ 29 Hydnum_repandum_RE1_RUFHYD9_AJ535305,
+ 30 Hydnum_rufescens_RU1_RUFHYD5_AJ547869,
+ 31 Hydnum_rufescens_RU1_RUFHYD6_AJ547884,
+ 32 Hydnum_rufescens_RU1_RUFHYD7_AJ547870,
+ 33 Hydnum_rufescens_RU2_REP5_DQ367902,
+ 34 Hydnum_rufescens_RU2_RUFHYD2_AJ535301,
+ 35 Hydnum_rufescens_RU3_12901BIOFungi,
+ 36 Hydnum_rufescens_RU3_REP4_DQ218306,
+ 37 Hydnum_rufescens_RU3_RUFHYD3_AJ535303,
+ 38 Hydnum_rufescens_RU3_RUFHYD4_AJ535302,
+ 39 Hydnum_rufescens_RU4_RUFHYD12_AJ839969,
+ 40 Hydnum_rufescens_RU4_RUFHYD16_AJ547868,
+ 41 Hydnum_rufescens_RU4_RUFHYD17_AJ547885,
+ 42 Hydnum_rufescens_RU4_UMB1_DQ367903,
+ 43 Hydnum_rufescens_RU5_12760BIOFungi,
+ 44 Hydnum_rufescens_RU5_ALBHYD2_AJ534975,
+ 45 Hydnum_rufescens_RU5_RUF2_DQ658890,
+ 46 Hydnum_rufescens_RU5_RUF4_UDB001465,
+ 47 Hydnum_rufescens_RU5_RUF5_UDB002423,
+ 48 Hydnum_rufescens_RU5_RUFHYD14_AJ547872,
+ 49 Hydnum_rufescens_RU6_RUF1_AY817137,
+ 50 Hydnum_rufescens_RU6_RUFHYD15_AJ547867,
+ 51 Hydnum_rufescens_wrong_taxonomy_RUF3_AM087246,
+ 52 Hydnum_umbilicatum_UMBHYD1_AJ534972,
+ 53 Hydnum_umbilicatum_UMBHYD2_AJ534973,
+ 54 Hydnum_vesterholtii_10429BIOFungi,
+ 55 Hydnum_vesterholtii_10452BIOFungi,
+ 56 Hydnum_vesterholtii_12330BIOFungi,
+ 57 Hydnum_vesterholtii_12904BIOFungi,
+ 58 Hydnum_vesterholtii_REPHYD12A_AJ547879,
+ 59 Hydnum_vesterholtii_REPHYD12C_AJ783968,
+ 60 Hydnum_vesterholtii_REPHYD13_AJ547887,
+ 61 Sistotrema_muscicola_AJ606040,
+ 62 Sistotrema_alboluteum_AJ606042;
+ TREE Fig._2 =3D [&R] ((62:100.0,(51:100.0,61:100.0):93.269997):49.66=
,((4:100.0,(2:100.0,3:100.0):100.0):60.639999,(((56:100.0,58:100.0,59:100.0=
):84.639999,(54:100.0,55:100.0,57:100.0,60:100.0):98.330002):92.5,(((30:100=
.0,31:100.0,32:100.0):100.0,(11:100.0,12:100.0,13:100.0,14:100.0,15:100.0,1=
6:100.0,17:100.0,18:100.0,19:100.0,20:100.0,21:100.0,22:100.0,23:100.0,24:1=
00.0,25:100.0,26:100.0):99.93):68.690002,(((33:100.0,34:100.0):49.8050005,(=
35:100.0,36:100.0,37:100.0,38:100.0):99.989998):49.8050005,((7:100.0,8:100.=
0,9:100.0,10:100.0):100.0,(42:100.0,(39:100.0,40:100.0,41:100.0):98.449997)=
:86.790001,((52:100.0,53:100.0):99.93,(1:100.0,(5:97.47999949999999,6:100.0=
):97.47999949999999):100.0):53.310001,(27:100.0,(28:100.0,29:100.0,49:100.0=
,50:100.0):47.404999):47.404999,(43:100.0,44:100.0,45:100.0,46:100.0,47:100=
.0,48:100.0):99.459999):29.245001):29.245001):51.580002):61.540001):49.66);
+ TREE PAUP_1 =3D [&R] ((62:100.0,(51:100.0,61:100.0):93.269997):49.66=
,((4:100.0,(3:100.0,2:100.0):100.0):60.639999,(((58:100.0,59:100.0,56:100.0=
):84.639999,(60:100.0,54:100.0,55:100.0,57:100.0):98.330002):92.5,(((30:100=
.0,31:100.0,32:100.0):100.0,(19:100.0,20:100.0,21:100.0,22:100.0,23:100.0,2=
4:100.0,25:100.0,26:100.0,17:100.0,18:100.0,11:100.0,12:100.0,13:100.0,14:1=
00.0,15:100.0,16:100.0):99.93):68.690002,((34:100.0,33:100.0):99.610001,(37=
:100.0,38:100.0,35:100.0,36:100.0):99.989998,(42:100.0,(39:100.0,41:100.0,4=
0:100.0):98.449997):86.790001,(8:100.0,7:100.0,9:100.0,10:100.0):100.0,((52=
:100.0,53:100.0):99.93,(1:100.0,(5:100.0,6:100.0):94.959999):100.0):53.3100=
01,(29:100.0,27:100.0,28:100.0,50:100.0,49:100.0):94.809998,(44:100.0,43:10=
0.0,48:100.0,45:100.0,46:100.0,47:100.0):99.459999):58.490002):51.580002):6=
1.540001):49.66);
+
+
+
+END;
diff -r 6a644558eed7aba74a74060ccf0aa4ec211809c2 -r 07045f4895170a154c955f4=
8291f2b5e6feb94c2 test-data/visualization/phyloviz/2_nexus.nex
--- /dev/null
+++ b/test-data/visualization/phyloviz/2_nexus.nex
@@ -0,0 +1,96 @@
+#NEXUS
+
+[!This data set was downloaded from TreeBASE, a relational database of phy=
logenetic knowledge. TreeBASE has been supported by the NSF, Harvard Univer=
sity, Yale University, SDSC and UC Davis. Please do not remove this acknowl=
edgment from the Nexus file.
+
+
+Generated on August 18, 2012; 12:14 GMT
+
+TreeBASE (cc) 1994-2008
+
+Study reference:
+Naish D., Dyke G., Cau A., & Escuilli=C3=A9 F. 2012. A gigantic bird from =
the Upper Cretaceous
+of Central Asia. Biology Letters, 8(1): 97-100.
+
+TreeBASE Study URI: http://purl.org/phylo/treebase/phylows/study/TB2:S130=
08]
+
+BEGIN TREES;
+ TITLE Imported_trees;
+ LINK TAXA =3D Taxa1;
+ TRANSLATE
+ 1 Herrerasaurus,
+ 2 Tawa,
+ 3 Allosaurus,
+ 4 Alvarezsaurus,
+ 5 Anchiornis,
+ 6 Archaeopteryx,
+ 7 Archaeorhynchus,
+ 8 Avimimus,
+ 9 Baryonyx,
+ 10 Beipiaosaurus,
+ 11 Caenagnathus,
+ 12 Caudipteryx,
+ 13 Ceratosaurus,
+ 14 Chirostenotes,
+ 15 Citipati,
+ 16 Compsognathus,
+ 17 Confuciusornis,
+ 18 Dilong,
+ 19 Dilophosaurus,
+ 20 Epidendrosaurus,
+ 21 Epidexipteryx,
+ 22 Erlicosaurus,
+ 23 Eustreptospondylus,
+ 24 Gallimimus,
+ 25 Garudimimus,
+ 26 Gobipteryx,
+ 27 Guanlong,
+ 28 Haplocheirus,
+ 29 Harpymimus,
+ 30 Hebeiornis,
+ 31 Hongshanornis,
+ 32 Huoshanornis,
+ 33 Iberomesornis,
+ 34 Ichthyornis,
+ 35 Incisivosaurus,
+ 36 Jeholornis,
+ 37 Limusaurus,
+ 38 Longicrusavis,
+ 39 Longipteryx,
+ 40 Longirostravis,
+ 41 Majungasaurus,
+ 42 Masiakasaurus,
+ 43 Monolophosaurus,
+ 44 Mononykus,
+ 45 Neornithes,
+ 46 Ornitholestes,
+ 47 Ornithomimus,
+ 48 Patagonykus,
+ 49 Patagopteryx,
+ 50 Pelecanimimus,
+ 51 Pengornis,
+ 52 Protarchaeopteryx,
+ 53 Protopteryx,
+ 54 Rinchenia,
+ 55 Sapeornis,
+ 56 Segnosaurus,
+ 57 Shenzhousaurus,
+ 58 Shuvuuia,
+ 59 Sinornithosaurus,
+ 60 Sinosauropteryx,
+ 61 Sinovenator,
+ 62 Sinraptor,
+ 63 Syntarsus_kayentakatae,
+ 64 Troodon,
+ 65 Tyrannosaurus,
+ 66 Velociraptor,
+ 67 Yanornis,
+ 68 Yixianornis,
+ 69 Zhongjianornis,
+ 70 Zhongornis,
+ 71 Zuolong,
+ 72 Samrukia;
+ TREE Figure_1A =3D [&R] (1,(2,(((((43,(3,62)),(71,((46,((((28,(4,(48=
,(44,58)))),((((5,(61,(64,(59,66)))),(6,((36,(55,(69,(((7,34,45,49,72,(31,3=
8),(67,68)),(33,((32,((26,30),(39,40))),(51,53)))),(17,70))))),(20,21)))),(=
(11,(12,(8,(14,(15,54))))),(35,52))),(10,(22,56)))),(50,(57,(29,(25,(24,47)=
))))),(16,60))),(27,(18,65))))),(9,23)),(13,(41,(37,42)))),(19,63))));
+
+
+
+END;
diff -r 6a644558eed7aba74a74060ccf0aa4ec211809c2 -r 07045f4895170a154c955f4=
8291f2b5e6feb94c2 test-data/visualization/phyloviz/3_phyloxml.xml
--- /dev/null
+++ b/test-data/visualization/phyloviz/3_phyloxml.xml
@@ -0,0 +1,257 @@
+<?xml version=3D"1.0" encoding=3D"UTF-8"?>
+<phyloxml xmlns:xsi=3D"http://www.w3.org/2001/XMLSchema-instance"
+ xsi:schemaLocation=3D"http://www.phyloxml.org http://www.phyloxml.org/1=
.10/phyloxml.xsd"
+ xmlns=3D"http://www.phyloxml.org">
+ <phylogeny rooted=3D"true">
+ <clade>
+ <clade>
+ <branch_length>0.18105</branch_length>
+ <confidence type=3D"unknown">89.0</confidence>
+ <clade>
+ <branch_length>0.07466</branch_length>
+ <confidence type=3D"unknown">32.0</confidence>
+ <clade>
+ <branch_length>0.26168</branch_length>
+ <confidence type=3D"unknown">100.0</confidence>
+ <clade>
+ <branch_length>0.22058</branch_length>
+ <confidence type=3D"unknown">89.0</confidence>
+ <clade>
+ <branch_length>0.28901</branch_length>
+ <confidence type=3D"unknown">100.0</confidence>
+ <clade>
+ <branch_length>0.06584</branch_length>
+ <confidence type=3D"unknown">100.0</confidence>
+ <clade>
+ <branch_length>0.02309</branch_length>
+ <confidence type=3D"unknown">43.0</confidenc=
e>
+ <clade>
+ <branch_length>0.0746</branch_length>
+ <confidence type=3D"unknown">100.0</confi=
dence>
+ <clade>
+ <branch_length>0.02365</branch_length>
+ <confidence type=3D"unknown">88.0</con=
fidence>
+ <clade>
+ <name>22_MOUSE</name>
+ <branch_length>0.05998</branch_leng=
th>
+ <taxonomy>
+ <code>MOUSE</code>
+ </taxonomy>
+ </clade>
+ <clade>
+ <name>Apaf-1_HUMAN</name>
+ <branch_length>0.01825</branch_leng=
th>
+ <taxonomy>
+ <code>HUMAN</code>
+ </taxonomy>
+ </clade>
+ </clade>
+ <clade>
+ <name>12_CANFA</name>
+ <branch_length>0.04683</branch_length>
+ <taxonomy>
+ <code>CANFA</code>
+ </taxonomy>
+ </clade>
+ </clade>
+ <clade>
+ <name>11_CHICK</name>
+ <branch_length>0.15226</branch_length>
+ <taxonomy>
+ <code>CHICK</code>
+ </taxonomy>
+ </clade>
+ </clade>
+ <clade>
+ <name>16_XENLA</name>
+ <branch_length>0.4409</branch_length>
+ <taxonomy>
+ <code>XENLA</code>
+ </taxonomy>
+ </clade>
+ </clade>
+ <clade>
+ <branch_length>0.17031</branch_length>
+ <confidence type=3D"unknown">100.0</confidence>
+ <clade>
+ <branch_length>0.10929</branch_length>
+ <confidence type=3D"unknown">100.0</confiden=
ce>
+ <clade>
+ <name>14_FUGRU</name>
+ <branch_length>0.02255</branch_length>
+ <taxonomy>
+ <code>FUGRU</code>
+ </taxonomy>
+ </clade>
+ <clade>
+ <name>15_TETNG</name>
+ <branch_length>0.09478</branch_length>
+ <taxonomy>
+ <code>TETNG</code>
+ </taxonomy>
+ </clade>
+ </clade>
+ <clade>
+ <name>17_BRARE</name>
+ <branch_length>0.1811</branch_length>
+ <taxonomy>
+ <code>BRARE</code>
+ </taxonomy>
+ </clade>
+ </clade>
+ </clade>
+ <clade>
+ <branch_length>0.01594</branch_length>
+ <confidence type=3D"unknown">53.0</confidence>
+ <clade>
+ <branch_length>0.10709</branch_length>
+ <confidence type=3D"unknown">68.0</confidence>
+ <clade>
+ <name>1_BRAFL</name>
+ <branch_length>0.26131</branch_length>
+ <taxonomy>
+ <code>BRAFL</code>
+ </taxonomy>
+ </clade>
+ <clade>
+ <name>18_NEMVE</name>
+ <branch_length>0.38014</branch_length>
+ <taxonomy>
+ <code>NEMVE</code>
+ </taxonomy>
+ </clade>
+ </clade>
+ <clade>
+ <name>23_STRPU</name>
+ <branch_length>0.48179</branch_length>
+ <taxonomy>
+ <code>STRPU</code>
+ </taxonomy>
+ </clade>
+ </clade>
+ </clade>
+ <clade>
+ <branch_length>0.34475</branch_length>
+ <confidence type=3D"unknown">100.0</confidence>
+ <clade>
+ <name>26_STRPU</name>
+ <branch_length>0.36374</branch_length>
+ <taxonomy>
+ <code>STRPU</code>
+ </taxonomy>
+ <sequence>
+ <domain_architecture length=3D"1319">
+ <domain from=3D"18" to=3D"98" confidence=3D"=
3.4E-5">Death</domain>
+ <domain from=3D"189" to=3D"481" confidence=
=3D"1.8E-10">NB-ARC</domain>
+ <domain from=3D"630" to=3D"668" confidence=
=3D"8.2E-5">WD40</domain>
+ </domain_architecture>
+ </sequence>
+ </clade>
+ <clade>
+ <name>25_STRPU</name>
+ <branch_length>0.33137</branch_length>
+ <taxonomy>
+ <code>STRPU</code>
+ </taxonomy>
+ <sequence>
+ <domain_architecture length=3D"1947">
+ <domain from=3D"143" to=3D"227" confidence=
=3D"7.4E-5">Death</domain>
+ <domain from=3D"227" to=3D"550" confidence=
=3D"2.0E-13">NB-ARC</domain>
+ <domain from=3D"697" to=3D"736" confidence=
=3D"7.9E-4">WD40</domain>
+ <domain from=3D"745" to=3D"785" confidence=
=3D"1.5">WD40</domain>
+ <domain from=3D"1741" to=3D"1836" confidence=
=3D"2.0">Adeno_VII</domain>
+ </domain_architecture>
+ </sequence>
+ </clade>
+ </clade>
+ </clade>
+ <clade>
+ <branch_length>1.31498</branch_length>
+ <confidence type=3D"unknown">100.0</confidence>
+ <clade>
+ <name>CED4_CAEEL</name>
+ <branch_length>0.13241</branch_length>
+ <taxonomy>
+ <code>CAEEL</code>
+ </taxonomy>
+ <sequence>
+ <domain_architecture length=3D"714">
+ <domain from=3D"7" to=3D"90" confidence=3D"9.2E=
-14">CARD</domain>
+ <domain from=3D"116" to=3D"442" confidence=3D"5=
.8E-151">NB-ARC</domain>
+ </domain_architecture>
+ </sequence>
+ </clade>
+ <clade>
+ <name>31_CAEBR</name>
+ <branch_length>0.04777</branch_length>
+ <taxonomy>
+ <code>CAEBR</code>
+ </taxonomy>
+ <sequence>
+ <domain_architecture length=3D"554">
+ <domain from=3D"1" to=3D"75" confidence=3D"0.00=
46">CARD</domain>
+ <domain from=3D"101" to=3D"427" confidence=3D"2=
.1E-123">NB-ARC</domain>
+ </domain_architecture>
+ </sequence>
+ </clade>
+ </clade>
+ </clade>
+ <clade>
+ <branch_length>0.13172</branch_length>
+ <confidence type=3D"unknown">45.0</confidence>
+ <clade>
+ <branch_length>0.24915</branch_length>
+ <confidence type=3D"unknown">95.0</confidence>
+ <clade>
+ <branch_length>0.76898</branch_length>
+ <confidence type=3D"unknown">100.0</confidence>
+ <clade>
+ <name>28_DROPS</name>
+ <branch_length>0.1732</branch_length>
+ <taxonomy>
+ <code>DROPS</code>
+ </taxonomy>
+ <sequence>
+ <domain_architecture length=3D"535">
+ <domain from=3D"112" to=3D"399" confidence=
=3D"1.4E-5">NB-ARC</domain>
+ </domain_architecture>
+ </sequence>
+ </clade>
+ <clade>
+ <name>Dark_DROME</name>
+ <branch_length>0.18863</branch_length>
+ <taxonomy>
+ <code>DROME</code>
+ </taxonomy>
+ <sequence>
+ <domain_architecture length=3D"1421">
+ <domain from=3D"108" to=3D"397" confidence=
=3D"2.1E-5">NB-ARC</domain>
+ </domain_architecture>
+ </sequence>
+ </clade>
+ </clade>
+ <clade>
+ <name>29_AEDAE</name>
+ <branch_length>0.86398</branch_length>
+ <taxonomy>
+ <code>AEDAE</code>
+ </taxonomy>
+ <sequence>
+ <domain_architecture length=3D"423">
+ <domain from=3D"109" to=3D"421" confidence=3D"9=
.3E-6">NB-ARC</domain>
+ </domain_architecture>
+ </sequence>
+ </clade>
+ </clade>
+ <clade>
+ <name>30_TRICA</name>
+ <branch_length>0.97698</branch_length>
+ <taxonomy>
+ <code>TRICA</code>
+ </taxonomy>
+ </clade>
+ </clade>
+ </clade>
+ </clade>
+ </phylogeny>
+</phyloxml>
diff -r 6a644558eed7aba74a74060ccf0aa4ec211809c2 -r 07045f4895170a154c955f4=
8291f2b5e6feb94c2 test-data/visualization/phyloviz/4_newick.nhx
--- /dev/null
+++ b/test-data/visualization/phyloviz/4_newick.nhx
@@ -0,0 +1,33 @@
+(((BGIOSIBCA028421_ORYSA:0.423485[&&NHX:S=3DORYSA:O=3DBGIOSIBCA028421.1:G=
=3DBGIOSIBCA028421],
+At5g41150_ARATH:0.273135[&&NHX:S=3DARATH:O=3DAt5g41150.1:G=3DAt5g41150]
+):0.690991[&&NHX:S=3DMagnoliophyta:D=3DN:B=3D100],
+(rad16_SCHPO:0.718598[&&NHX:S=3DSCHPO:O=3DSPCC970.01:G=3DSPCC970.01],
+RAD1_YEAST:1.05456[&&NHX:S=3DYEAST:O=3DYPL022W.1:G=3DYPL022W]
+):0.344838[&&NHX:S=3DAscomycota:D=3DN:B=3D100]
+):0.103849[&&NHX:S=3DEukaryota:D=3DN:B=3D61],
+((((((((ERCC4_HUMAN:0.067531[&&NHX:S=3DHUMAN:O=3DENST00000311895.3:G=3DENS=
G00000175595],
+Ercc4_MOUSE:0.17422[&&NHX:S=3DMOUSE:O=3DENSMUST00000023206.5:G=3DENSMUSG00=
000022545]
+):0.065513[&&NHX:S=3DEuarchontoglires:D=3DN:B=3D100],
+ENSMODT00000006086_MONDO:0.104633[&&NHX:S=3DMONDO:O=3DENSMODT00000006086.2=
:G=3DENSMODG00000004840]
+):0.083764[&&NHX:S=3DTheria:D=3DN:B=3D100],
+Q5ZJP8_CHICK:0.153132[&&NHX:S=3DCHICK:O=3DENSGALT00000004716.2:G=3DENSGALG=
00000002981]
+):0.057998[&&NHX:S=3DAmniota:D=3DN:B=3D100],
+ENSXETT00000024054_XENTR:0.288632[&&NHX:S=3DXENTR:O=3DENSXETT00000024054.2=
:G=3DENSXETG00000010991]
+):0.075713[&&NHX:S=3DTetrapoda:D=3DN:B=3D100],
+(zgc-63468_BRARE:0.2218[&&NHX:S=3DBRARE:O=3DENSDART00000015780.4:G=3DENSDA=
RG00000014161],
+NEWSINFRUT00000137921_FUGRU:0.220441[&&NHX:S=3DFUGRU:O=3DNEWSINFRUT0000013=
7921.3:G=3DNEWSINFRUG00000130312]
+):0.170605[&&NHX:S=3DClupeocephala:D=3DN:B=3D100]
+):0.238713[&&NHX:S=3DEuteleostomi:D=3DN:B=3D100],
+ENSCINT00000011737_CIOIN:0.623567[&&NHX:S=3DCIOIN:O=3DENSCINT00000011737.2=
:G=3DENSCING00000005673]
+):0.07499[&&NHX:S=3DChordata:D=3DN:B=3D100],
+(Sm00.scaff00195.0600_SCHMA:0.784609[&&NHX:S=3DSCHMA:O=3DSm00.scaff00195.0=
600:G=3DSm00.scaff00195.0600],
+(CBG03141_CAEBR:0.093703[&&NHX:S=3DCAEBR:O=3DCBG03141:G=3DCBG03141],
+NP_496498_CAEEL:0.212236[&&NHX:S=3DCAEEL:O=3DC47D12.8.1:G=3DC47D12.8]
+):1.47416[&&NHX:S=3DCaenorhabditis:D=3DN:B=3D94]
+):0.26906[&&NHX:S=3DBilateria:D=3DN:B=3D97]
+):0.071406[&&NHX:S=3DBilateria:D=3DN:B=3D1],
+(mei-9-RA_DROME:0.170289[&&NHX:S=3DDROME:O=3DCG3697-RA.3:G=3DCG3697],
+GA17620-PA_DROPS:0.154817[&&NHX:S=3DDROPS:O=3DGA17620-PA:G=3DGA17620]
+):0.818474[&&NHX:S=3DSophophora:D=3DN:B=3D100]
+):0
+)[&&NHX:S=3DEukaryota:D=3DN];
\ No newline at end of file
diff -r 6a644558eed7aba74a74060ccf0aa4ec211809c2 -r 07045f4895170a154c955f4=
8291f2b5e6feb94c2 test-data/visualization/phyloviz/5_newick.nhx
--- /dev/null
+++ b/test-data/visualization/phyloviz/5_newick.nhx
@@ -0,0 +1,1 @@
+(CAE_ELE_PORCN:0.303421 ,((((DRO_PER_PORCN:0.001000 ,DRO_PSE_PORCN:0.00100=
0 )67:0.141994 ,(DRO_ANA_PORCN:0.111899 ,(DRO_ERE_PORCN:0.030516 ,(DRO_MEL_=
PORCN:0.021127 ,DRO_SEC_PORCN:0.021127 )38:0.030516 )35:0.111899 )18:0.1419=
94 )16:0.162611 ,(DRO_WIL_PORCN:0.152225 ,(DRO_VIR_PORCN:0.085057 ,DRO_MOJ_=
PORCN:0.085057 )24:0.152225 )15:0.162611 )13:0.295081 ,(ANO_GAM_PORCN:0.287=
545 ,((CIO_INT_PORCN:0.100686 ,CIO_SAV_PORCN:0.100686 )19:0.275542 ,((LOA_L=
OA_PORCN:0.036278 ,BRU_MAL_PORCN:0.036278 )29:0.272631 ,(((((DAN_RER_PORCN:=
0.086499 ,((TAK_RUB_PORCN:0.032609 ,TET_NIG_PORCN:0.032609 )32:0.048864 ,(G=
AD_MOR_PORCN:0.039387 ,(ORY_LAT_PORCN:0.031729 ,(GAS_ACU_PORCN:0.021882 ,OR=
E_NIL_PORCN:0.021882 )37:0.031729 )34:0.039387 )28:0.048864 )27:0.086499 )2=
3:0.119618 ,(LAT_CHA_PORCN:0.099348 ,((XEN_LAE_PORCN:0.033333 ,XEN_TRO_PORC=
N:0.033333 )31:0.091250 ,(ANO_CAR_PORCN:0.086538 ,((MON_DOM_PORCN:0.014100 =
,(MAC_EUG_PORCN:0.005423 ,SAR_HAR_PORCN:0.005423 )57:0.014100 )42:0.062862 =
,(ORN_ANA_PORCN:0.057974 ,(GOR_GOR_PORCN:0.033876 ,(FEL_CAT_PORCN:0.022851 =
,(PRO_CAP_PORCN:0.019716 ,(CAV_POR_PORCN:0.018599 ,(ERI_EUR_PORCN:0.015518 =
,((DIP_ORD_PORCN:0.007231 ,(MUS_MUS_PORCN:0.001085 ,(RAT_NOR_PORCN:0.001000=
,CRI_GRI_PORCN:0.001000 )69:0.001085 )64:0.007231 )53:0.012954 ,(DAS_NOV_P=
ORCN:0.011362 ,(LOX_AFR_PORCN:0.010575 ,(CAL_JAC_PORCN:0.010332 ,(OCH_PRI_P=
ORCN:0.010063 ,(MIC_MUR_PORCN:0.009123 ,(SUS_SCR_PORCN:0.008880 ,(MYO_LUC_P=
ORCN:0.008460 ,((CAN_FAM_PORCN:0.005423 ,AIL_MEL_PORCN:0.005423 )58:0.00809=
3 ,((PTE_VAM_PORCN:0.006508 ,BOS_TAU_PORCN:0.006508 )55:0.007494 ,((SPE_TRI=
_PORCN:0.003254 ,TUP_BEL_PORCN:0.003254 )61:0.006929 ,((OTO_GAR_PORCN:0.001=
085 ,(ORY_CUN_PORCN:0.001000 ,TUR_TRU_PORCN:0.001000 )68:0.001085 )65:0.005=
965 ,(EQU_CAB_PORCN:0.003688 ,(MAC_MUL_PORCN:0.002711 ,(PAN_TRO_PORCN:0.001=
446 ,(HOM_SAP_PORCN:0.001085 ,(PON_ABE_PORCN:0.001000 ,NOM_LEU_PORCN:0.0010=
00 )70:0.001085 )66:0.001446 )63:0.002711 )62:0.003688 )60:0.005965 )56:0.0=
06929 )54:0.007494 )52:0.008093 )51:0.008460 )50:0.008880 )49:0.009123 )48:=
0.010063 )47:0.010332 )46:0.010575 )45:0.011362 )44:0.012954 )43:0.015518 )=
41:0.018599 )40:0.019716 )39:0.022851 )36:0.033876 )30:0.057974 )26:0.06286=
2 )25:0.086538 )22:0.091250 )21:0.099348 )20:0.119618 )17:0.214465 ,(BRA_FL=
O_PORCN:0.189220 ,SAC_KOW_PORCN:0.189220 )12:0.214465 )11:0.257058 ,(NEM_VE=
C_PORCN:0.246631 ,AMP_QUE_PORCN:0.246631 )9:0.257058 )8:0.266904 ,(TRI_CAS_=
PORCN:0.259494 ,(PED_HUM_PORCN:0.227009 ,(NAS_VIT_PORCN:0.160241 ,(API_MEL_=
PORCN:0.031851 ,(BOM_TER_PORCN:0.004808 ,BOM_IMP_PORCN:0.004808 )59:0.03185=
1 )33:0.160241 )14:0.227009 )10:0.259494 )7:0.266904 )6:0.272631 )5:0.27554=
2 )4:0.287545 )3:0.295081 )2:0.303421 )1:0.0001;
diff -r 6a644558eed7aba74a74060ccf0aa4ec211809c2 -r 07045f4895170a154c955f4=
8291f2b5e6feb94c2 tools/data_source/data_source.py
--- a/tools/data_source/data_source.py
+++ b/tools/data_source/data_source.py
@@ -4,6 +4,7 @@
import socket, urllib, sys, os
from galaxy import eggs #eggs needs to be imported so that galaxy.util can=
find docutils egg...
from galaxy.util.json import from_json_string, to_json_string
+from galaxy.util import get_charset_from_http_headers
import galaxy.model # need to import model before sniff to resolve a circu=
lar import dependency
from galaxy.datatypes import sniff
from galaxy.datatypes.registry import Registry
@@ -92,7 +93,7 @@
stop_err( 'The size of the data (%d bytes) you have reques=
ted exceeds the maximum allowed (%d bytes) on this server.' % ( file_size, =
max_file_size ) )
#do sniff stream for multi_byte
try:
- cur_filename, is_multi_byte =3D sniff.stream_to_open_named_fil=
e( page, os.open( cur_filename, os.O_WRONLY | os.O_CREAT ), cur_filename )
+ cur_filename, is_multi_byte =3D sniff.stream_to_open_named_fil=
e( page, os.open( cur_filename, os.O_WRONLY | os.O_CREAT ), cur_filename, s=
ource_encoding=3Dget_charset_from_http_headers( page.headers ) )
except Exception, e:
stop_err( 'Unable to fetch %s:\n%s' % ( cur_URL, e ) )
=20
diff -r 6a644558eed7aba74a74060ccf0aa4ec211809c2 -r 07045f4895170a154c955f4=
8291f2b5e6feb94c2 tools/data_source/upload.py
--- a/tools/data_source/upload.py
+++ b/tools/data_source/upload.py
@@ -90,7 +90,8 @@
=20
if dataset.type =3D=3D 'url':
try:
- temp_name, dataset.is_multi_byte =3D sniff.stream_to_file( url=
lib.urlopen( dataset.path ), prefix=3D'url_paste' )
+ page =3D urllib.urlopen( dataset.path ) #page will be .close()=
ed by sniff methods
+ temp_name, dataset.is_multi_byte =3D sniff.stream_to_file( pag=
e, prefix=3D'url_paste', source_encoding=3Dutil.get_charset_from_http_heade=
rs( page.headers ) )
except Exception, e:
file_err( 'Unable to fetch %s\n%s' % ( dataset.path, str( e ) =
), dataset, json_file )
return
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: inithello: Made download status page more informative, fixed intermittent indexing error.
by Bitbucket 29 Aug '12
by Bitbucket 29 Aug '12
29 Aug '12
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/8a8a37bbdc2f/
changeset: 8a8a37bbdc2f
user: inithello
date: 2012-08-29 16:16:11
summary: Made download status page more informative, fixed intermittent indexing error.
affected #: 2 files
diff -r 30c7f58f116d9e11a209ed8a5d193ec2d91de280 -r 8a8a37bbdc2fc233446966793b6545d553aa50d8 lib/galaxy/tools/genome_index/index_genome.py
--- a/lib/galaxy/tools/genome_index/index_genome.py
+++ b/lib/galaxy/tools/genome_index/index_genome.py
@@ -54,6 +54,7 @@
self._log( self.locations )
self._log( 'Indexer %s completed successfully.' % indexer )
self._flush_files()
+ exit(0)
def _check_link( self ):
self._log( 'Checking symlink to %s' % self.fafile )
diff -r 30c7f58f116d9e11a209ed8a5d193ec2d91de280 -r 8a8a37bbdc2fc233446966793b6545d553aa50d8 lib/galaxy/web/controllers/data_admin.py
--- a/lib/galaxy/web/controllers/data_admin.py
+++ b/lib/galaxy/web/controllers/data_admin.py
@@ -148,7 +148,8 @@
dbkey = build[0]
longname = build[1]
break
- assert dbkey is not '?', 'That build was not found'
+ if dbkey == '?':
+ return trans.fill_template( '/admin/data_admin/generic_error.mako', message='An invalid build was specified.' )
ftp = ftplib.FTP('hgdownload.cse.ucsc.edu')
ftp.login('anonymous', trans.get_user().email)
checker = []
@@ -189,7 +190,8 @@
dbkeys=trans.ucsc_builds )
elif source == 'Ensembl':
dbkey = params.get( 'ensembl_dbkey', None )
- assert dbkey is not '?', 'That build was not found'
+ if dbkey == '?':
+ return trans.fill_template( '/admin/data_admin/generic_error.mako', message='An invalid build was specified.' )
for build in trans.ensembl_builds:
if build[ 'dbkey' ] == dbkey:
dbkey = build[ 'dbkey' ]
@@ -199,7 +201,7 @@
break
url = 'ftp://ftp.ensembl.org/pub/release-%s/fasta/%s/dna/%s.%s.%s.dna.toplevel.fa.…' % ( release, pathname.lower(), pathname, dbkey, release )
else:
- return trans.fill_template( '/admin/data_admin/generic_error.mako', message='Somehow an invalid data source was specified.' )
+ return trans.fill_template( '/admin/data_admin/generic_error.mako', message='An invalid data source was specified.' )
if url is None:
return trans.fill_template( '/admin/data_admin/generic_error.mako', message='Unable to generate a valid URL with the specified parameters.' )
params = dict( protocol='http', name=dbkey, datatype='fasta', url=url, user=trans.user.id )
@@ -248,7 +250,8 @@
sa = trans.app.model.context.current
if jobtype == 'liftover':
job = sa.query( model.TransferJob ).filter_by( id=jobid ).first()
- joblabel = 'Download liftOver'
+ liftover = trans.app.job_manager.deferred_job_queue.plugins['LiftOverTransferPlugin'].get_job_status( jobid )
+ joblabel = 'Download liftOver (%s to %s)' % ( liftover.params[ 'from_genome' ], liftover.params[ 'to_genome' ] )
elif jobtype == 'transfer':
job = sa.query( model.TransferJob ).filter_by( id=jobid ).first()
joblabel = 'Download Genome'
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/30c7f58f116d/
changeset: 30c7f58f116d
user: dan
date: 2012-08-29 16:03:42
summary: Unicode fixes for annotations
affected #: 2 files
diff -r 80dd03582ea9041d0c897b3a2c58e12a5bb494bb -r 30c7f58f116d9e11a209ed8a5d193ec2d91de280 lib/galaxy/model/item_attrs.py
--- a/lib/galaxy/model/item_attrs.py
+++ b/lib/galaxy/model/item_attrs.py
@@ -95,7 +95,7 @@
""" Returns a user's annotation string for an item. """
annotation_obj = self.get_item_annotation_obj( db_session, user, item )
if annotation_obj:
- return annotation_obj.annotation
+ return galaxy.util.unicodify( annotation_obj.annotation )
return None
def get_item_annotation_obj( self, db_session, user, item ):
diff -r 80dd03582ea9041d0c897b3a2c58e12a5bb494bb -r 30c7f58f116d9e11a209ed8a5d193ec2d91de280 lib/galaxy/web/controllers/dataset.py
--- a/lib/galaxy/web/controllers/dataset.py
+++ b/lib/galaxy/web/controllers/dataset.py
@@ -644,7 +644,10 @@
dataset = self.get_dataset( trans, id, False, True )
if not dataset:
web.httpexceptions.HTTPNotFound()
- return self.get_item_annotation_str( trans.sa_session, trans.user, dataset )
+ annotation = self.get_item_annotation_str( trans.sa_session, trans.user, dataset )
+ if annotation and isinstance( annotation, unicode ):
+ annotation = annotation.encode( 'ascii', 'replace' ) #paste needs ascii here
+ return annotation
@web.expose
def display_at( self, trans, dataset_id, filename=None, **kwd ):
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
5 new commits in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/1aee30671c45/
changeset: 1aee30671c45
user: dan
date: 2012-08-29 01:23:39
summary: Handle non-ascii unicode in data source tools. Add util.is_binary() method that returns true when provided string contains a null byte.
affected #: 3 files
diff -r 738b703f725c1ade02018489e1c8e197236e30cd -r 1aee30671c45ecf045e33fe472525b814b97838d lib/galaxy/datatypes/sniff.py
--- a/lib/galaxy/datatypes/sniff.py
+++ b/lib/galaxy/datatypes/sniff.py
@@ -6,6 +6,7 @@
from galaxy import util
from galaxy.datatypes.checkers import *
from galaxy.datatypes.binary import unsniffable_binary_formats
+from encodings import search_function as encodings_search_function
log = logging.getLogger(__name__)
@@ -15,7 +16,7 @@
full_path = os.path.join(path, 'test', fname)
return full_path
-def stream_to_open_named_file( stream, fd, filename ):
+def stream_to_open_named_file( stream, fd, filename, source_encoding=None, source_error='strict', target_encoding=None, target_error='strict' ):
"""Writes a stream to the provided file descriptor, returns the file's name and bool( is_multi_byte ). Closes file descriptor"""
#signature and behavor is somewhat odd, due to backwards compatibility, but this can/should be done better
CHUNK_SIZE = 1048576
@@ -23,6 +24,10 @@
is_compressed = False
is_binary = False
is_multi_byte = False
+ if not target_encoding or not encodings_search_function( target_encoding ):
+ target_encoding = util.DEFAULT_ENCODING #utf-8
+ if not source_encoding:
+ source_encoding = util.DEFAULT_ENCODING #sys.getdefaultencoding() would mimic old behavior (defaults to ascii)
while 1:
chunk = stream.read( CHUNK_SIZE )
if not chunk:
@@ -42,13 +47,12 @@
chars = chunk[:100]
is_multi_byte = util.is_multi_byte( chars )
if not is_multi_byte:
- for char in chars:
- if ord( char ) > 128:
- is_binary = True
- break
+ is_binary = util.is_binary( chunk )
data_checked = True
if not is_compressed and not is_binary:
- os.write( fd, chunk.encode( "utf-8" ) )
+ if not isinstance( chunk, unicode ):
+ chunk = chunk.decode( source_encoding, source_error )
+ os.write( fd, chunk.encode( target_encoding, target_error ) )
else:
# Compressed files must be encoded after they are uncompressed in the upload utility,
# while binary files should not be encoded at all.
@@ -56,10 +60,10 @@
os.close( fd )
return filename, is_multi_byte
-def stream_to_file( stream, suffix='', prefix='', dir=None, text=False ):
+def stream_to_file( stream, suffix='', prefix='', dir=None, text=False, **kwd ):
"""Writes a stream to a temporary file, returns the temporary file's name"""
fd, temp_name = tempfile.mkstemp( suffix=suffix, prefix=prefix, dir=dir, text=text )
- return stream_to_open_named_file( stream, fd, temp_name )
+ return stream_to_open_named_file( stream, fd, temp_name, **kwd )
def check_newlines( fname, bytes_to_read=52428800 ):
"""
@@ -305,14 +309,9 @@
else:
for hdr in headers:
for char in hdr:
- if len( char ) > 1:
- for c in char:
- if ord( c ) > 128:
- is_binary = True
- break
- elif ord( char ) > 128:
- is_binary = True
- break
+ #old behavior had 'char' possibly having length > 1,
+ #need to determine when/if this occurs
+ is_binary = util.is_binary( char )
if is_binary:
break
if is_binary:
diff -r 738b703f725c1ade02018489e1c8e197236e30cd -r 1aee30671c45ecf045e33fe472525b814b97838d lib/galaxy/util/__init__.py
--- a/lib/galaxy/util/__init__.py
+++ b/lib/galaxy/util/__init__.py
@@ -34,6 +34,9 @@
gzip_magic = '\037\213'
bz2_magic = 'BZh'
+DEFAULT_ENCODING = 'utf-8'
+NULL_CHAR = '\000'
+BINARY_CHARS = [ NULL_CHAR ]
from inflection import Inflector, English
inflector = Inflector(English)
@@ -57,6 +60,32 @@
return True
return False
+def is_binary( value, binary_chars=None ):
+ """
+ File is binary if it contains a null-byte by default (e.g. behavior of grep, etc.).
+ This may fail for utf-16 files, but so would ASCII encoding.
+ >>> is_binary( string.printable )
+ False
+ >>> is_binary( '\\xce\\x94' )
+ False
+ >>> is_binary( '\\000' )
+ True
+ """
+ if binary_chars is None:
+ binary_chars = BINARY_CHARS
+ for binary_char in binary_chars:
+ if binary_char in value:
+ return True
+ return False
+
+def get_charset_from_http_headers( headers, default=None ):
+ rval = headers.get('content-type', None )
+ if rval and 'charset=' in rval:
+ rval = rval.split('charset=')[-1].split(';')[0].strip()
+ if rval:
+ return rval
+ return default
+
def synchronized(func):
"""This wrapper will serialize access to 'func' to a single thread. Use it as a decorator."""
def caller(*params, **kparams):
@@ -333,6 +362,17 @@
else:
return amount[0:sfs] + '0'*(len(amount) - sfs)
+def unicodify( value, encoding=DEFAULT_ENCODING, error='replace', default=None ):
+ """
+ Returns a unicode string or None
+ """
+ if isinstance( value, unicode ):
+ return value
+ try:
+ return unicode( value, encoding, error )
+ except:
+ return default
+
def object_to_string( obj ):
return binascii.hexlify( pickle.dumps( obj, 2 ) )
@@ -502,7 +542,7 @@
def recursively_stringify_dictionary_keys( d ):
if isinstance(d, dict):
- return dict([(k.encode('utf-8'), recursively_stringify_dictionary_keys(v)) for k,v in d.iteritems()])
+ return dict([(k.encode( DEFAULT_ENCODING ), recursively_stringify_dictionary_keys(v)) for k,v in d.iteritems()])
elif isinstance(d, list):
return [recursively_stringify_dictionary_keys(x) for x in d]
else:
@@ -622,7 +662,7 @@
Sends an email.
"""
to = listify( to )
- msg = MIMEText( body )
+ msg = MIMEText( body.encode( 'ascii', 'replace' ) )
msg[ 'To' ] = ', '.join( to )
msg[ 'From' ] = frm
msg[ 'Subject' ] = subject
diff -r 738b703f725c1ade02018489e1c8e197236e30cd -r 1aee30671c45ecf045e33fe472525b814b97838d tools/data_source/data_source.py
--- a/tools/data_source/data_source.py
+++ b/tools/data_source/data_source.py
@@ -4,6 +4,7 @@
import socket, urllib, sys, os
from galaxy import eggs #eggs needs to be imported so that galaxy.util can find docutils egg...
from galaxy.util.json import from_json_string, to_json_string
+from galaxy.util import get_charset_from_http_headers
import galaxy.model # need to import model before sniff to resolve a circular import dependency
from galaxy.datatypes import sniff
from galaxy.datatypes.registry import Registry
@@ -92,7 +93,7 @@
stop_err( 'The size of the data (%d bytes) you have requested exceeds the maximum allowed (%d bytes) on this server.' % ( file_size, max_file_size ) )
#do sniff stream for multi_byte
try:
- cur_filename, is_multi_byte = sniff.stream_to_open_named_file( page, os.open( cur_filename, os.O_WRONLY | os.O_CREAT ), cur_filename )
+ cur_filename, is_multi_byte = sniff.stream_to_open_named_file( page, os.open( cur_filename, os.O_WRONLY | os.O_CREAT ), cur_filename, source_encoding=get_charset_from_http_headers( page.headers ) )
except Exception, e:
stop_err( 'Unable to fetch %s:\n%s' % ( cur_URL, e ) )
https://bitbucket.org/galaxy/galaxy-central/changeset/577498958c37/
changeset: 577498958c37
user: dan
date: 2012-08-29 01:23:39
summary: Handle non-ascii unicode in upload tool. Significant pre-existing refactoring still needed.
affected #: 1 file
diff -r 1aee30671c45ecf045e33fe472525b814b97838d -r 577498958c37cb3c0702b06a048bb51b2d82257a tools/data_source/upload.py
--- a/tools/data_source/upload.py
+++ b/tools/data_source/upload.py
@@ -90,7 +90,8 @@
if dataset.type == 'url':
try:
- temp_name, dataset.is_multi_byte = sniff.stream_to_file( urllib.urlopen( dataset.path ), prefix='url_paste' )
+ page = urllib.urlopen( dataset.path ) #page will be .close()ed by sniff methods
+ temp_name, dataset.is_multi_byte = sniff.stream_to_file( page, prefix='url_paste', source_encoding=util.get_charset_from_http_headers( page.headers ) )
except Exception, e:
file_err( 'Unable to fetch %s\n%s' % ( dataset.path, str( e ) ), dataset, json_file )
return
https://bitbucket.org/galaxy/galaxy-central/changeset/fca4e12478d6/
changeset: fca4e12478d6
user: dan
date: 2012-08-29 01:23:39
summary: Handle non-ascii unicode in dataset edit attributes.
affected #: 1 file
diff -r 577498958c37cb3c0702b06a048bb51b2d82257a -r fca4e12478d62162cb486422c79687587209c639 templates/dataset/edit_attributes.mako
--- a/templates/dataset/edit_attributes.mako
+++ b/templates/dataset/edit_attributes.mako
@@ -58,7 +58,7 @@
Info:
</label><div style="float: left; width: 250px; margin-right: 10px;">
- <textarea name="info" cols="40" rows="2">${data.info | h}</textarea>
+ <textarea name="info" cols="40" rows="2">${ util.unicodify( data.info ) | h}</textarea></div><div style="clear: both"></div></div>
https://bitbucket.org/galaxy/galaxy-central/changeset/efefe08d6dd0/
changeset: efefe08d6dd0
user: dan
date: 2012-08-29 01:23:39
summary: Handle non-ascii unicode in dataset error page.
affected #: 1 file
diff -r fca4e12478d62162cb486422c79687587209c639 -r efefe08d6dd0c70055a80953007e312e1d85d271 templates/dataset/errors.mako
--- a/templates/dataset/errors.mako
+++ b/templates/dataset/errors.mako
@@ -24,21 +24,21 @@
<% job = hda.creating_job_associations[0].job %>
%if job.traceback:
The Galaxy framework encountered the following error while attempting to run the tool:
- <pre>${job.traceback | h}</pre>
+ <pre>${ util.unicodify( job.traceback ) | h}</pre>
%endif
%if job.stderr or job.info:
Tool execution generated the following error message:
%if job.stderr:
- <pre>${job.stderr | h}</pre>
+ <pre>${ util.unicodify( job.stderr ) | h}</pre>
%elif job.info:
- <pre>${job.info | h}</pre>
+ <pre>${ util.unicodify( job.info ) | h}</pre>
%endif
%else:
Tool execution did not generate any error messages.
%endif
%if job.stdout:
The tool produced the following additional output:
- <pre>${job.stdout | h}</pre>
+ <pre>${ util.unicodify( job.stdout ) | h}</pre>
%endif
%else:
The tool did not create any additional job / error info.
https://bitbucket.org/galaxy/galaxy-central/changeset/80dd03582ea9/
changeset: 80dd03582ea9
user: dan
date: 2012-08-29 01:23:40
summary: Handle non-ascii unicode in dataset error report emails.
affected #: 1 file
diff -r efefe08d6dd0c70055a80953007e312e1d85d271 -r 80dd03582ea9041d0c897b3a2c58e12a5bb494bb lib/galaxy/web/controllers/dataset.py
--- a/lib/galaxy/web/controllers/dataset.py
+++ b/lib/galaxy/web/controllers/dataset.py
@@ -203,12 +203,12 @@
job_id=job.id,
job_tool_id=job.tool_id,
job_command_line=job.command_line,
- job_stderr=job.stderr,
- job_stdout=job.stdout,
- job_info=job.info,
- job_traceback=job.traceback,
+ job_stderr=util.unicodify( job.stderr ),
+ job_stdout=util.unicodify( job.stdout ),
+ job_info=util.unicodify( job.info ),
+ job_traceback=util.unicodify( job.traceback ),
email=email,
- message=message )
+ message=util.unicodify( message ) )
frm = to_address
# Check email a bit
email = email.strip()
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: greg: Fix handling of ToolSectionLabel objects in the tool panel.
by Bitbucket 28 Aug '12
by Bitbucket 28 Aug '12
28 Aug '12
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/738b703f725c/
changeset: 738b703f725c
user: greg
date: 2012-08-28 20:32:04
summary: Fix handling of ToolSectionLabel objects in the tool panel.
affected #: 1 file
diff -r 483cbfc5341a0331cc86185f8821cdecec52a0b4 -r 738b703f725c1ade02018489e1c8e197236e30cd lib/galaxy/tools/__init__.py
--- a/lib/galaxy/tools/__init__.py
+++ b/lib/galaxy/tools/__init__.py
@@ -187,7 +187,9 @@
section.elems[ section_key ] = workflow
log.debug( "Loaded workflow: %s %s" % ( workflow_id, workflow.name ) )
elif section_key.startswith( 'label_' ):
- section.elems[ section_key ] = section_val
+ if section_val:
+ section.elems[ section_key ] = section_val
+ log.debug( "Loaded label: %s" % ( section_val.text ) )
self.tool_panel[ key ] = section
def load_integrated_tool_panel_keys( self ):
"""
@@ -215,12 +217,12 @@
section.elems[ key ] = None
elif section_elem.tag == 'label':
key = 'label_%s' % section_elem.get( 'id' )
- section.elems[ key ] = ToolSectionLabel( section_elem )
+ section.elems[ key ] = None
key = 'section_%s' % elem.get( 'id' )
self.integrated_tool_panel[ key ] = section
elif elem.tag == 'label':
key = 'label_%s' % elem.get( 'id' )
- self.integrated_tool_panel[ key ] = ToolSectionLabel( elem )
+ self.integrated_tool_panel[ key ] = None
def write_integrated_tool_panel_config_file( self ):
"""
Write the current in-memory version of the integrated_tool_panel.xml file to disk. Since Galaxy administrators
@@ -254,10 +256,11 @@
if section_item:
os.write( fd, ' <workflow id="%s" />\n' % section_item.id )
elif section_key.startswith( 'label_' ):
- label_id = section_item.id or ''
- label_text = section_item.text or ''
- label_version = section_item.version or ''
- os.write( fd, ' <label id="%s" text="%s" version="%s" />\n' % ( label_id, label_text, label_version ) )
+ if section_item:
+ label_id = section_item.id or ''
+ label_text = section_item.text or ''
+ label_version = section_item.version or ''
+ os.write( fd, ' <label id="%s" text="%s" version="%s" />\n' % ( label_id, label_text, label_version ) )
os.write( fd, ' </section>\n' )
os.write( fd, '</toolbox>\n' )
os.close( fd )
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0